language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def check_invoice_integrity(row): """ Rheader-helper to check and report invoice integrity Args: row: the invoice record Returns: integrity check result """ billing = current.s3db.fin_VoucherBilling(row.billing_id) try: checked = billing.check_invoice(row.id) except ValueError: checked = False T = current.T if checked: return SPAN(T("Ok"), I(_class="fa fa-check"), _class="record-integrity-ok", ) else: current.response.error = T("This invoice may be invalid - please contact the administrator") return SPAN(T("Failed"), I(_class="fa fa-exclamation-triangle"), _class="record-integrity-broken", )
def check_invoice_integrity(row): """ Rheader-helper to check and report invoice integrity Args: row: the invoice record Returns: integrity check result """ billing = current.s3db.fin_VoucherBilling(row.billing_id) try: checked = billing.check_invoice(row.id) except ValueError: checked = False T = current.T if checked: return SPAN(T("Ok"), I(_class="fa fa-check"), _class="record-integrity-ok", ) else: current.response.error = T("This invoice may be invalid - please contact the administrator") return SPAN(T("Failed"), I(_class="fa fa-exclamation-triangle"), _class="record-integrity-broken", )
Python
def can_cancel_debit(debit): """ Check whether the current user is entitled to cancel a certain voucher debit: * User must have the VOUCHER_PROVIDER role for the organisation that originally accepted the voucher (not even ADMIN-role can override this requirement) Args: debit: the debit (Row, must contain the debit pe_id) Returns: True|False """ auth = current.auth user = auth.user if user: # Look up the role ID gtable = auth.settings.table_group query = (gtable.uuid == "VOUCHER_PROVIDER") role = current.db(query).select(gtable.id, cache = current.s3db.cache, limitby = (0, 1), ).first() if not role: return False # Get the realms they have this role for realms = user.realms if role.id in realms: role_realms = realms.get(role.id) else: # They don't have the role at all return False if not role_realms: # User has a site-wide VOUCHER_PROVIDER role, however # for cancellation of debits they must be affiliated # with the debit owner organisation role_realms = current.s3db.pr_default_realms(user["pe_id"]) return debit.pe_id in role_realms else: # No user return False
def can_cancel_debit(debit): """ Check whether the current user is entitled to cancel a certain voucher debit: * User must have the VOUCHER_PROVIDER role for the organisation that originally accepted the voucher (not even ADMIN-role can override this requirement) Args: debit: the debit (Row, must contain the debit pe_id) Returns: True|False """ auth = current.auth user = auth.user if user: # Look up the role ID gtable = auth.settings.table_group query = (gtable.uuid == "VOUCHER_PROVIDER") role = current.db(query).select(gtable.id, cache = current.s3db.cache, limitby = (0, 1), ).first() if not role: return False # Get the realms they have this role for realms = user.realms if role.id in realms: role_realms = realms.get(role.id) else: # They don't have the role at all return False if not role_realms: # User has a site-wide VOUCHER_PROVIDER role, however # for cancellation of debits they must be affiliated # with the debit owner organisation role_realms = current.s3db.pr_default_realms(user["pe_id"]) return debit.pe_id in role_realms else: # No user return False
Python
def workflow_tag_represent(options, none=None): """ Color-coded and icon-supported representation of facility approval workflow tags Args: options: the tag options as dict {value: label} none: treat None-values like this option (str) """ icons = {"REVISE": "fa fa-exclamation-triangle", "REJECT": "fa fa-exclamation-triangle", "REVIEW": "fa fa-hourglass", "APPROVED": "fa fa-check", "COMPLETE": "fa fa-check", "N/A": "fa fa-minus-circle", "N": "fa fa-minus-circle", "Y": "fa fa-check", } css_classes = {"REVISE": "workflow-red", "REJECT": "workflow-red", "REVIEW": "workflow-amber", "APPROVED": "workflow-green", "COMPLETE": "workflow-green", "N/A": "workflow-grey", "N": "workflow-red", "Y": "workflow-green", } def represent(value, row=None): if value is None and none: value = none label = DIV(_class="approve-workflow") color = css_classes.get(value) if color: label.add_class(color) icon = icons.get(value) if icon: label.append(I(_class=icon)) label.append(options.get(value, "-")) return label return represent
def workflow_tag_represent(options, none=None): """ Color-coded and icon-supported representation of facility approval workflow tags Args: options: the tag options as dict {value: label} none: treat None-values like this option (str) """ icons = {"REVISE": "fa fa-exclamation-triangle", "REJECT": "fa fa-exclamation-triangle", "REVIEW": "fa fa-hourglass", "APPROVED": "fa fa-check", "COMPLETE": "fa fa-check", "N/A": "fa fa-minus-circle", "N": "fa fa-minus-circle", "Y": "fa fa-check", } css_classes = {"REVISE": "workflow-red", "REJECT": "workflow-red", "REVIEW": "workflow-amber", "APPROVED": "workflow-green", "COMPLETE": "workflow-green", "N/A": "workflow-grey", "N": "workflow-red", "Y": "workflow-green", } def represent(value, row=None): if value is None and none: value = none label = DIV(_class="approve-workflow") color = css_classes.get(value) if color: label.add_class(color) icon = icons.get(value) if icon: label.append(I(_class=icon)) label.append(options.get(value, "-")) return label return represent
Python
def applicable_org_types(organisation_id, group=None, represent=False): """ Look up organisation types by OrgGroup-tag Args: organisation_id: the record ID of an existing organisation group: alternatively, the organisation group name represent: include type labels in the result Returns: a list of organisation type IDs, for filtering, or a dict {type_id: label}, for selecting """ db = current.db s3db = current.s3db ttable = s3db.org_organisation_type_tag if organisation_id: # Look up the org groups of this record gtable = s3db.org_group mtable = s3db.org_group_membership join = gtable.on(gtable.id == mtable.group_id) query = (mtable.organisation_id == organisation_id) & \ (mtable.deleted == False) rows = db(query).select(gtable.name, join=join) groups = {row.name for row in rows} q = (ttable.value.belongs(groups)) # Look up the org types the record is currently linked to ltable = s3db.org_organisation_organisation_type query = (ltable.organisation_id == organisation_id) & \ (ltable.deleted == False) rows = db(query).select(ltable.organisation_type_id) current_types = {row.organisation_type_id for row in rows} elif group: # Use group name as-is q = (ttable.value == group) # Look up all types tagged for this group query = (ttable.tag == "OrgGroup") & q & \ (ttable.deleted == False) rows = db(query).select(ttable.organisation_type_id, cache = s3db.cache, ) type_ids = {row.organisation_type_id for row in rows} if organisation_id: # Add the org types the record is currently linked to type_ids |= current_types if represent: labels = ttable.organisation_type_id.represent if hasattr(labels, "bulk"): labels.bulk(list(type_ids)) output = {str(t): labels(t) for t in type_ids} else: output = list(type_ids) return output
def applicable_org_types(organisation_id, group=None, represent=False): """ Look up organisation types by OrgGroup-tag Args: organisation_id: the record ID of an existing organisation group: alternatively, the organisation group name represent: include type labels in the result Returns: a list of organisation type IDs, for filtering, or a dict {type_id: label}, for selecting """ db = current.db s3db = current.s3db ttable = s3db.org_organisation_type_tag if organisation_id: # Look up the org groups of this record gtable = s3db.org_group mtable = s3db.org_group_membership join = gtable.on(gtable.id == mtable.group_id) query = (mtable.organisation_id == organisation_id) & \ (mtable.deleted == False) rows = db(query).select(gtable.name, join=join) groups = {row.name for row in rows} q = (ttable.value.belongs(groups)) # Look up the org types the record is currently linked to ltable = s3db.org_organisation_organisation_type query = (ltable.organisation_id == organisation_id) & \ (ltable.deleted == False) rows = db(query).select(ltable.organisation_type_id) current_types = {row.organisation_type_id for row in rows} elif group: # Use group name as-is q = (ttable.value == group) # Look up all types tagged for this group query = (ttable.tag == "OrgGroup") & q & \ (ttable.deleted == False) rows = db(query).select(ttable.organisation_type_id, cache = s3db.cache, ) type_ids = {row.organisation_type_id for row in rows} if organisation_id: # Add the org types the record is currently linked to type_ids |= current_types if represent: labels = ttable.organisation_type_id.represent if hasattr(labels, "bulk"): labels.bulk(list(type_ids)) output = {str(t): labels(t) for t in type_ids} else: output = list(type_ids) return output
Python
def update_daily_report(site_id, result_date, disease_id): """ Update daily testing activity report (without subtotals per demographic) - called when a new individual test result is registered Args: site_id: the test station site ID result_date: the result date of the test disease_id: the disease ID """ db = current.db s3db = current.s3db table = s3db.disease_case_diagnostics # Count records grouped by result query = (table.site_id == site_id) & \ (table.disease_id == disease_id) & \ (table.result_date == result_date) & \ (table.deleted == False) cnt = table.id.count() rows = db(query).select(table.result, cnt, groupby = table.result, ) total = positive = 0 for row in rows: num = row[cnt] total += num if row.disease_case_diagnostics.result == "POS": positive += num # Look up the daily report rtable = s3db.disease_testing_report query = (rtable.site_id == site_id) & \ (rtable.disease_id == disease_id) & \ (rtable.date == result_date) & \ (rtable.deleted == False) report = db(query).select(rtable.id, rtable.tests_total, rtable.tests_positive, limitby = (0, 1), ).first() if report: # Update report if actual numbers are greater if report.tests_total < total or report.tests_positive < positive: report.update_record(tests_total = total, tests_positive = positive, ) else: # Create report report = {"site_id": site_id, "disease_id": disease_id, "date": result_date, "tests_total": total, "tests_positive": positive, } report_id = rtable.insert(**report) if report_id: current.auth.s3_set_record_owner(rtable, report_id) report["id"] = report_id s3db.onaccept(rtable, report, method="create")
def update_daily_report(site_id, result_date, disease_id): """ Update daily testing activity report (without subtotals per demographic) - called when a new individual test result is registered Args: site_id: the test station site ID result_date: the result date of the test disease_id: the disease ID """ db = current.db s3db = current.s3db table = s3db.disease_case_diagnostics # Count records grouped by result query = (table.site_id == site_id) & \ (table.disease_id == disease_id) & \ (table.result_date == result_date) & \ (table.deleted == False) cnt = table.id.count() rows = db(query).select(table.result, cnt, groupby = table.result, ) total = positive = 0 for row in rows: num = row[cnt] total += num if row.disease_case_diagnostics.result == "POS": positive += num # Look up the daily report rtable = s3db.disease_testing_report query = (rtable.site_id == site_id) & \ (rtable.disease_id == disease_id) & \ (rtable.date == result_date) & \ (rtable.deleted == False) report = db(query).select(rtable.id, rtable.tests_total, rtable.tests_positive, limitby = (0, 1), ).first() if report: # Update report if actual numbers are greater if report.tests_total < total or report.tests_positive < positive: report.update_record(tests_total = total, tests_positive = positive, ) else: # Create report report = {"site_id": site_id, "disease_id": disease_id, "date": result_date, "tests_total": total, "tests_positive": positive, } report_id = rtable.insert(**report) if report_id: current.auth.s3_set_record_owner(rtable, report_id) report["id"] = report_id s3db.onaccept(rtable, report, method="create")
Python
def update_daily_report_by_demographic(site_id, result_date, disease_id): """ Update daily testing activity report (with subtotals per demographic) - called when a new individual test result is registered Args: site_id: the test station site ID result_date: the result date of the test disease_id: the disease ID """ db = current.db s3db = current.s3db set_record_owner = current.auth.s3_set_record_owner table = s3db.disease_case_diagnostics rtable = s3db.disease_testing_report dtable = s3db.disease_testing_demographic # Count individual results by demographic and result query = (table.site_id == site_id) & \ (table.disease_id == disease_id) & \ (table.result_date == result_date) & \ (table.deleted == False) cnt = table.id.count() rows = db(query).select(table.demographic_id, table.result, cnt, groupby = (table.demographic_id, table.result), ) # Generate recorded-subtotals matrix subtotals = {} total = positive = 0 for row in rows: record = row.disease_case_diagnostics demographic_id = record.demographic_id item = subtotals.get(demographic_id) if not item: item = subtotals[demographic_id] = {"tests_total": 0, "tests_positive": 0, } num = row[cnt] total += num item["tests_total"] += num if record.result == "POS": positive += num item["tests_positive"] += num # Look up the daily report query = (rtable.site_id == site_id) & \ (rtable.disease_id == disease_id) & \ (rtable.date == result_date) & \ (rtable.deleted == False) report = db(query).select(rtable.id, rtable.tests_total, rtable.tests_positive, limitby = (0, 1), ).first() if not report: # Create a report with the recorded totals report = {"site_id": site_id, "disease_id": disease_id, "date": result_date, "tests_total": total, "tests_positive": positive, } report["id"] = report_id = rtable.insert(**report) if report_id: set_record_owner(rtable, report_id) s3db.onaccept(rtable, report, method="create") # Add subtotals per demographic for demographic_id, item in subtotals.items(): subtotal = {"report_id": report_id, "demographic_id": demographic_id, "tests_total": item["tests_total"], "tests_positive": item["tests_positive"] } subtotal_id = subtotal["id"] = dtable.insert(**subtotal) set_record_owner(dtable, subtotal_id) # We've already set the correct totals in the report: #s3db.onaccept(dtable, subtotal, method="create") else: # Update the existing report with revised subtotals report_id = report.id # Get all current (reported) subtotals of this report query = (dtable.report_id == report_id) & \ (dtable.deleted == False) rows = db(query).select(dtable.id, dtable.demographic_id, dtable.tests_total, dtable.tests_positive, orderby = ~dtable.modified_on, ) # For each demographic, determine the recorded and reported subtotals for demographic_id, item in subtotals.items(): # Recorded totals recorded_total = item["tests_total"] recorded_positive = item["tests_positive"] # Reported totals last_report = None reported_total = reported_positive = 0 for row in rows: if row.demographic_id == demographic_id: reported_total += row.tests_total reported_positive += row.tests_positive if not last_report: last_report = row if not last_report: # No subtotal for this demographic yet => create one subtotal = {"report_id": report_id, "demographic_id": demographic_id, "tests_total": recorded_total, "tests_positive": recorded_positive, } subtotal_id = subtotal["id"] = dtable.insert(**subtotal) set_record_owner(dtable, subtotal_id) # We do this in-bulk at the end: #s3db.onaccept(dtable, subtotal, method="create") elif reported_total < recorded_total or \ reported_positive < recorded_positive: # Update the last subtotal with the differences last_report.update_record( tests_total = last_report.tests_total + \ max(recorded_total - reported_total, 1), tests_positive = last_report.tests_positive + \ max(recorded_positive - reported_positive, 0), ) # Get subtotals for all demographics under this report query = (dtable.report_id == report_id) & \ (dtable.deleted == False) total = dtable.tests_total.sum() positive = dtable.tests_positive.sum() row = db(query).select(total, positive).first() # Update the overall report query = (rtable.id == report_id) & \ (rtable.deleted == False) db(query).update(tests_total = row[total], tests_positive = row[positive], )
def update_daily_report_by_demographic(site_id, result_date, disease_id): """ Update daily testing activity report (with subtotals per demographic) - called when a new individual test result is registered Args: site_id: the test station site ID result_date: the result date of the test disease_id: the disease ID """ db = current.db s3db = current.s3db set_record_owner = current.auth.s3_set_record_owner table = s3db.disease_case_diagnostics rtable = s3db.disease_testing_report dtable = s3db.disease_testing_demographic # Count individual results by demographic and result query = (table.site_id == site_id) & \ (table.disease_id == disease_id) & \ (table.result_date == result_date) & \ (table.deleted == False) cnt = table.id.count() rows = db(query).select(table.demographic_id, table.result, cnt, groupby = (table.demographic_id, table.result), ) # Generate recorded-subtotals matrix subtotals = {} total = positive = 0 for row in rows: record = row.disease_case_diagnostics demographic_id = record.demographic_id item = subtotals.get(demographic_id) if not item: item = subtotals[demographic_id] = {"tests_total": 0, "tests_positive": 0, } num = row[cnt] total += num item["tests_total"] += num if record.result == "POS": positive += num item["tests_positive"] += num # Look up the daily report query = (rtable.site_id == site_id) & \ (rtable.disease_id == disease_id) & \ (rtable.date == result_date) & \ (rtable.deleted == False) report = db(query).select(rtable.id, rtable.tests_total, rtable.tests_positive, limitby = (0, 1), ).first() if not report: # Create a report with the recorded totals report = {"site_id": site_id, "disease_id": disease_id, "date": result_date, "tests_total": total, "tests_positive": positive, } report["id"] = report_id = rtable.insert(**report) if report_id: set_record_owner(rtable, report_id) s3db.onaccept(rtable, report, method="create") # Add subtotals per demographic for demographic_id, item in subtotals.items(): subtotal = {"report_id": report_id, "demographic_id": demographic_id, "tests_total": item["tests_total"], "tests_positive": item["tests_positive"] } subtotal_id = subtotal["id"] = dtable.insert(**subtotal) set_record_owner(dtable, subtotal_id) # We've already set the correct totals in the report: #s3db.onaccept(dtable, subtotal, method="create") else: # Update the existing report with revised subtotals report_id = report.id # Get all current (reported) subtotals of this report query = (dtable.report_id == report_id) & \ (dtable.deleted == False) rows = db(query).select(dtable.id, dtable.demographic_id, dtable.tests_total, dtable.tests_positive, orderby = ~dtable.modified_on, ) # For each demographic, determine the recorded and reported subtotals for demographic_id, item in subtotals.items(): # Recorded totals recorded_total = item["tests_total"] recorded_positive = item["tests_positive"] # Reported totals last_report = None reported_total = reported_positive = 0 for row in rows: if row.demographic_id == demographic_id: reported_total += row.tests_total reported_positive += row.tests_positive if not last_report: last_report = row if not last_report: # No subtotal for this demographic yet => create one subtotal = {"report_id": report_id, "demographic_id": demographic_id, "tests_total": recorded_total, "tests_positive": recorded_positive, } subtotal_id = subtotal["id"] = dtable.insert(**subtotal) set_record_owner(dtable, subtotal_id) # We do this in-bulk at the end: #s3db.onaccept(dtable, subtotal, method="create") elif reported_total < recorded_total or \ reported_positive < recorded_positive: # Update the last subtotal with the differences last_report.update_record( tests_total = last_report.tests_total + \ max(recorded_total - reported_total, 1), tests_positive = last_report.tests_positive + \ max(recorded_positive - reported_positive, 0), ) # Get subtotals for all demographics under this report query = (dtable.report_id == report_id) & \ (dtable.deleted == False) total = dtable.tests_total.sum() positive = dtable.tests_positive.sum() row = db(query).select(total, positive).first() # Update the overall report query = (rtable.id == report_id) & \ (rtable.deleted == False) db(query).update(tests_total = row[total], tests_positive = row[positive], )
Python
def rlp_holidays(start, end): """ Date rules set for holidays in RLP Args: start: the start date end: the end date Returns: a dateutil.rrule rule set for all holidays within the interval """ rules = rrule.rruleset() addrule = rules.rrule newrule = rrule.rrule # Fixed-date holidays addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=1, bymonthday=1)) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=5, bymonthday=1)) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=10, bymonthday=3)) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=11, bymonthday=1)) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=12, bymonthday=(25, 26))) # Easter-related holidays: # (Karfreitag, Ostermontag, Christi Himmelfahrt, Pfingstmontag, Fronleichnam) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, byeaster=(-2, 1, 39, 50, 60))) # Exclude holidays on weekends rules.exrule(newrule(rrule.WEEKLY, dtstart=start, until=end, byweekday=(rrule.SA,rrule.SU))) return rules
def rlp_holidays(start, end): """ Date rules set for holidays in RLP Args: start: the start date end: the end date Returns: a dateutil.rrule rule set for all holidays within the interval """ rules = rrule.rruleset() addrule = rules.rrule newrule = rrule.rrule # Fixed-date holidays addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=1, bymonthday=1)) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=5, bymonthday=1)) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=10, bymonthday=3)) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=11, bymonthday=1)) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, bymonth=12, bymonthday=(25, 26))) # Easter-related holidays: # (Karfreitag, Ostermontag, Christi Himmelfahrt, Pfingstmontag, Fronleichnam) addrule(newrule(rrule.YEARLY, dtstart=start, until=end, byeaster=(-2, 1, 39, 50, 60))) # Exclude holidays on weekends rules.exrule(newrule(rrule.WEEKLY, dtstart=start, until=end, byweekday=(rrule.SA,rrule.SU))) return rules
Python
def render_list(self, value, labels, show_link=True): """ Helper method to render list-type representations from bulk()-results. Args: value: the list labels: the labels as returned from bulk() show_link: render references as links, should be the same as used with bulk() """ show_link = show_link and self.show_link values = [v for v in value if v is not None] if not len(values): return "" if show_link: labels_ = (labels[v] if v in labels else self.default for v in values) else: labels_ = sorted(s3_str(labels[v]) if v in labels else self.default for v in values) html = UL(_class="service-list") for label in labels_: html.append(LI(label)) return html
def render_list(self, value, labels, show_link=True): """ Helper method to render list-type representations from bulk()-results. Args: value: the list labels: the labels as returned from bulk() show_link: render references as links, should be the same as used with bulk() """ show_link = show_link and self.show_link values = [v for v in value if v is not None] if not len(values): return "" if show_link: labels_ = (labels[v] if v in labels else self.default for v in values) else: labels_ = sorted(s3_str(labels[v]) if v in labels else self.default for v in values) html = UL(_class="service-list") for label in labels_: html.append(LI(label)) return html
Python
def lookup_rows(self, key, values, fields=None): """ Custom lookup method for organisation rows, does a left join with the parent organisation. Parameters key and fields are not used, but are kept for API compatibility reasons. Args: values: the organisation IDs """ db = current.db s3db = current.s3db otable = s3db.org_organisation count = len(values) if count == 1: query = (otable.id == values[0]) else: query = (otable.id.belongs(values)) rows = db(query).select(otable.id, otable.name, limitby = (0, count), ) if self.show_type: ltable = s3db.org_organisation_organisation_type if count == 1: query = (ltable.organisation_id == values[0]) else: query = (ltable.organisation_id.belongs(values)) query &= (ltable.deleted == False) types = db(query).select(ltable.organisation_id, ltable.organisation_type_id, ) all_types = set() org_types = self.org_types = {} for t in types: type_id = t.organisation_type_id all_types.add(type_id) organisation_id = t.organisation_id if organisation_id not in org_types: org_types[organisation_id] = {type_id} else: org_types[organisation_id].add(type_id) if all_types: ttable = s3db.org_organisation_type query = ttable.id.belongs(all_types) types = db(query).select(ttable.id, ttable.name, limitby = (0, len(all_types)), ) self.type_names = {t.id: t.name for t in types} return rows
def lookup_rows(self, key, values, fields=None): """ Custom lookup method for organisation rows, does a left join with the parent organisation. Parameters key and fields are not used, but are kept for API compatibility reasons. Args: values: the organisation IDs """ db = current.db s3db = current.s3db otable = s3db.org_organisation count = len(values) if count == 1: query = (otable.id == values[0]) else: query = (otable.id.belongs(values)) rows = db(query).select(otable.id, otable.name, limitby = (0, count), ) if self.show_type: ltable = s3db.org_organisation_organisation_type if count == 1: query = (ltable.organisation_id == values[0]) else: query = (ltable.organisation_id.belongs(values)) query &= (ltable.deleted == False) types = db(query).select(ltable.organisation_id, ltable.organisation_type_id, ) all_types = set() org_types = self.org_types = {} for t in types: type_id = t.organisation_type_id all_types.add(type_id) organisation_id = t.organisation_id if organisation_id not in org_types: org_types[organisation_id] = {type_id} else: org_types[organisation_id].add(type_id) if all_types: ttable = s3db.org_organisation_type query = ttable.id.belongs(all_types) types = db(query).select(ttable.id, ttable.name, limitby = (0, len(all_types)), ) self.type_names = {t.id: t.name for t in types} return rows
Python
def apply_method(self, r, **attr): """ Generate a PDF of an Invoice Args: r: the CRUDRequest instance attr: controller attributes """ if r.representation != "pdf": r.error(415, current.ERROR.BAD_FORMAT) if not r.record or r.http != "GET": r.error(400, current.ERROR.BAD_REQUEST) T = current.T # Filename to include invoice number if available invoice_no = r.record.invoice_no from core import DataExporter exporter = DataExporter.pdf return exporter(r.resource, request = r, method = "read", pdf_title = T("Invoice"), pdf_filename = invoice_no if invoice_no else None, pdf_header = self.invoice_header, pdf_callback = self.invoice, pdf_footer = self.invoice_footer, pdf_hide_comments = True, pdf_header_padding = 12, pdf_orientation = "Portrait", pdf_table_autogrow = "B", **attr )
def apply_method(self, r, **attr): """ Generate a PDF of an Invoice Args: r: the CRUDRequest instance attr: controller attributes """ if r.representation != "pdf": r.error(415, current.ERROR.BAD_FORMAT) if not r.record or r.http != "GET": r.error(400, current.ERROR.BAD_REQUEST) T = current.T # Filename to include invoice number if available invoice_no = r.record.invoice_no from core import DataExporter exporter = DataExporter.pdf return exporter(r.resource, request = r, method = "read", pdf_title = T("Invoice"), pdf_filename = invoice_no if invoice_no else None, pdf_header = self.invoice_header, pdf_callback = self.invoice, pdf_footer = self.invoice_footer, pdf_hide_comments = True, pdf_header_padding = 12, pdf_orientation = "Portrait", pdf_table_autogrow = "B", **attr )
Python
def lookup_header_data(invoice): """ Look up data for the invoice header Args: invoice: the invoice record Returns: dict with header data """ db = current.db s3db = current.s3db data = {} btable = s3db.fin_voucher_billing ptable = s3db.fin_voucher_program otable = s3db.org_organisation ftable = s3db.org_facility ltable = s3db.gis_location ctable = s3db.pr_contact # Look up the billing date query = (btable.id == invoice.billing_id) billing = db(query).select(btable.date, limitby = (0, 1), ).first() if billing: data["billing_date"] = billing.date # Use the program admin org as "payers" query = (ptable.id == invoice.program_id) join = otable.on(otable.id == ptable.organisation_id) admin_org = db(query).select(otable.name, join = join, limitby = (0, 1), ).first() if admin_org: data["payers"] = admin_org.name # Look up details of the invoicing party query = (otable.pe_id == invoice.pe_id) & \ (otable.deleted == False) organisation = db(query).select(otable.id, otable.name, limitby = (0, 1), ).first() if organisation: data["organisation"] = organisation.name # Email address query = (ctable.pe_id == invoice.pe_id) & \ (ctable.contact_method == "EMAIL") & \ (ctable.deleted == False) email = db(query).select(ctable.value, limitby = (0, 1), ).first() if email: data["email"] = email.value # Facility address query = (ftable.organisation_id == organisation.id) & \ (ftable.obsolete == False) & \ (ftable.deleted == False) left = ltable.on(ltable.id == ftable.location_id) facility = db(query).select(ftable.email, ltable.addr_street, ltable.addr_postcode, ltable.L3, ltable.L4, left = left, limitby = (0, 1), orderby = ftable.created_on, ).first() if facility: if data.get("email"): # Fallback data["email"] = facility.org_facility.email location = facility.gis_location data["addr_street"] = location.addr_street or "-" data["addr_postcode"] = location.addr_postcode or "-" data["addr_place"] = location.L4 or location.L3 or "-" return data
def lookup_header_data(invoice): """ Look up data for the invoice header Args: invoice: the invoice record Returns: dict with header data """ db = current.db s3db = current.s3db data = {} btable = s3db.fin_voucher_billing ptable = s3db.fin_voucher_program otable = s3db.org_organisation ftable = s3db.org_facility ltable = s3db.gis_location ctable = s3db.pr_contact # Look up the billing date query = (btable.id == invoice.billing_id) billing = db(query).select(btable.date, limitby = (0, 1), ).first() if billing: data["billing_date"] = billing.date # Use the program admin org as "payers" query = (ptable.id == invoice.program_id) join = otable.on(otable.id == ptable.organisation_id) admin_org = db(query).select(otable.name, join = join, limitby = (0, 1), ).first() if admin_org: data["payers"] = admin_org.name # Look up details of the invoicing party query = (otable.pe_id == invoice.pe_id) & \ (otable.deleted == False) organisation = db(query).select(otable.id, otable.name, limitby = (0, 1), ).first() if organisation: data["organisation"] = organisation.name # Email address query = (ctable.pe_id == invoice.pe_id) & \ (ctable.contact_method == "EMAIL") & \ (ctable.deleted == False) email = db(query).select(ctable.value, limitby = (0, 1), ).first() if email: data["email"] = email.value # Facility address query = (ftable.organisation_id == organisation.id) & \ (ftable.obsolete == False) & \ (ftable.deleted == False) left = ltable.on(ltable.id == ftable.location_id) facility = db(query).select(ftable.email, ltable.addr_street, ltable.addr_postcode, ltable.L3, ltable.L4, left = left, limitby = (0, 1), orderby = ftable.created_on, ).first() if facility: if data.get("email"): # Fallback data["email"] = facility.org_facility.email location = facility.gis_location data["addr_street"] = location.addr_street or "-" data["addr_postcode"] = location.addr_postcode or "-" data["addr_place"] = location.L4 or location.L3 or "-" return data
Python
def lookup_body_data(invoice): """ Look up additional data for invoice body Args: invoice: the invoice record Returns: dict with invoice data """ db = current.db s3db = current.s3db ptable = s3db.fin_voucher_program query = (ptable.id == invoice.program_id) & \ (ptable.deleted == False) program = db(query).select(ptable.id, ptable.name, ptable.unit, limitby = (0, 1), ).first() if program: data = {"title": program.name, "unit": program.unit, } else: data = {} return data
def lookup_body_data(invoice): """ Look up additional data for invoice body Args: invoice: the invoice record Returns: dict with invoice data """ db = current.db s3db = current.s3db ptable = s3db.fin_voucher_program query = (ptable.id == invoice.program_id) & \ (ptable.deleted == False) program = db(query).select(ptable.id, ptable.name, ptable.unit, limitby = (0, 1), ).first() if program: data = {"title": program.name, "unit": program.unit, } else: data = {} return data
Python
def apply_method(self, r, **attr): """ Generate a PDF of a Claim Args: r: the CRUDRequest instance attr: controller attributes """ if r.representation != "pdf": r.error(415, current.ERROR.BAD_FORMAT) if not r.record or r.http != "GET": r.error(400, current.ERROR.BAD_REQUEST) T = current.T # Filename to include invoice number if available invoice_no = self.invoice_number(r.record) from core import DataExporter exporter = DataExporter.pdf return exporter(r.resource, request = r, method = "read", pdf_title = T("Compensation Claim"), pdf_filename = invoice_no if invoice_no else None, pdf_header = self.claim_header, pdf_callback = self.claim, pdf_footer = self.claim_footer, pdf_hide_comments = True, pdf_header_padding = 12, pdf_orientation = "Portrait", pdf_table_autogrow = "B", **attr )
def apply_method(self, r, **attr): """ Generate a PDF of a Claim Args: r: the CRUDRequest instance attr: controller attributes """ if r.representation != "pdf": r.error(415, current.ERROR.BAD_FORMAT) if not r.record or r.http != "GET": r.error(400, current.ERROR.BAD_REQUEST) T = current.T # Filename to include invoice number if available invoice_no = self.invoice_number(r.record) from core import DataExporter exporter = DataExporter.pdf return exporter(r.resource, request = r, method = "read", pdf_title = T("Compensation Claim"), pdf_filename = invoice_no if invoice_no else None, pdf_header = self.claim_header, pdf_callback = self.claim, pdf_footer = self.claim_footer, pdf_hide_comments = True, pdf_header_padding = 12, pdf_orientation = "Portrait", pdf_table_autogrow = "B", **attr )
Python
def lookup_header_data(claim): """ Look up data for the claim header Args: claim: the claim record Returns: dict with header data """ db = current.db s3db = current.s3db data = {} btable = s3db.fin_voucher_billing itable = s3db.fin_voucher_invoice ptable = s3db.fin_voucher_program otable = s3db.org_organisation ftable = s3db.org_facility ltable = s3db.gis_location ctable = s3db.pr_contact # Look up the billing date query = (btable.id == claim.billing_id) billing = db(query).select(btable.date, limitby = (0, 1), ).first() if billing: data["billing_date"] = billing.date # Look up invoice details if claim.invoice_id: query = (itable.id == claim.invoice_id) invoice = db(query).select(itable.date, itable.invoice_no, limitby = (0, 1), ).first() if invoice: data["invoice_no"] = invoice.invoice_no data["invoice_date"] = invoice.date # Use the program admin org as "payers" query = (ptable.id == claim.program_id) join = otable.on(otable.id == ptable.organisation_id) admin_org = db(query).select(otable.name, join = join, limitby = (0, 1), ).first() if admin_org: data["payers"] = admin_org.name # Look up details of the invoicing party query = (otable.pe_id == claim.pe_id) & \ (otable.deleted == False) organisation = db(query).select(otable.id, otable.name, limitby = (0, 1), ).first() if organisation: data["organisation"] = organisation.name # Email address query = (ctable.pe_id == claim.pe_id) & \ (ctable.contact_method == "EMAIL") & \ (ctable.deleted == False) email = db(query).select(ctable.value, limitby = (0, 1), ).first() if email: data["email"] = email.value # Facility address query = (ftable.organisation_id == organisation.id) & \ (ftable.obsolete == False) & \ (ftable.deleted == False) left = ltable.on(ltable.id == ftable.location_id) facility = db(query).select(ftable.email, ltable.addr_street, ltable.addr_postcode, ltable.L3, ltable.L4, left = left, limitby = (0, 1), orderby = ftable.created_on, ).first() if facility: if data.get("email"): # Fallback data["email"] = facility.org_facility.email location = facility.gis_location data["addr_street"] = location.addr_street or "-" data["addr_postcode"] = location.addr_postcode or "-" data["addr_place"] = location.L4 or location.L3 or "-" return data
def lookup_header_data(claim): """ Look up data for the claim header Args: claim: the claim record Returns: dict with header data """ db = current.db s3db = current.s3db data = {} btable = s3db.fin_voucher_billing itable = s3db.fin_voucher_invoice ptable = s3db.fin_voucher_program otable = s3db.org_organisation ftable = s3db.org_facility ltable = s3db.gis_location ctable = s3db.pr_contact # Look up the billing date query = (btable.id == claim.billing_id) billing = db(query).select(btable.date, limitby = (0, 1), ).first() if billing: data["billing_date"] = billing.date # Look up invoice details if claim.invoice_id: query = (itable.id == claim.invoice_id) invoice = db(query).select(itable.date, itable.invoice_no, limitby = (0, 1), ).first() if invoice: data["invoice_no"] = invoice.invoice_no data["invoice_date"] = invoice.date # Use the program admin org as "payers" query = (ptable.id == claim.program_id) join = otable.on(otable.id == ptable.organisation_id) admin_org = db(query).select(otable.name, join = join, limitby = (0, 1), ).first() if admin_org: data["payers"] = admin_org.name # Look up details of the invoicing party query = (otable.pe_id == claim.pe_id) & \ (otable.deleted == False) organisation = db(query).select(otable.id, otable.name, limitby = (0, 1), ).first() if organisation: data["organisation"] = organisation.name # Email address query = (ctable.pe_id == claim.pe_id) & \ (ctable.contact_method == "EMAIL") & \ (ctable.deleted == False) email = db(query).select(ctable.value, limitby = (0, 1), ).first() if email: data["email"] = email.value # Facility address query = (ftable.organisation_id == organisation.id) & \ (ftable.obsolete == False) & \ (ftable.deleted == False) left = ltable.on(ltable.id == ftable.location_id) facility = db(query).select(ftable.email, ltable.addr_street, ltable.addr_postcode, ltable.L3, ltable.L4, left = left, limitby = (0, 1), orderby = ftable.created_on, ).first() if facility: if data.get("email"): # Fallback data["email"] = facility.org_facility.email location = facility.gis_location data["addr_street"] = location.addr_street or "-" data["addr_postcode"] = location.addr_postcode or "-" data["addr_place"] = location.L4 or location.L3 or "-" return data
Python
def lookup_body_data(claim): """ Look up additional data for claim body Args: claim: the claim record Returns: dict with claim data """ db = current.db s3db = current.s3db ptable = s3db.fin_voucher_program query = (ptable.id == claim.program_id) & \ (ptable.deleted == False) program = db(query).select(ptable.id, ptable.name, ptable.unit, limitby = (0, 1), ).first() if program: data = {"title": program.name, "unit": program.unit, } else: data = {} return data
def lookup_body_data(claim): """ Look up additional data for claim body Args: claim: the claim record Returns: dict with claim data """ db = current.db s3db = current.s3db ptable = s3db.fin_voucher_program query = (ptable.id == claim.program_id) & \ (ptable.deleted == False) program = db(query).select(ptable.id, ptable.name, ptable.unit, limitby = (0, 1), ).first() if program: data = {"title": program.name, "unit": program.unit, } else: data = {} return data
Python
def represent_row_html(self, row): """ Represent a row with contact information, styleable HTML Args: row: the Row """ T = current.T output = DIV(SPAN(s3_fullname(row), _class = "manager-name", ), _class = "manager-repr", ) table = self.table try: dob = row.date_of_birth except AttributeError: dob = None dob = table.date_of_birth.represent(dob) if dob else "-" pe_id = row.pe_id email = self._email.get(pe_id) if self.show_email else None phone = self._phone.get(pe_id) if self.show_phone else None details = TABLE(TR(TH("%s:" % T("Date of Birth")), TD(dob), _class = "manager-dob" ), TR(TH(ICON("mail")), TD(A(email, _href="mailto:%s" % email) if email else "-"), _class = "manager-email" ), TR(TH(ICON("phone")), TD(phone if phone else "-"), _class = "manager-phone", ), _class="manager-details", ) output.append(details) return output
def represent_row_html(self, row): """ Represent a row with contact information, styleable HTML Args: row: the Row """ T = current.T output = DIV(SPAN(s3_fullname(row), _class = "manager-name", ), _class = "manager-repr", ) table = self.table try: dob = row.date_of_birth except AttributeError: dob = None dob = table.date_of_birth.represent(dob) if dob else "-" pe_id = row.pe_id email = self._email.get(pe_id) if self.show_email else None phone = self._phone.get(pe_id) if self.show_phone else None details = TABLE(TR(TH("%s:" % T("Date of Birth")), TD(dob), _class = "manager-dob" ), TR(TH(ICON("mail")), TD(A(email, _href="mailto:%s" % email) if email else "-"), _class = "manager-email" ), TR(TH(ICON("phone")), TD(phone if phone else "-"), _class = "manager-phone", ), _class="manager-details", ) output.append(details) return output
Python
def __search(self): """ Process filters in POST, interprets URL filter expressions in POST vars (if multipart), or from JSON request body (if not multipart or $search=ajax). Note: Overrides CRUDRequest method as GET (r.http) to trigger the correct method handlers, but will not change current.request.env.request_method. """ get_vars = self.get_vars content_type = self.env.get("content_type") or "" mode = get_vars.get("$search") # Override request method if mode: self.http = "GET" # Retrieve filters from request body if content_type == "application/x-www-form-urlencoded": # Read POST vars (e.g. from S3.gis.refreshLayer) filters = self.post_vars decode = None elif mode == "ajax" or content_type[:10] != "multipart/": # Read body JSON (e.g. from $.searchS3) body = self.body body.seek(0) # Decode request body (=bytes stream) into a str # - minor performance advantage by avoiding the need for # json.loads to detect the encoding s = body.read().decode("utf-8") try: filters = json.loads(s) except ValueError: filters = {} if not isinstance(filters, dict): filters = {} decode = None else: # Read POST vars JSON (e.g. from $.searchDownloadS3) filters = self.post_vars decode = json.loads # Move filters into GET vars get_vars = Storage(get_vars) post_vars = Storage(self.post_vars) del get_vars["$search"] for k, v in filters.items(): k0 = k[0] if k == "$filter" or k[0:2] == "$$" or k == "bbox" or \ k0 != "_" and ("." in k or k0 == "(" and ")" in k): try: value = decode(v) if decode else v except ValueError: continue # Catch any non-str values if type(value) is list: value = [s3_str(item) if not isinstance(item, str) else item for item in value ] elif type(value) is not str: value = s3_str(value) get_vars[s3_str(k)] = value # Remove filter expression from POST vars if k in post_vars: del post_vars[k] # Override self.get_vars and self.post_vars self.get_vars = get_vars self.post_vars = post_vars # Update combined vars self.vars = get_vars.copy() self.vars.update(self.post_vars)
def __search(self): """ Process filters in POST, interprets URL filter expressions in POST vars (if multipart), or from JSON request body (if not multipart or $search=ajax). Note: Overrides CRUDRequest method as GET (r.http) to trigger the correct method handlers, but will not change current.request.env.request_method. """ get_vars = self.get_vars content_type = self.env.get("content_type") or "" mode = get_vars.get("$search") # Override request method if mode: self.http = "GET" # Retrieve filters from request body if content_type == "application/x-www-form-urlencoded": # Read POST vars (e.g. from S3.gis.refreshLayer) filters = self.post_vars decode = None elif mode == "ajax" or content_type[:10] != "multipart/": # Read body JSON (e.g. from $.searchS3) body = self.body body.seek(0) # Decode request body (=bytes stream) into a str # - minor performance advantage by avoiding the need for # json.loads to detect the encoding s = body.read().decode("utf-8") try: filters = json.loads(s) except ValueError: filters = {} if not isinstance(filters, dict): filters = {} decode = None else: # Read POST vars JSON (e.g. from $.searchDownloadS3) filters = self.post_vars decode = json.loads # Move filters into GET vars get_vars = Storage(get_vars) post_vars = Storage(self.post_vars) del get_vars["$search"] for k, v in filters.items(): k0 = k[0] if k == "$filter" or k[0:2] == "$$" or k == "bbox" or \ k0 != "_" and ("." in k or k0 == "(" and ")" in k): try: value = decode(v) if decode else v except ValueError: continue # Catch any non-str values if type(value) is list: value = [s3_str(item) if not isinstance(item, str) else item for item in value ] elif type(value) is not str: value = s3_str(value) get_vars[s3_str(k)] = value # Remove filter expression from POST vars if k in post_vars: del post_vars[k] # Override self.get_vars and self.post_vars self.get_vars = get_vars self.post_vars = post_vars # Update combined vars self.vars = get_vars.copy() self.vars.update(self.post_vars)
Python
def default_methods(self): """ Default method handlers as dict {method: handler} """ methods = self._default_methods if not methods: from .methods import RESTful, S3Filter, S3GroupedItemsReport, \ S3HierarchyCRUD, S3Map, S3Merge, S3MobileCRUD, \ S3Organizer, S3Profile, S3Report, S3Summary, \ TimePlot, S3XForms, SpreadsheetImporter methods = {"deduplicate": S3Merge, "fields": RESTful, "filter": S3Filter, "grouped": S3GroupedItemsReport, "hierarchy": S3HierarchyCRUD, "import": SpreadsheetImporter, "map": S3Map, "mform": S3MobileCRUD, "options": RESTful, "organize": S3Organizer, "profile": S3Profile, "report": S3Report, "summary": S3Summary, "sync": current.sync, "timeplot": TimePlot, "xform": S3XForms, } methods["copy"] = lambda r, **attr: redirect(URL(args = "create", vars = {"from_record": r.id}, )) from .msg import S3Compose methods["compose"] = S3Compose from .ui import search_ac methods["search_ac"] = search_ac try: from s3db.cms import S3CMS except ImportError: current.log.error("S3CMS default method not found") else: methods["cms"] = S3CMS self._default_methods = methods return methods
def default_methods(self): """ Default method handlers as dict {method: handler} """ methods = self._default_methods if not methods: from .methods import RESTful, S3Filter, S3GroupedItemsReport, \ S3HierarchyCRUD, S3Map, S3Merge, S3MobileCRUD, \ S3Organizer, S3Profile, S3Report, S3Summary, \ TimePlot, S3XForms, SpreadsheetImporter methods = {"deduplicate": S3Merge, "fields": RESTful, "filter": S3Filter, "grouped": S3GroupedItemsReport, "hierarchy": S3HierarchyCRUD, "import": SpreadsheetImporter, "map": S3Map, "mform": S3MobileCRUD, "options": RESTful, "organize": S3Organizer, "profile": S3Profile, "report": S3Report, "summary": S3Summary, "sync": current.sync, "timeplot": TimePlot, "xform": S3XForms, } methods["copy"] = lambda r, **attr: redirect(URL(args = "create", vars = {"from_record": r.id}, )) from .msg import S3Compose methods["compose"] = S3Compose from .ui import search_ac methods["search_ac"] = search_ac try: from s3db.cms import S3CMS except ImportError: current.log.error("S3CMS default method not found") else: methods["cms"] = S3CMS self._default_methods = methods return methods
Python
def factory(self, **args): """ Generate a new request for the same resource Args: args: arguments for request constructor """ return crud_request(r=self, **args)
def factory(self, **args): """ Generate a new request for the same resource Args: args: arguments for request constructor """ return crud_request(r=self, **args)
Python
def transformable(self, method=None): """ Check the request for a transformable format Args: method: "import" for import methods, otherwise None """ if self.representation in ("html", "aadata", "popup", "iframe"): return False stylesheet = self.stylesheet(method=method, skip_error=True) if not stylesheet and self.representation != "xml": return False else: return True
def transformable(self, method=None): """ Check the request for a transformable format Args: method: "import" for import methods, otherwise None """ if self.representation in ("html", "aadata", "popup", "iframe"): return False stylesheet = self.stylesheet(method=method, skip_error=True) if not stylesheet and self.representation != "xml": return False else: return True
Python
def actuate_link(self, component_id=None): """ Determine whether to actuate a link or not Args: component_id: the component_id (if not self.component_id) """ if not component_id: component_id = self.component_id if self.component: single = component_id != None component = self.component if component.link: actuate = self.component.actuate if "linked" in self.get_vars: linked = self.get_vars.get("linked", False) linked = linked in ("true", "True") if linked: actuate = "replace" else: actuate = "hide" if actuate == "link": if self.method != "delete" and self.http != "DELETE": return single else: return not single elif actuate == "replace": return True #elif actuate == "embed": #raise NotImplementedError else: return False else: return True else: return False
def actuate_link(self, component_id=None): """ Determine whether to actuate a link or not Args: component_id: the component_id (if not self.component_id) """ if not component_id: component_id = self.component_id if self.component: single = component_id != None component = self.component if component.link: actuate = self.component.actuate if "linked" in self.get_vars: linked = self.get_vars.get("linked", False) linked = linked in ("true", "True") if linked: actuate = "replace" else: actuate = "hide" if actuate == "link": if self.method != "delete" and self.http != "DELETE": return single else: return not single elif actuate == "replace": return True #elif actuate == "embed": #raise NotImplementedError else: return False else: return True else: return False
Python
def url(self, id = None, component = None, component_id = None, target = None, method = None, representation = None, vars = None, host = None, ): """ Returns the URL of this request, use parameters to override current requests attributes: - None to keep current attribute (default) - 0 or "" to set attribute to NONE - value to use explicit value Args: id: the master record ID component: the component name component_id: the component ID target: the target record ID (choose automatically) method: the URL method representation: the representation for the URL vars: the URL query variables host: string to force absolute URL with host (True means http_host) Notes: - changing the master record ID resets the component ID - removing the target record ID sets the method to None - removing the method sets the target record ID to None - [] as id will be replaced by the "[id]" wildcard """ if vars is None: vars = self.get_vars elif vars and isinstance(vars, str): # We've come from a dataTable_vars which has the vars as # a JSON string, but with the wrong quotation marks vars = json.loads(vars.replace("'", "\"")) if "format" in vars: del vars["format"] args = [] cname = self.component_name # target if target is not None: if cname and (component is None or component == cname): component_id = target else: id = target # method default_method = False if method is None: default_method = True method = self.method elif method == "": # Switch to list? (= method="" and no explicit target ID) if component_id is None: if self.component_id is not None: component_id = 0 elif not self.component: if id is None: if self.id is not None: id = 0 method = None # id if id is None: id = self.id elif id in (0, ""): id = None elif id in ([], "[id]", "*"): id = "[id]" component_id = 0 elif str(id) != str(self.id): component_id = 0 # component if component is None: component = cname elif component == "": component = None if cname and cname != component or not component: component_id = 0 # component_id if component_id is None: component_id = self.component_id elif component_id == 0: component_id = None if self.component_id and default_method: method = None if id is None and self.id and \ (not component or not component_id) and default_method: method = None if id: args.append(id) if component: args.append(component) if component_id: args.append(component_id) if method: args.append(method) # representation if representation is None: representation = self.representation elif representation == "": representation = self.DEFAULT_REPRESENTATION f = self.function if not representation == self.DEFAULT_REPRESENTATION: if len(args) > 0: args[-1] = "%s.%s" % (args[-1], representation) else: f = "%s.%s" % (f, representation) return URL(r=self, c=self.controller, f=f, args=args, vars=vars, host=host)
def url(self, id = None, component = None, component_id = None, target = None, method = None, representation = None, vars = None, host = None, ): """ Returns the URL of this request, use parameters to override current requests attributes: - None to keep current attribute (default) - 0 or "" to set attribute to NONE - value to use explicit value Args: id: the master record ID component: the component name component_id: the component ID target: the target record ID (choose automatically) method: the URL method representation: the representation for the URL vars: the URL query variables host: string to force absolute URL with host (True means http_host) Notes: - changing the master record ID resets the component ID - removing the target record ID sets the method to None - removing the method sets the target record ID to None - [] as id will be replaced by the "[id]" wildcard """ if vars is None: vars = self.get_vars elif vars and isinstance(vars, str): # We've come from a dataTable_vars which has the vars as # a JSON string, but with the wrong quotation marks vars = json.loads(vars.replace("'", "\"")) if "format" in vars: del vars["format"] args = [] cname = self.component_name # target if target is not None: if cname and (component is None or component == cname): component_id = target else: id = target # method default_method = False if method is None: default_method = True method = self.method elif method == "": # Switch to list? (= method="" and no explicit target ID) if component_id is None: if self.component_id is not None: component_id = 0 elif not self.component: if id is None: if self.id is not None: id = 0 method = None # id if id is None: id = self.id elif id in (0, ""): id = None elif id in ([], "[id]", "*"): id = "[id]" component_id = 0 elif str(id) != str(self.id): component_id = 0 # component if component is None: component = cname elif component == "": component = None if cname and cname != component or not component: component_id = 0 # component_id if component_id is None: component_id = self.component_id elif component_id == 0: component_id = None if self.component_id and default_method: method = None if id is None and self.id and \ (not component or not component_id) and default_method: method = None if id: args.append(id) if component: args.append(component) if component_id: args.append(component_id) if method: args.append(method) # representation if representation is None: representation = self.representation elif representation == "": representation = self.DEFAULT_REPRESENTATION f = self.function if not representation == self.DEFAULT_REPRESENTATION: if len(args) > 0: args[-1] = "%s.%s" % (args[-1], representation) else: f = "%s.%s" % (f, representation) return URL(r=self, c=self.controller, f=f, args=args, vars=vars, host=host)
Python
def target(self): """ Get the target table of the current request Returns: tuple of (prefix, name, table, tablename) of the target resource of this request TODO update for link table support """ component = self.component if component is not None: link = self.component.link if link and not self.actuate_link(): return(link.prefix, link.name, link.table, link.tablename) return (component.prefix, component.name, component.table, component.tablename) else: return (self.prefix, self.name, self.table, self.tablename)
def target(self): """ Get the target table of the current request Returns: tuple of (prefix, name, table, tablename) of the target resource of this request TODO update for link table support """ component = self.component if component is not None: link = self.component.link if link and not self.actuate_link(): return(link.prefix, link.name, link.table, link.tablename) return (component.prefix, component.name, component.table, component.tablename) else: return (self.prefix, self.name, self.table, self.tablename)
Python
def viewing(self): """ Parse the "viewing" URL parameter, frequently used for perspective discrimination and processing in prep Returns: tuple (tablename, record_id) if "viewing" is set, otherwise None """ get_vars = self.get_vars if "viewing" in get_vars: try: tablename, record_id = get_vars.get("viewing").split(".") except (AttributeError, ValueError): return None try: record_id = int(record_id) except (TypeError, ValueError): return None return tablename, record_id return None
def viewing(self): """ Parse the "viewing" URL parameter, frequently used for perspective discrimination and processing in prep Returns: tuple (tablename, record_id) if "viewing" is set, otherwise None """ get_vars = self.get_vars if "viewing" in get_vars: try: tablename, record_id = get_vars.get("viewing").split(".") except (AttributeError, ValueError): return None try: record_id = int(record_id) except (TypeError, ValueError): return None return tablename, record_id return None
Python
def stylesheet(self, method=None, skip_error=False): """ Find the XSLT stylesheet for this request Args: method: "import" for data imports, else None skip_error: do not raise an HTTP error status if the stylesheet cannot be found """ representation = self.representation # Native S3XML? if representation == "xml": return None # External stylesheet specified? if "transform" in self.vars: return self.vars["transform"] component = self.component resourcename = component.name if component else self.name # Stylesheet attached to the request? extension = self.XSLT_EXTENSION filename = "%s.%s" % (resourcename, extension) if filename in self.post_vars: p = self.post_vars[filename] import cgi if isinstance(p, cgi.FieldStorage) and p.filename: return p.file # Look for stylesheet in file system folder = self.folder if method != "import": method = "export" stylesheet = None # Custom transformation stylesheet in template? if not stylesheet: formats = current.deployment_settings.get_xml_formats() if isinstance(formats, dict) and representation in formats: stylesheets = formats[representation] if isinstance(stylesheets, str) and stylesheets: stylesheets = stylesheets.split("/") + ["formats"] path = os.path.join("modules", "templates", *stylesheets) filename = "%s.%s" % (method, extension) stylesheet = os.path.join(folder, path, representation, filename) # Transformation stylesheet at standard location? if not stylesheet: path = self.XSLT_PATH filename = "%s.%s" % (method, extension) stylesheet = os.path.join(folder, path, representation, filename) if not os.path.exists(stylesheet): if not skip_error: self.error(501, "%s: %s" % (current.ERROR.BAD_TEMPLATE, stylesheet, )) stylesheet = None return stylesheet
def stylesheet(self, method=None, skip_error=False): """ Find the XSLT stylesheet for this request Args: method: "import" for data imports, else None skip_error: do not raise an HTTP error status if the stylesheet cannot be found """ representation = self.representation # Native S3XML? if representation == "xml": return None # External stylesheet specified? if "transform" in self.vars: return self.vars["transform"] component = self.component resourcename = component.name if component else self.name # Stylesheet attached to the request? extension = self.XSLT_EXTENSION filename = "%s.%s" % (resourcename, extension) if filename in self.post_vars: p = self.post_vars[filename] import cgi if isinstance(p, cgi.FieldStorage) and p.filename: return p.file # Look for stylesheet in file system folder = self.folder if method != "import": method = "export" stylesheet = None # Custom transformation stylesheet in template? if not stylesheet: formats = current.deployment_settings.get_xml_formats() if isinstance(formats, dict) and representation in formats: stylesheets = formats[representation] if isinstance(stylesheets, str) and stylesheets: stylesheets = stylesheets.split("/") + ["formats"] path = os.path.join("modules", "templates", *stylesheets) filename = "%s.%s" % (method, extension) stylesheet = os.path.join(folder, path, representation, filename) # Transformation stylesheet at standard location? if not stylesheet: path = self.XSLT_PATH filename = "%s.%s" % (method, extension) stylesheet = os.path.join(folder, path, representation, filename) if not os.path.exists(stylesheet): if not skip_error: self.error(501, "%s: %s" % (current.ERROR.BAD_TEMPLATE, stylesheet, )) stylesheet = None return stylesheet
Python
def customise_resource(self, tablename=None): """ Invoke the customization callback for a resource. Args: tablename: the tablename of the resource; if called without tablename it will invoke the callbacks for the target resources of this request: - master - active component - active link table (in this order) Example: Resource customization functions can be defined like: def customise_resource_my_table(r, tablename): current.s3db.configure(tablename, my_custom_setting = "example") settings.customise_resource_my_table = \ customise_resource_my_table Notes: - the hook itself can call r.customise_resource in order to cascade customizations as necessary - if a table is customised that is not currently loaded, then it will be loaded for this process """ if tablename is None: # Customise the current target resource(s) customise = self.customise_resource customise(self.resource.tablename) if self.component: customise(self.component.tablename) if self.link: customise(self.link.tablename) return s3db = current.s3db # Note: must load the model first, otherwise it would override # the custom settings when loaded later if not s3db.customised(tablename) and s3db.table(tablename, db_only=True): customise = current.deployment_settings.customise_resource(tablename) if customise: customise(self, tablename) s3db.customised(tablename, True)
def customise_resource(self, tablename=None): """ Invoke the customization callback for a resource. Args: tablename: the tablename of the resource; if called without tablename it will invoke the callbacks for the target resources of this request: - master - active component - active link table (in this order) Example: Resource customization functions can be defined like: def customise_resource_my_table(r, tablename): current.s3db.configure(tablename, my_custom_setting = "example") settings.customise_resource_my_table = \ customise_resource_my_table Notes: - the hook itself can call r.customise_resource in order to cascade customizations as necessary - if a table is customised that is not currently loaded, then it will be loaded for this process """ if tablename is None: # Customise the current target resource(s) customise = self.customise_resource customise(self.resource.tablename) if self.component: customise(self.component.tablename) if self.link: customise(self.link.tablename) return s3db = current.s3db # Note: must load the model first, otherwise it would override # the custom settings when loaded later if not s3db.customised(tablename) and s3db.table(tablename, db_only=True): customise = current.deployment_settings.customise_resource(tablename) if customise: customise(self, tablename) s3db.customised(tablename, True)
Python
def crud_request(*args, **kwargs): """ Helper function to generate CRUDRequest instances Args: args: arguments for the CRUDRequest kwargs: keyword arguments for the CRUDRequest Keyword Args: catch_errors: if set to False, errors will be raised instead of returned to the client, useful for optional sub-requests, or if the caller implements fallbacks """ catch_errors = kwargs.pop("catch_errors", True) error = None try: r = CRUDRequest(*args, **kwargs) except (AttributeError, SyntaxError): if catch_errors is False: raise error, message = 400, sys.exc_info()[1] except KeyError: if catch_errors is False: raise error, message = 404, sys.exc_info()[1] if error: if hasattr(message, "message"): message = message.message elif hasattr(message, "args"): message = message.args[0] if message.args else None message = s3_str(message) if message else "Unknown Error (%s)" % error if current.auth.permission.format == "html": current.session.error = message redirect(URL(f="index")) else: headers = {"Content-Type":"application/json"} current.log.error(message) raise HTTP(error, body = current.xml.json_message(success = False, statuscode = error, message = message, ), web2py_error = message, **headers) return r
def crud_request(*args, **kwargs): """ Helper function to generate CRUDRequest instances Args: args: arguments for the CRUDRequest kwargs: keyword arguments for the CRUDRequest Keyword Args: catch_errors: if set to False, errors will be raised instead of returned to the client, useful for optional sub-requests, or if the caller implements fallbacks """ catch_errors = kwargs.pop("catch_errors", True) error = None try: r = CRUDRequest(*args, **kwargs) except (AttributeError, SyntaxError): if catch_errors is False: raise error, message = 400, sys.exc_info()[1] except KeyError: if catch_errors is False: raise error, message = 404, sys.exc_info()[1] if error: if hasattr(message, "message"): message = message.message elif hasattr(message, "args"): message = message.args[0] if message.args else None message = s3_str(message) if message else "Unknown Error (%s)" % error if current.auth.permission.format == "html": current.session.error = message redirect(URL(f="index")) else: headers = {"Content-Type":"application/json"} current.log.error(message) raise HTTP(error, body = current.xml.json_message(success = False, statuscode = error, message = message, ), web2py_error = message, **headers) return r
Python
def crud_controller(prefix=None, resourcename=None, **attr): """ Helper function to apply CRUD methods Args: prefix: the application prefix resourcename: the resource name (without prefix) attr: additional keyword parameters Keyword Args: Any keyword parameters will be copied into the output dict (provided that the output is a dict). If a keyword parameter is callable, then it will be invoked, and its return value will be added to the output dict instead. The callable receives the CRUDRequest as its first and only parameter. CRUD can be configured per table using: s3db.configure(tablename, **attr) *** Redirection: create_next URL to redirect to after a record has been created update_next URL to redirect to after a record has been updated delete_next URL to redirect to after a record has been deleted *** Form configuration: list_fields list of names of fields to include into list views subheadings Sub-headings (see separate documentation) listadd Enable/Disable add-form in list views *** CRUD configuration: editable Allow/Deny record updates in this table deletable Allow/Deny record deletions in this table insertable Allow/Deny record insertions into this table copyable Allow/Deny record copying within this table *** Callbacks: create_onvalidation Function for additional record validation on create create_onaccept Function after successful record insertion update_onvalidation Function for additional record validation on update update_onaccept Function after successful record update onvalidation Fallback for both create_onvalidation and update_onvalidation onaccept Fallback for both create_onaccept and update_onaccept ondelete Function after record deletion """ auth = current.auth s3db = current.s3db request = current.request response = current.response s3 = response.s3 settings = current.deployment_settings # Parse the request dynamic = attr.get("dynamic") if dynamic: # Dynamic table controller c = request.controller f = request.function attr = settings.customise_controller("%s_%s" % (c, f), **attr) from core import DYNAMIC_PREFIX, s3_get_extension r = crud_request(DYNAMIC_PREFIX, dynamic, f = "%s/%s" % (f, dynamic), args = request.args[1:], extension = s3_get_extension(request), ) else: # Customise Controller from Template attr = settings.customise_controller( "%s_%s" % (prefix or request.controller, resourcename or request.function, ), **attr) r = crud_request(prefix, resourcename) # Customize target resource(s) from Template r.customise_resource() # List of methods rendering datatables with default action buttons dt_methods = (None, "datatable", "datatable_f", "summary", "list") # List of methods rendering datatables with custom action buttons, # => for these, s3.actions must not be touched, see below # (defining here allows postp to add a custom method to the list) s3.action_methods = ("import", "review", "approve", "reject", "deduplicate", ) # Execute the request output = r(**attr) method = r.method if isinstance(output, dict) and method in dt_methods: if s3.actions is None: # Add default action buttons prefix, name, table, tablename = r.target() authorised = auth.s3_has_permission("update", tablename) # If a component has components itself, then action buttons # can be forwarded to the native controller by setting native=True if r.component and s3db.has_components(table): native = output.get("native", False) else: native = False # Get table config get_config = s3db.get_config listadd = get_config(tablename, "listadd", True) # Which is the standard open-action? if settings.get_ui_open_read_first(): # Always read, irrespective permissions editable = False else: editable = get_config(tablename, "editable", True) if editable and \ auth.permission.ownership_required("update", table): # User cannot edit all records in the table if settings.get_ui_auto_open_update(): # Decide automatically per-record (implicit method) editable = "auto" else: # Always open read first (explicit read) editable = False deletable = get_config(tablename, "deletable", True) copyable = get_config(tablename, "copyable", False) # URL to open the resource from .methods import S3CRUD open_url = S3CRUD._linkto(r, authorised = authorised, update = editable, native = native)("[id]") # Add action buttons for Open/Delete/Copy as appropriate S3CRUD.action_buttons(r, deletable = deletable, copyable = copyable, editable = editable, read_url = open_url, update_url = open_url # To use modals #update_url = "%s.popup?refresh=list" % open_url ) # Override Add-button, link to native controller and put # the primary key into get_vars for automatic linking if native and not listadd and \ auth.s3_has_permission("create", tablename): label = get_crud_string(tablename, "label_create") component = r.resource.components[name] fkey = "%s.%s" % (name, component.fkey) get_vars_copy = request.get_vars.copy() get_vars_copy.update({fkey: r.record[component.fkey]}) url = URL(prefix, name, args = ["create"], vars = get_vars_copy, ) add_btn = A(label, _href = url, _class = "action-btn", ) output.update(add_btn = add_btn) elif method not in s3.action_methods: s3.actions = None return output
def crud_controller(prefix=None, resourcename=None, **attr): """ Helper function to apply CRUD methods Args: prefix: the application prefix resourcename: the resource name (without prefix) attr: additional keyword parameters Keyword Args: Any keyword parameters will be copied into the output dict (provided that the output is a dict). If a keyword parameter is callable, then it will be invoked, and its return value will be added to the output dict instead. The callable receives the CRUDRequest as its first and only parameter. CRUD can be configured per table using: s3db.configure(tablename, **attr) *** Redirection: create_next URL to redirect to after a record has been created update_next URL to redirect to after a record has been updated delete_next URL to redirect to after a record has been deleted *** Form configuration: list_fields list of names of fields to include into list views subheadings Sub-headings (see separate documentation) listadd Enable/Disable add-form in list views *** CRUD configuration: editable Allow/Deny record updates in this table deletable Allow/Deny record deletions in this table insertable Allow/Deny record insertions into this table copyable Allow/Deny record copying within this table *** Callbacks: create_onvalidation Function for additional record validation on create create_onaccept Function after successful record insertion update_onvalidation Function for additional record validation on update update_onaccept Function after successful record update onvalidation Fallback for both create_onvalidation and update_onvalidation onaccept Fallback for both create_onaccept and update_onaccept ondelete Function after record deletion """ auth = current.auth s3db = current.s3db request = current.request response = current.response s3 = response.s3 settings = current.deployment_settings # Parse the request dynamic = attr.get("dynamic") if dynamic: # Dynamic table controller c = request.controller f = request.function attr = settings.customise_controller("%s_%s" % (c, f), **attr) from core import DYNAMIC_PREFIX, s3_get_extension r = crud_request(DYNAMIC_PREFIX, dynamic, f = "%s/%s" % (f, dynamic), args = request.args[1:], extension = s3_get_extension(request), ) else: # Customise Controller from Template attr = settings.customise_controller( "%s_%s" % (prefix or request.controller, resourcename or request.function, ), **attr) r = crud_request(prefix, resourcename) # Customize target resource(s) from Template r.customise_resource() # List of methods rendering datatables with default action buttons dt_methods = (None, "datatable", "datatable_f", "summary", "list") # List of methods rendering datatables with custom action buttons, # => for these, s3.actions must not be touched, see below # (defining here allows postp to add a custom method to the list) s3.action_methods = ("import", "review", "approve", "reject", "deduplicate", ) # Execute the request output = r(**attr) method = r.method if isinstance(output, dict) and method in dt_methods: if s3.actions is None: # Add default action buttons prefix, name, table, tablename = r.target() authorised = auth.s3_has_permission("update", tablename) # If a component has components itself, then action buttons # can be forwarded to the native controller by setting native=True if r.component and s3db.has_components(table): native = output.get("native", False) else: native = False # Get table config get_config = s3db.get_config listadd = get_config(tablename, "listadd", True) # Which is the standard open-action? if settings.get_ui_open_read_first(): # Always read, irrespective permissions editable = False else: editable = get_config(tablename, "editable", True) if editable and \ auth.permission.ownership_required("update", table): # User cannot edit all records in the table if settings.get_ui_auto_open_update(): # Decide automatically per-record (implicit method) editable = "auto" else: # Always open read first (explicit read) editable = False deletable = get_config(tablename, "deletable", True) copyable = get_config(tablename, "copyable", False) # URL to open the resource from .methods import S3CRUD open_url = S3CRUD._linkto(r, authorised = authorised, update = editable, native = native)("[id]") # Add action buttons for Open/Delete/Copy as appropriate S3CRUD.action_buttons(r, deletable = deletable, copyable = copyable, editable = editable, read_url = open_url, update_url = open_url # To use modals #update_url = "%s.popup?refresh=list" % open_url ) # Override Add-button, link to native controller and put # the primary key into get_vars for automatic linking if native and not listadd and \ auth.s3_has_permission("create", tablename): label = get_crud_string(tablename, "label_create") component = r.resource.components[name] fkey = "%s.%s" % (name, component.fkey) get_vars_copy = request.get_vars.copy() get_vars_copy.update({fkey: r.record[component.fkey]}) url = URL(prefix, name, args = ["create"], vars = get_vars_copy, ) add_btn = A(label, _href = url, _class = "action-btn", ) output.update(add_btn = add_btn) elif method not in s3.action_methods: s3.actions = None return output
Python
def profile_rheader(r, tabs=None): """ Custom rheader for default/person """ if r.representation != "html": # Resource headers only used in interactive views return None tablename, record = s3_rheader_resource(r) if tablename != r.tablename: resource = current.s3db.resource(tablename, id=record.id) else: resource = r.resource rheader = None rheader_fields = [] if record: T = current.T if tablename == "pr_person": tabs = [(T("Person Details"), None), (T("User Account"), "user_profile"), (T("Contact Information"), "contacts"), ] rheader_fields = [] rheader = S3ResourceHeader(rheader_fields, tabs)(r, table = resource.table, record = record, ) return rheader
def profile_rheader(r, tabs=None): """ Custom rheader for default/person """ if r.representation != "html": # Resource headers only used in interactive views return None tablename, record = s3_rheader_resource(r) if tablename != r.tablename: resource = current.s3db.resource(tablename, id=record.id) else: resource = r.resource rheader = None rheader_fields = [] if record: T = current.T if tablename == "pr_person": tabs = [(T("Person Details"), None), (T("User Account"), "user_profile"), (T("Contact Information"), "contacts"), ] rheader_fields = [] rheader = S3ResourceHeader(rheader_fields, tabs)(r, table = resource.table, record = record, ) return rheader
Python
def s3_fieldmethod(name, f, represent=None, search_field=None): """ Helper to attach a representation method to a Field.Method. Args: name: the field name f: the field method represent: the representation function search_field: the field to use for searches - only used by datatable_filter currently - can only be a single field in the same table currently """ if represent is None and search_field is None: fieldmethod = Field.Method(name, f) else: class Handler: def __init__(self, method, row): self.method=method self.row=row def __call__(self, *args, **kwargs): return self.method(self.row, *args, **kwargs) if represent is not None: if hasattr(represent, "bulk"): Handler.represent = represent else: Handler.represent = staticmethod(represent) if search_field is not None: Handler.search_field = search_field fieldmethod = Field.Method(name, f, handler=Handler) return fieldmethod
def s3_fieldmethod(name, f, represent=None, search_field=None): """ Helper to attach a representation method to a Field.Method. Args: name: the field name f: the field method represent: the representation function search_field: the field to use for searches - only used by datatable_filter currently - can only be a single field in the same table currently """ if represent is None and search_field is None: fieldmethod = Field.Method(name, f) else: class Handler: def __init__(self, method, row): self.method=method self.row=row def __call__(self, *args, **kwargs): return self.method(self.row, *args, **kwargs) if represent is not None: if hasattr(represent, "bulk"): Handler.represent = represent else: Handler.represent = staticmethod(represent) if search_field is not None: Handler.search_field = search_field fieldmethod = Field.Method(name, f, handler=Handler) return fieldmethod
Python
def dummy(fname="dummy_id", ftype="integer"): """ Provide a dummy reusable field; for safe defaults in models Args: fname: the dummy field name ftype: the dummy field type Returns: a lambda with the same signature as a reusable field """ return lambda name=fname, **attr: Field(name, ftype, readable = False, writable = False, )
def dummy(fname="dummy_id", ftype="integer"): """ Provide a dummy reusable field; for safe defaults in models Args: fname: the dummy field name ftype: the dummy field type Returns: a lambda with the same signature as a reusable field """ return lambda name=fname, **attr: Field(name, ftype, readable = False, writable = False, )
Python
def uuid(): """ Universally unique record identifier according to RFC4122, as URN (e.g. "urn:uuid:fd8f97ab-1252-4d62-9982-8e3f3025307f"); uuids are mandatory for synchronization (incl. EdenMobile) """ return Field("uuid", type=s3uuid, default = "", length = 128, notnull = True, unique = True, readable = False, writable = False, )
def uuid(): """ Universally unique record identifier according to RFC4122, as URN (e.g. "urn:uuid:fd8f97ab-1252-4d62-9982-8e3f3025307f"); uuids are mandatory for synchronization (incl. EdenMobile) """ return Field("uuid", type=s3uuid, default = "", length = 128, notnull = True, unique = True, readable = False, writable = False, )
Python
def deleted(): """ Deletion status (True=record is deleted) """ return Field("deleted", "boolean", default = False, readable = False, writable = False, )
def deleted(): """ Deletion status (True=record is deleted) """ return Field("deleted", "boolean", default = False, readable = False, writable = False, )
Python
def deleted_fk(): """ Foreign key values of this record before deletion (foreign keys are set to None during deletion to derestrict constraints) """ return Field("deleted_fk", #"text", readable = False, writable = False, )
def deleted_fk(): """ Foreign key values of this record before deletion (foreign keys are set to None during deletion to derestrict constraints) """ return Field("deleted_fk", #"text", readable = False, writable = False, )
Python
def deleted_rb(): """ De-duplication: ID of the record that has replaced this record """ return Field("deleted_rb", "integer", readable = False, writable = False, )
def deleted_rb(): """ De-duplication: ID of the record that has replaced this record """ return Field("deleted_rb", "integer", readable = False, writable = False, )
Python
def created_on(): """ Date/time when the record was created """ return Field("created_on", "datetime", readable = False, writable = False, default = datetime.datetime.utcnow, )
def created_on(): """ Date/time when the record was created """ return Field("created_on", "datetime", readable = False, writable = False, default = datetime.datetime.utcnow, )
Python
def modified_on(): """ Date/time when the record was last modified """ return Field("modified_on", "datetime", readable = False, writable = False, default = datetime.datetime.utcnow, update = datetime.datetime.utcnow, )
def modified_on(): """ Date/time when the record was last modified """ return Field("modified_on", "datetime", readable = False, writable = False, default = datetime.datetime.utcnow, update = datetime.datetime.utcnow, )
Python
def created_by(cls): """ Auth_user ID of the user who created the record """ return Field("created_by", current.auth.settings.table_user, readable = False, writable = False, requires = None, default = cls._current_user(), represent = cls._represent_user(), ondelete = "RESTRICT", )
def created_by(cls): """ Auth_user ID of the user who created the record """ return Field("created_by", current.auth.settings.table_user, readable = False, writable = False, requires = None, default = cls._current_user(), represent = cls._represent_user(), ondelete = "RESTRICT", )
Python
def modified_by(cls): """ Auth_user ID of the last user who modified the record """ current_user = cls._current_user() return Field("modified_by", current.auth.settings.table_user, readable = False, writable = False, requires = None, default = current_user, update = current_user, represent = cls._represent_user(), ondelete = "RESTRICT", )
def modified_by(cls): """ Auth_user ID of the last user who modified the record """ current_user = cls._current_user() return Field("modified_by", current.auth.settings.table_user, readable = False, writable = False, requires = None, default = current_user, update = current_user, represent = cls._represent_user(), ondelete = "RESTRICT", )
Python
def approved_by(cls): """ Auth_user ID of the user who has approved the record: - None means unapproved - 0 means auto-approved """ return Field("approved_by", "integer", readable = False, writable = False, requires = None, represent = cls._represent_user(), )
def approved_by(cls): """ Auth_user ID of the user who has approved the record: - None means unapproved - 0 means auto-approved """ return Field("approved_by", "integer", readable = False, writable = False, requires = None, represent = cls._represent_user(), )
Python
def owned_by_user(cls): """ Auth_user ID of the user owning the record """ return Field("owned_by_user", current.auth.settings.table_user, readable = False, writable = False, requires = None, default = cls._current_user(), represent = cls._represent_user(), ondelete = "RESTRICT", )
def owned_by_user(cls): """ Auth_user ID of the user owning the record """ return Field("owned_by_user", current.auth.settings.table_user, readable = False, writable = False, requires = None, default = cls._current_user(), represent = cls._represent_user(), ondelete = "RESTRICT", )
Python
def owned_by_group(): """ Auth_group ID of the user role owning the record """ return Field("owned_by_group", "integer", default = None, readable = False, writable = False, requires = None, represent = auth_group_represent, )
def owned_by_group(): """ Auth_group ID of the user role owning the record """ return Field("owned_by_group", "integer", default = None, readable = False, writable = False, requires = None, represent = auth_group_represent, )
Python
def realm_entity(): """ PE ID of the entity managing the record """ return Field("realm_entity", "integer", default = None, readable = False, writable = False, requires = None, # using a lambda here as we don't want the model # to be loaded yet: represent = lambda pe_id: \ current.s3db.pr_pentity_represent(pe_id), )
def realm_entity(): """ PE ID of the entity managing the record """ return Field("realm_entity", "integer", default = None, readable = False, writable = False, requires = None, # using a lambda here as we don't want the model # to be loaded yet: represent = lambda pe_id: \ current.s3db.pr_pentity_represent(pe_id), )
Python
def all_meta_fields(cls): """ Standard meta fields for all tables Returns: tuple of Fields """ return (cls.uuid(), cls.mci(), cls.deleted(), cls.deleted_fk(), cls.deleted_rb(), cls.created_on(), cls.created_by(), cls.modified_on(), cls.modified_by(), cls.approved_by(), cls.owned_by_user(), cls.owned_by_group(), cls.realm_entity(), )
def all_meta_fields(cls): """ Standard meta fields for all tables Returns: tuple of Fields """ return (cls.uuid(), cls.mci(), cls.deleted(), cls.deleted_fk(), cls.deleted_rb(), cls.created_on(), cls.created_by(), cls.modified_on(), cls.modified_by(), cls.approved_by(), cls.owned_by_user(), cls.owned_by_group(), cls.realm_entity(), )
Python
def sync_meta_fields(cls): """ Meta-fields required for sync Returns: tuple of Fields """ return (cls.uuid(), cls.mci(), cls.deleted(), cls.deleted_fk(), cls.deleted_rb(), cls.created_on(), cls.modified_on(), )
def sync_meta_fields(cls): """ Meta-fields required for sync Returns: tuple of Fields """ return (cls.uuid(), cls.mci(), cls.deleted(), cls.deleted_fk(), cls.deleted_rb(), cls.created_on(), cls.modified_on(), )
Python
def _current_user(): """ Get the user ID of the currently logged-in user Returns: auth_user ID """ if current.auth.is_logged_in(): # Not current.auth.user to support impersonation return current.session.auth.user.id else: return None
def _current_user(): """ Get the user ID of the currently logged-in user Returns: auth_user ID """ if current.auth.is_logged_in(): # Not current.auth.user to support impersonation return current.session.auth.user.id else: return None
Python
def s3_all_meta_field_names(): """ Shortcut commonly used to include/exclude meta fields Returns: tuple of field names """ return ALL_META_FIELD_NAMES
def s3_all_meta_field_names(): """ Shortcut commonly used to include/exclude meta fields Returns: tuple of field names """ return ALL_META_FIELD_NAMES
Python
def s3_role_required(): """ Role Required to access a resource - used by GIS for map layer permissions management """ T = current.T gtable = current.auth.settings.table_group represent = S3Represent(lookup="auth_group", fields=["role"]) return Field("role_required", gtable, sortby="role", requires = IS_EMPTY_OR( IS_ONE_OF(current.db, "auth_group.id", represent, zero=T("Public"))), #widget = S3AutocompleteWidget("admin", # "group", # fieldname="role"), represent = represent, label = T("Role Required"), comment = DIV(_class="tooltip", _title="%s|%s" % (T("Role Required"), T("If this record should be restricted then select which role is required to access the record here."), ), ), ondelete = "RESTRICT", )
def s3_role_required(): """ Role Required to access a resource - used by GIS for map layer permissions management """ T = current.T gtable = current.auth.settings.table_group represent = S3Represent(lookup="auth_group", fields=["role"]) return Field("role_required", gtable, sortby="role", requires = IS_EMPTY_OR( IS_ONE_OF(current.db, "auth_group.id", represent, zero=T("Public"))), #widget = S3AutocompleteWidget("admin", # "group", # fieldname="role"), represent = represent, label = T("Role Required"), comment = DIV(_class="tooltip", _title="%s|%s" % (T("Role Required"), T("If this record should be restricted then select which role is required to access the record here."), ), ), ondelete = "RESTRICT", )
Python
def s3_roles_permitted(name="roles_permitted", **attr): """ List of Roles Permitted to access a resource - used by CMS """ T = current.T represent = S3Represent(lookup="auth_group", fields=["role"]) if "label" not in attr: attr["label"] = T("Roles Permitted") if "sortby" not in attr: attr["sortby"] = "role" if "represent" not in attr: attr["represent"] = represent if "requires" not in attr: attr["requires"] = IS_EMPTY_OR(IS_ONE_OF(current.db, "auth_group.id", represent, multiple=True)) if "comment" not in attr: attr["comment"] = DIV(_class="tooltip", _title="%s|%s" % (T("Roles Permitted"), T("If this record should be restricted then select which role(s) are permitted to access the record here."))) if "ondelete" not in attr: attr["ondelete"] = "RESTRICT" return Field(name, "list:reference auth_group", **attr)
def s3_roles_permitted(name="roles_permitted", **attr): """ List of Roles Permitted to access a resource - used by CMS """ T = current.T represent = S3Represent(lookup="auth_group", fields=["role"]) if "label" not in attr: attr["label"] = T("Roles Permitted") if "sortby" not in attr: attr["sortby"] = "role" if "represent" not in attr: attr["represent"] = represent if "requires" not in attr: attr["requires"] = IS_EMPTY_OR(IS_ONE_OF(current.db, "auth_group.id", represent, multiple=True)) if "comment" not in attr: attr["comment"] = DIV(_class="tooltip", _title="%s|%s" % (T("Roles Permitted"), T("If this record should be restricted then select which role(s) are permitted to access the record here."))) if "ondelete" not in attr: attr["ondelete"] = "RESTRICT" return Field(name, "list:reference auth_group", **attr)
Python
def s3_date(name="date", **attr): """ Return a standard date-field Args: name: the field name Keyword Args: default: the field default, can be specified as "now" for current date, or as Python date past: number of selectable past months future: number of selectable future months widget: the form widget for the field, can be specified as "date" for S3DateWidget, "calendar" for S3CalendarWidget, or as a web2py FormWidget, defaults to "calendar" calendar: the calendar to use for this widget, defaults to current.calendar start_field: CSS selector for the start field for interval selection default_interval: the default interval default_explicit: whether the user must click the field to set the default, or whether it will automatically be set when the value for start_field is set set_min: CSS selector for another date/time widget to dynamically set the minimum selectable date/time to the value selected in this widget set_max: CSS selector for another date/time widget to dynamically set the maximum selectable date/time to the value selected in this widget month_selector: allow direct selection of month Notes: - other S3ReusableField keywords are also supported (in addition to the above) - calendar-option requires widget="calendar" (default), otherwise Gregorian calendar is enforced for the field - set_min/set_max only supported for widget="calendar" (default) - interval options currently not supported by S3CalendarWidget, only available with widget="date" - start_field and default_interval should be given together - sets a default field label "Date" => use label-keyword to override if necessary - sets a default validator IS_UTC_DATE => use requires-keyword to override if necessary - sets a default representation S3DateTime.date_represent => use represent-keyword to override if necessary """ attributes = dict(attr) # Calendar calendar = attributes.pop("calendar", None) # Past and future options past = attributes.pop("past", None) future = attributes.pop("future", None) # Label if "label" not in attributes: attributes["label"] = current.T("Date") # Widget-specific options (=not intended for S3ReusableField) WIDGET_OPTIONS = ("start_field", "default_interval", "default_explicit", "set_min", "set_max", "month_selector", ) # Widget widget = attributes.get("widget", "calendar") widget_options = {} if widget == "date": # Legacy: S3DateWidget # @todo: deprecate (once S3CalendarWidget supports all legacy options) # Must use Gregorian calendar calendar = "Gregorian" # Past/future options if past is not None: widget_options["past"] = past if future is not None: widget_options["future"] = future # Supported additional widget options SUPPORTED_OPTIONS = ("start_field", "default_interval", "default_explicit", ) for option in WIDGET_OPTIONS: if option in attributes: if option in SUPPORTED_OPTIONS: widget_options[option] = attributes[option] del attributes[option] widget = S3DateWidget(**widget_options) elif widget == "calendar": # Default: calendar widget widget_options["calendar"] = calendar # Past/future options if past is not None: widget_options["past_months"] = past if future is not None: widget_options["future_months"] = future # Supported additional widget options SUPPORTED_OPTIONS = ("set_min", "set_max", "month_selector", ) for option in WIDGET_OPTIONS: if option in attributes: if option in SUPPORTED_OPTIONS: widget_options[option] = attributes[option] del attributes[option] widget = S3CalendarWidget(**widget_options) else: # Drop all widget options for option in WIDGET_OPTIONS: attributes.pop(option, None) attributes["widget"] = widget # Default value now = current.request.utcnow.date() if attributes.get("default") == "now": attributes["default"] = now # Representation if "represent" not in attributes: attributes["represent"] = lambda dt: \ S3DateTime.date_represent(dt, utc=True, calendar=calendar, ) # Validator if "requires" not in attributes: if past is None and future is None: requires = IS_UTC_DATE(calendar=calendar) else: from dateutil.relativedelta import relativedelta minimum = maximum = None if past is not None: minimum = now - relativedelta(months = past) if future is not None: maximum = now + relativedelta(months = future) requires = IS_UTC_DATE(calendar=calendar, minimum=minimum, maximum=maximum, ) empty = attributes.pop("empty", None) if empty is False: attributes["requires"] = requires else: # Default attributes["requires"] = IS_EMPTY_OR(requires) return Field(name, "date", **attributes)
def s3_date(name="date", **attr): """ Return a standard date-field Args: name: the field name Keyword Args: default: the field default, can be specified as "now" for current date, or as Python date past: number of selectable past months future: number of selectable future months widget: the form widget for the field, can be specified as "date" for S3DateWidget, "calendar" for S3CalendarWidget, or as a web2py FormWidget, defaults to "calendar" calendar: the calendar to use for this widget, defaults to current.calendar start_field: CSS selector for the start field for interval selection default_interval: the default interval default_explicit: whether the user must click the field to set the default, or whether it will automatically be set when the value for start_field is set set_min: CSS selector for another date/time widget to dynamically set the minimum selectable date/time to the value selected in this widget set_max: CSS selector for another date/time widget to dynamically set the maximum selectable date/time to the value selected in this widget month_selector: allow direct selection of month Notes: - other S3ReusableField keywords are also supported (in addition to the above) - calendar-option requires widget="calendar" (default), otherwise Gregorian calendar is enforced for the field - set_min/set_max only supported for widget="calendar" (default) - interval options currently not supported by S3CalendarWidget, only available with widget="date" - start_field and default_interval should be given together - sets a default field label "Date" => use label-keyword to override if necessary - sets a default validator IS_UTC_DATE => use requires-keyword to override if necessary - sets a default representation S3DateTime.date_represent => use represent-keyword to override if necessary """ attributes = dict(attr) # Calendar calendar = attributes.pop("calendar", None) # Past and future options past = attributes.pop("past", None) future = attributes.pop("future", None) # Label if "label" not in attributes: attributes["label"] = current.T("Date") # Widget-specific options (=not intended for S3ReusableField) WIDGET_OPTIONS = ("start_field", "default_interval", "default_explicit", "set_min", "set_max", "month_selector", ) # Widget widget = attributes.get("widget", "calendar") widget_options = {} if widget == "date": # Legacy: S3DateWidget # @todo: deprecate (once S3CalendarWidget supports all legacy options) # Must use Gregorian calendar calendar = "Gregorian" # Past/future options if past is not None: widget_options["past"] = past if future is not None: widget_options["future"] = future # Supported additional widget options SUPPORTED_OPTIONS = ("start_field", "default_interval", "default_explicit", ) for option in WIDGET_OPTIONS: if option in attributes: if option in SUPPORTED_OPTIONS: widget_options[option] = attributes[option] del attributes[option] widget = S3DateWidget(**widget_options) elif widget == "calendar": # Default: calendar widget widget_options["calendar"] = calendar # Past/future options if past is not None: widget_options["past_months"] = past if future is not None: widget_options["future_months"] = future # Supported additional widget options SUPPORTED_OPTIONS = ("set_min", "set_max", "month_selector", ) for option in WIDGET_OPTIONS: if option in attributes: if option in SUPPORTED_OPTIONS: widget_options[option] = attributes[option] del attributes[option] widget = S3CalendarWidget(**widget_options) else: # Drop all widget options for option in WIDGET_OPTIONS: attributes.pop(option, None) attributes["widget"] = widget # Default value now = current.request.utcnow.date() if attributes.get("default") == "now": attributes["default"] = now # Representation if "represent" not in attributes: attributes["represent"] = lambda dt: \ S3DateTime.date_represent(dt, utc=True, calendar=calendar, ) # Validator if "requires" not in attributes: if past is None and future is None: requires = IS_UTC_DATE(calendar=calendar) else: from dateutil.relativedelta import relativedelta minimum = maximum = None if past is not None: minimum = now - relativedelta(months = past) if future is not None: maximum = now + relativedelta(months = future) requires = IS_UTC_DATE(calendar=calendar, minimum=minimum, maximum=maximum, ) empty = attributes.pop("empty", None) if empty is False: attributes["requires"] = requires else: # Default attributes["requires"] = IS_EMPTY_OR(requires) return Field(name, "date", **attributes)
Python
def config(self): """ The applicable GIS configuration (lazy property) """ # TODO make overridable return current.gis.get_config()
def config(self): """ The applicable GIS configuration (lazy property) """ # TODO make overridable return current.gis.get_config()
Python
def uri(self): """ A universal identifier for this instance """ # TODO handle missing config.ids config = self.config uri_vars = {"config": ",".join(map(str, config.ids))} if config.ids else {} return URL(c="gis", f="context", args=[], vars=uri_vars, host=True)
def uri(self): """ A universal identifier for this instance """ # TODO handle missing config.ids config = self.config uri_vars = {"config": ",".join(map(str, config.ids))} if config.ids else {} return URL(c="gis", f="context", args=[], vars=uri_vars, host=True)
Python
def add_layer(self, instance_type, **kwargs): """ Adds (appends) a layer to this map Args: instance_type: the layer instance type (table name) Keyword Args: name: the layer title description: the layer description *: other layer parameters Returns: the newly added Layer """ layer = LAYERS.get(instance_type)(**kwargs) self.layers.append(layer) return layer
def add_layer(self, instance_type, **kwargs): """ Adds (appends) a layer to this map Args: instance_type: the layer instance type (table name) Keyword Args: name: the layer title description: the layer description *: other layer parameters Returns: the newly added Layer """ layer = LAYERS.get(instance_type)(**kwargs) self.layers.append(layer) return layer
Python
def context(self): """ Returns the web services context (MapContext) for this map """ context = MapContext(self.uri) for layer in self.layers: folder = layer.option("folder") if folder: folder = folder.lower().replace(" ", "_") if layer.option("base_layer"): folder = "/base" elif folder and folder[0] != "/": folder = "/%s" % folder elif not folder: folder = "/overlays" resource = MapResource(layer.uri, layer.name, active = layer.option("active"), folder = folder, ) resource.modified_on = layer.attr.get("modified_on", context.modified_on) offerings = layer.offerings if offerings: for offering in offerings: resource.add_offering(offering) context.append(resource) return context
def context(self): """ Returns the web services context (MapContext) for this map """ context = MapContext(self.uri) for layer in self.layers: folder = layer.option("folder") if folder: folder = folder.lower().replace(" ", "_") if layer.option("base_layer"): folder = "/base" elif folder and folder[0] != "/": folder = "/%s" % folder elif not folder: folder = "/overlays" resource = MapResource(layer.uri, layer.name, active = layer.option("active"), folder = folder, ) resource.modified_on = layer.attr.get("modified_on", context.modified_on) offerings = layer.offerings if offerings: for offering in offerings: resource.add_offering(offering) context.append(resource) return context
Python
def viewer(self): """ Configures and returns a viewer for this map """ # TODO implement pass
def viewer(self): """ Configures and returns a viewer for this map """ # TODO implement pass
Python
def uri(self): """ A universal resource identifier for this layer Returns: the identifier as str """ # TODO handle missing layer_id return URL(c="gis", f="layer_entity", args=[self.layer_id], host=True)
def uri(self): """ A universal resource identifier for this layer Returns: the identifier as str """ # TODO handle missing layer_id return URL(c="gis", f="layer_entity", args=[self.layer_id], host=True)
Python
def offerings(self): """ Web service offerings providing layer data Returns: list of Offering instances """ return None
def offerings(self): """ Web service offerings providing layer data Returns: list of Offering instances """ return None
Python
def dvr_home(): """ Do not redirect to person-controller """ return {"module_name": current.T("Case Consulting"), }
def dvr_home(): """ Do not redirect to person-controller """ return {"module_name": current.T("Case Consulting"), }
Python
def dvr_case_onaccept(form): """ Additional custom-onaccept for dvr_case to: * Force-update the realm entity of the person record: - the organisation managing the case is the realm-owner, but the person record is written first, so we need to update it after writing the case - the case can be transferred to another organisation/branch, and then the person record needs to be transferred to that same realm as well * Update the Population of all Shelters * Update the Location of the person record: - if the Case is linked to a Site then use that for the Location of the Person - otherwise use the Private Address """ try: form_vars = form.vars except AttributeError: return record_id = form_vars.id if not record_id: # Nothing we can do return db = current.db s3db = current.s3db # Update the Population of all Shelters from .cr import cr_shelter_population cr_shelter_population() # Get the Person ID & Site ID for this case person_id = form_vars.person_id if not person_id or "site_id" not in form_vars: # Reload the record table = s3db.dvr_case query = (table.id == record_id) row = db(query).select(table.person_id, table.site_id, limitby = (0, 1), ).first() if row: person_id = row.person_id site_id = row.site_id else: site_id = form_vars.site_id if person_id: set_realm_entity = current.auth.set_realm_entity # Configure components to inherit realm_entity # from the person record s3db.configure("pr_person", realm_components = ("case_activity", "case_details", "dvr_flag", "case_language", "case_note", "residence_status", "address", "contact", "contact_emergency", "group_membership", "image", "person_details", "person_tag", ), ) # Force-update the realm entity for the person set_realm_entity("pr_person", person_id, force_update=True) # Configure components to inherit realm entity # from the case activity record s3db.configure("dvr_case_activity", realm_components = ("case_activity_need", "case_activity_update", "response_action", ), ) # Force-update the realm entity for all case activities # linked to the person_id atable = s3db.dvr_case_activity query = (atable.person_id == person_id) set_realm_entity(atable, query, force_update=True) # Update the person's location_id ptable = s3db.pr_person location_id = None if site_id: # Use the Shelter's Address stable = s3db.org_site site = db(stable.site_id == site_id).select(stable.location_id, limitby = (0, 1), ).first() if site: location_id = site.location_id else: # Use the Private Address (no need to filter by address type as only # 'Current Address' is exposed) # NB If this is a New/Modified Address then this won't be caught here # - we use pr_address_onaccept to catch those atable = s3db.pr_address query = (ptable.id == person_id) & \ (ptable.pe_id == atable.pe_id) & \ (atable.deleted == False) address = db(query).select(atable.location_id, limitby = (0, 1), ).first() if address: location_id = address.location_id db(ptable.id == person_id).update(location_id = location_id, # Indirect update by system rule, # do not change modified_* fields: modified_on = ptable.modified_on, modified_by = ptable.modified_by, )
def dvr_case_onaccept(form): """ Additional custom-onaccept for dvr_case to: * Force-update the realm entity of the person record: - the organisation managing the case is the realm-owner, but the person record is written first, so we need to update it after writing the case - the case can be transferred to another organisation/branch, and then the person record needs to be transferred to that same realm as well * Update the Population of all Shelters * Update the Location of the person record: - if the Case is linked to a Site then use that for the Location of the Person - otherwise use the Private Address """ try: form_vars = form.vars except AttributeError: return record_id = form_vars.id if not record_id: # Nothing we can do return db = current.db s3db = current.s3db # Update the Population of all Shelters from .cr import cr_shelter_population cr_shelter_population() # Get the Person ID & Site ID for this case person_id = form_vars.person_id if not person_id or "site_id" not in form_vars: # Reload the record table = s3db.dvr_case query = (table.id == record_id) row = db(query).select(table.person_id, table.site_id, limitby = (0, 1), ).first() if row: person_id = row.person_id site_id = row.site_id else: site_id = form_vars.site_id if person_id: set_realm_entity = current.auth.set_realm_entity # Configure components to inherit realm_entity # from the person record s3db.configure("pr_person", realm_components = ("case_activity", "case_details", "dvr_flag", "case_language", "case_note", "residence_status", "address", "contact", "contact_emergency", "group_membership", "image", "person_details", "person_tag", ), ) # Force-update the realm entity for the person set_realm_entity("pr_person", person_id, force_update=True) # Configure components to inherit realm entity # from the case activity record s3db.configure("dvr_case_activity", realm_components = ("case_activity_need", "case_activity_update", "response_action", ), ) # Force-update the realm entity for all case activities # linked to the person_id atable = s3db.dvr_case_activity query = (atable.person_id == person_id) set_realm_entity(atable, query, force_update=True) # Update the person's location_id ptable = s3db.pr_person location_id = None if site_id: # Use the Shelter's Address stable = s3db.org_site site = db(stable.site_id == site_id).select(stable.location_id, limitby = (0, 1), ).first() if site: location_id = site.location_id else: # Use the Private Address (no need to filter by address type as only # 'Current Address' is exposed) # NB If this is a New/Modified Address then this won't be caught here # - we use pr_address_onaccept to catch those atable = s3db.pr_address query = (ptable.id == person_id) & \ (ptable.pe_id == atable.pe_id) & \ (atable.deleted == False) address = db(query).select(atable.location_id, limitby = (0, 1), ).first() if address: location_id = address.location_id db(ptable.id == person_id).update(location_id = location_id, # Indirect update by system rule, # do not change modified_* fields: modified_on = ptable.modified_on, modified_by = ptable.modified_by, )
Python
def note_date_dt_orderby(field, direction, orderby, left_joins): """ When sorting notes by date, use created_on to maintain consistent order of multiple notes on the same date """ sorting = {"table": field.tablename, "direction": direction, } orderby.append("%(table)s.date%(direction)s,%(table)s.created_on%(direction)s" % sorting)
def note_date_dt_orderby(field, direction, orderby, left_joins): """ When sorting notes by date, use created_on to maintain consistent order of multiple notes on the same date """ sorting = {"table": field.tablename, "direction": direction, } orderby.append("%(table)s.date%(direction)s,%(table)s.created_on%(direction)s" % sorting)
Python
def configure_case_activity_filters(r, ui_options, use_priority = False, emergencies = False, ): """ Configure filters for case activity list Args: r: the CRUDRequest ui_options: the UI options use_priority: expose the priority emergencies: list is prefiltered for emergency-priority """ resource = r.resource from core import TextFilter, OptionsFilter T = current.T db = current.db s3db = current.s3db # Sector filter options # - field options are configured in dvr_case_activity_sector sector_id = resource.table.sector_id sector_options = {k:v for k, v in sector_id.requires.options() if k} # Status filter options + defaults, status list field if ui_options.get("activity_closure"): stable = s3db.dvr_case_activity_status query = (stable.deleted == False) rows = db(query).select(stable.id, stable.name, stable.is_closed, cache = s3db.cache, orderby = stable.workflow_position, ) status_filter_options = OrderedDict((row.id, T(row.name)) for row in rows) status_filter_defaults = [row.id for row in rows if not row.is_closed] status_filter = OptionsFilter("status_id", options = status_filter_options, cols = 3, default = status_filter_defaults, sort = False, ) else: status_filter = None # Filter widgets filter_widgets = [ TextFilter(["person_id$pe_label", "person_id$first_name", "person_id$last_name", "need_details", ], label = T("Search"), ), OptionsFilter("person_id$person_details.nationality", label = T("Client Nationality"), hidden = True, ), ] if sector_id.readable: filter_widgets.insert(1, OptionsFilter("sector_id", hidden = True, options = sector_options, )) if status_filter: filter_widgets.insert(1, status_filter) # Priority filter (unless pre-filtered to emergencies anyway) if use_priority and not emergencies: field = resource.table.priority priority_opts = OrderedDict(field.requires.options()) priority_filter = OptionsFilter("priority", options = priority_opts, cols = 4, sort = False, ) filter_widgets.insert(2, priority_filter) # Can the user see cases from more than one org? from ..helpers import case_read_multiple_orgs multiple_orgs = case_read_multiple_orgs()[0] if multiple_orgs: # Add org-filter widget filter_widgets.insert(1, OptionsFilter("person_id$dvr_case.organisation_id")) # Person responsible filter if not r.get_vars.get("mine"): filter_widgets.insert(2, OptionsFilter("human_resource_id")) # Reconfigure table resource.configure(filter_widgets = filter_widgets, )
def configure_case_activity_filters(r, ui_options, use_priority = False, emergencies = False, ): """ Configure filters for case activity list Args: r: the CRUDRequest ui_options: the UI options use_priority: expose the priority emergencies: list is prefiltered for emergency-priority """ resource = r.resource from core import TextFilter, OptionsFilter T = current.T db = current.db s3db = current.s3db # Sector filter options # - field options are configured in dvr_case_activity_sector sector_id = resource.table.sector_id sector_options = {k:v for k, v in sector_id.requires.options() if k} # Status filter options + defaults, status list field if ui_options.get("activity_closure"): stable = s3db.dvr_case_activity_status query = (stable.deleted == False) rows = db(query).select(stable.id, stable.name, stable.is_closed, cache = s3db.cache, orderby = stable.workflow_position, ) status_filter_options = OrderedDict((row.id, T(row.name)) for row in rows) status_filter_defaults = [row.id for row in rows if not row.is_closed] status_filter = OptionsFilter("status_id", options = status_filter_options, cols = 3, default = status_filter_defaults, sort = False, ) else: status_filter = None # Filter widgets filter_widgets = [ TextFilter(["person_id$pe_label", "person_id$first_name", "person_id$last_name", "need_details", ], label = T("Search"), ), OptionsFilter("person_id$person_details.nationality", label = T("Client Nationality"), hidden = True, ), ] if sector_id.readable: filter_widgets.insert(1, OptionsFilter("sector_id", hidden = True, options = sector_options, )) if status_filter: filter_widgets.insert(1, status_filter) # Priority filter (unless pre-filtered to emergencies anyway) if use_priority and not emergencies: field = resource.table.priority priority_opts = OrderedDict(field.requires.options()) priority_filter = OptionsFilter("priority", options = priority_opts, cols = 4, sort = False, ) filter_widgets.insert(2, priority_filter) # Can the user see cases from more than one org? from ..helpers import case_read_multiple_orgs multiple_orgs = case_read_multiple_orgs()[0] if multiple_orgs: # Add org-filter widget filter_widgets.insert(1, OptionsFilter("person_id$dvr_case.organisation_id")) # Person responsible filter if not r.get_vars.get("mine"): filter_widgets.insert(2, OptionsFilter("human_resource_id")) # Reconfigure table resource.configure(filter_widgets = filter_widgets, )
Python
def configure_case_activity_sector(r, table, case_root_org): """ Configures the case activity sector_id field Args: r: the CRUDRequest table: the case activity table case_root_org: the ID of the case root organisation """ db = current.db s3db = current.s3db field = table.sector_id field.comment = None if case_root_org: # Limit the sector selection ltable = s3db.org_sector_organisation query = (ltable.organisation_id == case_root_org) & \ (ltable.deleted == False) rows = db(query).select(ltable.sector_id) sector_ids = set(row.sector_id for row in rows) # Default sector if len(sector_ids) == 1: default_sector_id = rows.first().sector_id else: default_sector_id = None # Include the sector_id of the current record (if any) record = None component = r.component if not component: if r.tablename == "dvr_case_activity": record = r.record elif component.tablename == "dvr_case_activity" and r.component_id: query = table.id == r.component_id record = db(query).select(table.sector_id, limitby = (0, 1), ).first() if record and record.sector_id: sector_ids.add(record.sector_id) # Set selectable sectors subset = db(s3db.org_sector.id.belongs(sector_ids)) field.requires = IS_EMPTY_OR(IS_ONE_OF(subset, "org_sector.id", field.represent, )) # Default selection? if len(sector_ids) == 1 and default_sector_id: # Single option => set as default and hide selector field.default = default_sector_id field.readable = field.writable = False
def configure_case_activity_sector(r, table, case_root_org): """ Configures the case activity sector_id field Args: r: the CRUDRequest table: the case activity table case_root_org: the ID of the case root organisation """ db = current.db s3db = current.s3db field = table.sector_id field.comment = None if case_root_org: # Limit the sector selection ltable = s3db.org_sector_organisation query = (ltable.organisation_id == case_root_org) & \ (ltable.deleted == False) rows = db(query).select(ltable.sector_id) sector_ids = set(row.sector_id for row in rows) # Default sector if len(sector_ids) == 1: default_sector_id = rows.first().sector_id else: default_sector_id = None # Include the sector_id of the current record (if any) record = None component = r.component if not component: if r.tablename == "dvr_case_activity": record = r.record elif component.tablename == "dvr_case_activity" and r.component_id: query = table.id == r.component_id record = db(query).select(table.sector_id, limitby = (0, 1), ).first() if record and record.sector_id: sector_ids.add(record.sector_id) # Set selectable sectors subset = db(s3db.org_sector.id.belongs(sector_ids)) field.requires = IS_EMPTY_OR(IS_ONE_OF(subset, "org_sector.id", field.represent, )) # Default selection? if len(sector_ids) == 1 and default_sector_id: # Single option => set as default and hide selector field.default = default_sector_id field.readable = field.writable = False
Python
def configure_case_activity_subject(r, table, case_root_org, person_id, use_need = False, use_subject = False, autolink = False, ): """ Configures the subject field(s) for case activities - need_id, or simple free-text subject Args: table: the case activity table case_root_org: the ID of the case root organisation person_id: the person ID of the case use_need: activities use need types use_subject: activities use free-text subject field autolink: whether response actions shall be automatically linked to case activities """ T = current.T db = current.db s3db = current.s3db if use_need: # Are we looking at a particular case activity? if r.tablename != "dvr_case_activity": activity_id = r.component_id else: activity_id = r.id # Expose need_id field = table.need_id field.label = T("Counseling Reason") field.readable = True field.writable = not activity_id or not autolink # Limit to org-specific need types ntable = s3db.dvr_need if case_root_org: query = (ntable.organisation_id == case_root_org) else: query = None # With autolink, prevent multiple activities per need type if autolink: joinq = (table.need_id == ntable.id) & \ (table.person_id == person_id) & \ (table.deleted == False) if activity_id: joinq &= (table.id != activity_id) left = table.on(joinq) q = (table.id == None) query = query & q if query else q else: left = None if query: field.requires = IS_ONE_OF(db(query), "dvr_need.id", field.represent, left = left, ) if use_subject: # Expose simple free-text subject field = table.subject field.readable = field.writable = True requires = IS_LENGTH(512, minsize=1) if use_need: # Subject optional when using needs requires = IS_EMPTY_OR(requires) field.requires = requires
def configure_case_activity_subject(r, table, case_root_org, person_id, use_need = False, use_subject = False, autolink = False, ): """ Configures the subject field(s) for case activities - need_id, or simple free-text subject Args: table: the case activity table case_root_org: the ID of the case root organisation person_id: the person ID of the case use_need: activities use need types use_subject: activities use free-text subject field autolink: whether response actions shall be automatically linked to case activities """ T = current.T db = current.db s3db = current.s3db if use_need: # Are we looking at a particular case activity? if r.tablename != "dvr_case_activity": activity_id = r.component_id else: activity_id = r.id # Expose need_id field = table.need_id field.label = T("Counseling Reason") field.readable = True field.writable = not activity_id or not autolink # Limit to org-specific need types ntable = s3db.dvr_need if case_root_org: query = (ntable.organisation_id == case_root_org) else: query = None # With autolink, prevent multiple activities per need type if autolink: joinq = (table.need_id == ntable.id) & \ (table.person_id == person_id) & \ (table.deleted == False) if activity_id: joinq &= (table.id != activity_id) left = table.on(joinq) q = (table.id == None) query = query & q if query else q else: left = None if query: field.requires = IS_ONE_OF(db(query), "dvr_need.id", field.represent, left = left, ) if use_subject: # Expose simple free-text subject field = table.subject field.readable = field.writable = True requires = IS_LENGTH(512, minsize=1) if use_need: # Subject optional when using needs requires = IS_EMPTY_OR(requires) field.requires = requires
Python
def configure_inline_responses(person_id, human_resource_id, hr_represent, use_theme = False, ): """ Configures the inline-responses for case activity form - can be either response_action or response_action_theme Args: person_id: the person ID of the case human_resource_id: the HR-ID of the consultant in charge hr_represent: representation function for human_resource_id use_theme: use theme(s) with responses Returns: S3SQLInlineComponent """ T = current.T db = current.db s3db = current.s3db settings = current.deployment_settings rtable = s3db.dvr_response_action from core import S3SQLInlineComponent, S3SQLVerticalSubFormLayout if use_theme and settings.get_dvr_response_themes_details(): # Expose response_action_theme inline # Filter action_id in inline response_themes to same beneficiary ltable = s3db.dvr_response_action_theme field = ltable.action_id dbset = db(rtable.person_id == person_id) if person_id else db field.requires = IS_EMPTY_OR(IS_ONE_OF(dbset, "dvr_response_action.id", field.represent, orderby = ~rtable.start_date, sort = False, )) # Inline-component inline_responses = S3SQLInlineComponent( "response_action_theme", fields = ["action_id", "theme_id", "comments", ], label = T("Themes"), orderby = "action_id", ) else: # Expose response_action inline # Set the person_id for inline responses (does not not happen # automatically since using case_activity_id as component key) if person_id: field = rtable.person_id field.default = person_id # Configure consultant in charge field = rtable.human_resource_id field.default = human_resource_id field.represent = hr_represent field.widget = field.comment = None # Require explicit unit in hours-widget above 4 hours from core import S3HoursWidget field = rtable.hours field.widget = S3HoursWidget(precision=2, explicit_above=4) # Add custom callback to validate inline responses s3db.add_custom_callback("dvr_response_action", "onvalidation", response_action_onvalidation, ) # Inline-component response_theme_ids = "response_theme_ids" if use_theme else None response_action_fields = ["start_date", response_theme_ids, "comments", "human_resource_id", "status_id", "hours", ] if settings.get_dvr_response_due_date(): response_action_fields.insert(-2, "date_due") if settings.get_dvr_response_types(): response_action_fields.insert(1, "response_type_id") inline_responses = S3SQLInlineComponent( "response_action", fields = response_action_fields, label = T("Actions"), layout = S3SQLVerticalSubFormLayout, explicit_add = T("Add Action"), ) return inline_responses
def configure_inline_responses(person_id, human_resource_id, hr_represent, use_theme = False, ): """ Configures the inline-responses for case activity form - can be either response_action or response_action_theme Args: person_id: the person ID of the case human_resource_id: the HR-ID of the consultant in charge hr_represent: representation function for human_resource_id use_theme: use theme(s) with responses Returns: S3SQLInlineComponent """ T = current.T db = current.db s3db = current.s3db settings = current.deployment_settings rtable = s3db.dvr_response_action from core import S3SQLInlineComponent, S3SQLVerticalSubFormLayout if use_theme and settings.get_dvr_response_themes_details(): # Expose response_action_theme inline # Filter action_id in inline response_themes to same beneficiary ltable = s3db.dvr_response_action_theme field = ltable.action_id dbset = db(rtable.person_id == person_id) if person_id else db field.requires = IS_EMPTY_OR(IS_ONE_OF(dbset, "dvr_response_action.id", field.represent, orderby = ~rtable.start_date, sort = False, )) # Inline-component inline_responses = S3SQLInlineComponent( "response_action_theme", fields = ["action_id", "theme_id", "comments", ], label = T("Themes"), orderby = "action_id", ) else: # Expose response_action inline # Set the person_id for inline responses (does not not happen # automatically since using case_activity_id as component key) if person_id: field = rtable.person_id field.default = person_id # Configure consultant in charge field = rtable.human_resource_id field.default = human_resource_id field.represent = hr_represent field.widget = field.comment = None # Require explicit unit in hours-widget above 4 hours from core import S3HoursWidget field = rtable.hours field.widget = S3HoursWidget(precision=2, explicit_above=4) # Add custom callback to validate inline responses s3db.add_custom_callback("dvr_response_action", "onvalidation", response_action_onvalidation, ) # Inline-component response_theme_ids = "response_theme_ids" if use_theme else None response_action_fields = ["start_date", response_theme_ids, "comments", "human_resource_id", "status_id", "hours", ] if settings.get_dvr_response_due_date(): response_action_fields.insert(-2, "date_due") if settings.get_dvr_response_types(): response_action_fields.insert(1, "response_type_id") inline_responses = S3SQLInlineComponent( "response_action", fields = response_action_fields, label = T("Actions"), layout = S3SQLVerticalSubFormLayout, explicit_add = T("Add Action"), ) return inline_responses
Python
def response_action_onvalidation(form): """ Onvalidation for response actions: - enforce hours for closed-statuses (org-specific UI option) """ ui_options = get_ui_options() if ui_options.get("response_effort_required"): db = current.db s3db = current.s3db form_vars = form.vars # Get the new status if "status_id" in form_vars: status_id = form_vars.status_id else: status_id = s3db.dvr_response_action.status_id.default try: hours = form_vars.hours except AttributeError: # No hours field in form, so no point validating it return if hours is None: # If new status is closed, require hours stable = s3db.dvr_response_status query = (stable.id == status_id) status = db(query).select(stable.is_closed, limitby = (0, 1), ).first() if status and status.is_closed: form.errors["hours"] = current.T("Please specify the effort spent")
def response_action_onvalidation(form): """ Onvalidation for response actions: - enforce hours for closed-statuses (org-specific UI option) """ ui_options = get_ui_options() if ui_options.get("response_effort_required"): db = current.db s3db = current.s3db form_vars = form.vars # Get the new status if "status_id" in form_vars: status_id = form_vars.status_id else: status_id = s3db.dvr_response_action.status_id.default try: hours = form_vars.hours except AttributeError: # No hours field in form, so no point validating it return if hours is None: # If new status is closed, require hours stable = s3db.dvr_response_status query = (stable.id == status_id) status = db(query).select(stable.is_closed, limitby = (0, 1), ).first() if status and status.is_closed: form.errors["hours"] = current.T("Please specify the effort spent")
Python
def response_date_dt_orderby(field, direction, orderby, left_joins): """ When sorting response actions by date, use created_on to maintain consistent order of multiple response actions on the same date """ sorting = {"table": field.tablename, "direction": direction, } orderby.append("%(table)s.start_date%(direction)s,%(table)s.created_on%(direction)s" % sorting)
def response_date_dt_orderby(field, direction, orderby, left_joins): """ When sorting response actions by date, use created_on to maintain consistent order of multiple response actions on the same date """ sorting = {"table": field.tablename, "direction": direction, } orderby.append("%(table)s.start_date%(direction)s,%(table)s.created_on%(direction)s" % sorting)
Python
def configure_response_action_tab(person_id, ui_options, response_type = None, use_due_date = False, use_theme = False, themes_details = False, ): """ Configures response_action tab of case file Args: person_id: the person ID of the case ui_options: the UI options for the organisation response_type: the response_type field (selector) use_due_date: use a separate due-date use_theme: use response action themes theme_details: enter details per theme """ T = current.T db = current.db s3db = current.s3db table = s3db.dvr_response_action ui_options_get = ui_options.get date_due = "date_due" if use_due_date else None # Hide person_id (already have the rheader context) field = table.person_id field.readable = field.writable = False if themes_details: list_fields = ["start_date", (T("Themes"), "dvr_response_action_theme.id"), "human_resource_id", "hours", "status_id", ] pdf_fields = ["start_date", #"human_resource_id", (T("Themes"), "dvr_response_action_theme.id"), ] else: # Show case_activity_id field = table.case_activity_id field.readable = True # Adjust representation to perspective if ui_options_get("activity_use_need"): field.label = T("Counseling Reason") show_as = "need" else: field.label = T("Subject") show_as = "subject" use_subject = ui_options_get("activity_use_subject") represent = s3db.dvr_CaseActivityRepresent(show_as = show_as, show_link = True, show_subject = use_subject, ) field.represent = represent if not ui_options_get("response_activity_autolink"): # Make activity selectable field.writable = True # Selectable options to include date represent = s3db.dvr_CaseActivityRepresent(show_as = show_as, show_link = True, show_subject = use_subject, show_date = True, ) # Limit to activities of the same case atable = s3db.dvr_case_activity db = current.db dbset = db(atable.person_id == person_id) field.requires = IS_ONE_OF(dbset, "dvr_case_activity.id", represent, orderby = ~db.dvr_case_activity.start_date, sort = False, ) # Allow in-popup creation of new activities for the case from s3layouts import S3PopupLink field.comment = S3PopupLink(label = T("Create Counseling Reason"), c = "dvr", f = "case_activity", vars = {"~.person_id": person_id, "prefix": "dvr/person/%s" % person_id, "parent": "response_action", }, ) else: field.writable = False # Adapt list-fields to perspective theme_ids = "response_theme_ids" if use_theme else None list_fields = ["case_activity_id", response_type, theme_ids, "comments", "human_resource_id", date_due, "start_date", "hours", "status_id", ] pdf_fields = ["start_date", #"human_resource_id", "case_activity_id", response_type, theme_ids, "comments", ] s3db.configure("dvr_response_action", filter_widgets = None, list_fields = list_fields, pdf_fields = pdf_fields, )
def configure_response_action_tab(person_id, ui_options, response_type = None, use_due_date = False, use_theme = False, themes_details = False, ): """ Configures response_action tab of case file Args: person_id: the person ID of the case ui_options: the UI options for the organisation response_type: the response_type field (selector) use_due_date: use a separate due-date use_theme: use response action themes theme_details: enter details per theme """ T = current.T db = current.db s3db = current.s3db table = s3db.dvr_response_action ui_options_get = ui_options.get date_due = "date_due" if use_due_date else None # Hide person_id (already have the rheader context) field = table.person_id field.readable = field.writable = False if themes_details: list_fields = ["start_date", (T("Themes"), "dvr_response_action_theme.id"), "human_resource_id", "hours", "status_id", ] pdf_fields = ["start_date", #"human_resource_id", (T("Themes"), "dvr_response_action_theme.id"), ] else: # Show case_activity_id field = table.case_activity_id field.readable = True # Adjust representation to perspective if ui_options_get("activity_use_need"): field.label = T("Counseling Reason") show_as = "need" else: field.label = T("Subject") show_as = "subject" use_subject = ui_options_get("activity_use_subject") represent = s3db.dvr_CaseActivityRepresent(show_as = show_as, show_link = True, show_subject = use_subject, ) field.represent = represent if not ui_options_get("response_activity_autolink"): # Make activity selectable field.writable = True # Selectable options to include date represent = s3db.dvr_CaseActivityRepresent(show_as = show_as, show_link = True, show_subject = use_subject, show_date = True, ) # Limit to activities of the same case atable = s3db.dvr_case_activity db = current.db dbset = db(atable.person_id == person_id) field.requires = IS_ONE_OF(dbset, "dvr_case_activity.id", represent, orderby = ~db.dvr_case_activity.start_date, sort = False, ) # Allow in-popup creation of new activities for the case from s3layouts import S3PopupLink field.comment = S3PopupLink(label = T("Create Counseling Reason"), c = "dvr", f = "case_activity", vars = {"~.person_id": person_id, "prefix": "dvr/person/%s" % person_id, "parent": "response_action", }, ) else: field.writable = False # Adapt list-fields to perspective theme_ids = "response_theme_ids" if use_theme else None list_fields = ["case_activity_id", response_type, theme_ids, "comments", "human_resource_id", date_due, "start_date", "hours", "status_id", ] pdf_fields = ["start_date", #"human_resource_id", "case_activity_id", response_type, theme_ids, "comments", ] s3db.configure("dvr_response_action", filter_widgets = None, list_fields = list_fields, pdf_fields = pdf_fields, )
Python
def cleanup_public_registry(): """ Automatically mark test stations as obsolete (and thus remove them from the public registry) when they have failed to submit daily activity reports for more than 4 four weeks; + notify OrgAdmins about deactivation Returns: error message, or None if successful """ db = current.db s3db = current.s3db ftable = s3db.org_facility ttable = s3db.org_site_tag otable = s3db.org_organisation gtable = s3db.org_group mtable = s3db.org_group_membership rtable = s3db.disease_testing_report ltable = s3db.gis_location today = datetime.datetime.utcnow().date() four_weeks_ago = today - datetime.timedelta(days=28) from .config import TESTSTATIONS join = [ttable.on((ttable.site_id == ftable.site_id) & \ (ttable.tag == "PUBLIC") & \ (ttable.deleted == False)), otable.on((otable.id == ftable.organisation_id)), gtable.on((mtable.organisation_id == otable.id) & \ (mtable.deleted == False) & \ (gtable.id == mtable.group_id) & \ (gtable.name == TESTSTATIONS)), ] left = [rtable.on((rtable.site_id == ftable.site_id) & \ (rtable.date >= four_weeks_ago) & \ (rtable.deleted == False)), ltable.on((ltable.id == ftable.location_id)), ] query = (rtable.id == None) & \ (ttable.value == "Y") & \ (ftable.created_on < four_weeks_ago) & \ (ftable.obsolete == False) & \ (ftable.deleted == False) rows = db(query).select(ftable.id, ftable.name, otable.id, otable.pe_id, otable.name, ltable.L1, #ltable.L2, ltable.L3, ltable.L4, ltable.addr_street, ltable.addr_postcode, join = join, left = left, ) if not rows: return None else: current.log.info("%s test facilities found obsolete" % len(rows)) from .helpers import get_role_emails from .notifications import CMSNotifications from core import s3_str errors = [] update_super = s3db.update_super for row in rows: organisation = row.org_organisation facility = row.org_facility location = row.gis_location # Mark facility as obsolete facility.update_record(obsolete = True) update_super(ftable, facility) # Prepare data for notification template place = location.L4 if location.L4 else location.L3 if location.L1: place = "%s (%s)" % (place, location.L1) reprstr = lambda v: s3_str(v) if v else "-" data = {"organisation": reprstr(organisation.name), "facility": reprstr(facility.name), "address": reprstr(location.addr_street), "postcode": reprstr(location.addr_postcode), "place": place, } # Notify all OrgAdmins contacts = get_role_emails("ORG_ADMIN", pe_id=organisation.pe_id) if contacts: error = CMSNotifications.send(contacts, "FacilityObsolete", data, module = "org", resource = "facility", ) else: error = "No contacts found" if error: msg = "Cound not notify %s (%s)" % (organisation.name, error) current.log.error(msg) errors.append(msg) return "\n".join(errors) if errors else None
def cleanup_public_registry(): """ Automatically mark test stations as obsolete (and thus remove them from the public registry) when they have failed to submit daily activity reports for more than 4 four weeks; + notify OrgAdmins about deactivation Returns: error message, or None if successful """ db = current.db s3db = current.s3db ftable = s3db.org_facility ttable = s3db.org_site_tag otable = s3db.org_organisation gtable = s3db.org_group mtable = s3db.org_group_membership rtable = s3db.disease_testing_report ltable = s3db.gis_location today = datetime.datetime.utcnow().date() four_weeks_ago = today - datetime.timedelta(days=28) from .config import TESTSTATIONS join = [ttable.on((ttable.site_id == ftable.site_id) & \ (ttable.tag == "PUBLIC") & \ (ttable.deleted == False)), otable.on((otable.id == ftable.organisation_id)), gtable.on((mtable.organisation_id == otable.id) & \ (mtable.deleted == False) & \ (gtable.id == mtable.group_id) & \ (gtable.name == TESTSTATIONS)), ] left = [rtable.on((rtable.site_id == ftable.site_id) & \ (rtable.date >= four_weeks_ago) & \ (rtable.deleted == False)), ltable.on((ltable.id == ftable.location_id)), ] query = (rtable.id == None) & \ (ttable.value == "Y") & \ (ftable.created_on < four_weeks_ago) & \ (ftable.obsolete == False) & \ (ftable.deleted == False) rows = db(query).select(ftable.id, ftable.name, otable.id, otable.pe_id, otable.name, ltable.L1, #ltable.L2, ltable.L3, ltable.L4, ltable.addr_street, ltable.addr_postcode, join = join, left = left, ) if not rows: return None else: current.log.info("%s test facilities found obsolete" % len(rows)) from .helpers import get_role_emails from .notifications import CMSNotifications from core import s3_str errors = [] update_super = s3db.update_super for row in rows: organisation = row.org_organisation facility = row.org_facility location = row.gis_location # Mark facility as obsolete facility.update_record(obsolete = True) update_super(ftable, facility) # Prepare data for notification template place = location.L4 if location.L4 else location.L3 if location.L1: place = "%s (%s)" % (place, location.L1) reprstr = lambda v: s3_str(v) if v else "-" data = {"organisation": reprstr(organisation.name), "facility": reprstr(facility.name), "address": reprstr(location.addr_street), "postcode": reprstr(location.addr_postcode), "place": place, } # Notify all OrgAdmins contacts = get_role_emails("ORG_ADMIN", pe_id=organisation.pe_id) if contacts: error = CMSNotifications.send(contacts, "FacilityObsolete", data, module = "org", resource = "facility", ) else: error = "No contacts found" if error: msg = "Cound not notify %s (%s)" % (organisation.name, error) current.log.error(msg) errors.append(msg) return "\n".join(errors) if errors else None
Python
def mandatory(self): """ Mandatory objects defined by this model, regardless whether enabled or disabled """ return None
def mandatory(self): """ Mandatory objects defined by this model, regardless whether enabled or disabled """ return None
Python
def model(self): """ Defines all tables in this model, to be implemented by subclasses """ return None
def model(self): """ Defines all tables in this model, to be implemented by subclasses """ return None
Python
def defaults(self): """ Definitions of model globals (response.s3.*) if the model has been disabled in deployment settings, to be implemented by subclasses """ return None
def defaults(self): """ Definitions of model globals (response.s3.*) if the model has been disabled in deployment settings, to be implemented by subclasses """ return None
Python
def module_map(self): """ Map of modules by prefix, for faster access (lazy property) """ mmap = self._module_map if mmap is None: mmap = self._module_map = {} # Package locations packages = ["s3db"] models = current.deployment_settings.get_base_models() if models: if isinstance(models, str): models = [models] if isinstance(models, (tuple, list)): for name in models: if isinstance(name, str) and name not in packages: packages.append(name) # Map all modules for package in packages: try: p = __import__(package, fromlist=("DEFAULT",)) except ImportError: current.log.error("DataModel cannot import package %s" % package) continue for k, v in p.__dict__.items(): if type(v) is MODULE_TYPE: if k not in mmap: mmap[k] = [v] else: mmap[k].append(v) return mmap
def module_map(self): """ Map of modules by prefix, for faster access (lazy property) """ mmap = self._module_map if mmap is None: mmap = self._module_map = {} # Package locations packages = ["s3db"] models = current.deployment_settings.get_base_models() if models: if isinstance(models, str): models = [models] if isinstance(models, (tuple, list)): for name in models: if isinstance(name, str) and name not in packages: packages.append(name) # Map all modules for package in packages: try: p = __import__(package, fromlist=("DEFAULT",)) except ImportError: current.log.error("DataModel cannot import package %s" % package) continue for k, v in p.__dict__.items(): if type(v) is MODULE_TYPE: if k not in mmap: mmap[k] = [v] else: mmap[k].append(v) return mmap
Python
def customised(tablename, update=None): """ Check (or mark) that customisations for a table model have been run Args: tablename: the name of the table update: True to mark that customisations have been run Returns: True|False whether customisations have been run """ tables = current.s3db._customised if update is not None: tables[tablename] = bool(update) return tables.get(tablename, False)
def customised(tablename, update=None): """ Check (or mark) that customisations for a table model have been run Args: tablename: the name of the table update: True to mark that customisations have been run Returns: True|False whether customisations have been run """ tables = current.s3db._customised if update is not None: tables[tablename] = bool(update) return tables.get(tablename, False)
Python
def table(cls, tablename, default=None, db_only=False): """ Helper function to load a table or other named object from models Args: tablename: the table name (or name of the object) default: the default value to return if not found db_only: find only tables, not other objects """ s3 = current.response.s3 if s3 is None: s3 = current.response.s3 = Storage() s3db = current.s3db if not db_only: if tablename in s3: return s3[tablename] elif tablename in s3db.classes: return s3db.classes[tablename].__dict__[tablename] db = current.db # Table already defined? try: return getattr(db, tablename) except AttributeError: pass found = None prefix = tablename.split("_", 1)[0] if prefix == DYNAMIC_PREFIX: try: found = DynamicTableModel(tablename).table except AttributeError: pass else: modules = s3db.module_map.get(prefix, "") for module in modules: names = module.__all__ s3models = module.__dict__ if not db_only and tablename in names: # A name defined at module level (e.g. a class) s3db.classes[tablename] = module found = s3models[tablename] else: # A name defined in a DataModel for n in names: model = s3models[n] if hasattr(model, "_edenmodel") and \ hasattr(model, "names") and \ tablename in model.names: model(prefix) break if found: return found if not db_only and tablename in s3: found = s3[tablename] elif hasattr(db, tablename): found = getattr(db, tablename) elif getattr(db, "_lazy_tables") and \ tablename in getattr(db, "_LAZY_TABLES"): found = getattr(db, tablename) else: found = default return found
def table(cls, tablename, default=None, db_only=False): """ Helper function to load a table or other named object from models Args: tablename: the table name (or name of the object) default: the default value to return if not found db_only: find only tables, not other objects """ s3 = current.response.s3 if s3 is None: s3 = current.response.s3 = Storage() s3db = current.s3db if not db_only: if tablename in s3: return s3[tablename] elif tablename in s3db.classes: return s3db.classes[tablename].__dict__[tablename] db = current.db # Table already defined? try: return getattr(db, tablename) except AttributeError: pass found = None prefix = tablename.split("_", 1)[0] if prefix == DYNAMIC_PREFIX: try: found = DynamicTableModel(tablename).table except AttributeError: pass else: modules = s3db.module_map.get(prefix, "") for module in modules: names = module.__all__ s3models = module.__dict__ if not db_only and tablename in names: # A name defined at module level (e.g. a class) s3db.classes[tablename] = module found = s3models[tablename] else: # A name defined in a DataModel for n in names: model = s3models[n] if hasattr(model, "_edenmodel") and \ hasattr(model, "names") and \ tablename in model.names: model(prefix) break if found: return found if not db_only and tablename in s3: found = s3[tablename] elif hasattr(db, tablename): found = getattr(db, tablename) elif getattr(db, "_lazy_tables") and \ tablename in getattr(db, "_LAZY_TABLES"): found = getattr(db, tablename) else: found = default return found
Python
def has(cls, name): """ Check whether name is available with s3db.table(); does just a name lookup, without loading any models Args: name: the name Returns: boolean """ s3 = current.response.s3 if s3 is None: s3 = current.response.s3 = Storage() s3db = current.s3db if name in s3 or name in s3db.classes: return True if hasattr(current.db, name): return True found = False prefix = name.split("_", 1)[0] if prefix == DYNAMIC_PREFIX: try: found = DynamicTableModel(name).table except AttributeError: pass else: modules = s3db.module_map.get(prefix, "") for module in modules: names = module.__all__ if name in names: found = True break s3models = module.__dict__ for n in names: model = s3models[n] if hasattr(model, "_edenmodel") and \ hasattr(model, "names") and \ name in model.names: found = True break return found
def has(cls, name): """ Check whether name is available with s3db.table(); does just a name lookup, without loading any models Args: name: the name Returns: boolean """ s3 = current.response.s3 if s3 is None: s3 = current.response.s3 = Storage() s3db = current.s3db if name in s3 or name in s3db.classes: return True if hasattr(current.db, name): return True found = False prefix = name.split("_", 1)[0] if prefix == DYNAMIC_PREFIX: try: found = DynamicTableModel(name).table except AttributeError: pass else: modules = s3db.module_map.get(prefix, "") for module in modules: names = module.__all__ if name in names: found = True break s3models = module.__dict__ for n in names: model = s3models[n] if hasattr(model, "_edenmodel") and \ hasattr(model, "names") and \ name in model.names: found = True break return found
Python
def load(cls, prefix): """ Helper function to load all DataModels in a module Args: prefix: the module prefix """ s3 = current.response.s3 if s3 is None: s3 = current.response.s3 = Storage() modules = current.s3db.module_map.get(prefix) if not modules: return for module in modules: for n in module.__all__: model = module.__dict__[n] if type(model).__name__ == "type" and \ issubclass(model, DataModel): model(prefix) elif n.startswith("%s_" % prefix): s3[n] = model
def load(cls, prefix): """ Helper function to load all DataModels in a module Args: prefix: the module prefix """ s3 = current.response.s3 if s3 is None: s3 = current.response.s3 = Storage() modules = current.s3db.module_map.get(prefix) if not modules: return for module in modules: for n in module.__all__: model = module.__dict__[n] if type(model).__name__ == "type" and \ issubclass(model, DataModel): model(prefix) elif n.startswith("%s_" % prefix): s3[n] = model
Python
def load_all_models(cls): """ Helper function to load all models """ s3 = current.response.s3 if s3.all_models_loaded: # Already loaded return s3.load_all_models = True # Load models for prefix in current.s3db.module_map: cls.load(prefix) # Define Scheduler tables # - already done during Scheduler().init() run during S3Task().init() in models/tasks.py #settings = current.deployment_settings #current.s3task.scheduler.define_tables(current.db, # migrate = settings.get_base_migrate()) # Define sessions table if current.deployment_settings.get_base_session_db(): # Copied from https://github.com/web2py/web2py/blob/master/gluon/globals.py#L895 # Not DRY, but no easy way to make it so current.db.define_table("web2py_session", Field("locked", "boolean", default=False), Field("client_ip", length=64), Field("created_datetime", "datetime", default=current.request.now), Field("modified_datetime", "datetime"), Field("unique_key", length=64), Field("session_data", "blob"), ) # Don't do this again within the current request cycle s3.load_all_models = False s3.all_models_loaded = True
def load_all_models(cls): """ Helper function to load all models """ s3 = current.response.s3 if s3.all_models_loaded: # Already loaded return s3.load_all_models = True # Load models for prefix in current.s3db.module_map: cls.load(prefix) # Define Scheduler tables # - already done during Scheduler().init() run during S3Task().init() in models/tasks.py #settings = current.deployment_settings #current.s3task.scheduler.define_tables(current.db, # migrate = settings.get_base_migrate()) # Define sessions table if current.deployment_settings.get_base_session_db(): # Copied from https://github.com/web2py/web2py/blob/master/gluon/globals.py#L895 # Not DRY, but no easy way to make it so current.db.define_table("web2py_session", Field("locked", "boolean", default=False), Field("client_ip", length=64), Field("created_datetime", "datetime", default=current.request.now), Field("modified_datetime", "datetime"), Field("unique_key", length=64), Field("session_data", "blob"), ) # Don't do this again within the current request cycle s3.load_all_models = False s3.all_models_loaded = True
Python
def define_table(tablename, *fields, **args): """ Same as db.define_table except that it does not repeat a table definition if the table is already defined. """ db = current.db if hasattr(db, tablename): table = getattr(db, tablename) else: table = db.define_table(tablename, *fields, **args) return table
def define_table(tablename, *fields, **args): """ Same as db.define_table except that it does not repeat a table definition if the table is already defined. """ db = current.db if hasattr(db, tablename): table = getattr(db, tablename) else: table = db.define_table(tablename, *fields, **args) return table
Python
def resource(tablename, *args, **kwargs): """ Wrapper for the CRUDResource constructor to realize the global s3db.resource() method """ from ..resource import CRUDResource return CRUDResource(tablename, *args, **kwargs)
def resource(tablename, *args, **kwargs): """ Wrapper for the CRUDResource constructor to realize the global s3db.resource() method """ from ..resource import CRUDResource return CRUDResource(tablename, *args, **kwargs)
Python
def configure(cls, tablename, **attr): """ Update the extra configuration of a table Args: tablename: the name of the table attr: dict of attributes to update """ config = current.model["config"] tn = tablename._tablename if type(tablename) is Table else tablename if tn not in config: config[tn] = {} config[tn].update(attr) return
def configure(cls, tablename, **attr): """ Update the extra configuration of a table Args: tablename: the name of the table attr: dict of attributes to update """ config = current.model["config"] tn = tablename._tablename if type(tablename) is Table else tablename if tn not in config: config[tn] = {} config[tn].update(attr) return
Python
def clear_config(cls, tablename, *keys): """ Removes configuration attributes of a resource Args: table: the resource DB table keys: keys of attributes to remove (maybe multiple) """ config = current.model["config"] tn = tablename._tablename if type(tablename) is Table else tablename if tn in config: if not keys: del config[tn] else: table_config = config[tn] for k in keys: table_config.pop(k, None)
def clear_config(cls, tablename, *keys): """ Removes configuration attributes of a resource Args: table: the resource DB table keys: keys of attributes to remove (maybe multiple) """ config = current.model["config"] tn = tablename._tablename if type(tablename) is Table else tablename if tn in config: if not keys: del config[tn] else: table_config = config[tn] for k in keys: table_config.pop(k, None)
Python
def add_custom_callback(cls, tablename, hook, cb, method=None): """ Generic method to append a custom onvalidation|onaccept callback to the originally configured callback chain, for use in customise_* in templates Args: tablename: the table name hook: the main hook ("onvalidation"|"onaccept") cb: the custom callback function method: the sub-hook ("create"|"update"|None) Example: # Add a create-onvalidation callback for the pr_person # table, while retaining any existing onvalidation: s3db.add_custom_callback("pr_person", "onvalidation", my_create_onvalidation, method = "create", ) """ def extend(this, new): if isinstance(this, (tuple, list)): this = list(this) elif this is not None: this = [this] else: this = [] if new not in this: this.append(new) return this callbacks = {} for m in ("create", "update", None): key = "%s_%s" % (m, hook) if m else hook callbacks[m] = cls.get_config(tablename, key) if method is None: generic_cb = callbacks[None] if generic_cb: callbacks[None] = extend(generic_cb, cb) else: callbacks[None] = cb for m in ("create", "update"): current_cb = callbacks[m] if current_cb: callbacks[m] = extend(current_cb, cb) else: current_cb = callbacks[method] if current_cb: callbacks[method] = extend(current_cb, cb) else: callbacks[method] = extend(callbacks[None], cb) settings = {} for m, setting in callbacks.items(): if setting: key = "%s_%s" % (m, hook) if m else hook settings[key] = setting cls.configure(tablename, **settings)
def add_custom_callback(cls, tablename, hook, cb, method=None): """ Generic method to append a custom onvalidation|onaccept callback to the originally configured callback chain, for use in customise_* in templates Args: tablename: the table name hook: the main hook ("onvalidation"|"onaccept") cb: the custom callback function method: the sub-hook ("create"|"update"|None) Example: # Add a create-onvalidation callback for the pr_person # table, while retaining any existing onvalidation: s3db.add_custom_callback("pr_person", "onvalidation", my_create_onvalidation, method = "create", ) """ def extend(this, new): if isinstance(this, (tuple, list)): this = list(this) elif this is not None: this = [this] else: this = [] if new not in this: this.append(new) return this callbacks = {} for m in ("create", "update", None): key = "%s_%s" % (m, hook) if m else hook callbacks[m] = cls.get_config(tablename, key) if method is None: generic_cb = callbacks[None] if generic_cb: callbacks[None] = extend(generic_cb, cb) else: callbacks[None] = cb for m in ("create", "update"): current_cb = callbacks[m] if current_cb: callbacks[m] = extend(current_cb, cb) else: current_cb = callbacks[method] if current_cb: callbacks[method] = extend(current_cb, cb) else: callbacks[method] = extend(callbacks[None], cb) settings = {} for m, setting in callbacks.items(): if setting: key = "%s_%s" % (m, hook) if m else hook settings[key] = setting cls.configure(tablename, **settings)
Python
def onaccept(cls, table, record, method="create"): """ Helper to run the onvalidation routine for a record Args: table: the Table record: the FORM or the Row to validate method: the method """ if hasattr(table, "_tablename"): tablename = table._tablename else: tablename = table onaccept = cls.get_config(tablename, "%s_onaccept" % method, cls.get_config(tablename, "onaccept")) if onaccept: if "vars" not in record: record = Storage(vars = Storage(record), errors = Storage(), ) callback(onaccept, record, tablename=tablename)
def onaccept(cls, table, record, method="create"): """ Helper to run the onvalidation routine for a record Args: table: the Table record: the FORM or the Row to validate method: the method """ if hasattr(table, "_tablename"): tablename = table._tablename else: tablename = table onaccept = cls.get_config(tablename, "%s_onaccept" % method, cls.get_config(tablename, "onaccept")) if onaccept: if "vars" not in record: record = Storage(vars = Storage(record), errors = Storage(), ) callback(onaccept, record, tablename=tablename)
Python
def onvalidation(cls, table, record, method="create"): """ Helper to run the onvalidation routine for a record Args: table: the Table record: the FORM or the Row to validate method: the method """ if hasattr(table, "_tablename"): tablename = table._tablename else: tablename = table onvalidation = cls.get_config(tablename, "%s_onvalidation" % method, cls.get_config(tablename, "onvalidation")) if "vars" not in record: record = Storage(vars=Storage(record), errors=Storage()) if onvalidation: callback(onvalidation, record, tablename=tablename) return record.errors
def onvalidation(cls, table, record, method="create"): """ Helper to run the onvalidation routine for a record Args: table: the Table record: the FORM or the Row to validate method: the method """ if hasattr(table, "_tablename"): tablename = table._tablename else: tablename = table onvalidation = cls.get_config(tablename, "%s_onvalidation" % method, cls.get_config(tablename, "onvalidation")) if "vars" not in record: record = Storage(vars=Storage(record), errors=Storage()) if onvalidation: callback(onvalidation, record, tablename=tablename) return record.errors
Python
def add_components(cls, master, **links): """ Configure component links for a master table. Args: master: the name of the master table links: component link configurations """ components = current.model["components"] load_all_models = current.response.s3.load_all_models master = master._tablename if type(master) is Table else master hooks = components.get(master) if hooks is None: hooks = {} for tablename, ll in links.items(): name = tablename.split("_", 1)[1] if not isinstance(ll, (tuple, list)): ll = [ll] for link in ll: if isinstance(link, str): alias = name pkey = None fkey = link linktable = None lkey = None rkey = None actuate = None autodelete = False autocomplete = None defaults = None multiple = True filterby = None # @ToDo: use these as fallback for RHeader Tabs on Web App # (see S3ComponentTab.__init__) label = None plural = None elif isinstance(link, dict): alias = link.get("name", name) joinby = link.get("joinby") if not joinby: continue linktable = link.get("link") linktable = linktable._tablename \ if type(linktable) is Table else linktable if load_all_models: # Warn for redeclaration of components (different table # under the same alias) - this is wrong most of the time, # even though it would produce valid+consistent results: if alias in hooks and hooks[alias].tablename != tablename: current.log.warning("Redeclaration of component (%s.%s)" % (master, alias)) # Ambiguous aliases can cause accidental deletions and # other serious integrity problems, so we warn for ambiguous # aliases (not raising exceptions just yet because there # are a number of legacy cases), # Currently only logging during load_all_models to not # completely submerge other important log messages if linktable and alias == linktable.split("_", 1)[1]: # @todo: fix legacy cases (e.g. renaming the link tables) # @todo: raise Exception once all legacy cases are fixed current.log.warning("Ambiguous link/component alias (%s.%s)" % (master, alias)) if alias == master.split("_", 1)[1]: # No legacy cases, so crash to prevent introduction of any raise SyntaxError("Ambiguous master/component alias (%s.%s)" % (master, alias)) pkey = link.get("pkey") if linktable is None: lkey = None rkey = None fkey = joinby else: lkey = joinby rkey = link.get("key") if not rkey: continue fkey = link.get("fkey") actuate = link.get("actuate") autodelete = link.get("autodelete", False) autocomplete = link.get("autocomplete") defaults = link.get("defaults") multiple = link.get("multiple", True) filterby = link.get("filterby") label = link.get("label") plural = link.get("plural") else: continue component = Storage(tablename = tablename, pkey = pkey, fkey = fkey, linktable = linktable, lkey = lkey, rkey = rkey, actuate = actuate, autodelete = autodelete, autocomplete = autocomplete, defaults = defaults, multiple = multiple, filterby = filterby, label = label, plural = plural, ) hooks[alias] = component components[master] = hooks
def add_components(cls, master, **links): """ Configure component links for a master table. Args: master: the name of the master table links: component link configurations """ components = current.model["components"] load_all_models = current.response.s3.load_all_models master = master._tablename if type(master) is Table else master hooks = components.get(master) if hooks is None: hooks = {} for tablename, ll in links.items(): name = tablename.split("_", 1)[1] if not isinstance(ll, (tuple, list)): ll = [ll] for link in ll: if isinstance(link, str): alias = name pkey = None fkey = link linktable = None lkey = None rkey = None actuate = None autodelete = False autocomplete = None defaults = None multiple = True filterby = None # @ToDo: use these as fallback for RHeader Tabs on Web App # (see S3ComponentTab.__init__) label = None plural = None elif isinstance(link, dict): alias = link.get("name", name) joinby = link.get("joinby") if not joinby: continue linktable = link.get("link") linktable = linktable._tablename \ if type(linktable) is Table else linktable if load_all_models: # Warn for redeclaration of components (different table # under the same alias) - this is wrong most of the time, # even though it would produce valid+consistent results: if alias in hooks and hooks[alias].tablename != tablename: current.log.warning("Redeclaration of component (%s.%s)" % (master, alias)) # Ambiguous aliases can cause accidental deletions and # other serious integrity problems, so we warn for ambiguous # aliases (not raising exceptions just yet because there # are a number of legacy cases), # Currently only logging during load_all_models to not # completely submerge other important log messages if linktable and alias == linktable.split("_", 1)[1]: # @todo: fix legacy cases (e.g. renaming the link tables) # @todo: raise Exception once all legacy cases are fixed current.log.warning("Ambiguous link/component alias (%s.%s)" % (master, alias)) if alias == master.split("_", 1)[1]: # No legacy cases, so crash to prevent introduction of any raise SyntaxError("Ambiguous master/component alias (%s.%s)" % (master, alias)) pkey = link.get("pkey") if linktable is None: lkey = None rkey = None fkey = joinby else: lkey = joinby rkey = link.get("key") if not rkey: continue fkey = link.get("fkey") actuate = link.get("actuate") autodelete = link.get("autodelete", False) autocomplete = link.get("autocomplete") defaults = link.get("defaults") multiple = link.get("multiple", True) filterby = link.get("filterby") label = link.get("label") plural = link.get("plural") else: continue component = Storage(tablename = tablename, pkey = pkey, fkey = fkey, linktable = linktable, lkey = lkey, rkey = rkey, actuate = actuate, autodelete = autodelete, autocomplete = autocomplete, defaults = defaults, multiple = multiple, filterby = filterby, label = label, plural = plural, ) hooks[alias] = component components[master] = hooks
Python
def add_dynamic_components(cls, tablename, exclude=None): """ Helper function to look up and declare dynamic components for a table; called by get_components if dynamic_components is configured for the table Args: tablename: the table name exclude: names to exclude (static components) """ mtable = cls.table(tablename) if mtable is None: return if cls.get_config(tablename, "dynamic_components_loaded"): # Already loaded return ttable = cls.table("s3_table") ftable = cls.table("s3_field") join = ttable.on(ttable.id == ftable.table_id) query = (ftable.master == tablename) & \ (ftable.component_key == True) & \ (ftable.deleted == False) rows = current.db(query).select(ftable.name, ftable.field_type, ftable.component_alias, ftable.settings, ttable.name, join = join, ) # Don't do this again during the same request cycle cls.configure(tablename, dynamic_components_loaded=True) components = {} for row in rows: hook = {} ctable = row["s3_table"] ctablename = ctable.name default_alias = ctablename.split("_", 1)[-1] field = row["s3_field"] alias = field.component_alias if not alias: alias = default_alias if exclude and alias in exclude: continue if alias != default_alias: hook["name"] = alias hook["joinby"] = field.name settings = field.settings if settings: multiple = settings.get("component_multiple", DEFAULT) if multiple is not DEFAULT: hook["multiple"] = multiple # Get the primary key field_type = field.field_type if field_type[:10] == "reference ": ktablename = field_type.split(" ", 1)[1] if "." in ktablename: ktablename, pkey = ktablename.split(".", 1)[1] if pkey and pkey != mtable._id.name: hook["pkey"] = pkey components[ctablename] = hook if components: cls.add_components(tablename, **components)
def add_dynamic_components(cls, tablename, exclude=None): """ Helper function to look up and declare dynamic components for a table; called by get_components if dynamic_components is configured for the table Args: tablename: the table name exclude: names to exclude (static components) """ mtable = cls.table(tablename) if mtable is None: return if cls.get_config(tablename, "dynamic_components_loaded"): # Already loaded return ttable = cls.table("s3_table") ftable = cls.table("s3_field") join = ttable.on(ttable.id == ftable.table_id) query = (ftable.master == tablename) & \ (ftable.component_key == True) & \ (ftable.deleted == False) rows = current.db(query).select(ftable.name, ftable.field_type, ftable.component_alias, ftable.settings, ttable.name, join = join, ) # Don't do this again during the same request cycle cls.configure(tablename, dynamic_components_loaded=True) components = {} for row in rows: hook = {} ctable = row["s3_table"] ctablename = ctable.name default_alias = ctablename.split("_", 1)[-1] field = row["s3_field"] alias = field.component_alias if not alias: alias = default_alias if exclude and alias in exclude: continue if alias != default_alias: hook["name"] = alias hook["joinby"] = field.name settings = field.settings if settings: multiple = settings.get("component_multiple", DEFAULT) if multiple is not DEFAULT: hook["multiple"] = multiple # Get the primary key field_type = field.field_type if field_type[:10] == "reference ": ktablename = field_type.split(" ", 1)[1] if "." in ktablename: ktablename, pkey = ktablename.split(".", 1)[1] if pkey and pkey != mtable._id.name: hook["pkey"] = pkey components[ctablename] = hook if components: cls.add_components(tablename, **components)
Python
def parse_hook(cls, table, alias, hook=None): """ Parse a component configuration, loading all necessary table models and applying defaults Args: table: the master table alias: the component alias hook: the component configuration (if already known) Returns: the component description (Storage {key: value}) """ load = cls.table if hook is None: table, hooks = cls.get_hooks(table, names=[alias]) if hooks and alias in hooks: hook = hooks[alias] else: return None tn = hook.tablename lt = hook.linktable ctable = load(tn) if ctable is None: return None if lt: ltable = load(lt) if ltable is None: return None else: ltable = None prefix, name = tn.split("_", 1) component = Storage(defaults=hook.defaults, multiple=hook.multiple, tablename=tn, table=ctable, prefix=prefix, name=name, alias=alias, label=hook.label, plural=hook.plural, ) if hook.supertable is not None: joinby = hook.supertable._id.name else: joinby = hook.fkey if hook.pkey is None: if hook.supertable is not None: component.pkey = joinby else: component.pkey = table._id.name else: component.pkey = hook.pkey if ltable is not None: if hook.actuate: component.actuate = hook.actuate else: component.actuate = "link" component.linktable = ltable if hook.fkey is None: component.fkey = ctable._id.name else: component.fkey = hook.fkey component.lkey = hook.lkey component.rkey = hook.rkey component.autocomplete = hook.autocomplete component.autodelete = hook.autodelete else: component.linktable = None component.fkey = hook.fkey component.lkey = component.rkey = None component.actuate = None component.autocomplete = None component.autodelete = None if hook.filterby is not None: component.filterby = hook.filterby return component
def parse_hook(cls, table, alias, hook=None): """ Parse a component configuration, loading all necessary table models and applying defaults Args: table: the master table alias: the component alias hook: the component configuration (if already known) Returns: the component description (Storage {key: value}) """ load = cls.table if hook is None: table, hooks = cls.get_hooks(table, names=[alias]) if hooks and alias in hooks: hook = hooks[alias] else: return None tn = hook.tablename lt = hook.linktable ctable = load(tn) if ctable is None: return None if lt: ltable = load(lt) if ltable is None: return None else: ltable = None prefix, name = tn.split("_", 1) component = Storage(defaults=hook.defaults, multiple=hook.multiple, tablename=tn, table=ctable, prefix=prefix, name=name, alias=alias, label=hook.label, plural=hook.plural, ) if hook.supertable is not None: joinby = hook.supertable._id.name else: joinby = hook.fkey if hook.pkey is None: if hook.supertable is not None: component.pkey = joinby else: component.pkey = table._id.name else: component.pkey = hook.pkey if ltable is not None: if hook.actuate: component.actuate = hook.actuate else: component.actuate = "link" component.linktable = ltable if hook.fkey is None: component.fkey = ctable._id.name else: component.fkey = hook.fkey component.lkey = hook.lkey component.rkey = hook.rkey component.autocomplete = hook.autocomplete component.autodelete = hook.autodelete else: component.linktable = None component.fkey = hook.fkey component.lkey = component.rkey = None component.actuate = None component.autocomplete = None component.autodelete = None if hook.filterby is not None: component.filterby = hook.filterby return component
Python
def __filter_hooks(cls, components, hooks, names=None, supertable=None): """ DRY Helper method to filter component hooks Args: components: components already found, dict {alias: component} hooks: component hooks to filter, dict {alias: hook} names: the names (=aliases) to include supertable: the super-table name to set for the component Returns: set of names that could not be found, or None if names was None """ for alias in hooks: if alias in components or \ names is not None and alias not in names: continue hook = hooks[alias] hook["supertable"] = supertable components[alias] = hook return set(names) - set(hooks) if names is not None else None
def __filter_hooks(cls, components, hooks, names=None, supertable=None): """ DRY Helper method to filter component hooks Args: components: components already found, dict {alias: component} hooks: component hooks to filter, dict {alias: hook} names: the names (=aliases) to include supertable: the super-table name to set for the component Returns: set of names that could not be found, or None if names was None """ for alias in hooks: if alias in components or \ names is not None and alias not in names: continue hook = hooks[alias] hook["supertable"] = supertable components[alias] = hook return set(names) - set(hooks) if names is not None else None
Python
def has_components(cls, table): """ Checks whether there are components defined for a table Args: table: the table or table name """ components = current.model["components"] load = cls.table # Get tablename and table if type(table) is Table: tablename = table._tablename else: tablename = table table = load(tablename) if table is None: return False # Attach dynamic components if cls.get_config(tablename, "dynamic_components"): cls.add_dynamic_components(tablename) # Get table hooks hooks = {} filter_hooks = cls.__filter_hooks h = components.get(tablename, None) if h: filter_hooks(hooks, h) if len(hooks): return True # Check for super-components # FIXME: add dynamic components for super-table? supertables = cls.get_config(tablename, "super_entity") if supertables: if not isinstance(supertables, (list, tuple)): supertables = [supertables] for s in supertables: if isinstance(s, str): s = load(s) if s is None: continue h = components.get(s._tablename, None) if h: filter_hooks(hooks, h, supertable=s) if len(hooks): return True # No components found return False
def has_components(cls, table): """ Checks whether there are components defined for a table Args: table: the table or table name """ components = current.model["components"] load = cls.table # Get tablename and table if type(table) is Table: tablename = table._tablename else: tablename = table table = load(tablename) if table is None: return False # Attach dynamic components if cls.get_config(tablename, "dynamic_components"): cls.add_dynamic_components(tablename) # Get table hooks hooks = {} filter_hooks = cls.__filter_hooks h = components.get(tablename, None) if h: filter_hooks(hooks, h) if len(hooks): return True # Check for super-components # FIXME: add dynamic components for super-table? supertables = cls.get_config(tablename, "super_entity") if supertables: if not isinstance(supertables, (list, tuple)): supertables = [supertables] for s in supertables: if isinstance(s, str): s = load(s) if s is None: continue h = components.get(s._tablename, None) if h: filter_hooks(hooks, h, supertable=s) if len(hooks): return True # No components found return False
Python
def hierarchy_link(cls, tablename): """ Get the alias of the component that represents the parent node in a hierarchy (for link-table based hierarchies) Args: tablename: the table name Returns: the alias of the hierarchy parent component """ if not cls.table(tablename, db_only=True): return None hierarchy_link = cls.get_config(tablename, "hierarchy_link") if not hierarchy_link: hierarchy = cls.get_config(tablename, "hierarchy") if hierarchy and "." in hierarchy: alias = hierarchy.rsplit(".", 1)[0] if "__link" in alias: hierarchy_link = alias.rsplit("__link", 1)[0] return hierarchy_link
def hierarchy_link(cls, tablename): """ Get the alias of the component that represents the parent node in a hierarchy (for link-table based hierarchies) Args: tablename: the table name Returns: the alias of the hierarchy parent component """ if not cls.table(tablename, db_only=True): return None hierarchy_link = cls.get_config(tablename, "hierarchy_link") if not hierarchy_link: hierarchy = cls.get_config(tablename, "hierarchy") if hierarchy and "." in hierarchy: alias = hierarchy.rsplit(".", 1)[0] if "__link" in alias: hierarchy_link = alias.rsplit("__link", 1)[0] return hierarchy_link
Python
def super_entity(cls, tablename, key, types, *fields, **args): """ Define a super-entity table Args: tablename: the tablename key: name of the primary key types: a dictionary of instance types fields: any shared fields args: table arguments (e.g. migrate) """ db = current.db if db._dbname == "postgres": sequence_name = "%s_%s_seq" % (tablename, key) else: sequence_name = None table = db.define_table(tablename, Field(key, "id", readable=False, writable=False), Field("deleted", "boolean", readable=False, writable=False, default=False), Field("instance_type", represent = lambda opt: \ types.get(opt, opt) or \ current.messages["NONE"], readable=False, writable=False), Field("uuid", length=128, readable=False, writable=False), sequence_name=sequence_name, *fields, **args) return table
def super_entity(cls, tablename, key, types, *fields, **args): """ Define a super-entity table Args: tablename: the tablename key: name of the primary key types: a dictionary of instance types fields: any shared fields args: table arguments (e.g. migrate) """ db = current.db if db._dbname == "postgres": sequence_name = "%s_%s_seq" % (tablename, key) else: sequence_name = None table = db.define_table(tablename, Field(key, "id", readable=False, writable=False), Field("deleted", "boolean", readable=False, writable=False, default=False), Field("instance_type", represent = lambda opt: \ types.get(opt, opt) or \ current.messages["NONE"], readable=False, writable=False), Field("uuid", length=128, readable=False, writable=False), sequence_name=sequence_name, *fields, **args) return table
Python
def super_key(cls, supertable, default=None): """ Get the name of the key for a super-entity Args: supertable: the super-entity table """ if supertable is None and default: return default if isinstance(supertable, str): supertable = cls.table(supertable) try: return supertable._id.name except AttributeError: pass raise SyntaxError("No id-type key found in %s" % supertable._tablename)
def super_key(cls, supertable, default=None): """ Get the name of the key for a super-entity Args: supertable: the super-entity table """ if supertable is None and default: return default if isinstance(supertable, str): supertable = cls.table(supertable) try: return supertable._id.name except AttributeError: pass raise SyntaxError("No id-type key found in %s" % supertable._tablename)
Python
def super_link(cls, name, supertable, label = None, comment = None, represent = None, orderby = None, sort = True, filterby = None, filter_opts = None, not_filterby = None, not_filter_opts = None, instance_types = None, realms = None, updateable = False, groupby = None, script = None, widget = None, empty = True, default = DEFAULT, ondelete = "CASCADE", readable = False, writable = False, ): """ Get a foreign key field for a super-entity Args: supertable: the super-entity table label: label for the field comment: comment for the field readable: set the field readable represent: set a representation function for the field """ if isinstance(supertable, str): supertable = cls.table(supertable) if supertable is None: if name is not None: return Field(name, "integer", readable = False, writable = False, ) else: raise SyntaxError("Undefined super-entity") try: key = supertable._id.name except AttributeError: raise SyntaxError("No id-type key found in %s" % supertable._tablename) if name is not None and name != key: raise SyntaxError("Primary key %s not found in %s" % (name, supertable._tablename)) requires = IS_ONE_OF(current.db, "%s.%s" % (supertable._tablename, key), represent, orderby = orderby, sort = sort, groupby = groupby, filterby = filterby, filter_opts = filter_opts, instance_types = instance_types, realms = realms, updateable = updateable, not_filterby = not_filterby, not_filter_opts = not_filter_opts, ) if empty: requires = IS_EMPTY_OR(requires) # Add the script into the comment if script: if comment: comment = TAG[""](comment, S3ScriptItem(script=script)) else: comment = S3ScriptItem(script=script) return Field(key, supertable, default = default, requires = requires, readable = readable, writable = writable, label = label, comment = comment, represent = represent, widget = widget, ondelete = ondelete, )
def super_link(cls, name, supertable, label = None, comment = None, represent = None, orderby = None, sort = True, filterby = None, filter_opts = None, not_filterby = None, not_filter_opts = None, instance_types = None, realms = None, updateable = False, groupby = None, script = None, widget = None, empty = True, default = DEFAULT, ondelete = "CASCADE", readable = False, writable = False, ): """ Get a foreign key field for a super-entity Args: supertable: the super-entity table label: label for the field comment: comment for the field readable: set the field readable represent: set a representation function for the field """ if isinstance(supertable, str): supertable = cls.table(supertable) if supertable is None: if name is not None: return Field(name, "integer", readable = False, writable = False, ) else: raise SyntaxError("Undefined super-entity") try: key = supertable._id.name except AttributeError: raise SyntaxError("No id-type key found in %s" % supertable._tablename) if name is not None and name != key: raise SyntaxError("Primary key %s not found in %s" % (name, supertable._tablename)) requires = IS_ONE_OF(current.db, "%s.%s" % (supertable._tablename, key), represent, orderby = orderby, sort = sort, groupby = groupby, filterby = filterby, filter_opts = filter_opts, instance_types = instance_types, realms = realms, updateable = updateable, not_filterby = not_filterby, not_filter_opts = not_filter_opts, ) if empty: requires = IS_EMPTY_OR(requires) # Add the script into the comment if script: if comment: comment = TAG[""](comment, S3ScriptItem(script=script)) else: comment = S3ScriptItem(script=script) return Field(key, supertable, default = default, requires = requires, readable = readable, writable = writable, label = label, comment = comment, represent = represent, widget = widget, ondelete = ondelete, )
Python
def update_super(cls, table, record): """ Updates the super-entity links of an instance record Args: table: the instance table record: the instance record """ get_config = cls.get_config # Get all super-entities of this table tablename = original_tablename(table) supertables = get_config(tablename, "super_entity") if not supertables: return False # Get the record record_id = record.get("id", None) if not record_id: return False # Find all super-tables, super-keys and shared fields if not isinstance(supertables, (list, tuple)): supertables = [supertables] updates = [] fields = [] has_deleted = "deleted" in table.fields has_uuid = "uuid" in table.fields for s in supertables: # Get the supertable and the corresponding superkey if type(s) is not Table: s = cls.table(s) if s is None: continue tn = s._tablename key = cls.super_key(s) protected = [key] # Fields in the supertable that shall not be treated as # shared fields (i.e. must not be overridden by instance # values) not_shared = get_config(tn, "no_shared_fields") if isinstance(not_shared, (tuple, list)): protected.extend(not_shared) # Shared fields shared = get_config(tablename, "%s_fields" % tn) if shared: # Instance table specifies a specific field mapping # {superfield: instfield} for this supertable shared = {fn: shared[fn] for fn in shared if fn not in protected and \ fn in s.fields and \ shared[fn] in table.fields} else: # All fields the supertable and instance table have # in common, except protected fields shared = {fn: fn for fn in s.fields if fn not in protected and \ fn in table.fields} fields.extend(shared.values()) fields.append(key) updates.append((tn, s, key, shared)) # Get the record data db = current.db ogetattr = object.__getattribute__ if has_deleted: fields.append("deleted") if has_uuid: fields.append("uuid") fields = [ogetattr(table, fn) for fn in list(set(fields))] _record = db(table.id == record_id).select(limitby=(0, 1), *fields).first() if not _record: return False super_keys = {} for tn, s, key, shared in updates: data = Storage([(fn, _record[shared[fn]]) for fn in shared]) data.instance_type = tablename if has_deleted: data.deleted = _record.get("deleted", False) if has_uuid: data.uuid = _record.get("uuid", None) # Do we already have a super-record? skey = ogetattr(_record, key) if skey: query = (s[key] == skey) row = db(query).select(s._id, limitby=(0, 1)).first() else: row = None if row: # Update the super-entity record db(s._id == skey).update(**data) super_keys[key] = skey data[key] = skey form = Storage(vars=data) onaccept = get_config(tn, "update_onaccept", get_config(tn, "onaccept", None)) if onaccept: onaccept(form) else: # Insert a new super-entity record k = s.insert(**data) if k: super_keys[key] = k data[key] = k onaccept = get_config(tn, "create_onaccept", get_config(tn, "onaccept", None)) if onaccept: form = Storage(vars=data) onaccept(form) # Update the super_keys in the record if super_keys: # System update => don't update modified_by/on if "modified_on" in table.fields: super_keys["modified_by"] = table.modified_by super_keys["modified_on"] = table.modified_on db(table.id == record_id).update(**super_keys) record.update(super_keys) return True
def update_super(cls, table, record): """ Updates the super-entity links of an instance record Args: table: the instance table record: the instance record """ get_config = cls.get_config # Get all super-entities of this table tablename = original_tablename(table) supertables = get_config(tablename, "super_entity") if not supertables: return False # Get the record record_id = record.get("id", None) if not record_id: return False # Find all super-tables, super-keys and shared fields if not isinstance(supertables, (list, tuple)): supertables = [supertables] updates = [] fields = [] has_deleted = "deleted" in table.fields has_uuid = "uuid" in table.fields for s in supertables: # Get the supertable and the corresponding superkey if type(s) is not Table: s = cls.table(s) if s is None: continue tn = s._tablename key = cls.super_key(s) protected = [key] # Fields in the supertable that shall not be treated as # shared fields (i.e. must not be overridden by instance # values) not_shared = get_config(tn, "no_shared_fields") if isinstance(not_shared, (tuple, list)): protected.extend(not_shared) # Shared fields shared = get_config(tablename, "%s_fields" % tn) if shared: # Instance table specifies a specific field mapping # {superfield: instfield} for this supertable shared = {fn: shared[fn] for fn in shared if fn not in protected and \ fn in s.fields and \ shared[fn] in table.fields} else: # All fields the supertable and instance table have # in common, except protected fields shared = {fn: fn for fn in s.fields if fn not in protected and \ fn in table.fields} fields.extend(shared.values()) fields.append(key) updates.append((tn, s, key, shared)) # Get the record data db = current.db ogetattr = object.__getattribute__ if has_deleted: fields.append("deleted") if has_uuid: fields.append("uuid") fields = [ogetattr(table, fn) for fn in list(set(fields))] _record = db(table.id == record_id).select(limitby=(0, 1), *fields).first() if not _record: return False super_keys = {} for tn, s, key, shared in updates: data = Storage([(fn, _record[shared[fn]]) for fn in shared]) data.instance_type = tablename if has_deleted: data.deleted = _record.get("deleted", False) if has_uuid: data.uuid = _record.get("uuid", None) # Do we already have a super-record? skey = ogetattr(_record, key) if skey: query = (s[key] == skey) row = db(query).select(s._id, limitby=(0, 1)).first() else: row = None if row: # Update the super-entity record db(s._id == skey).update(**data) super_keys[key] = skey data[key] = skey form = Storage(vars=data) onaccept = get_config(tn, "update_onaccept", get_config(tn, "onaccept", None)) if onaccept: onaccept(form) else: # Insert a new super-entity record k = s.insert(**data) if k: super_keys[key] = k data[key] = k onaccept = get_config(tn, "create_onaccept", get_config(tn, "onaccept", None)) if onaccept: form = Storage(vars=data) onaccept(form) # Update the super_keys in the record if super_keys: # System update => don't update modified_by/on if "modified_on" in table.fields: super_keys["modified_by"] = table.modified_by super_keys["modified_on"] = table.modified_on db(table.id == record_id).update(**super_keys) record.update(super_keys) return True
Python
def delete_super(cls, table, record): """ Removes the super-entity links of an instance record Args: table: the instance table record: the instance record Returns: True if successful, otherwise False (caller must roll back the transaction if False is returned!) """ # Must have a record ID record_id = record.get(table._id.name, None) if not record_id: raise RuntimeError("Record ID required for delete_super") # Get all super-tables get_config = cls.get_config supertables = get_config(original_tablename(table), "super_entity") # None? Ok - done! if not supertables: return True if not isinstance(supertables, (list, tuple)): supertables = [supertables] # Get the keys for all super-tables keys = {} load = {} for sname in supertables: stable = cls.table(sname) if isinstance(sname, str) else sname if stable is None: continue key = stable._id.name if key in record: keys[stable._tablename] = (key, record[key]) else: load[stable._tablename] = key # If necessary, load missing keys if load: row = current.db(table._id == record_id).select( table._id, *load.values(), limitby=(0, 1)).first() for sname, key in load.items(): keys[sname] = (key, row[key]) # Delete super-records define_resource = current.s3db.resource update_record = record.update_record for sname in keys: key, value = keys[sname] if not value: # Skip if we don't have a super-key continue # Remove the super key update_record(**{key: None}) # Delete the super record sresource = define_resource(sname, id=value) deleted = sresource.delete(cascade=True, log_errors=True) if not deleted or sresource.error: # Restore the super key # @todo: is this really necessary? => caller must roll back # anyway in this case, which would automatically restore update_record(**{key: value}) return False return True
def delete_super(cls, table, record): """ Removes the super-entity links of an instance record Args: table: the instance table record: the instance record Returns: True if successful, otherwise False (caller must roll back the transaction if False is returned!) """ # Must have a record ID record_id = record.get(table._id.name, None) if not record_id: raise RuntimeError("Record ID required for delete_super") # Get all super-tables get_config = cls.get_config supertables = get_config(original_tablename(table), "super_entity") # None? Ok - done! if not supertables: return True if not isinstance(supertables, (list, tuple)): supertables = [supertables] # Get the keys for all super-tables keys = {} load = {} for sname in supertables: stable = cls.table(sname) if isinstance(sname, str) else sname if stable is None: continue key = stable._id.name if key in record: keys[stable._tablename] = (key, record[key]) else: load[stable._tablename] = key # If necessary, load missing keys if load: row = current.db(table._id == record_id).select( table._id, *load.values(), limitby=(0, 1)).first() for sname, key in load.items(): keys[sname] = (key, row[key]) # Delete super-records define_resource = current.s3db.resource update_record = record.update_record for sname in keys: key, value = keys[sname] if not value: # Skip if we don't have a super-key continue # Remove the super key update_record(**{key: None}) # Delete the super record sresource = define_resource(sname, id=value) deleted = sresource.delete(cascade=True, log_errors=True) if not deleted or sresource.error: # Restore the super key # @todo: is this really necessary? => caller must roll back # anyway in this case, which would automatically restore update_record(**{key: value}) return False return True
Python
def append(self, resource): """ Append a resource to this context """ self.resources.append(resource)
def append(self, resource): """ Append a resource to this context """ self.resources.append(resource)
Python
def as_dict(self): """ Returns the context as JSON-serializable dict """ resources = [r.as_dict() for r in self.resources] context = {"type": "FeatureCollection", "id": self.uri, "properties": { "title": "Eden Map Configuration", "updated": self.modified_on.isoformat() + "Z", "lang": "en", # TODO use session language? }, "features": resources, } return context
def as_dict(self): """ Returns the context as JSON-serializable dict """ resources = [r.as_dict() for r in self.resources] context = {"type": "FeatureCollection", "id": self.uri, "properties": { "title": "Eden Map Configuration", "updated": self.modified_on.isoformat() + "Z", "lang": "en", # TODO use session language? }, "features": resources, } return context
Python
def json(self): """ Returns this context in GeoJSON notation Returns: a str containing a GeoJSON object """ return json.dumps(self.as_dict(), separators=JSONSEPARATORS)
def json(self): """ Returns this context in GeoJSON notation Returns: a str containing a GeoJSON object """ return json.dumps(self.as_dict(), separators=JSONSEPARATORS)
Python
def add_offering(self, offering): """ Add (append) a service offering to this resource Args: offering: the Offering instance """ if offering: self.offerings.append(offering)
def add_offering(self, offering): """ Add (append) a service offering to this resource Args: offering: the Offering instance """ if offering: self.offerings.append(offering)
Python
def as_dict(self): """ Returns the resource object as JSON-serializable dict """ properties = {"title": self.title, "updated": self.modified_on.isoformat() + "Z", "active": bool(self.active), } if self.folder: properties["folder"] = self.folder if self.offerings: properties["offerings"] = [o.as_dict() for o in self.offerings] if self.contents: properties["contents"] = [c.as_dict() for c in self.contents] resource = {"type": "Feature", "id": self.uri, #"geometry": ?, "properties": properties } return resource
def as_dict(self): """ Returns the resource object as JSON-serializable dict """ properties = {"title": self.title, "updated": self.modified_on.isoformat() + "Z", "active": bool(self.active), } if self.folder: properties["folder"] = self.folder if self.offerings: properties["offerings"] = [o.as_dict() for o in self.offerings] if self.contents: properties["contents"] = [c.as_dict() for c in self.contents] resource = {"type": "Feature", "id": self.uri, #"geometry": ?, "properties": properties } return resource