language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def data_element(self, variable): """ Overrides FilterWidget.data_element(), constructs multiple hidden INPUTs (one per variable) with element IDs of the form <id>-<operator>-data (where no operator is translated as "eq"). Args: variable: the variable(s) """ fields = self.field if type(fields) is not list: # Use function from RangeFilter parent class return super(DateFilter, self).data_element(variable) selectors = self.selector.split("|") operators = self.operator elements = [] widget_id = self.attr["_id"] start = True for selector in selectors: if start: operator = operators[0] start = False else: operator = operators[1] variable = self._variable(selector, [operator])[0] elements.append( INPUT(_type = "hidden", _id = "%s-%s-data" % (widget_id, operator), _class = "filter-widget-data %s-data" % self.css_base, _value = variable, )) return elements
def data_element(self, variable): """ Overrides FilterWidget.data_element(), constructs multiple hidden INPUTs (one per variable) with element IDs of the form <id>-<operator>-data (where no operator is translated as "eq"). Args: variable: the variable(s) """ fields = self.field if type(fields) is not list: # Use function from RangeFilter parent class return super(DateFilter, self).data_element(variable) selectors = self.selector.split("|") operators = self.operator elements = [] widget_id = self.attr["_id"] start = True for selector in selectors: if start: operator = operators[0] start = False else: operator = operators[1] variable = self._variable(selector, [operator])[0] elements.append( INPUT(_type = "hidden", _id = "%s-%s-data" % (widget_id, operator), _class = "filter-widget-data %s-data" % self.css_base, _value = variable, )) return elements
Python
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget Args: resource: the CRUDResource """ if self._auto_range(): minimum, maximum = self._options(resource) ISO = "%Y-%m-%dT%H:%M:%S" if minimum: minimum = minimum.strftime(ISO) if maximum: maximum = maximum.strftime(ISO) attr = self._attr(resource) options = {attr["_id"]: {"min": minimum, "max": maximum, }} else: options = {} return options
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget Args: resource: the CRUDResource """ if self._auto_range(): minimum, maximum = self._options(resource) ISO = "%Y-%m-%dT%H:%M:%S" if minimum: minimum = minimum.strftime(ISO) if maximum: maximum = maximum.strftime(ISO) attr = self._attr(resource) options = {attr["_id"]: {"min": minimum, "max": maximum, }} else: options = {} return options
Python
def _options(self, resource): """ Helper function to retrieve the current options for this filter widget Args: resource: the CRUDResource as_str: return date as ISO-formatted string not raw DateTime """ query = resource.get_query() rfilter = resource.rfilter if rfilter: join = rfilter.get_joins() left = rfilter.get_joins(left = True) else: join = left = None fields = self.field if type(fields) is list: # Separate start/end fields srfield = S3ResourceField(resource, fields[0]) erfield = S3ResourceField(resource, fields[0]) # Include field joins (if fields are in joined tables) sjoins = srfield.join for tname in sjoins: query &= sjoins[tname] ejoins = erfield.join for tname in ejoins: if tname not in sjoins: query &= ejoins[tname] start_field = srfield.field end_field = erfield.field row = current.db(query).select(start_field.min(), start_field.max(), end_field.max(), join = join, left = left, ).first() minimum = row[start_field.min()] maximum = row[start_field.max()] end_max = row[end_field.max()] if end_max: maximum = max(maximum, end_max) else: # Single filter field rfield = S3ResourceField(resource, fields) # Include field joins (if field is in joined table) joins = rfield.join for tname in joins: query &= joins[tname] field = rfield.field row = current.db(query).select(field.min(), field.max(), join = join, left = left, ).first() minimum = row[field.min()] maximum = row[field.max()] # Ensure that we can select the extreme values minute_step = 5 timedelta = datetime.timedelta if minimum: minimum -= timedelta(minutes = minute_step) if maximum: maximum += timedelta(minutes = minute_step) return minimum, maximum
def _options(self, resource): """ Helper function to retrieve the current options for this filter widget Args: resource: the CRUDResource as_str: return date as ISO-formatted string not raw DateTime """ query = resource.get_query() rfilter = resource.rfilter if rfilter: join = rfilter.get_joins() left = rfilter.get_joins(left = True) else: join = left = None fields = self.field if type(fields) is list: # Separate start/end fields srfield = S3ResourceField(resource, fields[0]) erfield = S3ResourceField(resource, fields[0]) # Include field joins (if fields are in joined tables) sjoins = srfield.join for tname in sjoins: query &= sjoins[tname] ejoins = erfield.join for tname in ejoins: if tname not in sjoins: query &= ejoins[tname] start_field = srfield.field end_field = erfield.field row = current.db(query).select(start_field.min(), start_field.max(), end_field.max(), join = join, left = left, ).first() minimum = row[start_field.min()] maximum = row[start_field.max()] end_max = row[end_field.max()] if end_max: maximum = max(maximum, end_max) else: # Single filter field rfield = S3ResourceField(resource, fields) # Include field joins (if field is in joined table) joins = rfield.join for tname in joins: query &= joins[tname] field = rfield.field row = current.db(query).select(field.min(), field.max(), join = join, left = left, ).first() minimum = row[field.min()] maximum = row[field.max()] # Ensure that we can select the extreme values minute_step = 5 timedelta = datetime.timedelta if minimum: minimum -= timedelta(minutes = minute_step) if maximum: maximum += timedelta(minutes = minute_step) return minimum, maximum
Python
def _auto_range(self): """ Whether to automatically determine minimum/maximum selectable dates; deployment setting with per-widget override option "auto_range" Returns: bool """ auto_range = self.opts.get("auto_range") if auto_range is None: # Not specified for widget => apply global setting auto_range = current.deployment_settings.get_search_dates_auto_range() return auto_range
def _auto_range(self): """ Whether to automatically determine minimum/maximum selectable dates; deployment setting with per-widget override option "auto_range" Returns: bool """ auto_range = self.opts.get("auto_range") if auto_range is None: # Not specified for widget => apply global setting auto_range = current.deployment_settings.get_search_dates_auto_range() return auto_range
Python
def _format_value(values, variable, timepicker=True): """ Format a selected value in local format as expected by the calender widget Args: values: the selected values as dict {variable: value} variable: the relevant variable timepicker: whether the widget uses a time picker Returns: the formatted value as str """ value = values.get(variable) if value in (None, []): value = None elif type(value) is list: value = value[0] # Widget expects a string in local calendar and format if isinstance(value, str): # URL filter or filter default come as string in # Gregorian calendar and ISO format => convert into # a datetime try: dt = s3_decode_iso_datetime(value) except ValueError: dt = None else: # Assume datetime dt = value if dt: if timepicker: dtstr = S3DateTime.datetime_represent(dt, utc=False) else: dtstr = S3DateTime.date_represent(dt, utc=False) else: dtstr = None return dtstr
def _format_value(values, variable, timepicker=True): """ Format a selected value in local format as expected by the calender widget Args: values: the selected values as dict {variable: value} variable: the relevant variable timepicker: whether the widget uses a time picker Returns: the formatted value as str """ value = values.get(variable) if value in (None, []): value = None elif type(value) is list: value = value[0] # Widget expects a string in local calendar and format if isinstance(value, str): # URL filter or filter default come as string in # Gregorian calendar and ISO format => convert into # a datetime try: dt = s3_decode_iso_datetime(value) except ValueError: dt = None else: # Assume datetime dt = value if dt: if timepicker: dtstr = S3DateTime.datetime_represent(dt, utc=False) else: dtstr = S3DateTime.date_represent(dt, utc=False) else: dtstr = None return dtstr
Python
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ T = current.T css_base = self.css_base attr = self.attr css = attr.get("class") attr["_class"] = "%s %s" % (css, css_base) if css else css_base input_class = "%s-%s" % (css_base, "input") input_labels = self.input_labels input_elements = DIV() ie_append = input_elements.append _id = attr["_id"] _variable = self._variable selector = self.selector opts = self.opts minimum = opts.get("minimum", 0) maximum = opts.get("maximum", 120) for operator in self.operator: input_id = "%s-%s" % (_id, operator) # Selectable options input_opts = [OPTION("%s" % i, value=i) for i in range(minimum, maximum + 1) ] input_opts.insert(0, OPTION("", value="")) # Input Element input_box = SELECT(input_opts, _id = input_id, _class = input_class, ) variable = _variable(selector, operator) # Populate with the value, if given # if user has not set any of the limits, we get [] in values. value = values.get(variable, None) if value not in [None, []]: if type(value) is list: value = value[0] input_box["_value"] = value input_box["value"] = value label = input_labels[operator] if label: label = DIV(LABEL("%s:" % T(input_labels[operator]), _for = input_id, ), _class = "age-filter-label", ) ie_append(DIV(label, DIV(input_box, _class = "age-filter-widget", ), _class = "range-filter-field", )) ie_append(DIV(LABEL(T("Years")), _class = "age-filter-unit", )) return input_elements
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ T = current.T css_base = self.css_base attr = self.attr css = attr.get("class") attr["_class"] = "%s %s" % (css, css_base) if css else css_base input_class = "%s-%s" % (css_base, "input") input_labels = self.input_labels input_elements = DIV() ie_append = input_elements.append _id = attr["_id"] _variable = self._variable selector = self.selector opts = self.opts minimum = opts.get("minimum", 0) maximum = opts.get("maximum", 120) for operator in self.operator: input_id = "%s-%s" % (_id, operator) # Selectable options input_opts = [OPTION("%s" % i, value=i) for i in range(minimum, maximum + 1) ] input_opts.insert(0, OPTION("", value="")) # Input Element input_box = SELECT(input_opts, _id = input_id, _class = input_class, ) variable = _variable(selector, operator) # Populate with the value, if given # if user has not set any of the limits, we get [] in values. value = values.get(variable, None) if value not in [None, []]: if type(value) is list: value = value[0] input_box["_value"] = value input_box["value"] = value label = input_labels[operator] if label: label = DIV(LABEL("%s:" % T(input_labels[operator]), _for = input_id, ), _class = "age-filter-label", ) ie_append(DIV(label, DIV(input_box, _class = "age-filter-widget", ), _class = "range-filter-field", )) ie_append(DIV(LABEL(T("Years")), _class = "age-filter-unit", )) return input_elements
Python
def xml(self): """ Render the Map - this is primarily done by inserting a lot of JavaScript - CSS loaded as-standard to avoid delays in page loading - HTML added in init() as a component """ if not self.setup: result = self._setup() if result is None: if self.error_message: self.append(self.error_message) return super(MAP, self).xml() return "" # Add ExtJS # @ToDo: Do this conditionally on whether Ext UI is used s3_include_ext() dumps = json.dumps s3 = current.response.s3 js_global = s3.js_global js_global_append = js_global.append i18n_dict = self.i18n i18n = [] i18n_append = i18n.append for key, val in i18n_dict.items(): line = '''i18n.%s="%s"''' % (key, val) if line not in i18n: i18n_append(line) i18n = '''\n'''.join(i18n) if i18n not in js_global: js_global_append(i18n) globals_dict = self.globals js_globals = [] for key, val in globals_dict.items(): line = '''S3.gis.%s=%s''' % (key, dumps(val, separators=JSONSEPARATORS)) if line not in js_globals: js_globals.append(line) js_globals = '''\n'''.join(js_globals) if js_globals not in js_global: js_global_append(js_globals) # Underscore for Popup Templates s3_include_underscore() debug = s3.debug scripts = s3.scripts if self.opts.get("color_picker", False): if debug: script = URL(c="static", f="scripts/spectrum.js") else: script = URL(c="static", f="scripts/spectrum.min.js") if script not in scripts: scripts.append(script) if debug: script = URL(c="static", f="scripts/S3/s3.gis.loader.js") else: script = URL(c="static", f="scripts/S3/s3.gis.loader.min.js") if script not in scripts: scripts.append(script) callback = self.callback map_id = self.id options = self.options projection = options["projection"] try: options = dumps(options, separators=JSONSEPARATORS) except Exception as exception: current.log.error("Map %s failed to initialise" % map_id, exception) plugin_callbacks = '''\n'''.join(self.plugin_callbacks) if callback: if callback == "DEFAULT": if map_id == "default_map": callback = '''S3.gis.show_map(null,%s)''' % options else: callback = '''S3.gis.show_map(%s,%s)''' % (map_id, options) else: # Store options where they can be read by a later show_map() js_global_append('''S3.gis.options["%s"]=%s''' % (map_id, options)) script = URL(c="static", f="scripts/yepnope.1.5.4-min.js") if script not in scripts: scripts.append(script) if plugin_callbacks: callback = '''%s\n%s''' % (callback, plugin_callbacks) callback = '''function(){%s}''' % callback else: # Store options where they can be read by a later show_map() js_global_append('''S3.gis.options["%s"]=%s''' % (map_id, options)) if plugin_callbacks: callback = '''function(){%s}''' % plugin_callbacks else: callback = '''null''' loader = \ '''s3_gis_loadjs(%(debug)s,%(projection)s,%(callback)s,%(scripts)s)''' \ % {"debug": "true" if debug else "false", "projection": projection, "callback": callback, "scripts": self.scripts, } jquery_ready = s3.jquery_ready if loader not in jquery_ready: jquery_ready.append(loader) # Return the HTML return super(MAP, self).xml()
def xml(self): """ Render the Map - this is primarily done by inserting a lot of JavaScript - CSS loaded as-standard to avoid delays in page loading - HTML added in init() as a component """ if not self.setup: result = self._setup() if result is None: if self.error_message: self.append(self.error_message) return super(MAP, self).xml() return "" # Add ExtJS # @ToDo: Do this conditionally on whether Ext UI is used s3_include_ext() dumps = json.dumps s3 = current.response.s3 js_global = s3.js_global js_global_append = js_global.append i18n_dict = self.i18n i18n = [] i18n_append = i18n.append for key, val in i18n_dict.items(): line = '''i18n.%s="%s"''' % (key, val) if line not in i18n: i18n_append(line) i18n = '''\n'''.join(i18n) if i18n not in js_global: js_global_append(i18n) globals_dict = self.globals js_globals = [] for key, val in globals_dict.items(): line = '''S3.gis.%s=%s''' % (key, dumps(val, separators=JSONSEPARATORS)) if line not in js_globals: js_globals.append(line) js_globals = '''\n'''.join(js_globals) if js_globals not in js_global: js_global_append(js_globals) # Underscore for Popup Templates s3_include_underscore() debug = s3.debug scripts = s3.scripts if self.opts.get("color_picker", False): if debug: script = URL(c="static", f="scripts/spectrum.js") else: script = URL(c="static", f="scripts/spectrum.min.js") if script not in scripts: scripts.append(script) if debug: script = URL(c="static", f="scripts/S3/s3.gis.loader.js") else: script = URL(c="static", f="scripts/S3/s3.gis.loader.min.js") if script not in scripts: scripts.append(script) callback = self.callback map_id = self.id options = self.options projection = options["projection"] try: options = dumps(options, separators=JSONSEPARATORS) except Exception as exception: current.log.error("Map %s failed to initialise" % map_id, exception) plugin_callbacks = '''\n'''.join(self.plugin_callbacks) if callback: if callback == "DEFAULT": if map_id == "default_map": callback = '''S3.gis.show_map(null,%s)''' % options else: callback = '''S3.gis.show_map(%s,%s)''' % (map_id, options) else: # Store options where they can be read by a later show_map() js_global_append('''S3.gis.options["%s"]=%s''' % (map_id, options)) script = URL(c="static", f="scripts/yepnope.1.5.4-min.js") if script not in scripts: scripts.append(script) if plugin_callbacks: callback = '''%s\n%s''' % (callback, plugin_callbacks) callback = '''function(){%s}''' % callback else: # Store options where they can be read by a later show_map() js_global_append('''S3.gis.options["%s"]=%s''' % (map_id, options)) if plugin_callbacks: callback = '''function(){%s}''' % plugin_callbacks else: callback = '''null''' loader = \ '''s3_gis_loadjs(%(debug)s,%(projection)s,%(callback)s,%(scripts)s)''' \ % {"debug": "true" if debug else "false", "projection": projection, "callback": callback, "scripts": self.scripts, } jquery_ready = s3.jquery_ready if loader not in jquery_ready: jquery_ready.append(loader) # Return the HTML return super(MAP, self).xml()
Python
def xml(self): """ Render the Map - this is primarily done by inserting JavaScript """ # Read Map Config options = self._options() if options is None: # No Map Config: Just show error in the DIV auth = current.auth if auth.s3_has_permission("create", "gis_hierarchy"): error_message = DIV(_class = "mapError") # Deliberately not T() to save unneccessary load on translators error_message.append("Map cannot display without GIS config!") error_message.append(XML(" (You can can create one ")) error_message.append(A("here", _href=URL(c="gis", f="config"))) error_message.append(")") else: error_message = DIV( "Map cannot display without GIS config!", # Deliberately not T() to save unneccessary load on translators _class="mapError" ) self.components = [error_message] return super(MAP2, self).xml() map_id = self.opts.get("id", "default_map") options = json.dumps(options, separators=JSONSEPARATORS) # Insert the JavaScript appname = current.request.application s3 = current.response.s3 # Underscore for Popup Templates s3_include_underscore() # OpenLayers script = "/%s/static/scripts/gis/ol.js" % appname if script not in s3.scripts: s3.scripts.append(script) # S3 GIS if s3.debug: script = "/%s/static/scripts/S3/s3.ui.gis.js" % appname else: script = "/%s/static/scripts/S3/s3.ui.gis.min.js" % appname if script not in s3.scripts_modules: s3.scripts_modules.append(script) script = '''$('#%(map_id)s').showMap(%(options)s)''' % {"map_id": map_id, "options": options, } s3.jquery_ready.append(script) # Return the HTML return super(MAP2, self).xml()
def xml(self): """ Render the Map - this is primarily done by inserting JavaScript """ # Read Map Config options = self._options() if options is None: # No Map Config: Just show error in the DIV auth = current.auth if auth.s3_has_permission("create", "gis_hierarchy"): error_message = DIV(_class = "mapError") # Deliberately not T() to save unneccessary load on translators error_message.append("Map cannot display without GIS config!") error_message.append(XML(" (You can can create one ")) error_message.append(A("here", _href=URL(c="gis", f="config"))) error_message.append(")") else: error_message = DIV( "Map cannot display without GIS config!", # Deliberately not T() to save unneccessary load on translators _class="mapError" ) self.components = [error_message] return super(MAP2, self).xml() map_id = self.opts.get("id", "default_map") options = json.dumps(options, separators=JSONSEPARATORS) # Insert the JavaScript appname = current.request.application s3 = current.response.s3 # Underscore for Popup Templates s3_include_underscore() # OpenLayers script = "/%s/static/scripts/gis/ol.js" % appname if script not in s3.scripts: s3.scripts.append(script) # S3 GIS if s3.debug: script = "/%s/static/scripts/S3/s3.ui.gis.js" % appname else: script = "/%s/static/scripts/S3/s3.ui.gis.min.js" % appname if script not in s3.scripts_modules: s3.scripts_modules.append(script) script = '''$('#%(map_id)s').showMap(%(options)s)''' % {"map_id": map_id, "options": options, } s3.jquery_ready.append(script) # Return the HTML return super(MAP2, self).xml()
Python
def addFeatures(features): """ Add Simple Features to the Draft layer - used by S3LocationSelectorWidget @todo: obsolete? """ simplify = GIS.simplify _f = [] append = _f.append for feature in features: geojson = simplify(feature, output="geojson") if geojson: f = {"type": "Feature", "geometry": json.loads(geojson), } append(f) return _f
def addFeatures(features): """ Add Simple Features to the Draft layer - used by S3LocationSelectorWidget @todo: obsolete? """ simplify = GIS.simplify _f = [] append = _f.append for feature in features: geojson = simplify(feature, output="geojson") if geojson: f = {"type": "Feature", "geometry": json.loads(geojson), } append(f) return _f
Python
def addFeatureQueries(feature_queries): """ Add Feature Queries to the map - These can be Rows or Storage() Note: These considerations need to be taken care of before arriving here: - Security of data - Localisation of name/popup_label """ db = current.db s3db = current.s3db cache = s3db.cache request = current.request controller = request.controller function = request.function fqtable = s3db.gis_feature_query mtable = s3db.gis_marker auth = current.auth auth_user = auth.user if auth_user: created_by = auth_user.id s3_make_session_owner = auth.s3_make_session_owner else: # Anonymous # @ToDo: A deployment with many Anonymous Feature Queries being # accessed will need to change this design - e.g. use session ID instead created_by = None layers_feature_query = [] append = layers_feature_query.append for layer in feature_queries: name = str(layer["name"]) _layer = {"name": name} name_safe = re.sub(r"\W", "_", name) # Lat/Lon via Join or direct? try: join = hasattr(layer["query"][0].gis_location, "lat") except (AttributeError, KeyError): # Invalid layer continue # Push the Features into a temporary table in order to have them accessible via GeoJSON # @ToDo: Maintenance Script to clean out old entries (> 24 hours?) cname = "%s_%s_%s" % (name_safe, controller, function) # Clear old records query = (fqtable.name == cname) & \ (fqtable.created_by == created_by) db(query).delete() for row in layer["query"]: rowdict = {"name" : cname} if join: rowdict["lat"] = row.gis_location.lat rowdict["lon"] = row.gis_location.lon else: rowdict["lat"] = row["lat"] rowdict["lon"] = row["lon"] if "popup_url" in row: rowdict["popup_url"] = row["popup_url"] if "popup_label" in row: rowdict["popup_label"] = row["popup_label"] if "marker" in row: rowdict["marker_url"] = URL(c="static", f="img", args=["markers", row["marker"].image]) rowdict["marker_height"] = row["marker"].height rowdict["marker_width"] = row["marker"].width else: if "marker_url" in row: rowdict["marker_url"] = row["marker_url"] if "marker_height" in row: rowdict["marker_height"] = row["marker_height"] if "marker_width" in row: rowdict["marker_width"] = row["marker_width"] if "shape" in row: rowdict["shape"] = row["shape"] if "size" in row: rowdict["size"] = row["size"] if "colour" in row: rowdict["colour"] = row["colour"] if "opacity" in row: rowdict["opacity"] = row["opacity"] record_id = fqtable.insert(**rowdict) if not created_by: s3_make_session_owner(fqtable, record_id) # URL to retrieve the data url = "%s.geojson?feature_query.name=%s&feature_query.created_by=%s" % \ (URL(c="gis", f="feature_query"), cname, created_by) _layer["url"] = url if "active" in layer and not layer["active"]: _layer["visibility"] = False if "marker" in layer: # per-Layer Marker marker = layer["marker"] if isinstance(marker, int): # integer (marker_id) not row marker = db(mtable.id == marker).select(mtable.image, mtable.height, mtable.width, limitby = (0, 1), cache=cache ).first() if marker: # @ToDo: Single option as Marker.as_json_dict() _layer["marker_url"] = marker["image"] _layer["marker_height"] = marker["height"] _layer["marker_width"] = marker["width"] if "opacity" in layer and layer["opacity"] != 1: _layer["opacity"] = "%.1f" % layer["opacity"] if "cluster_attribute" in layer and \ layer["cluster_attribute"] != CLUSTER_ATTRIBUTE: _layer["cluster_attribute"] = layer["cluster_attribute"] if "cluster_distance" in layer and \ layer["cluster_distance"] != CLUSTER_DISTANCE: _layer["cluster_distance"] = layer["cluster_distance"] if "cluster_threshold" in layer and \ layer["cluster_threshold"] != CLUSTER_THRESHOLD: _layer["cluster_threshold"] = layer["cluster_threshold"] append(_layer) return layers_feature_query
def addFeatureQueries(feature_queries): """ Add Feature Queries to the map - These can be Rows or Storage() Note: These considerations need to be taken care of before arriving here: - Security of data - Localisation of name/popup_label """ db = current.db s3db = current.s3db cache = s3db.cache request = current.request controller = request.controller function = request.function fqtable = s3db.gis_feature_query mtable = s3db.gis_marker auth = current.auth auth_user = auth.user if auth_user: created_by = auth_user.id s3_make_session_owner = auth.s3_make_session_owner else: # Anonymous # @ToDo: A deployment with many Anonymous Feature Queries being # accessed will need to change this design - e.g. use session ID instead created_by = None layers_feature_query = [] append = layers_feature_query.append for layer in feature_queries: name = str(layer["name"]) _layer = {"name": name} name_safe = re.sub(r"\W", "_", name) # Lat/Lon via Join or direct? try: join = hasattr(layer["query"][0].gis_location, "lat") except (AttributeError, KeyError): # Invalid layer continue # Push the Features into a temporary table in order to have them accessible via GeoJSON # @ToDo: Maintenance Script to clean out old entries (> 24 hours?) cname = "%s_%s_%s" % (name_safe, controller, function) # Clear old records query = (fqtable.name == cname) & \ (fqtable.created_by == created_by) db(query).delete() for row in layer["query"]: rowdict = {"name" : cname} if join: rowdict["lat"] = row.gis_location.lat rowdict["lon"] = row.gis_location.lon else: rowdict["lat"] = row["lat"] rowdict["lon"] = row["lon"] if "popup_url" in row: rowdict["popup_url"] = row["popup_url"] if "popup_label" in row: rowdict["popup_label"] = row["popup_label"] if "marker" in row: rowdict["marker_url"] = URL(c="static", f="img", args=["markers", row["marker"].image]) rowdict["marker_height"] = row["marker"].height rowdict["marker_width"] = row["marker"].width else: if "marker_url" in row: rowdict["marker_url"] = row["marker_url"] if "marker_height" in row: rowdict["marker_height"] = row["marker_height"] if "marker_width" in row: rowdict["marker_width"] = row["marker_width"] if "shape" in row: rowdict["shape"] = row["shape"] if "size" in row: rowdict["size"] = row["size"] if "colour" in row: rowdict["colour"] = row["colour"] if "opacity" in row: rowdict["opacity"] = row["opacity"] record_id = fqtable.insert(**rowdict) if not created_by: s3_make_session_owner(fqtable, record_id) # URL to retrieve the data url = "%s.geojson?feature_query.name=%s&feature_query.created_by=%s" % \ (URL(c="gis", f="feature_query"), cname, created_by) _layer["url"] = url if "active" in layer and not layer["active"]: _layer["visibility"] = False if "marker" in layer: # per-Layer Marker marker = layer["marker"] if isinstance(marker, int): # integer (marker_id) not row marker = db(mtable.id == marker).select(mtable.image, mtable.height, mtable.width, limitby = (0, 1), cache=cache ).first() if marker: # @ToDo: Single option as Marker.as_json_dict() _layer["marker_url"] = marker["image"] _layer["marker_height"] = marker["height"] _layer["marker_width"] = marker["width"] if "opacity" in layer and layer["opacity"] != 1: _layer["opacity"] = "%.1f" % layer["opacity"] if "cluster_attribute" in layer and \ layer["cluster_attribute"] != CLUSTER_ATTRIBUTE: _layer["cluster_attribute"] = layer["cluster_attribute"] if "cluster_distance" in layer and \ layer["cluster_distance"] != CLUSTER_DISTANCE: _layer["cluster_distance"] = layer["cluster_distance"] if "cluster_threshold" in layer and \ layer["cluster_threshold"] != CLUSTER_THRESHOLD: _layer["cluster_threshold"] = layer["cluster_threshold"] append(_layer) return layers_feature_query
Python
def data_element(self, variable): """ Prototype method to construct the hidden element that holds the URL query term corresponding to an input element in the widget. Args: variable: the URL query variable """ if type(variable) is list: variable = "&".join(variable) return INPUT(_type = "hidden", _id = "%s-data" % self.attr["_id"], _class = "filter-widget-data %s-data" % self.css_base, _value = variable, )
def data_element(self, variable): """ Prototype method to construct the hidden element that holds the URL query term corresponding to an input element in the widget. Args: variable: the URL query variable """ if type(variable) is list: variable = "&".join(variable) return INPUT(_type = "hidden", _id = "%s-data" % self.attr["_id"], _class = "filter-widget-data %s-data" % self.css_base, _value = variable, )
Python
def _operator(cls, get_vars, selector): """ Helper method to get the operators from the URL query Args: get_vars: the GET vars (a dict) selector: field selector Returns: query operator - None, str or list """ variables = ["%s__%s" % (selector, op) for op in cls.alternatives] slen = len(selector) + 2 operators = [k[slen:] for k in get_vars if k in variables] if not operators: return None elif len(operators) == 1: return operators[0] else: return operators
def _operator(cls, get_vars, selector): """ Helper method to get the operators from the URL query Args: get_vars: the GET vars (a dict) selector: field selector Returns: query operator - None, str or list """ variables = ["%s__%s" % (selector, op) for op in cls.alternatives] slen = len(selector) + 2 operators = [k[slen:] for k in get_vars if k in variables] if not operators: return None elif len(operators) == 1: return operators[0] else: return operators
Python
def html(self, resource, get_vars=None, target=None, alias=None): """ Render this filter form as HTML form. Args: resource: the CRUDResource get_vars: the request GET vars (URL query dict) target: the HTML element ID of the target object for this filter form (e.g. a datatable) alias: the resource alias to use in widgets Returns: a FORM """ attr = self.attr form_id = attr.get("_id") if not form_id: form_id = "filter-form" attr["_id"] = form_id # Prevent issues with Webkit-based browsers & Back buttons attr["_autocomplete"] = "off" opts_get = self.opts.get settings = current.deployment_settings # Form style formstyle = opts_get("formstyle", None) if not formstyle: formstyle = settings.get_ui_filter_formstyle() # Filter widgets rows = self._render_widgets(resource, get_vars = get_vars or {}, alias = alias, formstyle = formstyle, ) # Filter Manager (load/apply/save filters) fm = settings.get_search_filter_manager() if fm and opts_get("filter_manager", resource is not None): filter_manager = self._render_filters(resource, form_id) else: filter_manager = None # Other filter form controls controls = self._render_controls(resource, filter_manager) if controls: rows.append(formstyle(None, "", controls, "")) # Submit elements ajax = opts_get("ajax", False) submit = opts_get("submit", False) if submit: # Auto-submit? auto_submit = settings.get_ui_filter_auto_submit() if auto_submit and opts_get("auto_submit", True): script = '''S3.search.filterFormAutoSubmit('%s',%s)''' % \ (form_id, auto_submit) current.response.s3.jquery_ready.append(script) # Custom label and class _class = None if submit is True: label = current.T("Search") elif isinstance(submit, (list, tuple)): label, _class = submit else: label = submit # Submit button submit_button = INPUT(_type = "button", _value = label, _class = "filter-submit", ) if _class: submit_button.add_class(_class) # Where to request filtered data from: submit_url = opts_get("url", URL(vars={})) # Where to request updated options from: ajax_url = opts_get("ajaxurl", URL(args=["filter.options"], vars={})) # Submit row elements submit = TAG[""](submit_button, INPUT(_type = "hidden", _class = "filter-ajax-url", _value = ajax_url, ), INPUT(_type = "hidden", _class = "filter-submit-url", _value = submit_url, )) if ajax and target: submit.append(INPUT(_type = "hidden", _class = "filter-submit-target", _value = target, )) # Append submit row submit_row = formstyle(None, "", submit, "") if auto_submit and hasattr(submit_row, "add_class"): submit_row.add_class("hide") rows.append(submit_row) # Filter Manager (load/apply/save filters) if filter_manager: fmrow = formstyle(None, "", filter_manager, "") if hasattr(fmrow, "add_class"): fmrow.add_class("hide filter-manager-row") rows.append(fmrow) # Adapt to formstyle: render a TABLE only if formstyle returns TRs if rows: elements = rows[0] if not isinstance(elements, (list, tuple)): elements = elements.elements() n = len(elements) if n > 0 and elements[0].tag == "tr" or \ n > 1 and elements[0].tag == "" and elements[1].tag == "tr": form = FORM(TABLE(TBODY(rows)), **attr) else: form = FORM(DIV(rows), **attr) if settings.ui.formstyle == "bootstrap": # We need to amend the HTML markup to support this CSS framework form.add_class("form-horizontal") form.add_class("filter-form") if ajax: form.add_class("filter-ajax") else: return "" # Put a copy of formstyle into the form for access by the view form.formstyle = formstyle return form
def html(self, resource, get_vars=None, target=None, alias=None): """ Render this filter form as HTML form. Args: resource: the CRUDResource get_vars: the request GET vars (URL query dict) target: the HTML element ID of the target object for this filter form (e.g. a datatable) alias: the resource alias to use in widgets Returns: a FORM """ attr = self.attr form_id = attr.get("_id") if not form_id: form_id = "filter-form" attr["_id"] = form_id # Prevent issues with Webkit-based browsers & Back buttons attr["_autocomplete"] = "off" opts_get = self.opts.get settings = current.deployment_settings # Form style formstyle = opts_get("formstyle", None) if not formstyle: formstyle = settings.get_ui_filter_formstyle() # Filter widgets rows = self._render_widgets(resource, get_vars = get_vars or {}, alias = alias, formstyle = formstyle, ) # Filter Manager (load/apply/save filters) fm = settings.get_search_filter_manager() if fm and opts_get("filter_manager", resource is not None): filter_manager = self._render_filters(resource, form_id) else: filter_manager = None # Other filter form controls controls = self._render_controls(resource, filter_manager) if controls: rows.append(formstyle(None, "", controls, "")) # Submit elements ajax = opts_get("ajax", False) submit = opts_get("submit", False) if submit: # Auto-submit? auto_submit = settings.get_ui_filter_auto_submit() if auto_submit and opts_get("auto_submit", True): script = '''S3.search.filterFormAutoSubmit('%s',%s)''' % \ (form_id, auto_submit) current.response.s3.jquery_ready.append(script) # Custom label and class _class = None if submit is True: label = current.T("Search") elif isinstance(submit, (list, tuple)): label, _class = submit else: label = submit # Submit button submit_button = INPUT(_type = "button", _value = label, _class = "filter-submit", ) if _class: submit_button.add_class(_class) # Where to request filtered data from: submit_url = opts_get("url", URL(vars={})) # Where to request updated options from: ajax_url = opts_get("ajaxurl", URL(args=["filter.options"], vars={})) # Submit row elements submit = TAG[""](submit_button, INPUT(_type = "hidden", _class = "filter-ajax-url", _value = ajax_url, ), INPUT(_type = "hidden", _class = "filter-submit-url", _value = submit_url, )) if ajax and target: submit.append(INPUT(_type = "hidden", _class = "filter-submit-target", _value = target, )) # Append submit row submit_row = formstyle(None, "", submit, "") if auto_submit and hasattr(submit_row, "add_class"): submit_row.add_class("hide") rows.append(submit_row) # Filter Manager (load/apply/save filters) if filter_manager: fmrow = formstyle(None, "", filter_manager, "") if hasattr(fmrow, "add_class"): fmrow.add_class("hide filter-manager-row") rows.append(fmrow) # Adapt to formstyle: render a TABLE only if formstyle returns TRs if rows: elements = rows[0] if not isinstance(elements, (list, tuple)): elements = elements.elements() n = len(elements) if n > 0 and elements[0].tag == "tr" or \ n > 1 and elements[0].tag == "" and elements[1].tag == "tr": form = FORM(TABLE(TBODY(rows)), **attr) else: form = FORM(DIV(rows), **attr) if settings.ui.formstyle == "bootstrap": # We need to amend the HTML markup to support this CSS framework form.add_class("form-horizontal") form.add_class("filter-form") if ajax: form.add_class("filter-ajax") else: return "" # Put a copy of formstyle into the form for access by the view form.formstyle = formstyle return form
Python
def fields(self, resource, get_vars=None, alias=None): """ Render the filter widgets without FORM wrapper, e.g. to embed them as fieldset in another form. Args: resource: the CRUDResource get_vars: the request GET vars (URL query dict) alias: the resource alias to use in widgets """ attr = self.attr form_id = attr.get("_id") if not form_id: form_id = "filter-form" opts_get = self.opts.get settings = current.deployment_settings formstyle = self.opts.get("formstyle", None) if not formstyle: formstyle = current.deployment_settings.get_ui_filter_formstyle() rows = self._render_widgets(resource, get_vars = get_vars, alias = alias, formstyle = formstyle, ) # Filter Manager fm = settings.get_search_filter_manager() if fm and opts_get("filter_manager", resource is not None): filter_manager = self._render_filters(resource, form_id) else: filter_manager = None controls = self._render_controls(resource, filter_manager) if controls: rows.append(formstyle(None, "", controls, "")) # Filter Manager (load/apply/save filters) if filter_manager: fmrow = formstyle(None, "", filter_manager, "") if hasattr(fmrow, "add_class"): fmrow.add_class("hide filter-manager-row") rows.append(fmrow) # Adapt to formstyle: only render a TABLE if formstyle returns TRs if rows: elements = rows[0] if not isinstance(elements, (list, tuple)): elements = elements.elements() n = len(elements) if n > 0 and elements[0].tag == "tr" or \ n > 1 and elements[0].tag == "" and elements[1].tag == "tr": fields = TABLE(TBODY(rows)) else: fields = DIV(rows) return fields
def fields(self, resource, get_vars=None, alias=None): """ Render the filter widgets without FORM wrapper, e.g. to embed them as fieldset in another form. Args: resource: the CRUDResource get_vars: the request GET vars (URL query dict) alias: the resource alias to use in widgets """ attr = self.attr form_id = attr.get("_id") if not form_id: form_id = "filter-form" opts_get = self.opts.get settings = current.deployment_settings formstyle = self.opts.get("formstyle", None) if not formstyle: formstyle = current.deployment_settings.get_ui_filter_formstyle() rows = self._render_widgets(resource, get_vars = get_vars, alias = alias, formstyle = formstyle, ) # Filter Manager fm = settings.get_search_filter_manager() if fm and opts_get("filter_manager", resource is not None): filter_manager = self._render_filters(resource, form_id) else: filter_manager = None controls = self._render_controls(resource, filter_manager) if controls: rows.append(formstyle(None, "", controls, "")) # Filter Manager (load/apply/save filters) if filter_manager: fmrow = formstyle(None, "", filter_manager, "") if hasattr(fmrow, "add_class"): fmrow.add_class("hide filter-manager-row") rows.append(fmrow) # Adapt to formstyle: only render a TABLE if formstyle returns TRs if rows: elements = rows[0] if not isinstance(elements, (list, tuple)): elements = elements.elements() n = len(elements) if n > 0 and elements[0].tag == "tr" or \ n > 1 and elements[0].tag == "" and elements[1].tag == "tr": fields = TABLE(TBODY(rows)) else: fields = DIV(rows) return fields
Python
def _render_controls(self, resource, filter_manager=None): """ Render optional additional filter form controls: advanced options toggle, clear filters. Args: resource: the resource filter_manager: the filter manager widget """ T = current.T controls = [] opts = self.opts advanced = opts.get("advanced", False) if advanced: _class = "filter-advanced" if advanced is True: label = T("More Options") elif isinstance(advanced, (list, tuple)): label = advanced[0] label = advanced[1] if len(advanced > 2): _class = "%s %s" % (advanced[2], _class) else: label = advanced label_off = T("Less Options") advanced = A(SPAN(label, data = {"on": label, "off": label_off, }, _class = "filter-advanced-label", ), ICON("down"), ICON("up", _style = "display:none"), _class = _class, ) controls.append(advanced) clear = opts.get("clear", True) if clear: _class = "filter-clear" if clear is True: label = T("Clear Filter") elif isinstance(clear, (list, tuple)): label = clear[0] _class = "%s %s" % (clear[1], _class) else: label = clear clear = A(label, _class=_class) clear.add_class("action-lnk") controls.append(clear) if filter_manager: show_fm = A(T("Saved Filters"), _class = "show-filter-manager action-lnk", ) controls.append(show_fm) return DIV(controls, _class="filter-controls") if controls else None
def _render_controls(self, resource, filter_manager=None): """ Render optional additional filter form controls: advanced options toggle, clear filters. Args: resource: the resource filter_manager: the filter manager widget """ T = current.T controls = [] opts = self.opts advanced = opts.get("advanced", False) if advanced: _class = "filter-advanced" if advanced is True: label = T("More Options") elif isinstance(advanced, (list, tuple)): label = advanced[0] label = advanced[1] if len(advanced > 2): _class = "%s %s" % (advanced[2], _class) else: label = advanced label_off = T("Less Options") advanced = A(SPAN(label, data = {"on": label, "off": label_off, }, _class = "filter-advanced-label", ), ICON("down"), ICON("up", _style = "display:none"), _class = _class, ) controls.append(advanced) clear = opts.get("clear", True) if clear: _class = "filter-clear" if clear is True: label = T("Clear Filter") elif isinstance(clear, (list, tuple)): label = clear[0] _class = "%s %s" % (clear[1], _class) else: label = clear clear = A(label, _class=_class) clear.add_class("action-lnk") controls.append(clear) if filter_manager: show_fm = A(T("Saved Filters"), _class = "show-filter-manager action-lnk", ) controls.append(show_fm) return DIV(controls, _class="filter-controls") if controls else None
Python
def apply_filter_defaults(request, resource): """ Add default filters to resource, to be called on a multi-record view when a filter form is rendered the first time and before the view elements get processed; can be overridden in request URL with ?default_filters=0 Args: request: the request resource: the resource Returns: dict with default filters (URL vars) """ default_filters = {} get_vars = request.get_vars if get_vars.get("default_filters") == "0": # Skip default filters (e.g. link in report) return default_filters # Do we have filter defaults for this table? tablename = resource.tablename filter_defaults = current.response.s3.get("filter_defaults") if filter_defaults: table_defaults = filter_defaults.get(tablename) else: table_defaults = None filter_widgets = resource.get_config("filter_widgets") for filter_widget in filter_widgets: widget_opts = filter_widget.opts # Do not apply defaults of hidden widgets because they are # not visible to the user if not filter_widget or widget_opts.get("hidden"): continue # Skip widget if there are no defaults if table_defaults is None and "default" not in widget_opts: continue # Use alias in selectors if looking at a component filter_widget.alias = resource.alias if resource.parent else None # Get all widget variables variables = filter_widget.variable(resource, get_vars) if type(variables) is not list: variables = [variables] for variable in variables: # Actual filter in get_vars? values = filter_widget._values(get_vars, variable) if values: filter_widget.values[variable] = values continue # Parse the variable selector, operator, invert = S3URLQuery.parse_key(variable) if invert: operator = "%s!" % operator applicable_defaults = None # Table default? if table_defaults and selector in table_defaults: # {selector: {op: value}}, {selector: value}, or {selector: callback} applicable_defaults = table_defaults[selector] else: # Widget default? widget_default = widget_opts.get("default", DEFAULT) if isinstance(widget_default, dict) and variable in widget_default: # {variable: value}, or {variable: callback} applicable_defaults = {operator: widget_default[variable]} elif widget_default is not DEFAULT: # {op: value}, value, or callback applicable_defaults = widget_default if callable(applicable_defaults): applicable_defaults = applicable_defaults(selector, tablename=tablename) if isinstance(applicable_defaults, dict): default = applicable_defaults.get(operator) elif operator in (None, "belongs", "eq", "ne", "like"): default = applicable_defaults else: default = None if default is None: # Ignore (configure [None] to filter for None) continue if not isinstance(default, list): default = [default] filter_widget.values[variable] = [str(v) if v is None else v for v in default] default_filters[variable] = ",".join(s3_str(v) for v in default) # Apply to resource queries = S3URLQuery.parse(resource, default_filters) add_filter = resource.add_filter for alias in queries: for q in queries[alias]: add_filter(q) return default_filters
def apply_filter_defaults(request, resource): """ Add default filters to resource, to be called on a multi-record view when a filter form is rendered the first time and before the view elements get processed; can be overridden in request URL with ?default_filters=0 Args: request: the request resource: the resource Returns: dict with default filters (URL vars) """ default_filters = {} get_vars = request.get_vars if get_vars.get("default_filters") == "0": # Skip default filters (e.g. link in report) return default_filters # Do we have filter defaults for this table? tablename = resource.tablename filter_defaults = current.response.s3.get("filter_defaults") if filter_defaults: table_defaults = filter_defaults.get(tablename) else: table_defaults = None filter_widgets = resource.get_config("filter_widgets") for filter_widget in filter_widgets: widget_opts = filter_widget.opts # Do not apply defaults of hidden widgets because they are # not visible to the user if not filter_widget or widget_opts.get("hidden"): continue # Skip widget if there are no defaults if table_defaults is None and "default" not in widget_opts: continue # Use alias in selectors if looking at a component filter_widget.alias = resource.alias if resource.parent else None # Get all widget variables variables = filter_widget.variable(resource, get_vars) if type(variables) is not list: variables = [variables] for variable in variables: # Actual filter in get_vars? values = filter_widget._values(get_vars, variable) if values: filter_widget.values[variable] = values continue # Parse the variable selector, operator, invert = S3URLQuery.parse_key(variable) if invert: operator = "%s!" % operator applicable_defaults = None # Table default? if table_defaults and selector in table_defaults: # {selector: {op: value}}, {selector: value}, or {selector: callback} applicable_defaults = table_defaults[selector] else: # Widget default? widget_default = widget_opts.get("default", DEFAULT) if isinstance(widget_default, dict) and variable in widget_default: # {variable: value}, or {variable: callback} applicable_defaults = {operator: widget_default[variable]} elif widget_default is not DEFAULT: # {op: value}, value, or callback applicable_defaults = widget_default if callable(applicable_defaults): applicable_defaults = applicable_defaults(selector, tablename=tablename) if isinstance(applicable_defaults, dict): default = applicable_defaults.get(operator) elif operator in (None, "belongs", "eq", "ne", "like"): default = applicable_defaults else: default = None if default is None: # Ignore (configure [None] to filter for None) continue if not isinstance(default, list): default = [default] filter_widget.values[variable] = [str(v) if v is None else v for v in default] default_filters[variable] = ",".join(s3_str(v) for v in default) # Apply to resource queries = S3URLQuery.parse(resource, default_filters) add_filter = resource.add_filter for alias in queries: for q in queries[alias]: add_filter(q) return default_filters
Python
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ attr = self._attr(resource) opts_get = self.opts.get name = attr["_name"] # Get the options ftype, options = self._options(resource, values=values) if options is None: options = [] hide_widget = True hide_noopt = "" else: options = OrderedDict(options) hide_widget = False hide_noopt = " hide" # Any-All-Option : for many-to-many fields the user can # search for records containing all the options or any # of the options: if len(options) > 1 and (ftype[:4] == "list" or opts_get("anyall")): operator = opts_get("operator", None) if operator: # Fixed operator any_all = "" else: # User choice (initially set to "all") any_all = True operator = "contains" if operator == "anyof": filter_type = "any" else: filter_type = "all" self.operator = operator if any_all: # Provide a form to prompt the user to choose T = current.T any_all = DIV(LABEL("%s:" % T("Match")), LABEL(INPUT(_name = "%s_filter" % name, _id = "%s_filter_any" % name, _type = "radio", _value = "any", value = filter_type, ), T("Any##filter_options"), _for = "%s_filter_any" % name, ), LABEL(INPUT(_name = "%s_filter" % name, _id = "%s_filter_all" % name, _type = "radio", _value = "all", value = filter_type, ), T("All##filter_options"), _for = "%s_filter_all" % name, ), _class="s3-options-filter-anyall", ) else: any_all = "" # Initialize widget #widget_type = opts_get("widget") # Use groupedopts widget if we specify cols, otherwise assume multiselect cols = opts_get("cols", None) if cols: widget_class = "groupedopts-filter-widget" w = S3GroupedOptionsWidget(options = options, multiple = opts_get("multiple", True), cols = cols, size = opts_get("size", 12), help_field = opts_get("help_field"), sort = opts_get("sort", True), orientation = opts_get("orientation"), table = opts_get("table", True), no_opts = opts_get("no_opts", None), option_comment = opts_get("option_comment", False), ) else: # Default widget_type = "multiselect" widget_class = "multiselect-filter-widget" w = S3MultiSelectWidget(search = opts_get("search", "auto"), header = opts_get("header", False), selectedList = opts_get("selectedList", 3), noneSelectedText = opts_get("noneSelectedText", "Select"), multiple = opts_get("multiple", True), ) # Add widget class and default class classes = attr.get("_class", "").split() + [widget_class, self.css_base] if hide_widget: classes.append("hide") attr["_class"] = " ".join(set(classes)) if classes else None # Render the widget dummy_field = Storage(name = name, type = ftype, requires = IS_IN_SET(options, multiple=True), ) widget = w(dummy_field, values, **attr) return TAG[""](any_all, widget, SPAN(self.no_opts, _class = "no-options-available%s" % hide_noopt, ), )
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ attr = self._attr(resource) opts_get = self.opts.get name = attr["_name"] # Get the options ftype, options = self._options(resource, values=values) if options is None: options = [] hide_widget = True hide_noopt = "" else: options = OrderedDict(options) hide_widget = False hide_noopt = " hide" # Any-All-Option : for many-to-many fields the user can # search for records containing all the options or any # of the options: if len(options) > 1 and (ftype[:4] == "list" or opts_get("anyall")): operator = opts_get("operator", None) if operator: # Fixed operator any_all = "" else: # User choice (initially set to "all") any_all = True operator = "contains" if operator == "anyof": filter_type = "any" else: filter_type = "all" self.operator = operator if any_all: # Provide a form to prompt the user to choose T = current.T any_all = DIV(LABEL("%s:" % T("Match")), LABEL(INPUT(_name = "%s_filter" % name, _id = "%s_filter_any" % name, _type = "radio", _value = "any", value = filter_type, ), T("Any##filter_options"), _for = "%s_filter_any" % name, ), LABEL(INPUT(_name = "%s_filter" % name, _id = "%s_filter_all" % name, _type = "radio", _value = "all", value = filter_type, ), T("All##filter_options"), _for = "%s_filter_all" % name, ), _class="s3-options-filter-anyall", ) else: any_all = "" # Initialize widget #widget_type = opts_get("widget") # Use groupedopts widget if we specify cols, otherwise assume multiselect cols = opts_get("cols", None) if cols: widget_class = "groupedopts-filter-widget" w = S3GroupedOptionsWidget(options = options, multiple = opts_get("multiple", True), cols = cols, size = opts_get("size", 12), help_field = opts_get("help_field"), sort = opts_get("sort", True), orientation = opts_get("orientation"), table = opts_get("table", True), no_opts = opts_get("no_opts", None), option_comment = opts_get("option_comment", False), ) else: # Default widget_type = "multiselect" widget_class = "multiselect-filter-widget" w = S3MultiSelectWidget(search = opts_get("search", "auto"), header = opts_get("header", False), selectedList = opts_get("selectedList", 3), noneSelectedText = opts_get("noneSelectedText", "Select"), multiple = opts_get("multiple", True), ) # Add widget class and default class classes = attr.get("_class", "").split() + [widget_class, self.css_base] if hide_widget: classes.append("hide") attr["_class"] = " ".join(set(classes)) if classes else None # Render the widget dummy_field = Storage(name = name, type = ftype, requires = IS_IN_SET(options, multiple=True), ) widget = w(dummy_field, values, **attr) return TAG[""](any_all, widget, SPAN(self.no_opts, _class = "no-options-available%s" % hide_noopt, ), )
Python
def no_opts(self): """ Get the label for "no options available" Returns: the label (lazyT) """ label = self.opts.no_opts if not label: label = current.T("No options available") return label
def no_opts(self): """ Get the label for "no options available" Returns: the label (lazyT) """ label = self.opts.no_opts if not label: label = current.T("No options available") return label
Python
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget Args: resource: the CRUDResource """ opts = self.opts attr = self._attr(resource) ftype, options = self._options(resource) if options is None: options = {attr["_id"]: {"empty": str(self.no_opts)}} else: #widget_type = opts["widget"] # Use groupedopts widget if we specify cols, otherwise assume multiselect cols = opts.get("cols", None) if cols: # Use the widget method to group and sort the options widget = S3GroupedOptionsWidget( options = options, multiple = True, cols = cols, size = opts["size"] or 12, help_field = opts["help_field"], sort = opts.get("sort", True), ) options = {attr["_id"]: widget._options({"type": ftype}, [])} else: # Multiselect # Produce a simple list of tuples options = {attr["_id"]: [(k, s3_str(v)) for k, v in options]} return options
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget Args: resource: the CRUDResource """ opts = self.opts attr = self._attr(resource) ftype, options = self._options(resource) if options is None: options = {attr["_id"]: {"empty": str(self.no_opts)}} else: #widget_type = opts["widget"] # Use groupedopts widget if we specify cols, otherwise assume multiselect cols = opts.get("cols", None) if cols: # Use the widget method to group and sort the options widget = S3GroupedOptionsWidget( options = options, multiple = True, cols = cols, size = opts["size"] or 12, help_field = opts["help_field"], sort = opts.get("sort", True), ) options = {attr["_id"]: widget._options({"type": ftype}, [])} else: # Multiselect # Produce a simple list of tuples options = {attr["_id"]: [(k, s3_str(v)) for k, v in options]} return options
Python
def _options(self, resource, values=None): """ Helper function to retrieve the current options for this filter widget Args: resource: the CRUDResource values: the currently selected values (resp. filter default) Returns: tuple (ftype, opt_list) """ opts = self.opts # Resolve the filter field selector = self.field if isinstance(selector, (tuple, list)): selector = selector[0] if resource is None: rname = opts.get("resource") if rname: resource = current.s3db.resource(rname) lookup = opts.get("lookup") if lookup: selector = lookup if resource: rfield = S3ResourceField(resource, selector) field = rfield.field ftype = rfield.ftype else: rfield = field = None ftype = "string" # Determine available options options = opts.options if options is not None: # Dict {value: label} or a callable returning that dict: if callable(options): options = options() opt_keys = list(options.keys()) elif resource: if ftype == "boolean": opt_keys = (True, False) else: opt_keys = self._lookup_options(resource, rfield) else: opt_keys = [] # Make sure the selected options are in the available options # (not possible if we have a fixed options dict) if options is None and values: self._add_selected(opt_keys, values, ftype) # No options available? if len(opt_keys) < 1 or len(opt_keys) == 1 and not opt_keys[0]: return ftype, None # Represent the options if options is not None: if opts.translate: # Translate the labels T = current.T opt_list = [(opt, T(label)) if isinstance(label, str) else (opt, label) for opt, label in options.items() ] else: opt_list = list(options.items()) else: opt_list = self._represent_options(field, opt_keys) # Sort the options opt_list, has_none = self._sort_options(opt_list) # Add none-option if configured and not in options list yet none = opts.none if none and not has_none: # Add none-option if none is True: none = current.messages["NONE"] opt_list.append((None, none)) # Browsers automatically select the first option in single-selects, # but that doesn't filter the data, so the first option must be # empty if we don't have a default: if not opts.get("multiple", True) and not self.values: opt_list.insert(0, ("", "")) return ftype, opt_list
def _options(self, resource, values=None): """ Helper function to retrieve the current options for this filter widget Args: resource: the CRUDResource values: the currently selected values (resp. filter default) Returns: tuple (ftype, opt_list) """ opts = self.opts # Resolve the filter field selector = self.field if isinstance(selector, (tuple, list)): selector = selector[0] if resource is None: rname = opts.get("resource") if rname: resource = current.s3db.resource(rname) lookup = opts.get("lookup") if lookup: selector = lookup if resource: rfield = S3ResourceField(resource, selector) field = rfield.field ftype = rfield.ftype else: rfield = field = None ftype = "string" # Determine available options options = opts.options if options is not None: # Dict {value: label} or a callable returning that dict: if callable(options): options = options() opt_keys = list(options.keys()) elif resource: if ftype == "boolean": opt_keys = (True, False) else: opt_keys = self._lookup_options(resource, rfield) else: opt_keys = [] # Make sure the selected options are in the available options # (not possible if we have a fixed options dict) if options is None and values: self._add_selected(opt_keys, values, ftype) # No options available? if len(opt_keys) < 1 or len(opt_keys) == 1 and not opt_keys[0]: return ftype, None # Represent the options if options is not None: if opts.translate: # Translate the labels T = current.T opt_list = [(opt, T(label)) if isinstance(label, str) else (opt, label) for opt, label in options.items() ] else: opt_list = list(options.items()) else: opt_list = self._represent_options(field, opt_keys) # Sort the options opt_list, has_none = self._sort_options(opt_list) # Add none-option if configured and not in options list yet none = opts.none if none and not has_none: # Add none-option if none is True: none = current.messages["NONE"] opt_list.append((None, none)) # Browsers automatically select the first option in single-selects, # but that doesn't filter the data, so the first option must be # empty if we don't have a default: if not opts.get("multiple", True) and not self.values: opt_list.insert(0, ("", "")) return ftype, opt_list
Python
def _lookup_options(self, resource, rfield): """ Lookup the filter options from resource Args: resource: the CRUDResource to filter rfield: the filter field (S3ResourceField) Returns: list of options (keys only, no represent) """ colname, rows = None, None field = rfield.field if field and self.opts.reverse_lookup is not False: virtual = False ktablename, key = s3_get_foreign_key(field, m2m=False)[:2] if ktablename: ktable = current.s3db.table(ktablename) key_field = ktable[key] colname = str(key_field) # Try a reverse-lookup, i.e. select records from the # referenced table that are linked to at least one # record in the filtered table query = resource.get_query() rfilter = resource.rfilter if rfilter: join = rfilter.get_joins() left = rfilter.get_joins(left=True) else: join = left = None query &= (key_field == field) & \ current.auth.s3_accessible_query("read", ktable) # If the filter field is in a joined table itself, # include the join for that table joins = rfield.join for tname in joins: query &= joins[tname] opts = self.opts # Filter options by location? location_filter = opts.get("location_filter") if location_filter and "location_id" in ktable: location = current.session.s3.location_filter if location: query &= (ktable.location_id == location) # Filter options by organisation? org_filter = opts.get("org_filter") if org_filter and "organisation_id" in ktable: root_org = current.auth.root_org() if root_org: query &= ((ktable.organisation_id == root_org) | \ (ktable.organisation_id == None)) rows = current.db(query).select(key_field, resource._id.min(), groupby = key_field, join = join, left = left, ) else: virtual = not bool(field) if rows is None: # Fall back to regular forward-lookup, i.e. select all # unique values in the filter field multiple = rfield.ftype[:5] == "list:" groupby = field if field and not multiple else None rows = resource.select([rfield.selector], limit = None, groupby = groupby, virtual = virtual, as_rows = True, ) colname = rfield.colname else: multiple = False # Extract option keys from rows opt_keys = set() if rows: for row in rows: val = row[colname] if virtual and callable(val): val = val() if (multiple or virtual) and isinstance(val, (list, tuple, set)): opt_keys.update(val) else: opt_keys.add(val) return list(opt_keys)
def _lookup_options(self, resource, rfield): """ Lookup the filter options from resource Args: resource: the CRUDResource to filter rfield: the filter field (S3ResourceField) Returns: list of options (keys only, no represent) """ colname, rows = None, None field = rfield.field if field and self.opts.reverse_lookup is not False: virtual = False ktablename, key = s3_get_foreign_key(field, m2m=False)[:2] if ktablename: ktable = current.s3db.table(ktablename) key_field = ktable[key] colname = str(key_field) # Try a reverse-lookup, i.e. select records from the # referenced table that are linked to at least one # record in the filtered table query = resource.get_query() rfilter = resource.rfilter if rfilter: join = rfilter.get_joins() left = rfilter.get_joins(left=True) else: join = left = None query &= (key_field == field) & \ current.auth.s3_accessible_query("read", ktable) # If the filter field is in a joined table itself, # include the join for that table joins = rfield.join for tname in joins: query &= joins[tname] opts = self.opts # Filter options by location? location_filter = opts.get("location_filter") if location_filter and "location_id" in ktable: location = current.session.s3.location_filter if location: query &= (ktable.location_id == location) # Filter options by organisation? org_filter = opts.get("org_filter") if org_filter and "organisation_id" in ktable: root_org = current.auth.root_org() if root_org: query &= ((ktable.organisation_id == root_org) | \ (ktable.organisation_id == None)) rows = current.db(query).select(key_field, resource._id.min(), groupby = key_field, join = join, left = left, ) else: virtual = not bool(field) if rows is None: # Fall back to regular forward-lookup, i.e. select all # unique values in the filter field multiple = rfield.ftype[:5] == "list:" groupby = field if field and not multiple else None rows = resource.select([rfield.selector], limit = None, groupby = groupby, virtual = virtual, as_rows = True, ) colname = rfield.colname else: multiple = False # Extract option keys from rows opt_keys = set() if rows: for row in rows: val = row[colname] if virtual and callable(val): val = val() if (multiple or virtual) and isinstance(val, (list, tuple, set)): opt_keys.update(val) else: opt_keys.add(val) return list(opt_keys)
Python
def _add_selected(opt_keys, values, ftype): """ Add currently selected values to the options Args: opt_keys: list of option keys to add the values to values: list of currently selected values (resp. defaults) ftype: the field type """ numeric = ftype in ("integer", "id") or ftype[:9] == "reference" for v in values: if numeric and v is not None: try: value = int(v) except ValueError: # not valid for this field type => skip continue else: value = v if value not in opt_keys and \ (not isinstance(value, int) or str(value) not in opt_keys): opt_keys.append(value)
def _add_selected(opt_keys, values, ftype): """ Add currently selected values to the options Args: opt_keys: list of option keys to add the values to values: list of currently selected values (resp. defaults) ftype: the field type """ numeric = ftype in ("integer", "id") or ftype[:9] == "reference" for v in values: if numeric and v is not None: try: value = int(v) except ValueError: # not valid for this field type => skip continue else: value = v if value not in opt_keys and \ (not isinstance(value, int) or str(value) not in opt_keys): opt_keys.append(value)
Python
def _values(get_vars, variable): """ Helper method to get all values of a URL query variable Args: get_vars: the GET vars (a dict) variable: the name of the query variable Returns: a list of values """ if not variable: return [] # Match __eq before checking any other operator selector = S3URLQuery.parse_key(variable)[0] for key in ("%s__eq" % selector, selector, variable): if key in get_vars: values = S3URLQuery.parse_value(get_vars[key]) if not isinstance(values, (list, tuple)): values = [values] return values return []
def _values(get_vars, variable): """ Helper method to get all values of a URL query variable Args: get_vars: the GET vars (a dict) variable: the name of the query variable Returns: a list of values """ if not variable: return [] # Match __eq before checking any other operator selector = S3URLQuery.parse_key(variable)[0] for key in ("%s__eq" % selector, selector, variable): if key in get_vars: values = S3URLQuery.parse_value(get_vars[key]) if not isinstance(values, (list, tuple)): values = [values] return values return []
Python
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ # Currently selected values selected = [] append = selected.append if not isinstance(values, (list, tuple, set)): values = [values] for v in values: if isinstance(v, int) or str(v).isdigit(): append(v) # Resolve the field selector rfield = S3ResourceField(resource, self.field) # Instantiate the widget opts = self.opts bulk_select = current.deployment_settings \ .get_ui_hierarchy_filter_bulk_select_option() if bulk_select is None: bulk_select = opts.get("bulk_select", False) if opts.get("widget") == "cascade": formstyle = current.deployment_settings.get_ui_filter_formstyle() w = S3CascadeSelectWidget(lookup = opts.get("lookup"), formstyle = formstyle, multiple = opts.get("multiple", True), filter = opts.get("filter"), leafonly = opts.get("leafonly", True), cascade = opts.get("cascade"), represent = opts.get("represent"), inline = True, ) else: w = S3HierarchyWidget(lookup = opts.get("lookup"), multiple = opts.get("multiple", True), filter = opts.get("filter"), leafonly = opts.get("leafonly", True), cascade = opts.get("cascade", False), represent = opts.get("represent"), bulk_select = bulk_select, none = opts.get("none"), ) # Render the widget widget = w(rfield.field, selected, **self._attr(resource)) widget.add_class(self.css_base) return widget
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ # Currently selected values selected = [] append = selected.append if not isinstance(values, (list, tuple, set)): values = [values] for v in values: if isinstance(v, int) or str(v).isdigit(): append(v) # Resolve the field selector rfield = S3ResourceField(resource, self.field) # Instantiate the widget opts = self.opts bulk_select = current.deployment_settings \ .get_ui_hierarchy_filter_bulk_select_option() if bulk_select is None: bulk_select = opts.get("bulk_select", False) if opts.get("widget") == "cascade": formstyle = current.deployment_settings.get_ui_filter_formstyle() w = S3CascadeSelectWidget(lookup = opts.get("lookup"), formstyle = formstyle, multiple = opts.get("multiple", True), filter = opts.get("filter"), leafonly = opts.get("leafonly", True), cascade = opts.get("cascade"), represent = opts.get("represent"), inline = True, ) else: w = S3HierarchyWidget(lookup = opts.get("lookup"), multiple = opts.get("multiple", True), filter = opts.get("filter"), leafonly = opts.get("leafonly", True), cascade = opts.get("cascade", False), represent = opts.get("represent"), bulk_select = bulk_select, none = opts.get("none"), ) # Render the widget widget = w(rfield.field, selected, **self._attr(resource)) widget.add_class(self.css_base) return widget
Python
def variable(self, resource, get_vars=None): """ Generate the name for the URL query variable for this widget, detect alternative __typeof queries. Args: resource: the resource Returns: the URL query variable name (or list of variable names if there are multiple operators) """ label, self.selector = self._selector(resource, self.field) if not self.selector: return None if "label" not in self.opts: self.opts["label"] = label selector = self.selector if self.alternatives and get_vars is not None: # Get the actual operator from get_vars operator = self._operator(get_vars, self.selector) if operator: self.operator = operator variable = self._variable(selector, self.operator) if not get_vars or not resource or variable in get_vars: return variable # Detect and resolve __typeof queries resolve = S3ResourceQuery._resolve_hierarchy selector = resource.prefix_selector(selector) for key, value in list(get_vars.items()): if key.startswith(selector): selectors, op = S3URLQuery.parse_expression(key)[:2] else: continue if op != "typeof" or len(selectors) != 1: continue rfield = resource.resolve_selector(selectors[0]) if rfield.field: values = S3URLQuery.parse_value(value) field, nodeset, none = resolve(rfield.field, values)[1:] if field and (nodeset or none): if nodeset is None: nodeset = set() if none: nodeset.add(None) get_vars.pop(key, None) get_vars[variable] = [str(v) for v in nodeset] break return variable
def variable(self, resource, get_vars=None): """ Generate the name for the URL query variable for this widget, detect alternative __typeof queries. Args: resource: the resource Returns: the URL query variable name (or list of variable names if there are multiple operators) """ label, self.selector = self._selector(resource, self.field) if not self.selector: return None if "label" not in self.opts: self.opts["label"] = label selector = self.selector if self.alternatives and get_vars is not None: # Get the actual operator from get_vars operator = self._operator(get_vars, self.selector) if operator: self.operator = operator variable = self._variable(selector, self.operator) if not get_vars or not resource or variable in get_vars: return variable # Detect and resolve __typeof queries resolve = S3ResourceQuery._resolve_hierarchy selector = resource.prefix_selector(selector) for key, value in list(get_vars.items()): if key.startswith(selector): selectors, op = S3URLQuery.parse_expression(key)[:2] else: continue if op != "typeof" or len(selectors) != 1: continue rfield = resource.resolve_selector(selectors[0]) if rfield.field: values = S3URLQuery.parse_value(value) field, nodeset, none = resolve(rfield.field, values)[1:] if field and (nodeset or none): if nodeset is None: nodeset = set() if none: nodeset.add(None) get_vars.pop(key, None) get_vars[variable] = [str(v) for v in nodeset] break return variable
Python
def defaults(self): """ Safe defaults for model-global names in case module is disabled """ dummy = S3ReusableField.dummy return {"cms_post_id": dummy("post_id"), "cms_tag_id": dummy("tag_id"), }
def defaults(self): """ Safe defaults for model-global names in case module is disabled """ dummy = S3ReusableField.dummy return {"cms_post_id": dummy("post_id"), "cms_tag_id": dummy("tag_id"), }
Python
def cms_post_duplicate(item): """ CMS Post Import - Update Detection (primarily for non-blog contents such as homepage, module index pages, summary pages, or online documentation): - same name and series => same post Args: item: the import item TODO if no name present => use cms_post_module component to identify updates (also requires deduplication of cms_post_module component) """ data = item.data name = data.get("name") series_id = data.get("series_id") if not name: return table = item.table query = (table.name == name) & \ (table.series_id == series_id) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE
def cms_post_duplicate(item): """ CMS Post Import - Update Detection (primarily for non-blog contents such as homepage, module index pages, summary pages, or online documentation): - same name and series => same post Args: item: the import item TODO if no name present => use cms_post_module component to identify updates (also requires deduplication of cms_post_module component) """ data = item.data name = data.get("name") series_id = data.get("series_id") if not name: return table = item.table query = (table.name == name) & \ (table.series_id == series_id) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE
Python
def cms_post_onaccept(form): """ - Set person_id from created_by if not already set - Handle the case where the page is for a Module home page, Resource Summary page or Map Layer """ db = current.db s3db = current.s3db post_id = form.vars.id get_vars = current.request.get_vars module = get_vars.get("module", None) if module: table = db.cms_post_module query = (table.module == module) resource = get_vars.get("resource", None) if resource: query &= (table.resource == resource) record = get_vars.get("record", None) if record: # Profile page query &= (table.record == record) else: # Resource Summary page query &= (table.record == None) else: # Module home page record = None query &= ((table.resource == None) | \ (table.resource == "index")) result = db(query).update(post_id=post_id) if not result: table.insert(post_id=post_id, module=module, resource=resource, record=record, ) layer_id = get_vars.get("layer_id", None) if layer_id: table = s3db.cms_post_layer query = (table.layer_id == layer_id) result = db(query).update(post_id=post_id) if not result: table.insert(post_id=post_id, layer_id=layer_id, ) # Read record table = db.cms_post record = db(table.id == post_id).select(table.person_id, table.created_by, limitby=(0, 1) ).first() if record.created_by and not record.person_id: # Set from Author ptable = s3db.pr_person putable = s3db.pr_person_user query = (putable.user_id == record.created_by) & \ (putable.pe_id == ptable.pe_id) person = db(query).select(ptable.id, limitby=(0, 1) ).first() if person: db(table.id == post_id).update(person_id=person.id)
def cms_post_onaccept(form): """ - Set person_id from created_by if not already set - Handle the case where the page is for a Module home page, Resource Summary page or Map Layer """ db = current.db s3db = current.s3db post_id = form.vars.id get_vars = current.request.get_vars module = get_vars.get("module", None) if module: table = db.cms_post_module query = (table.module == module) resource = get_vars.get("resource", None) if resource: query &= (table.resource == resource) record = get_vars.get("record", None) if record: # Profile page query &= (table.record == record) else: # Resource Summary page query &= (table.record == None) else: # Module home page record = None query &= ((table.resource == None) | \ (table.resource == "index")) result = db(query).update(post_id=post_id) if not result: table.insert(post_id=post_id, module=module, resource=resource, record=record, ) layer_id = get_vars.get("layer_id", None) if layer_id: table = s3db.cms_post_layer query = (table.layer_id == layer_id) result = db(query).update(post_id=post_id) if not result: table.insert(post_id=post_id, layer_id=layer_id, ) # Read record table = db.cms_post record = db(table.id == post_id).select(table.person_id, table.created_by, limitby=(0, 1) ).first() if record.created_by and not record.person_id: # Set from Author ptable = s3db.pr_person putable = s3db.pr_person_user query = (putable.user_id == record.created_by) & \ (putable.pe_id == ptable.pe_id) person = db(query).select(ptable.id, limitby=(0, 1) ).first() if person: db(table.id == post_id).update(person_id=person.id)
Python
def cms_add_bookmark(r, **attr): """ Bookmark a Post CRUD method for interactive requests """ post_id = r.id user = current.auth.user user_id = user and user.id if not post_id or not user_id: r.error(405, current.ERROR.BAD_METHOD) db = current.db ltable = db.cms_post_user query = (ltable.post_id == post_id) & \ (ltable.user_id == user_id) exists = db(query).select(ltable.id, ltable.deleted, ltable.deleted_fk, limitby=(0, 1) ).first() if exists: link_id = exists.id if exists.deleted: if exists.deleted_fk: data = json.loads(exists.deleted_fk) data["deleted"] = False else: data = {"deleted": False} db(ltable.id == link_id).update(**data) else: link_id = ltable.insert(post_id = post_id, user_id = user_id, ) output = current.xml.json_message(True, 200, current.T("Bookmark Added")) current.response.headers["Content-Type"] = "application/json" return output
def cms_add_bookmark(r, **attr): """ Bookmark a Post CRUD method for interactive requests """ post_id = r.id user = current.auth.user user_id = user and user.id if not post_id or not user_id: r.error(405, current.ERROR.BAD_METHOD) db = current.db ltable = db.cms_post_user query = (ltable.post_id == post_id) & \ (ltable.user_id == user_id) exists = db(query).select(ltable.id, ltable.deleted, ltable.deleted_fk, limitby=(0, 1) ).first() if exists: link_id = exists.id if exists.deleted: if exists.deleted_fk: data = json.loads(exists.deleted_fk) data["deleted"] = False else: data = {"deleted": False} db(ltable.id == link_id).update(**data) else: link_id = ltable.insert(post_id = post_id, user_id = user_id, ) output = current.xml.json_message(True, 200, current.T("Bookmark Added")) current.response.headers["Content-Type"] = "application/json" return output
Python
def cms_remove_bookmark(r, **attr): """ Remove a Bookmark for a Post CRUD method for interactive requests """ post_id = r.id user = current.auth.user user_id = user and user.id if not post_id or not user_id: r.error(405, current.ERROR.BAD_METHOD) db = current.db ltable = db.cms_post_user query = (ltable.post_id == post_id) & \ (ltable.user_id == user_id) exists = db(query).select(ltable.id, ltable.deleted, limitby=(0, 1) ).first() if exists and not exists.deleted: resource = current.s3db.resource("cms_post_user", id=exists.id) resource.delete() output = current.xml.json_message(True, 200, current.T("Bookmark Removed")) current.response.headers["Content-Type"] = "application/json" return output
def cms_remove_bookmark(r, **attr): """ Remove a Bookmark for a Post CRUD method for interactive requests """ post_id = r.id user = current.auth.user user_id = user and user.id if not post_id or not user_id: r.error(405, current.ERROR.BAD_METHOD) db = current.db ltable = db.cms_post_user query = (ltable.post_id == post_id) & \ (ltable.user_id == user_id) exists = db(query).select(ltable.id, ltable.deleted, limitby=(0, 1) ).first() if exists and not exists.deleted: resource = current.s3db.resource("cms_post_user", id=exists.id) resource.delete() output = current.xml.json_message(True, 200, current.T("Bookmark Removed")) current.response.headers["Content-Type"] = "application/json" return output
Python
def cms_add_tag(r, **attr): """ Add a Tag to a Post CRUD method for interactive requests - designed to be called as an afterTagAdded callback to tag-it.js """ post_id = r.id if not post_id or len(r.args) < 3: r.error(405, current.ERROR.BAD_METHOD) tag = r.args[2] db = current.db ttable = db.cms_tag ltable = db.cms_tag_post exists = db(ttable.name == tag).select(ttable.id, ttable.deleted, ttable.deleted_fk, limitby=(0, 1) ).first() if exists: tag_id = exists.id if exists.deleted: if exists.deleted_fk: data = json.loads(exists.deleted_fk) data["deleted"] = False else: data = {"deleted": False} db(ttable.id == tag_id).update(**data) else: tag_id = ttable.insert(name=tag) query = (ltable.tag_id == tag_id) & \ (ltable.post_id == post_id) exists = db(query).select(ltable.id, ltable.deleted, ltable.deleted_fk, limitby=(0, 1) ).first() if exists: if exists.deleted: if exists.deleted_fk: data = json.loads(exists.deleted_fk) data["deleted"] = False else: data = {"deleted": False} db(ltable.id == exists.id).update(**data) else: ltable.insert(post_id = post_id, tag_id = tag_id, ) output = current.xml.json_message(True, 200, current.T("Tag Added")) current.response.headers["Content-Type"] = "application/json" return output
def cms_add_tag(r, **attr): """ Add a Tag to a Post CRUD method for interactive requests - designed to be called as an afterTagAdded callback to tag-it.js """ post_id = r.id if not post_id or len(r.args) < 3: r.error(405, current.ERROR.BAD_METHOD) tag = r.args[2] db = current.db ttable = db.cms_tag ltable = db.cms_tag_post exists = db(ttable.name == tag).select(ttable.id, ttable.deleted, ttable.deleted_fk, limitby=(0, 1) ).first() if exists: tag_id = exists.id if exists.deleted: if exists.deleted_fk: data = json.loads(exists.deleted_fk) data["deleted"] = False else: data = {"deleted": False} db(ttable.id == tag_id).update(**data) else: tag_id = ttable.insert(name=tag) query = (ltable.tag_id == tag_id) & \ (ltable.post_id == post_id) exists = db(query).select(ltable.id, ltable.deleted, ltable.deleted_fk, limitby=(0, 1) ).first() if exists: if exists.deleted: if exists.deleted_fk: data = json.loads(exists.deleted_fk) data["deleted"] = False else: data = {"deleted": False} db(ltable.id == exists.id).update(**data) else: ltable.insert(post_id = post_id, tag_id = tag_id, ) output = current.xml.json_message(True, 200, current.T("Tag Added")) current.response.headers["Content-Type"] = "application/json" return output
Python
def cms_remove_tag(r, **attr): """ Remove a Tag from a Post CRUD method for interactive requests - designed to be called as an afterTagRemoved callback to tag-it.js """ post_id = r.id if not post_id or len(r.args) < 3: r.error(405, current.ERROR.BAD_METHOD) tag = r.args[2] db = current.db ttable = db.cms_tag exists = db(ttable.name == tag).select(ttable.id, ttable.deleted, limitby=(0, 1) ).first() if exists: tag_id = exists.id ltable = db.cms_tag_post query = (ltable.tag_id == tag_id) & \ (ltable.post_id == post_id) exists = db(query).select(ltable.id, ltable.deleted, limitby=(0, 1) ).first() if exists and not exists.deleted: resource = current.s3db.resource("cms_tag_post", id=exists.id) resource.delete() output = current.xml.json_message(True, 200, current.T("Tag Removed")) current.response.headers["Content-Type"] = "application/json" return output
def cms_remove_tag(r, **attr): """ Remove a Tag from a Post CRUD method for interactive requests - designed to be called as an afterTagRemoved callback to tag-it.js """ post_id = r.id if not post_id or len(r.args) < 3: r.error(405, current.ERROR.BAD_METHOD) tag = r.args[2] db = current.db ttable = db.cms_tag exists = db(ttable.name == tag).select(ttable.id, ttable.deleted, limitby=(0, 1) ).first() if exists: tag_id = exists.id ltable = db.cms_tag_post query = (ltable.tag_id == tag_id) & \ (ltable.post_id == post_id) exists = db(query).select(ltable.id, ltable.deleted, limitby=(0, 1) ).first() if exists and not exists.deleted: resource = current.s3db.resource("cms_tag_post", id=exists.id) resource.delete() output = current.xml.json_message(True, 200, current.T("Tag Removed")) current.response.headers["Content-Type"] = "application/json" return output
Python
def cms_share(r, **attr): """ Share a Post to a Forum CRUD method for interactive requests - designed to be called via AJAX """ post_id = r.id if not post_id or len(r.args) < 3: r.error(405, current.ERROR.BAD_METHOD) db = current.db s3db = current.s3db auth = current.auth forum_id = r.args[2] if not auth.s3_has_role("ADMIN"): # Check that user is a member of the forum mtable = s3db.pr_forum_membership ptable = s3db.pr_person query = (ptable.pe_id == auth.user.pe_id) & \ (mtable.person_id == ptable.id) member = db(query).select(mtable.id, limitby = (0, 1) ).first() if not member: output = current.xml.json_message(False, 403, current.T("Cannot Share to a Forum unless you are a Member")) current.response.headers["Content-Type"] = "application/json" return output ltable = s3db.cms_post_forum query = (ltable.post_id == post_id) & \ (ltable.forum_id == forum_id) exists = db(query).select(ltable.id, limitby=(0, 1) ).first() if not exists: ltable.insert(post_id = post_id, forum_id = forum_id, ) # Update modified_on of the forum to allow subscribers to be notified db(s3db.pr_forum.id == forum_id).update(modified_on = r.utcnow) output = current.xml.json_message(True, 200, current.T("Post Shared")) current.response.headers["Content-Type"] = "application/json" return output
def cms_share(r, **attr): """ Share a Post to a Forum CRUD method for interactive requests - designed to be called via AJAX """ post_id = r.id if not post_id or len(r.args) < 3: r.error(405, current.ERROR.BAD_METHOD) db = current.db s3db = current.s3db auth = current.auth forum_id = r.args[2] if not auth.s3_has_role("ADMIN"): # Check that user is a member of the forum mtable = s3db.pr_forum_membership ptable = s3db.pr_person query = (ptable.pe_id == auth.user.pe_id) & \ (mtable.person_id == ptable.id) member = db(query).select(mtable.id, limitby = (0, 1) ).first() if not member: output = current.xml.json_message(False, 403, current.T("Cannot Share to a Forum unless you are a Member")) current.response.headers["Content-Type"] = "application/json" return output ltable = s3db.cms_post_forum query = (ltable.post_id == post_id) & \ (ltable.forum_id == forum_id) exists = db(query).select(ltable.id, limitby=(0, 1) ).first() if not exists: ltable.insert(post_id = post_id, forum_id = forum_id, ) # Update modified_on of the forum to allow subscribers to be notified db(s3db.pr_forum.id == forum_id).update(modified_on = r.utcnow) output = current.xml.json_message(True, 200, current.T("Post Shared")) current.response.headers["Content-Type"] = "application/json" return output
Python
def cms_unshare(r, **attr): """ Unshare a Post from a Forum CRUD method for interactive requests - designed to be called via AJAX """ post_id = r.id if not post_id or len(r.args) < 3: r.error(405, current.ERROR.BAD_METHOD) db = current.db s3db = current.s3db forum_id = r.args[2] ltable = s3db.cms_post_forum query = (ltable.post_id == post_id) & \ (ltable.forum_id == forum_id) exists = db(query).select(ltable.id, ltable.created_by, limitby=(0, 1) ).first() if exists: auth = current.auth if not auth.s3_has_role("ADMIN"): # Check that user is the one that shared the Incident if exists.created_by != auth.user.id: output = current.xml.json_message(False, 403, current.T("Only the Sharer, or Admin, can Unshare")) current.response.headers["Content-Type"] = "application/json" return output resource = s3db.resource("cms_post_forum", id=exists.id) resource.delete() output = current.xml.json_message(True, 200, current.T("Stopped Sharing Post")) current.response.headers["Content-Type"] = "application/json" return output
def cms_unshare(r, **attr): """ Unshare a Post from a Forum CRUD method for interactive requests - designed to be called via AJAX """ post_id = r.id if not post_id or len(r.args) < 3: r.error(405, current.ERROR.BAD_METHOD) db = current.db s3db = current.s3db forum_id = r.args[2] ltable = s3db.cms_post_forum query = (ltable.post_id == post_id) & \ (ltable.forum_id == forum_id) exists = db(query).select(ltable.id, ltable.created_by, limitby=(0, 1) ).first() if exists: auth = current.auth if not auth.s3_has_role("ADMIN"): # Check that user is the one that shared the Incident if exists.created_by != auth.user.id: output = current.xml.json_message(False, 403, current.T("Only the Sharer, or Admin, can Unshare")) current.response.headers["Content-Type"] = "application/json" return output resource = s3db.resource("cms_post_forum", id=exists.id) resource.delete() output = current.xml.json_message(True, 200, current.T("Stopped Sharing Post")) current.response.headers["Content-Type"] = "application/json" return output
Python
def recipient_onvalidation(form): """ Onvalidation of newsletter recipient: - prevent duplication of recipient """ form_vars = form.vars table = current.s3db.cms_newsletter_recipient newsletter_id = form_vars.get("newsletter_id") if not newsletter_id: newsletter_id = table.newsletter_id.default if not newsletter_id: return pe_id = form_vars.get("pe_id") if not pe_id: return query = (table.newsletter_id == newsletter_id) & \ (table.pe_id == pe_id) & \ (table.deleted == False) record_id = get_form_record_id(form) if record_id: query &= (table.id != record_id) if current.db(query).select(table.id, limitby=(0, 1)).first(): form.errors["pe_id"] = current.T("Recipient already registered")
def recipient_onvalidation(form): """ Onvalidation of newsletter recipient: - prevent duplication of recipient """ form_vars = form.vars table = current.s3db.cms_newsletter_recipient newsletter_id = form_vars.get("newsletter_id") if not newsletter_id: newsletter_id = table.newsletter_id.default if not newsletter_id: return pe_id = form_vars.get("pe_id") if not pe_id: return query = (table.newsletter_id == newsletter_id) & \ (table.pe_id == pe_id) & \ (table.deleted == False) record_id = get_form_record_id(form) if record_id: query &= (table.id != record_id) if current.db(query).select(table.id, limitby=(0, 1)).first(): form.errors["pe_id"] = current.T("Recipient already registered")
Python
def update_total_recipients(newsletter_id): """ Update the total number of recipients in a newsletter Args: newsletter_id: the newsletter record ID """ if not newsletter_id: return db = current.db s3db = current.s3db rtable = s3db.cms_newsletter_recipient total = rtable.id.count() query = (rtable.newsletter_id == newsletter_id) & \ (rtable.deleted == False) row = db(query).select(total).first() if row: table = s3db.cms_newsletter query = (table.id == newsletter_id) db(query).update(total_recipients=row[total])
def update_total_recipients(newsletter_id): """ Update the total number of recipients in a newsletter Args: newsletter_id: the newsletter record ID """ if not newsletter_id: return db = current.db s3db = current.s3db rtable = s3db.cms_newsletter_recipient total = rtable.id.count() query = (rtable.newsletter_id == newsletter_id) & \ (rtable.deleted == False) row = db(query).select(total).first() if row: table = s3db.cms_newsletter query = (table.id == newsletter_id) db(query).update(total_recipients=row[total])
Python
def recipient_onaccept(cls, form): """ Onaccept of newsletter recipients: - update the total number of recipients in the newsletter """ record_id = get_form_record_id(form) if not record_id: return s3db = current.s3db rtable = s3db.cms_newsletter_recipient ntable = s3db.cms_newsletter join = ntable.on(ntable.id == rtable.newsletter_id) query = (rtable.id == record_id) newsletter = current.db(query).select(ntable.id, ntable.status, join = join, limitby = (0, 1), ).first() if newsletter: # Update total number of recipients cls.update_total_recipients(newsletter.id) # Notify pending recipients immediately when already SENT notify = s3db.get_config("cms_newsletter", "notify_recipients") if notify and newsletter.status == "SENT": notify(newsletter.id)
def recipient_onaccept(cls, form): """ Onaccept of newsletter recipients: - update the total number of recipients in the newsletter """ record_id = get_form_record_id(form) if not record_id: return s3db = current.s3db rtable = s3db.cms_newsletter_recipient ntable = s3db.cms_newsletter join = ntable.on(ntable.id == rtable.newsletter_id) query = (rtable.id == record_id) newsletter = current.db(query).select(ntable.id, ntable.status, join = join, limitby = (0, 1), ).first() if newsletter: # Update total number of recipients cls.update_total_recipients(newsletter.id) # Notify pending recipients immediately when already SENT notify = s3db.get_config("cms_newsletter", "notify_recipients") if notify and newsletter.status == "SENT": notify(newsletter.id)
Python
def recipient_ondelete(cls, row): """ Ondelete of newsletter recipients: - update the total number of recipients in the newsletter """ cls.update_total_recipients(row.newsletter_id)
def recipient_ondelete(cls, row): """ Ondelete of newsletter recipients: - update the total number of recipients in the newsletter """ cls.update_total_recipients(row.newsletter_id)
Python
def newsletter_read_status(row): """ Field method to indicate the (un)read-status of a newsletter """ status = "" try: record_id = row.cms_newsletter.id except AttributeError: pass else: if record_id in cms_unread_newsletters(count=False): status = SPAN(current.T("new"), _class="prio prio-amber") return status
def newsletter_read_status(row): """ Field method to indicate the (un)read-status of a newsletter """ status = "" try: record_id = row.cms_newsletter.id except AttributeError: pass else: if record_id in cms_unread_newsletters(count=False): status = SPAN(current.T("new"), _class="prio prio-amber") return status
Python
def update_recipients(r, newsletter_id): """ Updates the recipients to a newsletter according to its distribution list Args: r: the current CRUDRequest newsletter_id: the newsletter record_id Note: This function expects a hook "lookup_recipients" for the cms_newsletter table to be configured with a callback function that accepts a (filtered) resource, and returns the list of pe_ids of all relevant recipients. """ db = current.db s3db = current.s3db # Lookup existing recipients rtable = s3db.cms_newsletter_recipient query = (rtable.newsletter_id == newsletter_id) & \ (rtable.deleted == False) rows = db(query).select(rtable.pe_id) existing = {row.pe_id for row in rows} # Get the lookup-callback lookup = s3db.get_config("cms_newsletter", "lookup_recipients") if not lookup: return recipients = set() ltable = s3db.cms_newsletter_distribution ftable = s3db.pr_filter join = ftable.on(ftable.id == ltable.filter_id) query = (ltable.newsletter_id == newsletter_id) & \ (ltable.deleted == False) filters = db(query).select(ftable.controller, ftable.function, ftable.resource, ftable.query, ftable.serverside, join = join, ) permissions = current.auth.permission for row in filters: # Get the filter query queries = row.serverside if queries is None: # Fallback for backwards-compatibility try: queries = json.loads(row.query) except JSONERRORS: queries = [] # Convert into filter_vars filter_vars = {} for selector, value in queries: v = filter_vars.get(selector) if v: filter_vars[selector] = [v, value] else: filter_vars[selector] = value # Apply permissions of the original controller/function # where the filter was created - otherwise, the lookup would # happen with permissions for the cms/newsletter controller, # which may not grant access to the recipient resource at all c, f = permissions.controller, permissions.function permissions.controller = row.controller permissions.function = row.function # Instantiate the recipient resource with these filters tablename = row.resource r.customise_resource(tablename) resource = s3db.resource(tablename, vars=filter_vars) # Apply callback to look up the recipients for the filtered # recipient resource pe_ids = lookup(resource) if pe_ids: recipients |= set(pe_ids) # Restore permission controller/function permissions.controller, permissions.function = c, f # Remove irrelevant recipients to_delete = existing - recipients if to_delete: query = (FS("newsletter_id") == newsletter_id) & \ (FS("pe_id").belongs(to_delete)) s3db.resource(rtable, filter=query).delete() existing -= to_delete # Insert new recipients to_add = recipients - existing if to_add: ids = rtable.bulk_insert({"newsletter_id": newsletter_id, "pe_id": pe_id, } for pe_id in to_add) set_record_owner = current.auth.s3_set_record_owner for record_id in ids: set_record_owner(rtable, record_id) CMSNewsletterModel.update_total_recipients(newsletter_id)
def update_recipients(r, newsletter_id): """ Updates the recipients to a newsletter according to its distribution list Args: r: the current CRUDRequest newsletter_id: the newsletter record_id Note: This function expects a hook "lookup_recipients" for the cms_newsletter table to be configured with a callback function that accepts a (filtered) resource, and returns the list of pe_ids of all relevant recipients. """ db = current.db s3db = current.s3db # Lookup existing recipients rtable = s3db.cms_newsletter_recipient query = (rtable.newsletter_id == newsletter_id) & \ (rtable.deleted == False) rows = db(query).select(rtable.pe_id) existing = {row.pe_id for row in rows} # Get the lookup-callback lookup = s3db.get_config("cms_newsletter", "lookup_recipients") if not lookup: return recipients = set() ltable = s3db.cms_newsletter_distribution ftable = s3db.pr_filter join = ftable.on(ftable.id == ltable.filter_id) query = (ltable.newsletter_id == newsletter_id) & \ (ltable.deleted == False) filters = db(query).select(ftable.controller, ftable.function, ftable.resource, ftable.query, ftable.serverside, join = join, ) permissions = current.auth.permission for row in filters: # Get the filter query queries = row.serverside if queries is None: # Fallback for backwards-compatibility try: queries = json.loads(row.query) except JSONERRORS: queries = [] # Convert into filter_vars filter_vars = {} for selector, value in queries: v = filter_vars.get(selector) if v: filter_vars[selector] = [v, value] else: filter_vars[selector] = value # Apply permissions of the original controller/function # where the filter was created - otherwise, the lookup would # happen with permissions for the cms/newsletter controller, # which may not grant access to the recipient resource at all c, f = permissions.controller, permissions.function permissions.controller = row.controller permissions.function = row.function # Instantiate the recipient resource with these filters tablename = row.resource r.customise_resource(tablename) resource = s3db.resource(tablename, vars=filter_vars) # Apply callback to look up the recipients for the filtered # recipient resource pe_ids = lookup(resource) if pe_ids: recipients |= set(pe_ids) # Restore permission controller/function permissions.controller, permissions.function = c, f # Remove irrelevant recipients to_delete = existing - recipients if to_delete: query = (FS("newsletter_id") == newsletter_id) & \ (FS("pe_id").belongs(to_delete)) s3db.resource(rtable, filter=query).delete() existing -= to_delete # Insert new recipients to_add = recipients - existing if to_add: ids = rtable.bulk_insert({"newsletter_id": newsletter_id, "pe_id": pe_id, } for pe_id in to_add) set_record_owner = current.auth.s3_set_record_owner for record_id in ids: set_record_owner(rtable, record_id) CMSNewsletterModel.update_total_recipients(newsletter_id)
Python
def notify_recipients(cls, newsletter_id): """ Notify newsletter recipients (=send the newsletter per email) Args: newsletter_id: the newsletter record ID Returns: error message on failure, otherwise None Raises: RuntimeError if message cannot be composed (e.g. missing subject) Note: This function applies the "resolve_recipient" callback if it is configured for the cms_newsletter table, to find the email address(es) for a recipient. If no callback is configured, the internal resolve() method is used instead. """ # Customise resource to pick up resolve-callback from core import crud_request r = crud_request("cms", "newsletter") r.customise_resource("cms_newsletter") s3db = current.s3db # Get pending recipients rtable = s3db.cms_newsletter_recipient query = (rtable.newsletter_id == newsletter_id) & \ (rtable.status == "PENDING") & \ (rtable.deleted == False) recipients = current.db(query).select(rtable.id, rtable.pe_id, ) if not recipients: return "No pending recipients" # Compose message message = cls.compose(newsletter_id) if not message: raise RuntimeError("Could not compose newsletter message") # Resolve-callback to lookup email address resolve = s3db.get_config("cms_newsletter", "resolve_recipient") if not resolve: # Fallback to direct pr_contact lookup resolve = cls.resolve seen = set() send_email = current.msg.send_email for recipient in recipients: errors = [] email = resolve(recipient.pe_id) if not email: recipient.update_record(status = "ERROR", errors = "No email address found", ) continue if not isinstance(email, (list, tuple, set)): email = [email] for mailaddress in email: if mailaddress in seen: continue seen.add(mailaddress) if not send_email(to = mailaddress, subject = message[0], message = message[1], attachments = message[2], from_address = message[3], reply_to = message[3], ): errors.append('Failed to send email to "%s"' % mailaddress) if errors: recipient.update_record(status = "ERROR", errors = "\n".join(errors), ) else: recipient.update_record(status = "NOTIFIED") return None
def notify_recipients(cls, newsletter_id): """ Notify newsletter recipients (=send the newsletter per email) Args: newsletter_id: the newsletter record ID Returns: error message on failure, otherwise None Raises: RuntimeError if message cannot be composed (e.g. missing subject) Note: This function applies the "resolve_recipient" callback if it is configured for the cms_newsletter table, to find the email address(es) for a recipient. If no callback is configured, the internal resolve() method is used instead. """ # Customise resource to pick up resolve-callback from core import crud_request r = crud_request("cms", "newsletter") r.customise_resource("cms_newsletter") s3db = current.s3db # Get pending recipients rtable = s3db.cms_newsletter_recipient query = (rtable.newsletter_id == newsletter_id) & \ (rtable.status == "PENDING") & \ (rtable.deleted == False) recipients = current.db(query).select(rtable.id, rtable.pe_id, ) if not recipients: return "No pending recipients" # Compose message message = cls.compose(newsletter_id) if not message: raise RuntimeError("Could not compose newsletter message") # Resolve-callback to lookup email address resolve = s3db.get_config("cms_newsletter", "resolve_recipient") if not resolve: # Fallback to direct pr_contact lookup resolve = cls.resolve seen = set() send_email = current.msg.send_email for recipient in recipients: errors = [] email = resolve(recipient.pe_id) if not email: recipient.update_record(status = "ERROR", errors = "No email address found", ) continue if not isinstance(email, (list, tuple, set)): email = [email] for mailaddress in email: if mailaddress in seen: continue seen.add(mailaddress) if not send_email(to = mailaddress, subject = message[0], message = message[1], attachments = message[2], from_address = message[3], reply_to = message[3], ): errors.append('Failed to send email to "%s"' % mailaddress) if errors: recipient.update_record(status = "ERROR", errors = "\n".join(errors), ) else: recipient.update_record(status = "NOTIFIED") return None
Python
def resolve(pe_id): """ Look up the email address for the given person entity from pr_contact. Args: the pe_id Returns: the email address, if any - or None """ ctable = current.s3db.pr_contact query = (ctable.pe_id == pe_id) & \ (ctable.contact_method == "EMAIL") & \ (ctable.deleted == False) contact = current.db(query).select(ctable.value, limitby = (0, 1), orderby = [ctable.priority, ~ctable.created_on, ], ).first() return contact.value if contact else None
def resolve(pe_id): """ Look up the email address for the given person entity from pr_contact. Args: the pe_id Returns: the email address, if any - or None """ ctable = current.s3db.pr_contact query = (ctable.pe_id == pe_id) & \ (ctable.contact_method == "EMAIL") & \ (ctable.deleted == False) contact = current.db(query).select(ctable.value, limitby = (0, 1), orderby = [ctable.priority, ~ctable.created_on, ], ).first() return contact.value if contact else None
Python
def cms_newsletter_notify(newsletter_id=None): """ Async task to notify newsletter recipients Args: newsletter_id: the newsletter record ID Returns: error message on failure, otherwise None """ if not newsletter_id: return None try: error = cms_UpdateNewsletter.notify_recipients(newsletter_id) except Exception: # Reset status, so can retry sending table = current.s3db.cms_newsletter db = current.db db(table.id == newsletter_id).update(status="NEW") db.commit() raise return error
def cms_newsletter_notify(newsletter_id=None): """ Async task to notify newsletter recipients Args: newsletter_id: the newsletter record ID Returns: error message on failure, otherwise None """ if not newsletter_id: return None try: error = cms_UpdateNewsletter.notify_recipients(newsletter_id) except Exception: # Reset status, so can retry sending table = current.s3db.cms_newsletter db = current.db db(table.id == newsletter_id).update(status="NEW") db.commit() raise return error
Python
def cms_newsletter_actions(newsletter): """ Generates an action button to send a newsletter Args: newsletter: the newsletter record (including id and status) Returns: a list of action buttons """ s3db = current.s3db table = s3db.cms_newsletter newsletter_id = newsletter.id actions = None if newsletter.status == "NEW" and \ current.auth.s3_has_permission("update", table, record_id=newsletter_id): T = current.T db = current.db # Generate formkey from core import FormKey formkey = FormKey("update-newsletter-%s" % newsletter_id).generate() # Confirmation questions s3 = current.response.s3 confirm = {"send_newsletter": T("Do you want to send this newsletter?"), "update_recipients": T("Assign recipients from distribution list?"), "remove_recipients": T("Remove all assigned recipients?"), } i18n = "\n".join('i18n.%s="%s"' % (k, v) for k, v in confirm.items()) if i18n not in s3.js_global: s3.js_global.append(i18n) # Inject script for actions appname = current.request.application script = "/%s/static/scripts/S3/s3.cms.js" % appname if script not in s3.scripts: s3.scripts.append(script) # Action buttons update_btn = A(T("Assign recipients"), _class = "action-btn newsletter-update-btn", _db_id = str(newsletter_id), _title = T("Assign recipients from distribution list"), data = {"key": formkey}, ) # Check whether recipients have been added rtable = s3db.cms_newsletter_recipient query = (rtable.newsletter_id == newsletter_id) & \ (rtable.deleted == False) recipient = db(query).select(rtable.id, limitby=(0, 1)).first() if not recipient: title = T("No Recipients assigned") attr = {"_disabled": "disabled"} else: title = None attr = {"_db_id": str(newsletter_id), "data": {"key": formkey}} remove_btn = A(T("Remove recipients"), _class = "action-btn newsletter-remove-btn", _title = title if title else T("Remove all recipients"), **attr) send_btn = A(T("Send"), _class = "action-btn newsletter-send-btn", _title = title if title else T("Send this newsletter"), **attr) actions = [update_btn, remove_btn, send_btn] return actions
def cms_newsletter_actions(newsletter): """ Generates an action button to send a newsletter Args: newsletter: the newsletter record (including id and status) Returns: a list of action buttons """ s3db = current.s3db table = s3db.cms_newsletter newsletter_id = newsletter.id actions = None if newsletter.status == "NEW" and \ current.auth.s3_has_permission("update", table, record_id=newsletter_id): T = current.T db = current.db # Generate formkey from core import FormKey formkey = FormKey("update-newsletter-%s" % newsletter_id).generate() # Confirmation questions s3 = current.response.s3 confirm = {"send_newsletter": T("Do you want to send this newsletter?"), "update_recipients": T("Assign recipients from distribution list?"), "remove_recipients": T("Remove all assigned recipients?"), } i18n = "\n".join('i18n.%s="%s"' % (k, v) for k, v in confirm.items()) if i18n not in s3.js_global: s3.js_global.append(i18n) # Inject script for actions appname = current.request.application script = "/%s/static/scripts/S3/s3.cms.js" % appname if script not in s3.scripts: s3.scripts.append(script) # Action buttons update_btn = A(T("Assign recipients"), _class = "action-btn newsletter-update-btn", _db_id = str(newsletter_id), _title = T("Assign recipients from distribution list"), data = {"key": formkey}, ) # Check whether recipients have been added rtable = s3db.cms_newsletter_recipient query = (rtable.newsletter_id == newsletter_id) & \ (rtable.deleted == False) recipient = db(query).select(rtable.id, limitby=(0, 1)).first() if not recipient: title = T("No Recipients assigned") attr = {"_disabled": "disabled"} else: title = None attr = {"_db_id": str(newsletter_id), "data": {"key": formkey}} remove_btn = A(T("Remove recipients"), _class = "action-btn newsletter-remove-btn", _title = title if title else T("Remove all recipients"), **attr) send_btn = A(T("Send"), _class = "action-btn newsletter-send-btn", _title = title if title else T("Send this newsletter"), **attr) actions = [update_btn, remove_btn, send_btn] return actions
Python
def cms_accessible_newsletters(): """ Constructs a subquery for newsletters accessible by the current user Returns: - a subquery (SQL string) for use with belongs(), or - an empty set if there is no current user """ db = current.db s3db = current.s3db auth = current.auth if not auth.user: return set() settings = current.deployment_settings from core import accessible_pe_query rtable = s3db.cms_newsletter_recipient types = settings.get_cms_newsletter_recipient_types() query = accessible_pe_query(table = rtable, instance_types = types, c = "cms", f = "newsletter_recipient", ) if auth.user: query |= (rtable.pe_id == auth.user.pe_id) return db(query)._select(rtable.newsletter_id, groupby = rtable.newsletter_id, )
def cms_accessible_newsletters(): """ Constructs a subquery for newsletters accessible by the current user Returns: - a subquery (SQL string) for use with belongs(), or - an empty set if there is no current user """ db = current.db s3db = current.s3db auth = current.auth if not auth.user: return set() settings = current.deployment_settings from core import accessible_pe_query rtable = s3db.cms_newsletter_recipient types = settings.get_cms_newsletter_recipient_types() query = accessible_pe_query(table = rtable, instance_types = types, c = "cms", f = "newsletter_recipient", ) if auth.user: query |= (rtable.pe_id == auth.user.pe_id) return db(query)._select(rtable.newsletter_id, groupby = rtable.newsletter_id, )
Python
def cms_unread_newsletters(count=True, cached=True): """ Finds all newsletters accessible, but not yet confirmed as read by the current user Args: count: return the number of unread newsletters instead of their record IDs cached: use cached lookup results, if available Returns: - the number of unread newsletters, or their record IDs """ db = current.db s3db = current.s3db auth = current.auth ntable = s3db.cms_newsletter rtable = s3db.cms_newsletter_receipt expire, number, record_ids = None, None, None session = current.session if cached and "unread_newsletters" in session.s3: try: expire, number, record_ids = session.s3.unread_newsletters except (ValueError, TypeError): pass now = datetime.datetime.utcnow() if expire and expire > now: return number if count else record_ids if auth.user: user_id = auth.user.id left = rtable.on((rtable.newsletter_id == ntable.id) & \ (rtable.user_id == user_id) & \ (rtable.deleted == False)) query = (ntable.id.belongs(cms_accessible_newsletters())) & \ (ntable.status == "SENT") & \ auth.s3_accessible_query("read", ntable) & \ (ntable.deleted == False) & \ (rtable.id == None) else: query = ntable.id.belongs(set()) left = None rows = db(query).select(ntable.id, left=left) expire = now + datetime.timedelta(minutes=10) number = len(rows) record_ids = [row.id for row in rows] session.s3.unread_newsletters = (expire, number, record_ids) return number if count else record_ids
def cms_unread_newsletters(count=True, cached=True): """ Finds all newsletters accessible, but not yet confirmed as read by the current user Args: count: return the number of unread newsletters instead of their record IDs cached: use cached lookup results, if available Returns: - the number of unread newsletters, or their record IDs """ db = current.db s3db = current.s3db auth = current.auth ntable = s3db.cms_newsletter rtable = s3db.cms_newsletter_receipt expire, number, record_ids = None, None, None session = current.session if cached and "unread_newsletters" in session.s3: try: expire, number, record_ids = session.s3.unread_newsletters except (ValueError, TypeError): pass now = datetime.datetime.utcnow() if expire and expire > now: return number if count else record_ids if auth.user: user_id = auth.user.id left = rtable.on((rtable.newsletter_id == ntable.id) & \ (rtable.user_id == user_id) & \ (rtable.deleted == False)) query = (ntable.id.belongs(cms_accessible_newsletters())) & \ (ntable.status == "SENT") & \ auth.s3_accessible_query("read", ntable) & \ (ntable.deleted == False) & \ (rtable.id == None) else: query = ntable.id.belongs(set()) left = None rows = db(query).select(ntable.id, left=left) expire = now + datetime.timedelta(minutes=10) number = len(rows) record_ids = [row.id for row in rows] session.s3.unread_newsletters = (expire, number, record_ids) return number if count else record_ids
Python
def cms_mark_newsletter(newsletter_id=None, read=True): """ Mark one or more newsletters as read/unread by the current user Args: newsletter_id: the newsletter record ID (or a list of IDs) read: True|False to mark as read or unread Returns: the number of newsletters marked as read/unread """ db = current.db s3db = current.s3db auth = current.auth if not newsletter_id or not auth.user: return 0 # Get all newsletters matching newsletter_id ntable = s3db.cms_newsletter if isinstance(newsletter_id, (tuple, list, set)): query = (ntable.id.belongs(newsletter_id)) else: query = (ntable.id == newsletter_id) query &= (ntable.deleted == False) rows = db(query).select(ntable.id) newsletter_ids = {row.id for row in rows} if not newsletter_ids: return 0 # Get all existing receipts for those newsletters user_id = auth.user.id rtable = s3db.cms_newsletter_receipt query = (rtable.newsletter_id.belongs(newsletter_ids)) & \ (rtable.user_id == user_id) & \ (rtable.deleted == False) receipts = db(query).select(rtable.id, rtable.newsletter_id, ) if read: # Add missing receipts newsletter_ids -= {row.newsletter_id for row in receipts} for nid in newsletter_ids: receipt = {"user_id": user_id, "newsletter_id": nid} receipt_id = receipt["id"] = rtable.insert(**receipt) auth.s3_set_record_owner(rtable, receipt_id) s3db.onaccept(rtable, receipt, method="create") updated = len(newsletter_ids) else: # Remove existing receipts resource = s3db.resource("cms_newsletter_receipt", id = [row.id for row in receipts], ) updated = resource.delete() cms_unread_newsletters(cached=False) return updated
def cms_mark_newsletter(newsletter_id=None, read=True): """ Mark one or more newsletters as read/unread by the current user Args: newsletter_id: the newsletter record ID (or a list of IDs) read: True|False to mark as read or unread Returns: the number of newsletters marked as read/unread """ db = current.db s3db = current.s3db auth = current.auth if not newsletter_id or not auth.user: return 0 # Get all newsletters matching newsletter_id ntable = s3db.cms_newsletter if isinstance(newsletter_id, (tuple, list, set)): query = (ntable.id.belongs(newsletter_id)) else: query = (ntable.id == newsletter_id) query &= (ntable.deleted == False) rows = db(query).select(ntable.id) newsletter_ids = {row.id for row in rows} if not newsletter_ids: return 0 # Get all existing receipts for those newsletters user_id = auth.user.id rtable = s3db.cms_newsletter_receipt query = (rtable.newsletter_id.belongs(newsletter_ids)) & \ (rtable.user_id == user_id) & \ (rtable.deleted == False) receipts = db(query).select(rtable.id, rtable.newsletter_id, ) if read: # Add missing receipts newsletter_ids -= {row.newsletter_id for row in receipts} for nid in newsletter_ids: receipt = {"user_id": user_id, "newsletter_id": nid} receipt_id = receipt["id"] = rtable.insert(**receipt) auth.s3_set_record_owner(rtable, receipt_id) s3db.onaccept(rtable, receipt, method="create") updated = len(newsletter_ids) else: # Remove existing receipts resource = s3db.resource("cms_newsletter_receipt", id = [row.id for row in receipts], ) updated = resource.delete() cms_unread_newsletters(cached=False) return updated
Python
def cms_index(module, resource = None, page_name = None, alt_function = None, view = None): """ Return a module index page retrieved from CMS - or run an alternate function if not found """ response = current.response settings = current.deployment_settings if not page_name: page_name = settings.modules[module].get("name_nice", module) response.title = page_name item = None if settings.has_module("cms") and not settings.get_cms_hide_index(module): db = current.db table = current.s3db.cms_post ltable = db.cms_post_module query = (ltable.module == module) & \ (ltable.post_id == table.id) & \ (table.deleted != True) if resource is None: query &= ((ltable.resource == None) | \ (ltable.resource == "index")) else: query &= (ltable.resource == resource) _item = db(query).select(table.id, table.body, table.title, limitby=(0, 1)).first() # @ToDo: Replace this crude check with? #if current.auth.s3_has_permission("update", table, record_id=_item.id): auth = current.auth ADMIN = auth.get_system_roles().ADMIN ADMIN = auth.s3_has_role(ADMIN) get_vars = {"module": module} if resource: get_vars["resource"] = resource if _item: if _item.title: response.title = _item.title if ADMIN: item = DIV(XML(_item.body), BR(), A(current.T("Edit"), _href=URL(c="cms", f="post", args=[_item.id, "update"], vars=get_vars), _class="action-btn")) else: item = XML(_item.body) elif ADMIN: item = DIV(H2(page_name), A(current.T("Edit"), _href=URL(c="cms", f="post", args="create", vars=get_vars), _class="action-btn")) if not item: if alt_function: # Serve the alternate controller function # Copied from gluon.main serve_controller() # (We don't want to re-run models) from gluon.compileapp import build_environment, run_controller_in, run_view_in request = current.request environment = build_environment(request, response, current.session) environment["settings"] = settings environment["s3db"] = current.s3db # Retain certain globals (extend as needed): g = globals() environment["s3base"] = g.get("s3base") environment["s3_redirect_default"] = g.get("s3_redirect_default") page = run_controller_in(request.controller, alt_function, environment) if isinstance(page, dict): response._vars = page response._view_environment.update(page) run_view_in(response._view_environment) page = response.body.getvalue() # Set default headers if not set from gluon.contenttype import contenttype default_headers = [ ("Content-Type", contenttype("." + request.extension)), ("Cache-Control", "no-store, no-cache, must-revalidate, post-check=0, pre-check=0"), ("Expires", time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())), ("Pragma", "no-cache")] for key, value in default_headers: response.headers.setdefault(key, value) raise HTTP(response.status, page, **response.headers) else: item = H2(page_name) if view is not None: view = os.path.join(*(view.split("/"))) view = os.path.join(current.request.folder, "modules", "templates", view) try: # Pass view as file not str to work in compiled mode response.view = open(view, "rb") except IOError: raise HTTP(404, "Unable to open Custom View: %s" % view) else: response.view = "index.html" return {"item": item, "report": "", }
def cms_index(module, resource = None, page_name = None, alt_function = None, view = None): """ Return a module index page retrieved from CMS - or run an alternate function if not found """ response = current.response settings = current.deployment_settings if not page_name: page_name = settings.modules[module].get("name_nice", module) response.title = page_name item = None if settings.has_module("cms") and not settings.get_cms_hide_index(module): db = current.db table = current.s3db.cms_post ltable = db.cms_post_module query = (ltable.module == module) & \ (ltable.post_id == table.id) & \ (table.deleted != True) if resource is None: query &= ((ltable.resource == None) | \ (ltable.resource == "index")) else: query &= (ltable.resource == resource) _item = db(query).select(table.id, table.body, table.title, limitby=(0, 1)).first() # @ToDo: Replace this crude check with? #if current.auth.s3_has_permission("update", table, record_id=_item.id): auth = current.auth ADMIN = auth.get_system_roles().ADMIN ADMIN = auth.s3_has_role(ADMIN) get_vars = {"module": module} if resource: get_vars["resource"] = resource if _item: if _item.title: response.title = _item.title if ADMIN: item = DIV(XML(_item.body), BR(), A(current.T("Edit"), _href=URL(c="cms", f="post", args=[_item.id, "update"], vars=get_vars), _class="action-btn")) else: item = XML(_item.body) elif ADMIN: item = DIV(H2(page_name), A(current.T("Edit"), _href=URL(c="cms", f="post", args="create", vars=get_vars), _class="action-btn")) if not item: if alt_function: # Serve the alternate controller function # Copied from gluon.main serve_controller() # (We don't want to re-run models) from gluon.compileapp import build_environment, run_controller_in, run_view_in request = current.request environment = build_environment(request, response, current.session) environment["settings"] = settings environment["s3db"] = current.s3db # Retain certain globals (extend as needed): g = globals() environment["s3base"] = g.get("s3base") environment["s3_redirect_default"] = g.get("s3_redirect_default") page = run_controller_in(request.controller, alt_function, environment) if isinstance(page, dict): response._vars = page response._view_environment.update(page) run_view_in(response._view_environment) page = response.body.getvalue() # Set default headers if not set from gluon.contenttype import contenttype default_headers = [ ("Content-Type", contenttype("." + request.extension)), ("Cache-Control", "no-store, no-cache, must-revalidate, post-check=0, pre-check=0"), ("Expires", time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())), ("Pragma", "no-cache")] for key, value in default_headers: response.headers.setdefault(key, value) raise HTTP(response.status, page, **response.headers) else: item = H2(page_name) if view is not None: view = os.path.join(*(view.split("/"))) view = os.path.join(current.request.folder, "modules", "templates", view) try: # Pass view as file not str to work in compiled mode response.view = open(view, "rb") except IOError: raise HTTP(404, "Unable to open Custom View: %s" % view) else: response.view = "index.html" return {"item": item, "report": "", }
Python
def cms_documentation(r, default_page, default_url): """ Render an online documentation page, to be called from prep Args: r: the CRUDRequest default_page: the default page name default_url: the default URL if no contents found """ row = r.record if not row: # Find the CMS page name = r.get_vars.get("name", default_page) table = r.resource.table query = (table.name == name) & (table.deleted != True) row = current.db(query).select(table.id, table.title, table.body, limitby=(0, 1)).first() if not row: if name != default_page: # Error - CMS page not found r.error(404, current.T("Page not found"), next=URL(args=current.request.args, vars={}), ) else: # No CMS contents for module homepage found at all # => redirect to default page (preserving all errors) from core import s3_redirect_default s3_redirect_default(default_url) # Render the page from core import S3XMLContents return {"bypass": True, "output": {"title": row.title, "contents": S3XMLContents(row.body), }, }
def cms_documentation(r, default_page, default_url): """ Render an online documentation page, to be called from prep Args: r: the CRUDRequest default_page: the default page name default_url: the default URL if no contents found """ row = r.record if not row: # Find the CMS page name = r.get_vars.get("name", default_page) table = r.resource.table query = (table.name == name) & (table.deleted != True) row = current.db(query).select(table.id, table.title, table.body, limitby=(0, 1)).first() if not row: if name != default_page: # Error - CMS page not found r.error(404, current.T("Page not found"), next=URL(args=current.request.args, vars={}), ) else: # No CMS contents for module homepage found at all # => redirect to default page (preserving all errors) from core import s3_redirect_default s3_redirect_default(default_url) # Render the page from core import S3XMLContents return {"bypass": True, "output": {"title": row.title, "contents": S3XMLContents(row.body), }, }
Python
def apply_method(self, r, **attr): """ Entry point to apply cms method to CRUDRequests - produces a full page with a Richtext widget Args: r: the CRUDRequest attr: dictionary of parameters for the method handler Returns: output object to send to the view """ # Not Implemented r.error(405, current.ERROR.BAD_METHOD)
def apply_method(self, r, **attr): """ Entry point to apply cms method to CRUDRequests - produces a full page with a Richtext widget Args: r: the CRUDRequest attr: dictionary of parameters for the method handler Returns: output object to send to the view """ # Not Implemented r.error(405, current.ERROR.BAD_METHOD)
Python
def widget(self, r, method="cms", widget_id=None, **attr): """ Render a Rich Text widget suitable for use in a page such as S3Summary Args: method: the widget method r: the CRUDRequest attr: controller attributes TODO Support comments """ if not current.deployment_settings.has_module("cms"): return "" return self.resource_content(r.controller, r.function, r.id, widget_id)
def widget(self, r, method="cms", widget_id=None, **attr): """ Render a Rich Text widget suitable for use in a page such as S3Summary Args: method: the widget method r: the CRUDRequest attr: controller attributes TODO Support comments """ if not current.deployment_settings.has_module("cms"): return "" return self.resource_content(r.controller, r.function, r.id, widget_id)
Python
def resource_content(module, resource, record=None, widget_id=None, hide_if_empty=False): """ Render resource-related CMS contents Args: module: the module prefix resource: the resource name (without prefix) record: the record ID (optional) widget_id: the DOM node ID for the CMS widget hide_if_empty: return an empty string when there is no contents rather than a blank DIV """ db = current.db table = current.s3db.cms_post ltable = db.cms_post_module query = (ltable.module == module) & \ (ltable.resource == resource) & \ (ltable.record == record) & \ (ltable.post_id == table.id) & \ (table.deleted != True) _item = db(query).select(table.id, table.body, limitby=(0, 1)).first() # @ToDo: Replace this crude check with? #if current.auth.s3_has_permission("update", r.table, record_id=r.id): auth = current.auth ADMIN = auth.get_system_roles().ADMIN ADMIN = auth.s3_has_role(ADMIN) if ADMIN: if current.response.s3.crud.formstyle == "bootstrap": _class = "btn" else: _class = "action-btn" url_vars = {"module": module, "resource": resource, } if record: url_vars["record"] = record if _item: item = DIV(XML(_item.body), A(current.T("Edit"), _href=URL(c="cms", f="post", args = [_item.id, "update"], vars = url_vars, ), _class="%s cms-edit" % _class, )) else: item = A(current.T("Edit"), _href=URL(c="cms", f="post", args = "create", vars = url_vars, ), _class="%s cms-edit" % _class, ) elif _item: item = XML(_item.body) else: item = "" if item != "" or not hide_if_empty: output = DIV(item, _id=widget_id, _class="cms_content") else: output = item return output
def resource_content(module, resource, record=None, widget_id=None, hide_if_empty=False): """ Render resource-related CMS contents Args: module: the module prefix resource: the resource name (without prefix) record: the record ID (optional) widget_id: the DOM node ID for the CMS widget hide_if_empty: return an empty string when there is no contents rather than a blank DIV """ db = current.db table = current.s3db.cms_post ltable = db.cms_post_module query = (ltable.module == module) & \ (ltable.resource == resource) & \ (ltable.record == record) & \ (ltable.post_id == table.id) & \ (table.deleted != True) _item = db(query).select(table.id, table.body, limitby=(0, 1)).first() # @ToDo: Replace this crude check with? #if current.auth.s3_has_permission("update", r.table, record_id=r.id): auth = current.auth ADMIN = auth.get_system_roles().ADMIN ADMIN = auth.s3_has_role(ADMIN) if ADMIN: if current.response.s3.crud.formstyle == "bootstrap": _class = "btn" else: _class = "action-btn" url_vars = {"module": module, "resource": resource, } if record: url_vars["record"] = record if _item: item = DIV(XML(_item.body), A(current.T("Edit"), _href=URL(c="cms", f="post", args = [_item.id, "update"], vars = url_vars, ), _class="%s cms-edit" % _class, )) else: item = A(current.T("Edit"), _href=URL(c="cms", f="post", args = "create", vars = url_vars, ), _class="%s cms-edit" % _class, ) elif _item: item = XML(_item.body) else: item = "" if item != "" or not hide_if_empty: output = DIV(item, _id=widget_id, _class="cms_content") else: output = item return output
Python
def cms_configure_newsfeed_post_fields(): """ Customize cms_post fields for the Newsfeed / Home Pages """ s3db = current.s3db s3 = current.response.s3 settings = current.deployment_settings org_field = settings.get_cms_organisation() if org_field == "created_by$organisation_id": current.auth.settings.table_user.organisation_id.represent = \ s3db.org_organisation_represent elif org_field == "post_organisation.organisation_id": s3db.cms_post_organisation.organisation_id.label = "" org_group_field = settings.get_cms_organisation_group() if org_group_field == "created_by$org_group_id": current.auth.settings.table_user.org_group_id.represent = \ s3db.org_organisation_group_represent elif org_group_field == "post_organisation_group.group_id": s3db.cms_post_organisation_group.group_id.label = "" table = s3db.cms_post table.series_id.requires = table.series_id.requires.other contact_field = settings.get_cms_person() if contact_field == "created_by": table.created_by.represent = s3db.auth_UserRepresent(show_email = False, show_link = False) elif contact_field == "person_id": field = table.person_id field.readable = True field.writable = True field.comment = None field.widget = S3AddPersonWidget(controller="pr") field = table.location_id field.label = "" field.represent = s3db.gis_LocationRepresent(sep=" | ") # Required field.requires = IS_LOCATION() list_fields = ["series_id", "location_id", "date", ] lappend = list_fields.append if settings.get_cms_show_titles(): lappend("title") lappend("body") if contact_field: lappend(contact_field) if org_field: lappend(org_field) if org_group_field: lappend(org_group_field) if settings.get_cms_show_attachments(): lappend("document.file") if settings.get_cms_show_links(): lappend("document.url") if settings.get_cms_show_events(): lappend("event_post.event_id") if settings.get_cms_location_click_filters(): script = \ '''S3.filter_location=function(d){var cb for(var p in d){cb=$('input[name="multiselect_post-cms_post_location_id-location-filter-L'+p+'"][value="'+d[p]+'"]') if(!cb.prop('checked')){cb.click()}}}''' s3.jquery_ready.append(script) # Which levels of Hierarchy are we using? levels = current.gis.get_relevant_hierarchy_levels() for level in levels: lappend("location_id$%s" % level) if settings.get_cms_show_tags(): lappend("tag.name") if s3.debug: s3.scripts.append("/%s/static/scripts/tag-it.js" % current.request.application) else: s3.scripts.append("/%s/static/scripts/tag-it.min.js" % current.request.application) if current.auth.s3_has_permission("update", current.db.cms_tag_post): readonly = '''afterTagAdded:function(event,ui){ if(ui.duringInitialization){return} var post_id=$(this).attr('data-post_id') var url=S3.Ap.concat('/cms/post/',post_id,'/add_tag/',ui.tagLabel) $.getS3(url) },afterTagRemoved:function(event,ui){ var post_id=$(this).attr('data-post_id') var url=S3.Ap.concat('/cms/post/',post_id,'/remove_tag/',ui.tagLabel) $.getS3(url) },''' else: readonly = '''readOnly:true''' script = \ '''S3.tagit=function(){$('.s3-tags').tagit({autocomplete:{source:'%s'},%s})} S3.tagit() S3.redraw_fns.push('tagit')''' % (URL(c="cms", f="tag", args="search_ac.json"), readonly) s3.jquery_ready.append(script) s3db.configure("cms_post", list_fields = list_fields, ) return table
def cms_configure_newsfeed_post_fields(): """ Customize cms_post fields for the Newsfeed / Home Pages """ s3db = current.s3db s3 = current.response.s3 settings = current.deployment_settings org_field = settings.get_cms_organisation() if org_field == "created_by$organisation_id": current.auth.settings.table_user.organisation_id.represent = \ s3db.org_organisation_represent elif org_field == "post_organisation.organisation_id": s3db.cms_post_organisation.organisation_id.label = "" org_group_field = settings.get_cms_organisation_group() if org_group_field == "created_by$org_group_id": current.auth.settings.table_user.org_group_id.represent = \ s3db.org_organisation_group_represent elif org_group_field == "post_organisation_group.group_id": s3db.cms_post_organisation_group.group_id.label = "" table = s3db.cms_post table.series_id.requires = table.series_id.requires.other contact_field = settings.get_cms_person() if contact_field == "created_by": table.created_by.represent = s3db.auth_UserRepresent(show_email = False, show_link = False) elif contact_field == "person_id": field = table.person_id field.readable = True field.writable = True field.comment = None field.widget = S3AddPersonWidget(controller="pr") field = table.location_id field.label = "" field.represent = s3db.gis_LocationRepresent(sep=" | ") # Required field.requires = IS_LOCATION() list_fields = ["series_id", "location_id", "date", ] lappend = list_fields.append if settings.get_cms_show_titles(): lappend("title") lappend("body") if contact_field: lappend(contact_field) if org_field: lappend(org_field) if org_group_field: lappend(org_group_field) if settings.get_cms_show_attachments(): lappend("document.file") if settings.get_cms_show_links(): lappend("document.url") if settings.get_cms_show_events(): lappend("event_post.event_id") if settings.get_cms_location_click_filters(): script = \ '''S3.filter_location=function(d){var cb for(var p in d){cb=$('input[name="multiselect_post-cms_post_location_id-location-filter-L'+p+'"][value="'+d[p]+'"]') if(!cb.prop('checked')){cb.click()}}}''' s3.jquery_ready.append(script) # Which levels of Hierarchy are we using? levels = current.gis.get_relevant_hierarchy_levels() for level in levels: lappend("location_id$%s" % level) if settings.get_cms_show_tags(): lappend("tag.name") if s3.debug: s3.scripts.append("/%s/static/scripts/tag-it.js" % current.request.application) else: s3.scripts.append("/%s/static/scripts/tag-it.min.js" % current.request.application) if current.auth.s3_has_permission("update", current.db.cms_tag_post): readonly = '''afterTagAdded:function(event,ui){ if(ui.duringInitialization){return} var post_id=$(this).attr('data-post_id') var url=S3.Ap.concat('/cms/post/',post_id,'/add_tag/',ui.tagLabel) $.getS3(url) },afterTagRemoved:function(event,ui){ var post_id=$(this).attr('data-post_id') var url=S3.Ap.concat('/cms/post/',post_id,'/remove_tag/',ui.tagLabel) $.getS3(url) },''' else: readonly = '''readOnly:true''' script = \ '''S3.tagit=function(){$('.s3-tags').tagit({autocomplete:{source:'%s'},%s})} S3.tagit() S3.redraw_fns.push('tagit')''' % (URL(c="cms", f="tag", args="search_ac.json"), readonly) s3.jquery_ready.append(script) s3db.configure("cms_post", list_fields = list_fields, ) return table
Python
def apply_method(self, r, **attr): """ Applies the method (controller entry point). Args: r: the CRUDRequest attr: controller arguments """ if r.name == "post": if r.representation == "html": output = self.html(r, **attr) #elif r.representation == "pdf": # output = self.pdf(r, **attr) # return output #elif r.representation in ("xlsx", "xls"): # output = self.xls(r, **attr) # return output else: r.error(415, current.ERROR.BAD_FORMAT) else: r.error(405, current.ERROR.BAD_METHOD) return output
def apply_method(self, r, **attr): """ Applies the method (controller entry point). Args: r: the CRUDRequest attr: controller arguments """ if r.name == "post": if r.representation == "html": output = self.html(r, **attr) #elif r.representation == "pdf": # output = self.pdf(r, **attr) # return output #elif r.representation in ("xlsx", "xls"): # output = self.xls(r, **attr) # return output else: r.error(415, current.ERROR.BAD_FORMAT) else: r.error(405, current.ERROR.BAD_METHOD) return output
Python
def apply_method(self, r, **attr): """ Applies the method (controller entry point). Args: r: the CRUDRequest attr: controller arguments """ if r.representation == "json": table = current.s3db.cms_tag tags = current.db(table.deleted == False).select(table.name) tag_list = [tag.name for tag in tags] output = json.dumps(tag_list, separators=SEPARATORS) current.response.headers["Content-Type"] = "application/json" else: r.error(415, current.ERROR.BAD_FORMAT) return output
def apply_method(self, r, **attr): """ Applies the method (controller entry point). Args: r: the CRUDRequest attr: controller arguments """ if r.representation == "json": table = current.s3db.cms_tag tags = current.db(table.deleted == False).select(table.name) tag_list = [tag.name for tag in tags] output = json.dumps(tag_list, separators=SEPARATORS) current.response.headers["Content-Type"] = "application/json" else: r.error(415, current.ERROR.BAD_FORMAT) return output
Python
def testPolicy3(self): """ Test permission check with policy 3 """ auth = current.auth current.deployment_settings.security.policy = 3 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission c = "org" f = "permission_test" tablename = "org_permission_test" assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertFalse(permitted) # Function ACL not applicable in policy 3 permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.editor)
def testPolicy3(self): """ Test permission check with policy 3 """ auth = current.auth current.deployment_settings.security.policy = 3 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission c = "org" f = "permission_test" tablename = "org_permission_test" assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertFalse(permitted) # Function ACL not applicable in policy 3 permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.editor)
Python
def testPolicy4(self): """ Test permission check with policy 4 """ auth = current.auth current.deployment_settings.security.policy = 4 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission c = "org" f = "permission_test" tablename = "org_permission_test" assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) # Function ACL overrides controller ACL permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.editor)
def testPolicy4(self): """ Test permission check with policy 4 """ auth = current.auth current.deployment_settings.security.policy = 4 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission c = "org" f = "permission_test" tablename = "org_permission_test" assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) # Function ACL overrides controller ACL permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.editor)
Python
def testPolicy5(self): """ Test permission check with policy 5 """ auth = current.auth current.deployment_settings.security.policy = 5 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission accessible_url = auth.permission.accessible_url c = "org" f = "permission_test" tablename = "org_permission_test" assertEqual = self.assertEqual assertNotEqual = self.assertNotEqual assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) url = accessible_url(c=c, f=f) assertEqual(url, False) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) url = accessible_url(c=c, f=f) assertEqual(url, False) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) # Function ACL overrides controller ACL permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) # Page ACL blocks Table ACL # Toggle page ACL acl = auth.permission auth.permission.update_acl("TESTREADER", c=c, f=f, uacl=acl.READ|acl.CREATE|acl.UPDATE, oacl=acl.READ|acl.CREATE|acl.UPDATE) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) auth.permission.update_acl("TESTREADER", c=c, f=f, uacl=acl.READ|acl.CREATE, oacl=acl.READ|acl.CREATE|acl.UPDATE) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) url = accessible_url(c=c, f=f) assertNotEqual(url, False) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.editor)
def testPolicy5(self): """ Test permission check with policy 5 """ auth = current.auth current.deployment_settings.security.policy = 5 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission accessible_url = auth.permission.accessible_url c = "org" f = "permission_test" tablename = "org_permission_test" assertEqual = self.assertEqual assertNotEqual = self.assertNotEqual assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) url = accessible_url(c=c, f=f) assertEqual(url, False) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) url = accessible_url(c=c, f=f) assertEqual(url, False) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) # Function ACL overrides controller ACL permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) # Page ACL blocks Table ACL # Toggle page ACL acl = auth.permission auth.permission.update_acl("TESTREADER", c=c, f=f, uacl=acl.READ|acl.CREATE|acl.UPDATE, oacl=acl.READ|acl.CREATE|acl.UPDATE) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) auth.permission.update_acl("TESTREADER", c=c, f=f, uacl=acl.READ|acl.CREATE, oacl=acl.READ|acl.CREATE|acl.UPDATE) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) url = accessible_url(c=c, f=f) assertNotEqual(url, False) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.editor)
Python
def testPolicy6(self): """ Test permission check with policy 6 """ auth = current.auth current.deployment_settings.security.policy = 6 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission c = "org" f = "permission_test" tablename = "org_permission_test" assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader, for_pe=0) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) # Function ACL overrides controller ACL permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) # Page ACL blocks Table ACL auth.s3_remove_role(auth.user.id, self.reader, for_pe=[]) # Test with TESTEDITOR with universal realm auth.s3_assign_role(auth.user.id, self.editor, for_pe=0) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.editor, for_pe=[]) # Test with TESTEDITOR with limited realm auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[0]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) # Extend realm auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[1]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Withdraw role for one realm auth.s3_remove_role(auth.user.id, self.editor, for_pe=self.org[0]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Withdraw role for all realms auth.s3_remove_role(auth.user.id, self.editor, for_pe=[]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted)
def testPolicy6(self): """ Test permission check with policy 6 """ auth = current.auth current.deployment_settings.security.policy = 6 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission c = "org" f = "permission_test" tablename = "org_permission_test" assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader, for_pe=0) permitted = has_permission("read", c=c, f=f, table=tablename) assertTrue(permitted) permitted = has_permission("create", c=c, f=f, table=tablename) assertTrue(permitted) # Function ACL overrides controller ACL permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) # Page ACL blocks Table ACL auth.s3_remove_role(auth.user.id, self.reader, for_pe=[]) # Test with TESTEDITOR with universal realm auth.s3_assign_role(auth.user.id, self.editor, for_pe=0) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) auth.s3_remove_role(auth.user.id, self.editor, for_pe=[]) # Test with TESTEDITOR with limited realm auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[0]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) permitted = has_permission("delete", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) # Extend realm auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[1]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Withdraw role for one realm auth.s3_remove_role(auth.user.id, self.editor, for_pe=self.org[0]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Withdraw role for all realms auth.s3_remove_role(auth.user.id, self.editor, for_pe=[]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted)
Python
def testPolicy7(self): """ Test permission check with policy 7 """ auth = current.auth s3db = current.s3db current.deployment_settings.security.policy = 7 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission c = "org" f = "permission_test" tablename = "org_permission_test" assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Test with TESTEDITOR with limited realm auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[0]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) # Make org[1] a sub-entity of org[0] s3db.pr_add_affiliation(self.org[0], self.org[1], role="TestOrgUnit") # Reload realms and test again auth.s3_impersonate("[email protected]") permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Should now have access s3db.pr_remove_affiliation(self.org[0], self.org[1], role="TestOrgUnit") # Make org[0] a sub-entity of org[1] s3db.pr_add_affiliation(self.org[1], self.org[0], role="TestOrgUnit") # Reload realms auth.s3_impersonate("[email protected]") permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) # Should no longer have access # Switch realm auth.s3_remove_role(auth.user.id, self.editor, for_pe=self.org[0]) auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[1]) # Reload realms auth.s3_impersonate("[email protected]") permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Should have access again # Remove org[0] from realm s3db.pr_remove_affiliation(self.org[1], self.org[0], role="TestOrgUnit") # Reload realms auth.s3_impersonate("[email protected]") permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) # Should no longer have access permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Withdraw TESTEDITOR role auth.s3_remove_role(auth.user.id, self.editor, for_pe=[]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted)
def testPolicy7(self): """ Test permission check with policy 7 """ auth = current.auth s3db = current.s3db current.deployment_settings.security.policy = 7 auth.permission = S3Permission(auth) has_permission = auth.s3_has_permission c = "org" f = "permission_test" tablename = "org_permission_test" assertTrue = self.assertTrue assertFalse = self.assertFalse # Check anonymous auth.s3_impersonate(None) permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Check authenticated auth.s3_impersonate("[email protected]") permitted = has_permission("read", c=c, f=f, table=tablename) assertFalse(permitted) # Test with TESTEDITOR with limited realm auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[0]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) # Make org[1] a sub-entity of org[0] s3db.pr_add_affiliation(self.org[0], self.org[1], role="TestOrgUnit") # Reload realms and test again auth.s3_impersonate("[email protected]") permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Should now have access s3db.pr_remove_affiliation(self.org[0], self.org[1], role="TestOrgUnit") # Make org[0] a sub-entity of org[1] s3db.pr_add_affiliation(self.org[1], self.org[0], role="TestOrgUnit") # Reload realms auth.s3_impersonate("[email protected]") permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted) # Should no longer have access # Switch realm auth.s3_remove_role(auth.user.id, self.editor, for_pe=self.org[0]) auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[1]) # Reload realms auth.s3_impersonate("[email protected]") permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertTrue(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Should have access again # Remove org[0] from realm s3db.pr_remove_affiliation(self.org[1], self.org[0], role="TestOrgUnit") # Reload realms auth.s3_impersonate("[email protected]") permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) # Should no longer have access permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertTrue(permitted) # Withdraw TESTEDITOR role auth.s3_remove_role(auth.user.id, self.editor, for_pe=[]) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record1) assertFalse(permitted) permitted = has_permission("update", c=c, f=f, table=tablename, record_id=self.record2) assertFalse(permitted)
Python
def testPolicy3(self): """ Test accessible query with policy 3 """ auth = current.auth current.deployment_settings.security.policy = 3 auth.permission = S3Permission(auth) accessible_query = auth.s3_accessible_query c = "org" f = "permission_test" table = current.s3db.org_permission_test assertEqual = self.assertEqual ALL = (table.id > 0) NONE = (table.id == 0) # Check anonymous auth.s3_impersonate(None) query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Check authenticated auth.s3_impersonate("[email protected]") query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) query = accessible_query("read", "org_permission_test", c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) assertEqual(query, NONE) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) query = accessible_query("read", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.editor)
def testPolicy3(self): """ Test accessible query with policy 3 """ auth = current.auth current.deployment_settings.security.policy = 3 auth.permission = S3Permission(auth) accessible_query = auth.s3_accessible_query c = "org" f = "permission_test" table = current.s3db.org_permission_test assertEqual = self.assertEqual ALL = (table.id > 0) NONE = (table.id == 0) # Check anonymous auth.s3_impersonate(None) query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Check authenticated auth.s3_impersonate("[email protected]") query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) query = accessible_query("read", "org_permission_test", c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) assertEqual(query, NONE) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) query = accessible_query("read", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.editor)
Python
def testPolicy4(self): """ Test accessible query with policy 4 """ auth = current.auth current.deployment_settings.security.policy = 4 auth.permission = S3Permission(auth) accessible_query = auth.s3_accessible_query c = "org" f = "permission_test" table = current.s3db.org_permission_test assertEqual = self.assertEqual ALL = (table.id > 0) NONE = (table.id == 0) # Check anonymous auth.s3_impersonate(None) query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Check authenticated auth.s3_impersonate("[email protected]") query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) query = accessible_query("read", "org_permission_test", c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) roles = set(r for r in auth.user.realms if r is not None) OWNED = (((table.owned_by_user == auth.user.id) | \ ((table.owned_by_user == None) & \ (table.owned_by_group == None))) | \ (table.owned_by_group.belongs(roles))) assertEqual(query, OWNED) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, OWNED) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) query = accessible_query("read", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.editor)
def testPolicy4(self): """ Test accessible query with policy 4 """ auth = current.auth current.deployment_settings.security.policy = 4 auth.permission = S3Permission(auth) accessible_query = auth.s3_accessible_query c = "org" f = "permission_test" table = current.s3db.org_permission_test assertEqual = self.assertEqual ALL = (table.id > 0) NONE = (table.id == 0) # Check anonymous auth.s3_impersonate(None) query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Check authenticated auth.s3_impersonate("[email protected]") query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) query = accessible_query("read", "org_permission_test", c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) roles = set(r for r in auth.user.realms if r is not None) OWNED = (((table.owned_by_user == auth.user.id) | \ ((table.owned_by_user == None) & \ (table.owned_by_group == None))) | \ (table.owned_by_group.belongs(roles))) assertEqual(query, OWNED) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, OWNED) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) query = accessible_query("read", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.editor)
Python
def testPolicy5(self): """ Test accessible query with policy 5 """ auth = current.auth current.deployment_settings.security.policy = 5 auth.permission = S3Permission(auth) accessible_query = auth.s3_accessible_query c = "org" f = "permission_test" table = current.s3db.org_permission_test assertEqual = self.assertEqual ALL = (table.id > 0) NONE = (table.id == 0) # Check anonymous auth.s3_impersonate(None) query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Check authenticated auth.s3_impersonate("[email protected]") query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) query = accessible_query("read", "org_permission_test", c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) roles = set(r for r in auth.user.realms if r is not None) OWNED = (((table.owned_by_user == auth.user.id) | \ ((table.owned_by_user == None) & \ (table.owned_by_group == None))) | \ (table.owned_by_group.belongs(roles))) assertEqual(query, OWNED) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) query = accessible_query("read", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.editor)
def testPolicy5(self): """ Test accessible query with policy 5 """ auth = current.auth current.deployment_settings.security.policy = 5 auth.permission = S3Permission(auth) accessible_query = auth.s3_accessible_query c = "org" f = "permission_test" table = current.s3db.org_permission_test assertEqual = self.assertEqual ALL = (table.id > 0) NONE = (table.id == 0) # Check anonymous auth.s3_impersonate(None) query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Check authenticated auth.s3_impersonate("[email protected]") query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader) query = accessible_query("read", "org_permission_test", c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) roles = set(r for r in auth.user.realms if r is not None) OWNED = (((table.owned_by_user == auth.user.id) | \ ((table.owned_by_user == None) & \ (table.owned_by_group == None))) | \ (table.owned_by_group.belongs(roles))) assertEqual(query, OWNED) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor) query = accessible_query("read", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("update", table, c=c, f=f) assertEqual(query, ALL) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.editor)
Python
def testPolicy6(self): """ Test accessible query with policy 6 """ auth = current.auth current.deployment_settings.security.policy = 6 auth.permission = S3Permission(auth) accessible_query = auth.s3_accessible_query c = "org" f = "permission_test" table = current.s3db.org_permission_test assertEqual = self.assertEqual #ALL = (table.id > 0) NONE = (table.id == 0) # Check anonymous auth.s3_impersonate(None) query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Check authenticated auth.s3_impersonate("[email protected]") query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader, for_pe=self.org[0]) roles = {2, 3} expected = (((table.realm_entity == self.org[0]) | \ (table.realm_entity == None)) | \ ((((table.owned_by_user == None) & \ (table.owned_by_group == None)) & \ (table.realm_entity == None)) | \ (table.owned_by_group.belongs(roles)))) query = accessible_query("read", "org_permission_test", c=c, f=f) assertEqual(query, expected) query = accessible_query("update", table, c=c, f=f) expected = ((((table.owned_by_user == auth.user.id) & \ ((table.realm_entity == self.org[0]) | \ (table.realm_entity == None))) | \ (((table.owned_by_user == None) & \ (table.owned_by_group == None)) & \ (table.realm_entity == None))) | \ (((table.owned_by_group == self.reader) & \ (table.realm_entity.belongs([self.org[0]]))) | \ (table.owned_by_group.belongs(roles)))) assertEqual(query, expected) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[0]) roles = {2, 3} query = accessible_query("read", table, c=c, f=f) expected = (((table.realm_entity == self.org[0]) | \ (table.realm_entity == None)) | \ ((((table.owned_by_user == None) & \ (table.owned_by_group == None)) & \ (table.realm_entity == None)) | \ (table.owned_by_group.belongs(roles)))) assertEqual(query, expected) query = accessible_query("update", table, c=c, f=f) assertEqual(query, expected) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.editor) # Logout auth.s3_impersonate(None)
def testPolicy6(self): """ Test accessible query with policy 6 """ auth = current.auth current.deployment_settings.security.policy = 6 auth.permission = S3Permission(auth) accessible_query = auth.s3_accessible_query c = "org" f = "permission_test" table = current.s3db.org_permission_test assertEqual = self.assertEqual #ALL = (table.id > 0) NONE = (table.id == 0) # Check anonymous auth.s3_impersonate(None) query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Check authenticated auth.s3_impersonate("[email protected]") query = accessible_query("read", table, c=c, f=f) assertEqual(query, NONE) # Test with TESTREADER auth.s3_assign_role(auth.user.id, self.reader, for_pe=self.org[0]) roles = {2, 3} expected = (((table.realm_entity == self.org[0]) | \ (table.realm_entity == None)) | \ ((((table.owned_by_user == None) & \ (table.owned_by_group == None)) & \ (table.realm_entity == None)) | \ (table.owned_by_group.belongs(roles)))) query = accessible_query("read", "org_permission_test", c=c, f=f) assertEqual(query, expected) query = accessible_query("update", table, c=c, f=f) expected = ((((table.owned_by_user == auth.user.id) & \ ((table.realm_entity == self.org[0]) | \ (table.realm_entity == None))) | \ (((table.owned_by_user == None) & \ (table.owned_by_group == None)) & \ (table.realm_entity == None))) | \ (((table.owned_by_group == self.reader) & \ (table.realm_entity.belongs([self.org[0]]))) | \ (table.owned_by_group.belongs(roles)))) assertEqual(query, expected) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.reader) # Test with TESTEDITOR auth.s3_assign_role(auth.user.id, self.editor, for_pe=self.org[0]) roles = {2, 3} query = accessible_query("read", table, c=c, f=f) expected = (((table.realm_entity == self.org[0]) | \ (table.realm_entity == None)) | \ ((((table.owned_by_user == None) & \ (table.owned_by_group == None)) & \ (table.realm_entity == None)) | \ (table.owned_by_group.belongs(roles)))) assertEqual(query, expected) query = accessible_query("update", table, c=c, f=f) assertEqual(query, expected) query = accessible_query("delete", table, c=c, f=f) assertEqual(query, NONE) auth.s3_remove_role(auth.user.id, self.editor) # Logout auth.s3_impersonate(None)
Python
def as_dict(self, options=None): """ Output the Layers as a Python dict """ sublayer_dicts = [] append = sublayer_dicts.append sublayers = self.sublayers for sublayer in sublayers: # Read the output dict for this sublayer sublayer_dict = sublayer.as_dict() if sublayer_dict: # Add this layer to the list of layers for this layer type append(sublayer_dict) if sublayer_dicts and options: # Used by Map._setup() options[self.dictname] = sublayer_dicts return sublayer_dicts
def as_dict(self, options=None): """ Output the Layers as a Python dict """ sublayer_dicts = [] append = sublayer_dicts.append sublayers = self.sublayers for sublayer in sublayers: # Read the output dict for this sublayer sublayer_dict = sublayer.as_dict() if sublayer_dict: # Add this layer to the list of layers for this layer type append(sublayer_dict) if sublayer_dicts and options: # Used by Map._setup() options[self.dictname] = sublayer_dicts return sublayer_dicts
Python
def as_javascript(self): """ Output the Layers as global Javascript - suitable for inclusion in the HTML page """ result = self.as_json() if result: return '''S3.gis.%s=%s\n''' % (self.dictname, result) else: return ""
def as_javascript(self): """ Output the Layers as global Javascript - suitable for inclusion in the HTML page """ result = self.as_json() if result: return '''S3.gis.%s=%s\n''' % (self.dictname, result) else: return ""
Python
def __connect(self): """ Connect this instance to the hierarchy """ tablename = self.tablename if tablename : hierarchies = current.model["hierarchies"] if tablename in hierarchies: hierarchy = hierarchies[tablename] self.__theset = hierarchy["nodes"] self.__flags = hierarchy["flags"] else: self.__theset = dict() self.__flags = dict() self.load() hierarchy = {"nodes": self.__theset, "flags": self.__flags} hierarchies[tablename] = hierarchy else: self.__theset = dict() self.__flags = dict() return
def __connect(self): """ Connect this instance to the hierarchy """ tablename = self.tablename if tablename : hierarchies = current.model["hierarchies"] if tablename in hierarchies: hierarchy = hierarchies[tablename] self.__theset = hierarchy["nodes"] self.__flags = hierarchy["flags"] else: self.__theset = dict() self.__flags = dict() self.load() hierarchy = {"nodes": self.__theset, "flags": self.__flags} hierarchies[tablename] = hierarchy else: self.__theset = dict() self.__flags = dict() return
Python
def load(self): """ Try loading the hierarchy from s3_hierarchy """ if not self.config: return tablename = self.tablename if not self.__status("dbstatus", True): # Cancel attempt if DB is known to be dirty self.__status(dirty=True) return htable = current.s3db.s3_hierarchy query = (htable.tablename == tablename) row = current.db(query).select(htable.dirty, htable.hierarchy, limitby = (0, 1) ).first() if row and not row.dirty: data = row.hierarchy theset = self.__theset theset.clear() for node_id, item in data["nodes"].items(): theset[int(node_id)] = {"p": item["p"], "c": item["c"], "s": set(item["s"]) \ if item["s"] else set()} self.__status(dirty = False, dbupdate = None, dbstatus = True) return else: self.__status(dirty = True, dbupdate = None, dbstatus = False if row else None) return
def load(self): """ Try loading the hierarchy from s3_hierarchy """ if not self.config: return tablename = self.tablename if not self.__status("dbstatus", True): # Cancel attempt if DB is known to be dirty self.__status(dirty=True) return htable = current.s3db.s3_hierarchy query = (htable.tablename == tablename) row = current.db(query).select(htable.dirty, htable.hierarchy, limitby = (0, 1) ).first() if row and not row.dirty: data = row.hierarchy theset = self.__theset theset.clear() for node_id, item in data["nodes"].items(): theset[int(node_id)] = {"p": item["p"], "c": item["c"], "s": set(item["s"]) \ if item["s"] else set()} self.__status(dirty = False, dbupdate = None, dbstatus = True) return else: self.__status(dirty = True, dbupdate = None, dbstatus = False if row else None) return
Python
def dirty(cls, tablename): """ Mark this hierarchy as dirty. To be called when the target table gets updated (can be called repeatedly). Args: tablename: the tablename """ s3db = current.s3db if not tablename: return config = s3db.get_config(tablename, "hierarchy") if not config: return hierarchies = current.model["hierarchies"] if tablename in hierarchies: hierarchy = hierarchies[tablename] flags = hierarchy["flags"] else: flags = {} hierarchies[tablename] = {"nodes": dict(), "flags": flags} flags["dirty"] = True dbstatus = flags.get("dbstatus", True) if dbstatus: htable = current.s3db.s3_hierarchy query = (htable.tablename == tablename) row = current.db(query).select(htable.id, htable.dirty, limitby=(0, 1)).first() if not row: htable.insert(tablename=tablename, dirty=True) elif not row.dirty: row.update_record(dirty=True) flags["dbstatus"] = False return
def dirty(cls, tablename): """ Mark this hierarchy as dirty. To be called when the target table gets updated (can be called repeatedly). Args: tablename: the tablename """ s3db = current.s3db if not tablename: return config = s3db.get_config(tablename, "hierarchy") if not config: return hierarchies = current.model["hierarchies"] if tablename in hierarchies: hierarchy = hierarchies[tablename] flags = hierarchy["flags"] else: flags = {} hierarchies[tablename] = {"nodes": dict(), "flags": flags} flags["dirty"] = True dbstatus = flags.get("dbstatus", True) if dbstatus: htable = current.s3db.s3_hierarchy query = (htable.tablename == tablename) row = current.db(query).select(htable.id, htable.dirty, limitby=(0, 1)).first() if not row: htable.insert(tablename=tablename, dirty=True) elif not row.dirty: row.update_record(dirty=True) flags["dbstatus"] = False return
Python
def read(self): """ Rebuild this hierarchy from the target table """ tablename = self.tablename if not tablename: return s3db = current.s3db table = s3db[tablename] pkey = self.pkey fkey = self.fkey ckey = self.ckey fields = [pkey, fkey] if ckey is not None: fields.append(table[ckey]) if "deleted" in table: query = (table.deleted == False) else: query = (table.id > 0) rows = current.db(query).select(left = self.left, *fields) self.__theset.clear() add = self.add cfield = table[ckey] for row in rows: n = row[pkey] p = row[fkey] if ckey: c = row[cfield] else: c = None add(n, parent_id=p, category=c) # Update status: memory is clean, db needs update self.__status(dirty=False, dbupdate=True) # Remove subset self.__roots = None self.__nodes = None return
def read(self): """ Rebuild this hierarchy from the target table """ tablename = self.tablename if not tablename: return s3db = current.s3db table = s3db[tablename] pkey = self.pkey fkey = self.fkey ckey = self.ckey fields = [pkey, fkey] if ckey is not None: fields.append(table[ckey]) if "deleted" in table: query = (table.deleted == False) else: query = (table.id > 0) rows = current.db(query).select(left = self.left, *fields) self.__theset.clear() add = self.add cfield = table[ckey] for row in rows: n = row[pkey] p = row[fkey] if ckey: c = row[cfield] else: c = None add(n, parent_id=p, category=c) # Update status: memory is clean, db needs update self.__status(dirty=False, dbupdate=True) # Remove subset self.__roots = None self.__nodes = None return
Python
def __keys(self): """ Introspect the key fields in the hierarchical table """ tablename = self.tablename if not tablename: return s3db = current.s3db table = s3db[tablename] config = s3db.get_config(tablename, "hierarchy") if not config: return if isinstance(config, tuple): parent, self.__ckey = config[:2] else: parent, self.__ckey = config, None pkey = None fkey = None if parent is None: # Assume self-reference pkey = table._id for field in table: ftype = str(field.type) if ftype[:9] == "reference": key = ftype[10:].split(".") if key[0] == tablename and \ (len(key) == 1 or key[1] == pkey.name): parent = field.name fkey = field break else: resource = s3db.resource(tablename) master = resource.tablename rfield = resource.resolve_selector(parent) ltname = rfield.tname if ltname == master: # Self-reference fkey = rfield.field self.__link = None self.__lkey = None self.__left = None else: # Link table # Use the parent selector to find the link resource alias = parent.split(".%s" % rfield.fname)[0] calias = s3db.get_alias(master, alias) if not calias: # Fall back to link table name alias = ltname.split("_", 1)[1] calias = s3db.get_alias(master, alias) # Load the component and get the link parameters if calias: component = resource.components.get(calias) link = component.link if link: fkey = rfield.field self.__link = ltname self.__lkey = link.fkey self.__left = rfield.left.get(ltname) if not fkey: # No parent field found raise AttributeError("parent link not found") if pkey is None: ftype = str(fkey.type) if ftype[:9] != "reference": # Invalid parent field (not a foreign key) raise SyntaxError("Invalid parent field: " "%s is not a foreign key" % fkey) key = ftype[10:].split(".") if key[0] == tablename: # Self-reference pkey = table._id else: # Super-entity? ktable = s3db[key[0]] skey = ktable._id.name if skey != "id" and "instance_type" in ktable: try: pkey = table[skey] except AttributeError: raise SyntaxError("%s is not an instance type of %s" % (tablename, ktable._tablename)) self.__pkey = pkey self.__fkey = fkey return
def __keys(self): """ Introspect the key fields in the hierarchical table """ tablename = self.tablename if not tablename: return s3db = current.s3db table = s3db[tablename] config = s3db.get_config(tablename, "hierarchy") if not config: return if isinstance(config, tuple): parent, self.__ckey = config[:2] else: parent, self.__ckey = config, None pkey = None fkey = None if parent is None: # Assume self-reference pkey = table._id for field in table: ftype = str(field.type) if ftype[:9] == "reference": key = ftype[10:].split(".") if key[0] == tablename and \ (len(key) == 1 or key[1] == pkey.name): parent = field.name fkey = field break else: resource = s3db.resource(tablename) master = resource.tablename rfield = resource.resolve_selector(parent) ltname = rfield.tname if ltname == master: # Self-reference fkey = rfield.field self.__link = None self.__lkey = None self.__left = None else: # Link table # Use the parent selector to find the link resource alias = parent.split(".%s" % rfield.fname)[0] calias = s3db.get_alias(master, alias) if not calias: # Fall back to link table name alias = ltname.split("_", 1)[1] calias = s3db.get_alias(master, alias) # Load the component and get the link parameters if calias: component = resource.components.get(calias) link = component.link if link: fkey = rfield.field self.__link = ltname self.__lkey = link.fkey self.__left = rfield.left.get(ltname) if not fkey: # No parent field found raise AttributeError("parent link not found") if pkey is None: ftype = str(fkey.type) if ftype[:9] != "reference": # Invalid parent field (not a foreign key) raise SyntaxError("Invalid parent field: " "%s is not a foreign key" % fkey) key = ftype[10:].split(".") if key[0] == tablename: # Self-reference pkey = table._id else: # Super-entity? ktable = s3db[key[0]] skey = ktable._id.name if skey != "id" and "instance_type" in ktable: try: pkey = table[skey] except AttributeError: raise SyntaxError("%s is not an instance type of %s" % (tablename, ktable._tablename)) self.__pkey = pkey self.__fkey = fkey return
Python
def _represent(self, node_ids=None, renderer=None): """ Represent nodes as labels, the labels are stored in the nodes as attribute "l". Args: node_ids: the node IDs (None for all nodes) renderer: the representation method (falls back to the "name" field in the target table if present) """ theset = self.theset if node_ids is None: node_ids = self.nodes.keys() pending = set() for node_id in node_ids: node = theset.get(node_id) if not node: continue if "l" not in node: pending.add(node_id) if renderer is None: renderer = self.represent if renderer is None: tablename = self.tablename table = current.s3db.table(tablename) if tablename else None if table and "name" in table.fields: self.represent = renderer = S3Represent(lookup = tablename, key = self.pkey.name) else: renderer = s3_str if hasattr(renderer, "bulk"): labels = renderer.bulk(list(pending), list_type = False) for node_id, label in labels.items(): if node_id in theset: theset[node_id]["l"] = label else: for node_id in pending: try: label = renderer(node_id) except: label = s3_str(node_id) theset[node_id]["l"] = label return
def _represent(self, node_ids=None, renderer=None): """ Represent nodes as labels, the labels are stored in the nodes as attribute "l". Args: node_ids: the node IDs (None for all nodes) renderer: the representation method (falls back to the "name" field in the target table if present) """ theset = self.theset if node_ids is None: node_ids = self.nodes.keys() pending = set() for node_id in node_ids: node = theset.get(node_id) if not node: continue if "l" not in node: pending.add(node_id) if renderer is None: renderer = self.represent if renderer is None: tablename = self.tablename table = current.s3db.table(tablename) if tablename else None if table and "name" in table.fields: self.represent = renderer = S3Represent(lookup = tablename, key = self.pkey.name) else: renderer = s3_str if hasattr(renderer, "bulk"): labels = renderer.bulk(list(pending), list_type = False) for node_id, label in labels.items(): if node_id in theset: theset[node_id]["l"] = label else: for node_id in pending: try: label = renderer(node_id) except: label = s3_str(node_id) theset[node_id]["l"] = label return
Python
def label(self, node_id, represent=None): """ Get a label for a node Args: node_id: the node ID represent: the node ID representation method """ theset = self.theset node = theset.get(node_id) if node: if "l" in node: label = node["l"] else: self._represent(node_ids=[node_id], renderer=represent) if "l" in node: label = node["l"] if type(label) is str: label = s3_str(label) return label return None
def label(self, node_id, represent=None): """ Get a label for a node Args: node_id: the node ID represent: the node ID representation method """ theset = self.theset node = theset.get(node_id) if node: if "l" in node: label = node["l"] else: self._represent(node_ids=[node_id], renderer=represent) if "l" in node: label = node["l"] if type(label) is str: label = s3_str(label) return label return None
Python
def repr_expand(self, node_ids, levels=None, represent=None): """ Helper function to represent a set of nodes as lists of their respective ancestors starting by the root node Args: node_ids: the node_ids (iterable) levels: the number of levels to include (counting from root) represent: a representation function for each ancestor Returns: a dict {node_id: ["Label", "Label", ...]} - each label list is padded with "-" to reach the requested number of levels (TODO make this configurable) """ paths = {} all_parents = set() for node_id in node_ids: paths[node_id] = path = self.path(node_id) all_parents |= set(path) self._represent(all_parents, renderer=represent) theset = self.theset result = {} for node_id, path in paths.items(): p = (path + [None] * levels)[:levels] l = [theset[parent]["l"] if parent else "-" for parent in p] result[node_id] = l return result
def repr_expand(self, node_ids, levels=None, represent=None): """ Helper function to represent a set of nodes as lists of their respective ancestors starting by the root node Args: node_ids: the node_ids (iterable) levels: the number of levels to include (counting from root) represent: a representation function for each ancestor Returns: a dict {node_id: ["Label", "Label", ...]} - each label list is padded with "-" to reach the requested number of levels (TODO make this configurable) """ paths = {} all_parents = set() for node_id in node_ids: paths[node_id] = path = self.path(node_id) all_parents |= set(path) self._represent(all_parents, renderer=represent) theset = self.theset result = {} for node_id, path in paths.items(): p = (path + [None] * levels)[:levels] l = [theset[parent]["l"] if parent else "-" for parent in p] result[node_id] = l return result
Python
def json(self, root=None, represent=None, max_depth=None): """ Represent the hierarchy as JSON-serializable dict Args: root: the root node ID (or array of root node IDs) represent: the representation method max_depth: maximum recursion depth Returns: the hierarchy as dict: {node_id: [label, category, subnodes], ...} """ self._represent(renderer=represent) roots = [root] if root else self.roots output = {} for node_id in roots: item = self._json(node_id, represent = represent, max_depth = max_depth, ) if item: output[node_id] = item return output
def json(self, root=None, represent=None, max_depth=None): """ Represent the hierarchy as JSON-serializable dict Args: root: the root node ID (or array of root node IDs) represent: the representation method max_depth: maximum recursion depth Returns: the hierarchy as dict: {node_id: [label, category, subnodes], ...} """ self._represent(renderer=represent) roots = [root] if root else self.roots output = {} for node_id in roots: item = self._json(node_id, represent = represent, max_depth = max_depth, ) if item: output[node_id] = item return output
Python
def export_node(self, node_id, prefix = "_hierarchy", depth=None, level=0, path=None, hcol=None, columns = None, data = None, node_list=None): """ Export the hierarchy beneath a node Args: node_id: the root node prefix: prefix for the hierarchy column in the output depth: the maximum depth to export level: the current recursion level (internal) path: the path dict for this node (internal) hcol: the hierarchy column in the input data columns: the list of columns to export data: the input data dict {node_id: row} node_list: the output data list (will be appended to) Returns: the output data list TODO pass the input data as list and retain the original order when recursing into child nodes? """ if node_list is None: node_list = [] # Do not recurse deeper than depth levels below the root node if depth is not None and level > depth: return node_list # Get the current node node = self.nodes.get(node_id) if not node: return node_list # Get the node data if data: if node_id not in data: return node_list node_data = data.get(node_id) else: node_data = {} # Generate the path dict if it doesn't exist yet if path is None: if depth is None: depth = self.depth(node_id) path = dict(("%s.%s" % (prefix, l), "") for l in range(depth+1)) # Set the hierarchy column label = node_data.get(hcol) if hcol else node_id path["%s.%s" % (prefix, level)] = label # Add remaining columns to the record dict record = dict(path) if columns: for column in columns: if columns == hcol: continue record[column] = node_data.get(column) # Append the record to the node list node_list.append(record) # Recurse into child nodes children = node["s"] for child in children: self.export_node(child, prefix = prefix, depth=depth, level=level+1, path=dict(path), hcol=hcol, columns=columns, data=data, node_list=node_list, ) return node_list
def export_node(self, node_id, prefix = "_hierarchy", depth=None, level=0, path=None, hcol=None, columns = None, data = None, node_list=None): """ Export the hierarchy beneath a node Args: node_id: the root node prefix: prefix for the hierarchy column in the output depth: the maximum depth to export level: the current recursion level (internal) path: the path dict for this node (internal) hcol: the hierarchy column in the input data columns: the list of columns to export data: the input data dict {node_id: row} node_list: the output data list (will be appended to) Returns: the output data list TODO pass the input data as list and retain the original order when recursing into child nodes? """ if node_list is None: node_list = [] # Do not recurse deeper than depth levels below the root node if depth is not None and level > depth: return node_list # Get the current node node = self.nodes.get(node_id) if not node: return node_list # Get the node data if data: if node_id not in data: return node_list node_data = data.get(node_id) else: node_data = {} # Generate the path dict if it doesn't exist yet if path is None: if depth is None: depth = self.depth(node_id) path = dict(("%s.%s" % (prefix, l), "") for l in range(depth+1)) # Set the hierarchy column label = node_data.get(hcol) if hcol else node_id path["%s.%s" % (prefix, level)] = label # Add remaining columns to the record dict record = dict(path) if columns: for column in columns: if columns == hcol: continue record[column] = node_data.get(column) # Append the record to the node list node_list.append(record) # Recurse into child nodes children = node["s"] for child in children: self.export_node(child, prefix = prefix, depth=depth, level=level+1, path=dict(path), hcol=hcol, columns=columns, data=data, node_list=node_list, ) return node_list
Python
def pr_address_onaccept(form): """ Custom onaccept to set the person's Location to the Private Address - unless their case is associated with a Site """ try: record_id = form.vars.id except AttributeError: # Nothing we can do return db = current.db s3db = current.s3db atable = db.pr_address row = db(atable.id == record_id).select(atable.location_id, atable.pe_id, limitby=(0, 1), ).first() try: location_id = row.location_id except AttributeError: # Nothing we can do return pe_id = row.pe_id ctable = s3db.dvr_case ptable = s3db.pr_person query = (ptable.pe_id == pe_id) & \ (ptable.id == ctable.person_id) case = db(query).select(ctable.site_id, limitby=(0, 1), ).first() if case and not case.site_id: db(ptable.pe_id == pe_id).update(location_id = location_id, # Indirect update by system rule, # do not change modified_* fields: modified_on = ptable.modified_on, modified_by = ptable.modified_by, )
def pr_address_onaccept(form): """ Custom onaccept to set the person's Location to the Private Address - unless their case is associated with a Site """ try: record_id = form.vars.id except AttributeError: # Nothing we can do return db = current.db s3db = current.s3db atable = db.pr_address row = db(atable.id == record_id).select(atable.location_id, atable.pe_id, limitby=(0, 1), ).first() try: location_id = row.location_id except AttributeError: # Nothing we can do return pe_id = row.pe_id ctable = s3db.dvr_case ptable = s3db.pr_person query = (ptable.pe_id == pe_id) & \ (ptable.id == ctable.person_id) case = db(query).select(ctable.site_id, limitby=(0, 1), ).first() if case and not case.site_id: db(ptable.pe_id == pe_id).update(location_id = location_id, # Indirect update by system rule, # do not change modified_* fields: modified_on = ptable.modified_on, modified_by = ptable.modified_by, )
Python
def configure_person_tags(): """ Configure filtered pr_person_tag components for registration numbers: - BAMF Registration Number (tag=BAMF) """ current.s3db.add_components("pr_person", pr_person_tag = ({"name": "bamf", "joinby": "person_id", "filterby": { "tag": "BAMF", }, "multiple": False, }, ) )
def configure_person_tags(): """ Configure filtered pr_person_tag components for registration numbers: - BAMF Registration Number (tag=BAMF) """ current.s3db.add_components("pr_person", pr_person_tag = ({"name": "bamf", "joinby": "person_id", "filterby": { "tag": "BAMF", }, "multiple": False, }, ) )
Python
def check_kernel(): """Ensure that PWD is a kernel directory""" if (not os.path.isfile('MAINTAINERS') or not os.path.isfile('arch/arm/mach-msm/Kconfig')): fail("This doesn't seem to be an MSM kernel dir")
def check_kernel(): """Ensure that PWD is a kernel directory""" if (not os.path.isfile('MAINTAINERS') or not os.path.isfile('arch/arm/mach-msm/Kconfig')): fail("This doesn't seem to be an MSM kernel dir")
Python
def check_build(): """Ensure that the build directory is present.""" if not os.path.isdir(build_dir): try: os.makedirs(build_dir) except OSError as exc: if exc.errno == errno.EEXIST: pass else: raise
def check_build(): """Ensure that the build directory is present.""" if not os.path.isdir(build_dir): try: os.makedirs(build_dir) except OSError as exc: if exc.errno == errno.EEXIST: pass else: raise
Python
def build_threads(): """Determine the number of build threads requested by the user""" if all_options.load_average: return all_options.load_average return all_options.jobs or 1
def build_threads(): """Determine the number of build threads requested by the user""" if all_options.load_average: return all_options.load_average return all_options.jobs or 1
Python
def scan_configs(): """Get the full list of defconfigs appropriate for this tree.""" names = [] arch_pats = ( r'[fm]sm[0-9]*_defconfig', r'apq*_defconfig', r'qsd*_defconfig', r'mdm*_defconfig', r'mpq*_defconfig', ) arch64_pats = ( r'msm*_defconfig', ) for p in arch_pats: for n in glob.glob('arch/arm/configs/' + p): name = os.path.basename(n)[:-10] names.append(Builder(name, n)) if 'CROSS_COMPILE64' in os.environ: for p in arch64_pats: for n in glob.glob('arch/arm64/configs/' + p): name = os.path.basename(n)[:-10] + "-64" names.append(Builder(name, n)) return names
def scan_configs(): """Get the full list of defconfigs appropriate for this tree.""" names = [] arch_pats = ( r'[fm]sm[0-9]*_defconfig', r'apq*_defconfig', r'qsd*_defconfig', r'mdm*_defconfig', r'mpq*_defconfig', ) arch64_pats = ( r'msm*_defconfig', ) for p in arch_pats: for n in glob.glob('arch/arm/configs/' + p): name = os.path.basename(n)[:-10] names.append(Builder(name, n)) if 'CROSS_COMPILE64' in os.environ: for p in arch64_pats: for n in glob.glob('arch/arm64/configs/' + p): name = os.path.basename(n)[:-10] + "-64" names.append(Builder(name, n)) return names
Python
def request_weather(url): """request the weather from openweathermap.org API. Returns a dict of the json file""" response = requests.get(url) response_dict = response.json() return response_dict
def request_weather(url): """request the weather from openweathermap.org API. Returns a dict of the json file""" response = requests.get(url) response_dict = response.json() return response_dict
Python
def parse_weather(weather_data_raw): """parse the useful weather data from dict of the openweathermap.org json data. returns another, slimmed down dict with the chosen elements""" parsed_weather = {} parsed_weather['sunrise'] = dt.fromtimestamp(weather_data_raw.get("city").get("sunrise")).time() parsed_weather['sunset'] = dt.fromtimestamp(weather_data_raw.get("city").get("sunset")).time() for period in weather_data_raw['list']: # limiting the parsed weather data to weather for the next day if dt.fromtimestamp(period.get("dt")).date() == dt.today().date() + timedelta(days=1): time_period = dt.fromtimestamp(period.get("dt")) # the dict key for each period is a 2-dight 24-hour time, e.g 15 for 3.00pm parsed_weather[str(time_period.time())[:2]] = [ str(time_period.time())[:2], round(period.get("main").get("temp")), period.get("weather")[0].get("main").center(15), str(period.get("clouds").get("all")).zfill(3), str(round(period.get("wind").get("speed"))).zfill(3) ] return parsed_weather
def parse_weather(weather_data_raw): """parse the useful weather data from dict of the openweathermap.org json data. returns another, slimmed down dict with the chosen elements""" parsed_weather = {} parsed_weather['sunrise'] = dt.fromtimestamp(weather_data_raw.get("city").get("sunrise")).time() parsed_weather['sunset'] = dt.fromtimestamp(weather_data_raw.get("city").get("sunset")).time() for period in weather_data_raw['list']: # limiting the parsed weather data to weather for the next day if dt.fromtimestamp(period.get("dt")).date() == dt.today().date() + timedelta(days=1): time_period = dt.fromtimestamp(period.get("dt")) # the dict key for each period is a 2-dight 24-hour time, e.g 15 for 3.00pm parsed_weather[str(time_period.time())[:2]] = [ str(time_period.time())[:2], round(period.get("main").get("temp")), period.get("weather")[0].get("main").center(15), str(period.get("clouds").get("all")).zfill(3), str(round(period.get("wind").get("speed"))).zfill(3) ] return parsed_weather
Python
def make_message(parsed_weather): """takes a dict of weather data and processes into a message to send to whatsapp""" table = PrettyTable(['T'.center(6), '°C'.center(6), 'description', 'c%'.center(3), 'ws'.center(4)]) for p in ['00', '03', '06', '09', '12', '15', '18', '21']: table.add_row(parsed_weather[p]) # print(table) return table
def make_message(parsed_weather): """takes a dict of weather data and processes into a message to send to whatsapp""" table = PrettyTable(['T'.center(6), '°C'.center(6), 'description', 'c%'.center(3), 'ws'.center(4)]) for p in ['00', '03', '06', '09', '12', '15', '18', '21']: table.add_row(parsed_weather[p]) # print(table) return table
Python
def send_message(message): """uses twilio to send a whatsapp message to a phone number""" print(message) message = client.messages.create( body=message, from_='whatsapp:+14155238886', to=to_number )
def send_message(message): """uses twilio to send a whatsapp message to a phone number""" print(message) message = client.messages.create( body=message, from_='whatsapp:+14155238886', to=to_number )
Python
def has_gpp(): """ Check if this system has g++ compiler """ result = os.system('bash which g++') print(f"has_gpp() result = {result}") return not (result)
def has_gpp(): """ Check if this system has g++ compiler """ result = os.system('bash which g++') print(f"has_gpp() result = {result}") return not (result)
Python
def build_markdown_cpp_cell(ipynb_cell): """ Save the C++ source code and try to build it """ # Comment out ```'s txt = ipynb_cell['source'].replace('```', '// ```') cpp_file_name = get_filename_in_second_line(txt) if not cpp_file_name: # obtain temporary file name cpp_file_name = get_temp_cpp_filename() cpp_file_basename, _ = os.path.splitext(cpp_file_name) # open the temporary file and write to it with open(cpp_file_name, 'wt') as cpp_file: cpp_file.write(txt) # Build the code # Complex literal example needs C++ 14 # https://www.linuxquestions.org/questions/programming-9/trouble-with-double-complex-numbers-in-c-4175567740/ # https://stackoverflow.com/questions/31965413/compile-c14-code-with-g if re_main_function.findall(txt): # if txt includes the main() function, build execution file # if the C++ source code seems to have build command, use it if "\n// build command" in txt.lower(): compile_command = get_build_command_in_last_line(txt) else: compile_command = "" if not compile_command: compile_command = f"g++ -Wall -g -std=c++14 {cpp_file_name} -o {cpp_file_basename}" compile_result = os.system(compile_command) run_result = os.system(os.path.join(os.curdir, cpp_file_basename)) result = (compile_result or run_result) else: # if txt does not include main() function, just check grammar compile_command = f"g++ -Wall -g -std=c++14 {cpp_file_name} -fsyntax-only" result = os.system(compile_command) # Delete the execution file if os.path.exists(cpp_file_basename): os.remove(cpp_file_basename) result_dict = { 'result': result, 'cpp_filename': cpp_file_name, } return result_dict
def build_markdown_cpp_cell(ipynb_cell): """ Save the C++ source code and try to build it """ # Comment out ```'s txt = ipynb_cell['source'].replace('```', '// ```') cpp_file_name = get_filename_in_second_line(txt) if not cpp_file_name: # obtain temporary file name cpp_file_name = get_temp_cpp_filename() cpp_file_basename, _ = os.path.splitext(cpp_file_name) # open the temporary file and write to it with open(cpp_file_name, 'wt') as cpp_file: cpp_file.write(txt) # Build the code # Complex literal example needs C++ 14 # https://www.linuxquestions.org/questions/programming-9/trouble-with-double-complex-numbers-in-c-4175567740/ # https://stackoverflow.com/questions/31965413/compile-c14-code-with-g if re_main_function.findall(txt): # if txt includes the main() function, build execution file # if the C++ source code seems to have build command, use it if "\n// build command" in txt.lower(): compile_command = get_build_command_in_last_line(txt) else: compile_command = "" if not compile_command: compile_command = f"g++ -Wall -g -std=c++14 {cpp_file_name} -o {cpp_file_basename}" compile_result = os.system(compile_command) run_result = os.system(os.path.join(os.curdir, cpp_file_basename)) result = (compile_result or run_result) else: # if txt does not include main() function, just check grammar compile_command = f"g++ -Wall -g -std=c++14 {cpp_file_name} -fsyntax-only" result = os.system(compile_command) # Delete the execution file if os.path.exists(cpp_file_basename): os.remove(cpp_file_basename) result_dict = { 'result': result, 'cpp_filename': cpp_file_name, } return result_dict
Python
def check_logged_in(): """ Check if the user is already logged in. :return: """ search_rt = lookup_option(PAROPT_RT_OPTNAME) if search_rt is None: return False native_client = internal_auth_client() res = native_client.oauth2_validate_token(search_rt) return res['active']
def check_logged_in(): """ Check if the user is already logged in. :return: """ search_rt = lookup_option(PAROPT_RT_OPTNAME) if search_rt is None: return False native_client = internal_auth_client() res = native_client.oauth2_validate_token(search_rt) return res['active']
Python
def waitForJob(po: ParoptClient, job_id: str, max_wait: int, sleep_interval=1) -> bool: """Wait for job to finish. Raises exception if timesout or fails to get response too many times Parameters ---------- po : ParoptClient instance of service api job_id : str id of job max_wait : int maximum number of minutes to wait. If negative, will wait for 24 hours sleep_interval : int minutes to sleep between checks if the job is finished Returns ------- success : bool """ if max_wait < 0: timeout = time.time() + SECONDS_IN_DAY else: timeout = time.time() + (max_wait * 60) # convert sleep interval to seconds sleep_interval_secs = sleep_interval * 60 n_fails = 0 while time.time() < timeout and n_fails < MAX_FAILS: print(f"Job running, going to sleep for {sleep_interval} minutes...") time.sleep(sleep_interval_secs) job_res = po.getJob(job_id) if job_res.http_status == 200: job_data = job_res.data.get('job', {}) job_status = job_data.get('job_status') if job_status == 'finished': return True elif job_status == 'failed': raise Exception(f'Server failed to run trials. See error info below (from server):\n' f'{job_data.get("job_exc_info", "")}'.replace('\n', '\n| ')) else: # still running continue else: # something unexpected happened n_fails += 1 print("WARNING: Unexpected response:") printResponse(job_res) continue # failed to finish job in max time or too many fails occurred if n_fails == MAX_FAILS: raise Exception("Failed to wait for job: Too may failed calls/responses to api") else: raise Exception("Failed to wait for job: Reached maximum timeout")
def waitForJob(po: ParoptClient, job_id: str, max_wait: int, sleep_interval=1) -> bool: """Wait for job to finish. Raises exception if timesout or fails to get response too many times Parameters ---------- po : ParoptClient instance of service api job_id : str id of job max_wait : int maximum number of minutes to wait. If negative, will wait for 24 hours sleep_interval : int minutes to sleep between checks if the job is finished Returns ------- success : bool """ if max_wait < 0: timeout = time.time() + SECONDS_IN_DAY else: timeout = time.time() + (max_wait * 60) # convert sleep interval to seconds sleep_interval_secs = sleep_interval * 60 n_fails = 0 while time.time() < timeout and n_fails < MAX_FAILS: print(f"Job running, going to sleep for {sleep_interval} minutes...") time.sleep(sleep_interval_secs) job_res = po.getJob(job_id) if job_res.http_status == 200: job_data = job_res.data.get('job', {}) job_status = job_data.get('job_status') if job_status == 'finished': return True elif job_status == 'failed': raise Exception(f'Server failed to run trials. See error info below (from server):\n' f'{job_data.get("job_exc_info", "")}'.replace('\n', '\n| ')) else: # still running continue else: # something unexpected happened n_fails += 1 print("WARNING: Unexpected response:") printResponse(job_res) continue # failed to finish job in max time or too many fails occurred if n_fails == MAX_FAILS: raise Exception("Failed to wait for job: Too may failed calls/responses to api") else: raise Exception("Failed to wait for job: Reached maximum timeout")
Python
def loadYmlJson(file_path: str): """Get the given json or yaml file as a dict""" with open(file_path) as f: if file_path.endswith('.yaml') or file_path.endswith('.yml'): return yaml.load(f) elif file_path.endswith('.json'): return json.loads(f.read()) else: return None
def loadYmlJson(file_path: str): """Get the given json or yaml file as a dict""" with open(file_path) as f: if file_path.endswith('.yaml') or file_path.endswith('.yml'): return yaml.load(f) elif file_path.endswith('.json'): return json.loads(f.read()) else: return None
Python
def do_login_flow(): """ Do the globus native client login flow. Saves the token received by the client to the Globus configuration file """ native_client = internal_auth_client() label = platform.node() or None PAROPT_SCOPE = 'https://auth.globus.org/scopes/81fc4156-a623-47f2-93ad-7184118226ba/auth' native_client.oauth2_start_flow( requested_scopes=FUNCX_SCOPE, refresh_tokens=True, prefill_named_grant=label) linkprompt = 'Please log into Globus here' safeprint('{0}:\n{1}\n{2}\n{1}\n' .format(linkprompt, '-' * len(linkprompt), native_client.oauth2_get_authorize_url())) auth_code = input( 'Enter the resulting Authorization Code here:\n').strip() tkn = native_client.oauth2_exchange_code_for_tokens(auth_code) _revoke_current_tokens(native_client) _store_config(tkn)
def do_login_flow(): """ Do the globus native client login flow. Saves the token received by the client to the Globus configuration file """ native_client = internal_auth_client() label = platform.node() or None PAROPT_SCOPE = 'https://auth.globus.org/scopes/81fc4156-a623-47f2-93ad-7184118226ba/auth' native_client.oauth2_start_flow( requested_scopes=FUNCX_SCOPE, refresh_tokens=True, prefill_named_grant=label) linkprompt = 'Please log into Globus here' safeprint('{0}:\n{1}\n{2}\n{1}\n' .format(linkprompt, '-' * len(linkprompt), native_client.oauth2_get_authorize_url())) auth_code = input( 'Enter the resulting Authorization Code here:\n').strip() tkn = native_client.oauth2_exchange_code_for_tokens(auth_code) _revoke_current_tokens(native_client) _store_config(tkn)
Python
def make_authorizer(): """ Make a RefreshTokenAuthorizer given the tokens stored on disk Returns: (RefreshTokenAuthorizer): Tool to generate authorization credentials """ if not check_logged_in(): safeprint("No authorization credentials present. You must log in") do_login_flow() # Get the authorization client auth_client = internal_auth_client() # Get the tokens needed by the service rf_token = lookup_option(PAROPT_RT_OPTNAME) at_token = lookup_option(PAROPT_AT_OPTNAME) at_expires = int(lookup_option(PAROPT_AT_EXPIRES_OPTNAME)) authorizer = RefreshTokenAuthorizer(rf_token, auth_client, access_token=at_token, expires_at=at_expires) return authorizer
def make_authorizer(): """ Make a RefreshTokenAuthorizer given the tokens stored on disk Returns: (RefreshTokenAuthorizer): Tool to generate authorization credentials """ if not check_logged_in(): safeprint("No authorization credentials present. You must log in") do_login_flow() # Get the authorization client auth_client = internal_auth_client() # Get the tokens needed by the service rf_token = lookup_option(PAROPT_RT_OPTNAME) at_token = lookup_option(PAROPT_AT_OPTNAME) at_expires = int(lookup_option(PAROPT_AT_EXPIRES_OPTNAME)) authorizer = RefreshTokenAuthorizer(rf_token, auth_client, access_token=at_token, expires_at=at_expires) return authorizer
Python
def logout(): """ Remove Globus credentials from configuration file and revokes their authorization """ native_client = internal_auth_client() # remove tokens from config and revoke them # also, track whether or not we should print the rescind help for token_opt in (PAROPT_RT_OPTNAME, PAROPT_AT_OPTNAME): # first lookup the token -- if not found we'll continue token = lookup_option(token_opt) if not token: safeprint(('Warning: Found no token named "{}"! ' 'Recommend rescinding consent').format(token_opt)) continue # token was found, so try to revoke it try: native_client.oauth2_revoke_token(token) # if we network error, revocation failed -- print message and abort so # that we can revoke later when the network is working except globus_sdk.NetworkError: safeprint(('Failed to reach Globus to revoke tokens. ' 'Because we cannot revoke these tokens, cancelling ' 'logout')) return # finally, we revoked, so it's safe to remove the token remove_option(token_opt) # remove expiration time, just for cleanliness remove_option(PAROPT_AT_EXPIRES_OPTNAME)
def logout(): """ Remove Globus credentials from configuration file and revokes their authorization """ native_client = internal_auth_client() # remove tokens from config and revoke them # also, track whether or not we should print the rescind help for token_opt in (PAROPT_RT_OPTNAME, PAROPT_AT_OPTNAME): # first lookup the token -- if not found we'll continue token = lookup_option(token_opt) if not token: safeprint(('Warning: Found no token named "{}"! ' 'Recommend rescinding consent').format(token_opt)) continue # token was found, so try to revoke it try: native_client.oauth2_revoke_token(token) # if we network error, revocation failed -- print message and abort so # that we can revoke later when the network is working except globus_sdk.NetworkError: safeprint(('Failed to reach Globus to revoke tokens. ' 'Because we cannot revoke these tokens, cancelling ' 'logout')) return # finally, we revoked, so it's safe to remove the token remove_option(token_opt) # remove expiration time, just for cleanliness remove_option(PAROPT_AT_EXPIRES_OPTNAME)
Python
def _revoke_current_tokens(native_client): """ Revoke the tokens associated with a particular scope Args: native_client (NativeAppAuthClient): Authorization client for scope to be cleared """ for token_opt in (PAROPT_RT_OPTNAME, PAROPT_AT_OPTNAME): token = lookup_option(token_opt) if token: native_client.oauth2_revoke_token(token)
def _revoke_current_tokens(native_client): """ Revoke the tokens associated with a particular scope Args: native_client (NativeAppAuthClient): Authorization client for scope to be cleared """ for token_opt in (PAROPT_RT_OPTNAME, PAROPT_AT_OPTNAME): token = lookup_option(token_opt) if token: native_client.oauth2_revoke_token(token)
Python
def _store_config(token_response): """ Store the tokens on disk. Args: token_response (OAuthTokenResponse): Response from a token request """ tkn = token_response.by_resource_server paropt_at = tkn['paropt_service']['access_token'] paropt_rt = tkn['paropt_service']['refresh_token'] paropt_at_expires = tkn['paropt_service']['expires_at_seconds'] write_option(PAROPT_RT_OPTNAME, paropt_rt) write_option(PAROPT_AT_OPTNAME, paropt_at) write_option(PAROPT_AT_EXPIRES_OPTNAME, paropt_at_expires)
def _store_config(token_response): """ Store the tokens on disk. Args: token_response (OAuthTokenResponse): Response from a token request """ tkn = token_response.by_resource_server paropt_at = tkn['paropt_service']['access_token'] paropt_rt = tkn['paropt_service']['refresh_token'] paropt_at_expires = tkn['paropt_service']['expires_at_seconds'] write_option(PAROPT_RT_OPTNAME, paropt_rt) write_option(PAROPT_AT_OPTNAME, paropt_at) write_option(PAROPT_AT_EXPIRES_OPTNAME, paropt_at_expires)
Python
def forward(self, pred: torch.Tensor, target: torch.Tensor): r""" The module's forward pass takes the predicted frame sequence and the ground truth, compares them based on the deriving measure's criterion and logic and outputs a numerical assessment of the prediction quality. The base measure's forward method can be used by deriving classes and simply applies the criterion to the input tensors, sums up over all entries of an image and finally averages over frames and then batches. Args: pred (torch.Tensor): The predicted frame sequence as a 5D tensor (batch, frames, c, h, w). target (torch.Tensor): The ground truth frame sequence as a 5D tensor (batch, frames, c, h, w) Returns: The calculated numerical quality assessment. """ if pred.ndim != 5 or target.ndim != 5: raise ValueError(f"{self.NAME} expects 5-D inputs!") value = self.criterion(pred, target) return value.sum(dim=(4, 3, 2)).mean(dim=1).mean(dim=0)
def forward(self, pred: torch.Tensor, target: torch.Tensor): r""" The module's forward pass takes the predicted frame sequence and the ground truth, compares them based on the deriving measure's criterion and logic and outputs a numerical assessment of the prediction quality. The base measure's forward method can be used by deriving classes and simply applies the criterion to the input tensors, sums up over all entries of an image and finally averages over frames and then batches. Args: pred (torch.Tensor): The predicted frame sequence as a 5D tensor (batch, frames, c, h, w). target (torch.Tensor): The ground truth frame sequence as a 5D tensor (batch, frames, c, h, w) Returns: The calculated numerical quality assessment. """ if pred.ndim != 5 or target.ndim != 5: raise ValueError(f"{self.NAME} expects 5-D inputs!") value = self.criterion(pred, target) return value.sum(dim=(4, 3, 2)).mean(dim=1).mean(dim=0)
Python
def reshape_clamp(self, pred: torch.Tensor, target: torch.Tensor): r""" Reshapes and clamps the input tensors, returning a 4D tensor where batch and time dimension are combined. Args: pred (torch.Tensor): The predicted frame sequence as a 5D tensor (batch, frames, c, h, w). target (torch.Tensor): The ground truth frame sequence as a 5D tensor (batch, frames, c, h, w) Returns: the reshaped and clamped pred and target tensors. """ if pred.ndim != 5 or target.ndim != 5: raise ValueError(f"{self.NAME} expects 5-D inputs!") pred = pred.reshape(-1, *pred.shape[2:]) # [b*t, ...] pred = ((pred + 1) / 2).clamp_(min=0.0, max=1.0) # range: [0., 1.] target = target.reshape(-1, *target.shape[2:]) # [b*t, ...] target = ((target + 1) / 2).clamp_(min=0.0, max=1.0) # range: [0., 1.] return pred, target
def reshape_clamp(self, pred: torch.Tensor, target: torch.Tensor): r""" Reshapes and clamps the input tensors, returning a 4D tensor where batch and time dimension are combined. Args: pred (torch.Tensor): The predicted frame sequence as a 5D tensor (batch, frames, c, h, w). target (torch.Tensor): The ground truth frame sequence as a 5D tensor (batch, frames, c, h, w) Returns: the reshaped and clamped pred and target tensors. """ if pred.ndim != 5 or target.ndim != 5: raise ValueError(f"{self.NAME} expects 5-D inputs!") pred = pred.reshape(-1, *pred.shape[2:]) # [b*t, ...] pred = ((pred + 1) / 2).clamp_(min=0.0, max=1.0) # range: [0., 1.] target = target.reshape(-1, *target.shape[2:]) # [b*t, ...] target = ((target + 1) / 2).clamp_(min=0.0, max=1.0) # range: [0., 1.] return pred, target
Python
def train_iter(self, config, loader, optimizer, loss_provider, epoch): r""" PredRNN++'s training iteration utilizes reversed input and keeps track of the number of training iterations done so far in order to adjust the sampling schedule. Otherwise, the iteration logic is the same as in the default :meth:`train_iter()` function. Args: config (dict): The configuration dict of the current training run (combines model, dataset and run config) loader (DataLoader): Training data is sampled from this loader. optimizer (Optimizer): The optimizer to use for weight update calculations. loss_provider (PredictionLossProvider): An instance of the :class:`LossProvider` class for flexible loss calculation. epoch (int): The current epoch. """ loop = tqdm(loader) for data in loop: # fwd input, targets, actions = self.unpack_data(data, config) predictions, model_losses = self(input, pred_frames=config["pred_frames"], actions=actions, train=True) # loss _, total_loss = loss_provider.get_losses(predictions, targets) if model_losses is not None: for value in model_losses.values(): total_loss += value # reverse if self.reverse_input: input_rev, targets_rev, actions_rev = self.unpack_data(data, config, reverse=True) predictions_rev, model_losses_rev = self(input_rev, pred_frames=config["pred_frames"], actions=actions, train=True) # reverse_loss _, total_loss_rev = loss_provider.get_losses(predictions_rev, targets_rev) if model_losses_rev is not None: for value in model_losses_rev.values(): total_loss_rev += value total_loss = (total_loss + total_loss_rev) / 2 # bwd optimizer.zero_grad() total_loss.backward() optimizer.step() # bookkeeping self.training_iteration += 1 loop.set_postfix(loss=total_loss.item())
def train_iter(self, config, loader, optimizer, loss_provider, epoch): r""" PredRNN++'s training iteration utilizes reversed input and keeps track of the number of training iterations done so far in order to adjust the sampling schedule. Otherwise, the iteration logic is the same as in the default :meth:`train_iter()` function. Args: config (dict): The configuration dict of the current training run (combines model, dataset and run config) loader (DataLoader): Training data is sampled from this loader. optimizer (Optimizer): The optimizer to use for weight update calculations. loss_provider (PredictionLossProvider): An instance of the :class:`LossProvider` class for flexible loss calculation. epoch (int): The current epoch. """ loop = tqdm(loader) for data in loop: # fwd input, targets, actions = self.unpack_data(data, config) predictions, model_losses = self(input, pred_frames=config["pred_frames"], actions=actions, train=True) # loss _, total_loss = loss_provider.get_losses(predictions, targets) if model_losses is not None: for value in model_losses.values(): total_loss += value # reverse if self.reverse_input: input_rev, targets_rev, actions_rev = self.unpack_data(data, config, reverse=True) predictions_rev, model_losses_rev = self(input_rev, pred_frames=config["pred_frames"], actions=actions, train=True) # reverse_loss _, total_loss_rev = loss_provider.get_losses(predictions_rev, targets_rev) if model_losses_rev is not None: for value in model_losses_rev.values(): total_loss_rev += value total_loss = (total_loss + total_loss_rev) / 2 # bwd optimizer.zero_grad() total_loss.backward() optimizer.step() # bookkeeping self.training_iteration += 1 loop.set_postfix(loss=total_loss.item())
Python
def config(self) -> dict: r""" Returns: A dictionary containing the complete dataset configuration, including common attributes as well as dataset-specific attributes. """ attr_dict = get_public_attrs(self, "config", non_config_vars=self.NON_CONFIG_VARS) img_c, img_h, img_w = self.img_shape extra_config = { "img_h": img_h, "img_w": img_w, "img_c": img_c, "action_size": self.ACTION_SIZE, "tensor_value_range": [self.value_range_min, self.value_range_max], "NAME": self.NAME } return {**attr_dict, **extra_config}
def config(self) -> dict: r""" Returns: A dictionary containing the complete dataset configuration, including common attributes as well as dataset-specific attributes. """ attr_dict = get_public_attrs(self, "config", non_config_vars=self.NON_CONFIG_VARS) img_c, img_h, img_w = self.img_shape extra_config = { "img_h": img_h, "img_w": img_w, "img_c": img_c, "action_size": self.ACTION_SIZE, "tensor_value_range": [self.value_range_min, self.value_range_max], "NAME": self.NAME } return {**attr_dict, **extra_config}