repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
tjduigna/exatomic | exatomic/widgets/widget_utils.py | 3 | 9089 | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2018, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
"""
Widget Utilities
#########################
Widget layout and structure.
"""
import six
from collections import OrderedDict
from ipywidgets import VBox, Layout, FloatSlider, IntSlider
# The GUI box is fixed width
_glo = Layout(flex='0 0 240px')
# The folder is relative to the GUI box
_flo = Layout(width='100%', align_items='flex-end')
# All widgets not in a folder have this layout
_wlo = Layout(width='98%')
# HBoxes within the final scene VBox have this layout
_hboxlo = Layout(flex='1 1 auto', width='auto', height='auto')
# The final VBox containing all scenes has this layout
_vboxlo = Layout(flex='1 1 auto', width='auto', height='auto')
# The box to end all boxes
_bboxlo = Layout(flex='1 1 auto', width='auto', height='auto')
# Box layouts above are separate so it is easier to restyle
class _ListDict(object):
"""
Thin wrapper around OrderedDict that allows slicing by position (like list).
Requires string keys.
"""
def values(self):
return self.od.values()
def keys(self):
return self.od.keys()
def items(self):
return self.od.items()
def pop(self, key):
"""Pop value."""
if isinstance(key, six.string_types):
return self.od.pop(key)
return self.od.pop(list(self.od.keys())[key])
def insert(self, idx, key, obj):
"""Insert value at position idx with string key."""
if not isinstance(key, six.string_types):
raise TypeError("Key must be type str")
items = list(self.od.items())
items.insert(idx, (key, obj))
self.od = OrderedDict(items)
def update(self, *args, **kwargs):
"""Update OrderedDict"""
self.od.update(OrderedDict(*args, **kwargs))
def __setitem__(self, key, value):
if not isinstance(key, six.string_types):
raise TypeError('Must set _ListDict key must be type str.')
keys = list(self.od.keys())
if key in keys:
self.od[key] = value
else:
items = list(self.od.items())
items.append((key, value))
self.od = OrderedDict(items)
def __getitem__(self, key):
if not isinstance(key, (six.string_types, int, slice)):
raise TypeError('_ListDict slice must be of type str/int/slice.')
if isinstance(key, six.string_types):
return self.od[key]
return list(self.values())[key]
def __init__(self, *args, **kwargs):
self.od = OrderedDict(*args, **kwargs)
if not all((isinstance(key, six.string_types) for key in self.od.keys())):
raise TypeError('_ListDict keys must be of type str.')
def __len__(self):
return len(self.od)
def __repr__(self):
return repr(self.od)
class Folder(VBox):
"""A VBox that shows and hides widgets. For proper
indentation, instantiate sub-folders before passing to
super-folders. Should not exist outside of a GUI box."""
# Cannot also have a keys method -- used by ipywidgets
def activate(self, *keys, **kwargs):
"""Activate (show) widgets that are not disabled."""
update = kwargs.pop('update', False)
enable = kwargs.pop('enable', False)
keys = self._get(False, True) if not keys else keys
for key in keys:
obj = self._controls[key]
if enable:
obj.disabled = False
obj.active = True
elif not obj.disabled:
obj.active = True
if update:
self._set_gui()
def deactivate(self, *keys, **kwargs):
"""Deactivate (hide) widgets."""
active = kwargs.pop('active', False)
update = kwargs.pop('update', False)
keys = self._get(True, True) if not keys else keys
for key in keys:
if key == 'main': continue
self._controls[key].active = active
self._controls[key].disabled = True
if update:
self._set_gui()
def insert(self, idx, key, obj, active=True, update=False):
"""Insert widget into Folder, behaves as list.insert ."""
obj.layout.width = str(98 - (self.level + 1) * self.indent) + '%'
self._controls.insert(idx, key, obj)
if active:
self.activate(key, enable=True)
if update:
self._set_gui()
def update(self, objs, relayout=False):
"""Update the Folder widgets, behaves as dict.update."""
if relayout:
self._relayout(objs)
self._controls.update(objs)
def move_to_end(self, *keys):
"""Move widget(s) to the end of the folder."""
try:
for key in keys:
self._controls.move_to_end(key)
except AttributeError:
objs = [self._controls.pop(key) for key in keys]
for key, obj in zip(keys, objs):
self[key] = obj
def pop(self, key):
"""Pop a widget from the folder."""
return self._controls.pop(key)
def _close(self):
"""Close all widgets in the folder, then the folder."""
for widget in self._get():
widget.close()
self.close()
def _get(self, active=True, keys=False):
"""Get the widgets in the folder."""
if keys:
mit = self._controls.items()
if active:
return [key for key, obj in mit if obj.active]
return [key for key, obj in mit if not obj.active]
else:
mit = self._controls.values()
if active:
return [obj for obj in mit if obj.active]
return [obj for obj in mit if not obj.active]
def _set_gui(self):
"""Update the 'view' of the folder."""
if self.show:
self.activate()
self.children = self._get()
else:
self.children = [self._controls['main']]
self.on_displayed(VBox._fire_children_displayed)
def _relayout(self, objs):
"""Set layout for widgets in the folder."""
for obj in objs.values():
obj.layout = self._slo
def _init(self, control, content):
"""Set initial layout of primary button and widgets."""
def _b(b):
self.show = not self.show
self._set_gui()
control.on_click(_b)
control.active = True
control.disabled = False
control.layout = self._plo
self._controls = _ListDict([('main', control)])
if content is not None:
for key, obj in content.items():
if isinstance(obj, Folder):
obj.active = False
continue
obj.layout = self._slo
if not hasattr(obj, 'active'):
obj.active = self.show
if not hasattr(obj, 'disabled'):
obj.disabled = False
self._controls.update(content)
def __setitem__(self, key, obj):
return self._controls.__setitem__(key, obj)
def __getitem__(self, key):
return self._controls.__getitem__(key)
def __init__(self, control, content, **kwargs):
self.show = kwargs.pop('show', False)
self.indent = 5
self.level = kwargs.pop('level', 0)
pw = 98 - self.level * self.indent
self._slo = Layout(width=str(pw - self.indent) + '%')
self._plo = Layout(width=str(pw) + '%')
self._init(control, content)
lo = kwargs.pop('layout', None)
lo = Layout(width='100%', align_items='flex-end')
super(Folder, self).__init__(
children=self._get(), layout=lo, **kwargs)
self.active = True
self.disabled = False
class GUIBox(VBox):
def __init__(self, *args, **kwargs):
#lo = kwargs.pop('layout', None)
kwargs['layout'] = _glo # Force global layout
super(GUIBox, self).__init__(*args, **kwargs)
def gui_field_widgets(uni=False, test=False):
"""Return new widgets for field GUI functionality."""
flims = {'min': 30, 'max': 60,
'value': 30, 'step': 1,
'continuous_update': False}
iso_lims = {'description': 'Iso.',
'continuous_update': False}
if uni:
iso_lims.update({'min': 0.0001, 'max': 0.1,
'value': 0.0005, 'step': 0.0005,
'readout_format': '.4f'})
else:
iso_lims.update({'min': 3.0, 'max': 10.0, 'value': 2.0})
if uni and not test:
iso_lims['value'] = 0.03
alims = {'min': 0.01, 'max': 1.0,
'value': 1.0, 'step': 0.01}
return _ListDict([('alpha', FloatSlider(description='Opacity', **alims)),
('iso', FloatSlider(**iso_lims)),
('nx', IntSlider(description='Nx', **flims)),
('ny', IntSlider(description='Ny', **flims)),
('nz', IntSlider(description='Nz', **flims))])
| apache-2.0 |
neurobin/acme-tiny | acme_tiny.py | 1 | 10110 | #!/usr/bin/env python
import argparse, subprocess, json, os, sys, base64, binascii, time, hashlib, re, copy, textwrap, logging
try:
from urllib.request import urlopen # Python 3
except ImportError:
from urllib2 import urlopen # Python 2
#DEFAULT_CA = "https://acme-staging.api.letsencrypt.org"
DEFAULT_CA = "https://acme-v01.api.letsencrypt.org"
VERSION = "0.0.1"
VERSION_INFO="acme_tiny version: "+VERSION
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(logging.StreamHandler())
LOGGER.setLevel(logging.INFO)
def get_chain(url,log):
resp = urlopen(url)
if(resp.getcode() != 200):
log.error("E: Failed to fetch chain (CABUNDLE) from: "+url);sys.exit(1)
return """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format(
"\n".join(textwrap.wrap(base64.b64encode(resp.read()).decode('utf8'), 64)))
def get_crt(account_key, csr, acme_dir, log=LOGGER, CA=DEFAULT_CA):
# helper function base64 encode for jose spec
def _b64(b):
return base64.urlsafe_b64encode(b).decode('utf8').replace("=", "")
# parse account key to get public key
log.info("Parsing account key...")
proc = subprocess.Popen(["openssl", "rsa", "-in", account_key, "-noout", "-text"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0:
raise IOError("OpenSSL Error: {0}".format(err))
pub_hex, pub_exp = re.search(
r"modulus:\n\s+00:([a-f0-9\:\s]+?)\npublicExponent: ([0-9]+)",
out.decode('utf8'), re.MULTILINE|re.DOTALL).groups()
pub_exp = "{0:x}".format(int(pub_exp))
pub_exp = "0{0}".format(pub_exp) if len(pub_exp) % 2 else pub_exp
header = {
"alg": "RS256",
"jwk": {
"e": _b64(binascii.unhexlify(pub_exp.encode("utf-8"))),
"kty": "RSA",
"n": _b64(binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex).encode("utf-8"))),
},
}
accountkey_json = json.dumps(header['jwk'], sort_keys=True, separators=(',', ':'))
thumbprint = _b64(hashlib.sha256(accountkey_json.encode('utf8')).digest())
# helper function make signed requests
def _send_signed_request(url, payload):
payload64 = _b64(json.dumps(payload).encode('utf8'))
protected = copy.deepcopy(header)
protected["nonce"] = urlopen(CA + "/directory").headers['Replay-Nonce']
protected64 = _b64(json.dumps(protected).encode('utf8'))
proc = subprocess.Popen(["openssl", "dgst", "-sha256", "-sign", account_key],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate("{0}.{1}".format(protected64, payload64).encode('utf8'))
if proc.returncode != 0:
raise IOError("OpenSSL Error: {0}".format(err))
data = json.dumps({
"header": header, "protected": protected64,
"payload": payload64, "signature": _b64(out),
})
try:
resp = urlopen(url, data.encode('utf8'))
return resp.getcode(), resp.read()
except IOError as e:
return getattr(e, "code", None), getattr(e, "read", e.__str__), getattr(e, "info", None)()
crt_info = set([])
# find domains
log.info("Parsing CSR...")
proc = subprocess.Popen(["openssl", "req", "-in", csr, "-noout", "-text"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0:
raise IOError("Error loading {0}: {1}".format(csr, err))
domains = set([])
common_name = re.search(r"Subject:.*? CN=([^\s,;/]+)", out.decode('utf8'))
if common_name is not None:
domains.add(common_name.group(1))
subject_alt_names = re.search(r"X509v3 Subject Alternative Name: \n +([^\n]+)\n", out.decode('utf8'), re.MULTILINE|re.DOTALL)
if subject_alt_names is not None:
for san in subject_alt_names.group(1).split(", "):
if san.startswith("DNS:"):
domains.add(san[4:])
# get the certificate domains and expiration
log.info("Registering account...")
code, result, crt_info = _send_signed_request(CA + "/acme/new-reg", {
"resource": "new-reg",
"agreement": "https://letsencrypt.org/documents/LE-SA-v1.0.1-July-27-2015.pdf",
})
if code == 201:
log.info("Registered!")
elif code == 409:
log.info("Already registered!")
else:
raise ValueError("Error registering: {0} {1}".format(code, result))
# verify each domain
for domain in domains:
log.info("Verifying {0}...".format(domain))
# get new challenge
code, result, crt_info = _send_signed_request(CA + "/acme/new-authz", {
"resource": "new-authz",
"identifier": {"type": "dns", "value": domain},
})
if code != 201:
raise ValueError("Error requesting challenges: {0} {1}".format(code, result))
# make the challenge file
challenge = [c for c in json.loads(result.decode('utf8'))['challenges'] if c['type'] == "http-01"][0]
token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge['token'])
keyauthorization = "{0}.{1}".format(token, thumbprint)
wellknown_path = os.path.join(acme_dir, token)
with open(wellknown_path, "w") as wellknown_file:
wellknown_file.write(keyauthorization)
# check that the file is in place
wellknown_url = "http://{0}/.well-known/acme-challenge/{1}".format(domain, token)
try:
resp = urlopen(wellknown_url)
resp_data = resp.read().decode('utf8').strip()
assert resp_data == keyauthorization
except (IOError, AssertionError):
os.remove(wellknown_path)
raise ValueError("Wrote file to {0}, but couldn't download {1}".format(
wellknown_path, wellknown_url))
# notify challenge are met
code, result, crt_info = _send_signed_request(challenge['uri'], {
"resource": "challenge",
"keyAuthorization": keyauthorization,
})
if code != 202:
raise ValueError("Error triggering challenge: {0} {1}".format(code, result))
# wait for challenge to be verified
while True:
try:
resp = urlopen(challenge['uri'])
challenge_status = json.loads(resp.read().decode('utf8'))
except IOError as e:
raise ValueError("Error checking challenge: {0} {1}".format(
e.code, json.loads(e.read().decode('utf8'))))
if challenge_status['status'] == "pending":
time.sleep(2)
elif challenge_status['status'] == "valid":
log.info("{0} verified!".format(domain))
os.remove(wellknown_path)
break
else:
raise ValueError("{0} challenge did not pass: {1}".format(
domain, challenge_status))
# get the new certificate
log.info("Signing certificate...")
proc = subprocess.Popen(["openssl", "req", "-in", csr, "-outform", "DER"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
csr_der, err = proc.communicate()
code, result, crt_info = _send_signed_request(CA + "/acme/new-cert", {
"resource": "new-cert",
"csr": _b64(csr_der),
})
if code != 201:
raise ValueError("Error signing certificate: {0} {1}".format(code, result))
chain_url = re.match("\\s*<([^>]+)>;rel=\"up\"",crt_info['Link'])
# return signed certificate!
log.info("Certificate signed!")
return """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format(
"\n".join(textwrap.wrap(base64.b64encode(result).decode('utf8'), 64))), chain_url.group(1)
def main(argv):
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent("""\
This script automates the process of getting a signed TLS certificate from
Let's Encrypt using the ACME protocol. It will need to be run on your server
and have access to your private account key, so PLEASE READ THROUGH IT! It's
only ~200 lines, so it won't take long.
===Example Usage===
python acme_tiny.py --account-key ./account.key --csr ./domain.csr --acme-dir /usr/share/nginx/html/.well-known/acme-challenge/ > signed.crt
===================
""")
)
parser.add_argument("--account-key", required=True, help="path to your Let's Encrypt account private key")
parser.add_argument("--csr", required=True, help="path to your certificate signing request")
parser.add_argument("--acme-dir", required=True, help="path to the .well-known/acme-challenge/ directory")
parser.add_argument("--quiet", action="store_const", const=logging.ERROR, help="suppress output except for errors")
parser.add_argument("--ca", default=DEFAULT_CA, help="certificate authority, default is Let's Encrypt")
parser.add_argument("--cert-file", default="", help="File to write the certificate to. Overwrites if file exists.")
parser.add_argument("--chain-file", default="", help="File to write the certificate to. Overwrites if file exists.")
parser.add_argument("--full-chain",action="store_true", help="Print full chain on stdout.")
parser.add_argument("--version",action="version",version=VERSION_INFO, help="Show version info.")
args = parser.parse_args(argv)
LOGGER.setLevel(args.quiet or LOGGER.level)
signed_crt, chain_url = get_crt(args.account_key, args.csr, args.acme_dir, log=LOGGER, CA=args.ca)
if(args.cert_file):
with open(args.cert_file, "w") as f: f.write(signed_crt)
sys.stdout.write(signed_crt)
chain = get_chain(chain_url,log=LOGGER)
if(args.chain_file):
with open(args.chain_file, "w") as f: f.write(chain)
if(args.full_chain): sys.stdout.write(chain)
if __name__ == "__main__": # pragma: no cover
main(sys.argv[1:])
| mit |
ganeshnalawade/ansible | test/units/_vendor/test_vendor.py | 23 | 2128 | # (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import pkgutil
import pytest
import sys
from units.compat.mock import MagicMock, NonCallableMagicMock, patch
def reset_internal_vendor_package():
import ansible
ansible_vendor_path = os.path.join(os.path.dirname(ansible.__file__), '_vendor')
if ansible_vendor_path in sys.path:
sys.path.remove(ansible_vendor_path)
for pkg in ['ansible._vendor', 'ansible']:
if pkg in sys.modules:
del sys.modules[pkg]
def test_package_path_masking():
from ansible import _vendor
assert hasattr(_vendor, '__path__') and _vendor.__path__ == []
def test_no_vendored():
reset_internal_vendor_package()
with patch.object(pkgutil, 'iter_modules', return_value=[]):
previous_path = list(sys.path)
import ansible
ansible_vendor_path = os.path.join(os.path.dirname(ansible.__file__), '_vendor')
assert ansible_vendor_path not in sys.path
assert sys.path == previous_path
def test_vendored(vendored_pkg_names=None):
if not vendored_pkg_names:
vendored_pkg_names = ['boguspkg']
reset_internal_vendor_package()
with patch.object(pkgutil, 'iter_modules', return_value=list((None, p, None) for p in vendored_pkg_names)):
previous_path = list(sys.path)
import ansible
ansible_vendor_path = os.path.join(os.path.dirname(ansible.__file__), '_vendor')
assert sys.path[0] == ansible_vendor_path
if ansible_vendor_path in previous_path:
previous_path.remove(ansible_vendor_path)
assert sys.path[1:] == previous_path
def test_vendored_conflict():
with pytest.warns(UserWarning) as w:
import pkgutil
import sys
test_vendored(vendored_pkg_names=['sys', 'pkgutil']) # pass a real package we know is already loaded
assert 'pkgutil, sys' in str(w[0].message) # ensure both conflicting modules are listed and sorted
| gpl-3.0 |
sephii/django | django/contrib/admin/templatetags/admin_modify.py | 139 | 2353 | from django import template
register = template.Library()
@register.inclusion_tag('admin/prepopulated_fields_js.html', takes_context=True)
def prepopulated_fields_js(context):
"""
Creates a list of prepopulated_fields that should render Javascript for
the prepopulated fields for both the admin form and inlines.
"""
prepopulated_fields = []
if 'adminform' in context:
prepopulated_fields.extend(context['adminform'].prepopulated_fields)
if 'inline_admin_formsets' in context:
for inline_admin_formset in context['inline_admin_formsets']:
for inline_admin_form in inline_admin_formset:
if inline_admin_form.original is None:
prepopulated_fields.extend(inline_admin_form.prepopulated_fields)
context.update({'prepopulated_fields': prepopulated_fields})
return context
@register.inclusion_tag('admin/submit_line.html', takes_context=True)
def submit_row(context):
"""
Displays the row of buttons for delete and save.
"""
opts = context['opts']
change = context['change']
is_popup = context['is_popup']
save_as = context['save_as']
ctx = {
'opts': opts,
'show_delete_link': (
not is_popup and context['has_delete_permission'] and
change and context.get('show_delete', True)
),
'show_save_as_new': not is_popup and change and save_as,
'show_save_and_add_another': (
context['has_add_permission'] and not is_popup and
(not save_as or context['add'])
),
'show_save_and_continue': not is_popup and context['has_change_permission'],
'is_popup': is_popup,
'show_save': True,
'preserved_filters': context.get('preserved_filters'),
}
if context.get('original') is not None:
ctx['original'] = context['original']
return ctx
@register.filter
def cell_count(inline_admin_form):
"""Returns the number of cells used in a tabular inline"""
count = 1 # Hidden cell with hidden 'id' field
for fieldset in inline_admin_form:
# Loop through all the fields (one per cell)
for line in fieldset:
for field in line:
count += 1
if inline_admin_form.formset.can_delete:
# Delete checkbox
count += 1
return count
| bsd-3-clause |
michaelbrunnbauer/rdf2rdb | settings.py | 1 | 5319 | from datetime import datetime as _datetime
import iso8601 as _iso8601
# set to 'mysql' or 'postgres'
dbtype='mysql'
dbhost=''
dbname='rdf2rdb'
dbuser='rdf2rdb'
dbpasswd=''
# postgresql does not allow to connect to a server without specifying a
# database and does not allow to drop the database you are connected to.
# connect to this database to drop the database specified in dbname when
# command line option --drop is specified:
initial_dbname=''
# mapping from filename extension to RDFLib format
filenameextensions={
'n3':'n3',
'nt':'nt',
}
# replace with http://<yourwebsite>/.well-known/genid/ if suitable
skolem_uri_prefix='skolem:'
functionalproperties=[
'http://www.w3.org/2000/01/rdf-schema#label',
'http://www.w3.org/2000/01/rdf-schema#comment',
]
# multilingual database schemas currently not supported
# triples with language tags not included here will be ignored
# language tag information is lost in database
allowed_language_tags=[None,'en']
# mapping from datatype URI to internal datatype
# default mapping is 'string'
datatypemap={
'http://www.w3.org/2001/XMLSchema#date':'date',
'http://www.w3.org/2001/XMLSchema#dateTime':'datetime',
'http://www.w3.org/2001/XMLSchema#double':'double precision',
'http://www.w3.org/2001/XMLSchema#float':'float',
'http://www.w3.org/2001/XMLSchema#integer':'int',
'http://www.w3.org/2001/XMLSchema#boolean':'boolean',
}
# the sql datatype specified for "string" should support this size
max_string_size=65535
def date_conversion(value):
return value[: 10] # drop timezone if present
# TODO: handle DST correctly
def datetime_conversion(value):
try:
dt = _iso8601.parse_date(value)
except TypeError:
return None
diff = dt - _datetime.now(_iso8601.Utc())
return str(_datetime.now() + diff)
def string_conversion(value):
if len(value)>max_string_size:
return None
return value
def bool_conversion(value):
if value.lower() in ['true','1']:
return '1'
elif value.lower() in ['false','0']:
return '0'
else:
return None
# mapping from internal datatype to sql datatype and value conversion function
# default is internal datatype name and unmodified value
datetime_name='datetime' if dbtype=='mysql' else 'timestamp'
sql_datatypemap={
'date': ('date', date_conversion),
'datetime': (datetime_name, datetime_conversion),
'string':('text',string_conversion),
'boolean':('boolean',bool_conversion),
}
# this tool should support IRIs and stores them as UTF8 in the database
# we need indices on IRIs and the maximum index size in MySQL is 1000 bytes
# as every UTF8 character can have up to 3 bytes we cannot support longer IRIs
max_uri_length=333
max_label_length=40
# for stdout
outputencoding='ascii'
dblabel_allowed_characters="_"
dblabel_character_mapping={
' ':'_',
'-':'_',
'.':'_',
';':'_',
':':'_',
}
rdfns=u'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
rdfsns=u'http://www.w3.org/2000/01/rdf-schema#'
owlns=u'http://www.w3.org/2002/07/owl#'
sql_reserved_words=[
"accessible","add","all","alter","analyze","and","as","asc","asensitive","before","between","bigint","binary","blob","both","by","call","cascade","case","change","char","character","check","collate","column","columns","condition","connection","constraint","continue","convert","create","cross","current_date","current_time","current_timestamp","current_user","cursor","database","databases","day_hour","day_microsecond","day_minute","day_second","dec","decimal","declare","default","delayed","delete","desc","describe","deterministic","distinct","distinctrow","div","double","drop","dual","each","else","elseif","enclosed","escaped","exists","exit","explain","false","fetch","fields","float","float4","float8","for","force","foreign","from","fulltext","goto","grant","group","having","high_priority","hour_microsecond","hour_minute","hour_second","if","ignore","in","index","infile","inner","inout","insensitive","insert","int","int1","int2","int3","int4","int8","integer","interval","into","is","iterate","join","key","keys","kill","label","leading","leave","left","like","limit","linear","lines","load","localtime","localtimestamp","lock","long","longblob","longtext","loop","low_priority","match","mediumblob","mediumint","mediumtext","middleint","minute_microsecond","minute_second","mod","modifies","natural","not","no_write_to_binlog","null","numeric","on","optimize","option","optionally","or","order","out","outer","outfile","precision","primary","privileges","procedure","purge","raid0","range","read","reads","read_only","read_write","real","references","regexp","release","rename","repeat","replace","require","restrict","return","revoke","right","rlike","schema","schemas","second_microsecond","select","sensitive","separator","set","show","smallint","soname","spatial","specific","sql","sqlexception","sqlstate","sqlwarning","sql_big_result","sql_calc_found_rows","sql_small_result","ssl","starting","straight_join","table","tables","terminated","then","tinyblob","tinyint","tinytext","to","trailing","trigger","true","undo","union","unique","unlock","unsigned","update","upgrade","usage","use","using","utc_date","utc_time","utc_timestamp","values","varbinary","varchar","varcharacter","varying","when","where","while","with","write","x509","xor","year_month","zerofill","user"
]
| gpl-2.0 |
hdmetor/scikit-learn | sklearn/utils/optimize.py | 135 | 5671 | """
Our own implementation of the Newton algorithm
Unlike the scipy.optimize version, this version of the Newton conjugate
gradient solver uses only one function call to retrieve the
func value, the gradient value and a callable for the Hessian matvec
product. If the function call is very expensive (e.g. for logistic
regression with large design matrix), this approach gives very
significant speedups.
"""
# This is a modified file from scipy.optimize
# Original authors: Travis Oliphant, Eric Jones
# Modifications by Gael Varoquaux, Mathieu Blondel and Tom Dupre la Tour
# License: BSD
import numpy as np
import warnings
from scipy.optimize.linesearch import line_search_wolfe2, line_search_wolfe1
class _LineSearchError(RuntimeError):
pass
def _line_search_wolfe12(f, fprime, xk, pk, gfk, old_fval, old_old_fval,
**kwargs):
"""
Same as line_search_wolfe1, but fall back to line_search_wolfe2 if
suitable step length is not found, and raise an exception if a
suitable step length is not found.
Raises
------
_LineSearchError
If no suitable step size is found
"""
ret = line_search_wolfe1(f, fprime, xk, pk, gfk,
old_fval, old_old_fval,
**kwargs)
if ret[0] is None:
# line search failed: try different one.
ret = line_search_wolfe2(f, fprime, xk, pk, gfk,
old_fval, old_old_fval, **kwargs)
if ret[0] is None:
raise _LineSearchError()
return ret
def _cg(fhess_p, fgrad, maxiter, tol):
"""
Solve iteratively the linear system 'fhess_p . xsupi = fgrad'
with a conjugate gradient descent.
Parameters
----------
fhess_p : callable
Function that takes the gradient as a parameter and returns the
matrix product of the Hessian and gradient
fgrad : ndarray, shape (n_features,) or (n_features + 1,)
Gradient vector
maxiter : int
Number of CG iterations.
tol : float
Stopping criterion.
Returns
-------
xsupi : ndarray, shape (n_features,) or (n_features + 1,)
Estimated solution
"""
xsupi = np.zeros(len(fgrad), dtype=fgrad.dtype)
ri = fgrad
psupi = -ri
i = 0
dri0 = np.dot(ri, ri)
while i <= maxiter:
if np.sum(np.abs(ri)) <= tol:
break
Ap = fhess_p(psupi)
# check curvature
curv = np.dot(psupi, Ap)
if 0 <= curv <= 3 * np.finfo(np.float64).eps:
break
elif curv < 0:
if i > 0:
break
else:
# fall back to steepest descent direction
xsupi += dri0 / curv * psupi
break
alphai = dri0 / curv
xsupi += alphai * psupi
ri = ri + alphai * Ap
dri1 = np.dot(ri, ri)
betai = dri1 / dri0
psupi = -ri + betai * psupi
i = i + 1
dri0 = dri1 # update np.dot(ri,ri) for next time.
return xsupi
def newton_cg(grad_hess, func, grad, x0, args=(), tol=1e-4,
maxiter=100, maxinner=200, line_search=True, warn=True):
"""
Minimization of scalar function of one or more variables using the
Newton-CG algorithm.
Parameters
----------
grad_hess : callable
Should return the gradient and a callable returning the matvec product
of the Hessian.
func : callable
Should return the value of the function.
grad : callable
Should return the function value and the gradient. This is used
by the linesearch functions.
x0 : array of float
Initial guess.
args: tuple, optional
Arguments passed to func_grad_hess, func and grad.
tol : float
Stopping criterion. The iteration will stop when
``max{|g_i | i = 1, ..., n} <= tol``
where ``g_i`` is the i-th component of the gradient.
maxiter : int
Number of Newton iterations.
maxinner : int
Number of CG iterations.
line_search: boolean
Whether to use a line search or not.
warn: boolean
Whether to warn when didn't converge.
Returns
-------
xk : ndarray of float
Estimated minimum.
"""
x0 = np.asarray(x0).flatten()
xk = x0
k = 1
if line_search:
old_fval = func(x0, *args)
old_old_fval = None
# Outer loop: our Newton iteration
while k <= maxiter:
# Compute a search direction pk by applying the CG method to
# del2 f(xk) p = - fgrad f(xk) starting from 0.
fgrad, fhess_p = grad_hess(xk, *args)
absgrad = np.abs(fgrad)
if np.max(absgrad) < tol:
break
maggrad = np.sum(absgrad)
eta = min([0.5, np.sqrt(maggrad)])
termcond = eta * maggrad
# Inner loop: solve the Newton update by conjugate gradient, to
# avoid inverting the Hessian
xsupi = _cg(fhess_p, fgrad, maxiter=maxinner, tol=termcond)
alphak = 1.0
if line_search:
try:
alphak, fc, gc, old_fval, old_old_fval, gfkp1 = \
_line_search_wolfe12(func, grad, xk, xsupi, fgrad,
old_fval, old_old_fval, args=args)
except _LineSearchError:
warnings.warn('Line Search failed')
break
xk = xk + alphak * xsupi # upcast if necessary
k += 1
if warn and k > maxiter:
warnings.warn("newton-cg failed to converge. Increase the "
"number of iterations.")
return xk
| bsd-3-clause |
ardeaf/Reddit-Image-Scraper | redditimagescraper/modules/accessreddit.py | 1 | 2567 | import time
from datetime import datetime
import praw
import prawcore.exceptions
from . import config
# Returns a list of urls posted to the subreddit_name between start_date and end_date.
# The list is in the form: [url, date_string], [url, date_string], [url, date_string], ... ]
def subs_to_download(subreddit_name, date_list, exts, verbose):
if verbose:
print("Logging into Reddit.")
login_time = datetime.now()
reddit = praw.Reddit(username=config.username,
password=config.password,
client_secret=config.client_secret,
client_id=config.client_id,
user_agent="redditimagescraper, created by ardeaf")
if verbose:
print("Login complete, took {} seconds.".format((datetime.now() - login_time).total_seconds()))
ret_list = list()
subreddit = reddit.subreddit(subreddit_name)
if verbose:
retrieve_time = datetime.now()
print("Retrieving submissions. Started at {}".format(time.strftime("%H:%M:%S")))
# date_list has start and end time in epoch for each day between the days we are scraping.
for date in date_list:
start_date, end_date = date
if verbose:
print("Retrieving submission urls dated {}".format(
datetime.utcfromtimestamp(start_date).strftime("%m/%d/%Y")))
submissions_request = subreddit.submissions(start=start_date, end=end_date)
while True:
try:
ret_list += [[submission.url,
datetime.utcfromtimestamp(submission.created_utc).strftime('%Y%m%d_%H%M%S')]
for submission in submissions_request
if submission.url.endswith(exts)]
break
# Check if the subreddit exists
except prawcore.exceptions.Redirect and prawcore.exceptions.BadRequest as e:
if verbose:
print("\n!! Exception Raised: {}".format(e))
retrieve_time = datetime.now()
subreddit_name = input("{} does not exist. Please re-enter a valid subreddit: ".format(subreddit_name))
if verbose:
delta = (datetime.now() - retrieve_time).total_seconds()
print("Retrieval of submissions from /r/{} took {} seconds. Completed at {}".format(
subreddit_name, str(delta), time.strftime("%H:%M:%S")))
return ret_list
if __name__ == '__main__':
print("Nothing to see here! Move along!")
exit()
| gpl-3.0 |
Shao-Feng/crosswalk-test-suite | webapi/tct-csp-w3c-tests/csp-py/csp_default-src_none_style.py | 25 | 3912 | def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
response.headers.set(
"Content-Security-Policy",
"default-src 'none';script-src 'self' 'unsafe-inline'")
response.headers.set(
"X-Content-Security-Policy",
"default-src 'none';script-src 'self' 'unsafe-inline'")
response.headers.set(
"X-WebKit-CSP",
"default-src 'none';script-src 'self' 'unsafe-inline'")
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Hao, Yunfei <[email protected]>
-->
<html>
<head>
<title>CSP Test: csp_default-src_none_style</title>
<link rel="author" title="Intel" href="http://www.intel.com/"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#default-src"/>
<meta name="flags" content=""/>
<meta name="assert" content="default-src 'none';script-src 'self' 'unsafe-inline'"/>
<meta charset="utf-8"/>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<link rel="stylesheet" type="text/css" href='""" + url1 + """/csp/support/w3c/canvas-index.css'/>
<link rel="stylesheet" type="text/css" href="support/blue-100x100.css"/>
<style>
#test-green {
background-color: green;
}
</style>
</head>
<body>
<div id="log"></div>
<div id="test-blue"></div>
<div id="test-green"></div>
<h3>ext-css:""" + url1 + """/tests/csp/support/w3c/canvas-index.css</h3>
<script>
test(function() {
var div = document.querySelector("h3");
var fix = getComputedStyle(div)["display"];
assert_not_equals(fix, "inline", "style setted incorrectly");
}, document.title + "_blocked_ext");
test(function() {
var div = document.querySelector("#test-blue");
var fix = getComputedStyle(div)["backgroundColor"];
assert_not_equals(fix, "rgb(0, 0, 255)", "style setted incorrectly");
}, document.title + "_blocked_int");
test(function() {
var div = document.querySelector("#test-green");
var fix = getComputedStyle(div)["backgroundColor"];
assert_not_equals(fix, "rgb(0, 128, 0)", "style setted incorrectly");
}, document.title + "_blocked_inline");
</script>
</body>
</html> """
| bsd-3-clause |
rg3915/spark | spark/selenium/selenium_event.py | 1 | 1303 | import time
from random import choice, randint
from decouple import config
from selenium import webdriver
from gen_address import address
from gen_random_values import gen_string, gen_date, convert_date
HOME = config('HOME')
# page = webdriver.Firefox()
page = webdriver.Chrome(executable_path=HOME + '/chromedriver/chromedriver')
page.maximize_window()
time.sleep(0.5)
page.get('http://localhost:8000/login/')
# Login
search = page.find_element_by_id('username')
search.send_keys('admin')
search = page.find_element_by_id('password')
search.send_keys('demodemo')
search = page.find_element_by_class_name('btn-default')
search.click()
page.get('http://localhost:8000/events/')
button = page.find_element_by_class_name('btn-compose')
button.click()
title = choice(['ThinkUP', 'Grupy-SP', 'GDG-SP', 'CSS-SP', 'FrontSP',
'spark', 'Python Sudeste', 'Python Brasil'])
fields = [
['id_title', title],
['id_date_start', convert_date(gen_date(2017, 2017))],
['id_start', '%s:00' % randint(1, 23)],
['id_description', gen_string(30)],
['id_address', address()],
]
for field in fields:
search = page.find_element_by_id(field[0])
search.send_keys(field[1])
time.sleep(0.2)
button = page.find_element_by_class_name('btn-post')
button.click()
page.quit()
| mit |
genome-vendor/cython | build/lib.linux-x86_64-2.6/Cython/Compiler/Errors.py | 8 | 7130 | #
# Errors
#
import sys
from Cython.Utils import open_new_file
import DebugFlags
import Options
class PyrexError(Exception):
pass
class PyrexWarning(Exception):
pass
def context(position):
source = position[0]
assert not (isinstance(source, unicode) or isinstance(source, str)), (
"Please replace filename strings with Scanning.FileSourceDescriptor instances %r" % source)
try:
F = source.get_lines()
except UnicodeDecodeError:
# file has an encoding problem
s = u"[unprintable code]\n"
else:
s = u''.join(F[max(0, position[1]-6):position[1]])
s = u'...\n%s%s^\n' % (s, u' '*(position[2]-1))
s = u'%s\n%s%s\n' % (u'-'*60, s, u'-'*60)
return s
def format_position(position):
if position:
return u"%s:%d:%d: " % (position[0].get_error_description(),
position[1], position[2])
return u''
def format_error(message, position):
if position:
pos_str = format_position(position)
cont = context(position)
message = u'\nError compiling Cython file:\n%s\n%s%s' % (cont, pos_str, message or u'')
return message
class CompileError(PyrexError):
def __init__(self, position = None, message = u""):
self.position = position
self.message_only = message
self.formatted_message = format_error(message, position)
self.reported = False
# Deprecated and withdrawn in 2.6:
# self.message = message
Exception.__init__(self, self.formatted_message)
# Python Exception subclass pickling is broken,
# see http://bugs.python.org/issue1692335
self.args = (position, message)
def __str__(self):
return self.formatted_message
class CompileWarning(PyrexWarning):
def __init__(self, position = None, message = ""):
self.position = position
# Deprecated and withdrawn in 2.6:
# self.message = message
Exception.__init__(self, format_position(position) + message)
class InternalError(Exception):
# If this is ever raised, there is a bug in the compiler.
def __init__(self, message):
self.message_only = message
Exception.__init__(self, u"Internal compiler error: %s"
% message)
class AbortError(Exception):
# Throw this to stop the compilation immediately.
def __init__(self, message):
self.message_only = message
Exception.__init__(self, u"Abort error: %s" % message)
class CompilerCrash(CompileError):
# raised when an unexpected exception occurs in a transform
def __init__(self, pos, context, message, cause, stacktrace=None):
if message:
message = u'\n' + message
else:
message = u'\n'
self.message_only = message
if context:
message = u"Compiler crash in %s%s" % (context, message)
if stacktrace:
import traceback
message += (
u'\n\nCompiler crash traceback from this point on:\n' +
u''.join(traceback.format_tb(stacktrace)))
if cause:
if not stacktrace:
message += u'\n'
message += u'%s: %s' % (cause.__class__.__name__, cause)
CompileError.__init__(self, pos, message)
# Python Exception subclass pickling is broken,
# see http://bugs.python.org/issue1692335
self.args = (pos, context, message, cause, stacktrace)
class NoElementTreeInstalledException(PyrexError):
"""raised when the user enabled options.gdb_debug but no ElementTree
implementation was found
"""
listing_file = None
num_errors = 0
echo_file = None
def open_listing_file(path, echo_to_stderr = 1):
# Begin a new error listing. If path is None, no file
# is opened, the error counter is just reset.
global listing_file, num_errors, echo_file
if path is not None:
listing_file = open_new_file(path)
else:
listing_file = None
if echo_to_stderr:
echo_file = sys.stderr
else:
echo_file = None
num_errors = 0
def close_listing_file():
global listing_file
if listing_file:
listing_file.close()
listing_file = None
def report_error(err):
if error_stack:
error_stack[-1].append(err)
else:
global num_errors
# See Main.py for why dual reporting occurs. Quick fix for now.
if err.reported: return
err.reported = True
try: line = u"%s\n" % err
except UnicodeEncodeError:
# Python <= 2.5 does this for non-ASCII Unicode exceptions
line = format_error(getattr(err, 'message_only', "[unprintable exception message]"),
getattr(err, 'position', None)) + u'\n'
if listing_file:
try: listing_file.write(line)
except UnicodeEncodeError:
listing_file.write(line.encode('ASCII', 'replace'))
if echo_file:
try: echo_file.write(line)
except UnicodeEncodeError:
echo_file.write(line.encode('ASCII', 'replace'))
num_errors = num_errors + 1
if Options.fast_fail:
raise AbortError, "fatal errors"
def error(position, message):
#print "Errors.error:", repr(position), repr(message) ###
if position is None:
raise InternalError(message)
err = CompileError(position, message)
if DebugFlags.debug_exception_on_error: raise Exception(err) # debug
report_error(err)
return err
LEVEL=1 # warn about all errors level 1 or higher
def message(position, message, level=1):
if level < LEVEL:
return
warn = CompileWarning(position, message)
line = "note: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
return warn
def warning(position, message, level=0):
if level < LEVEL:
return
if Options.warning_errors and position:
return error(position, message)
warn = CompileWarning(position, message)
line = "warning: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
return warn
_warn_once_seen = {}
def warn_once(position, message, level=0):
if level < LEVEL or message in _warn_once_seen:
return
warn = CompileWarning(position, message)
line = "warning: %s\n" % warn
if listing_file:
listing_file.write(line)
if echo_file:
echo_file.write(line)
_warn_once_seen[message] = True
return warn
# These functions can be used to momentarily suppress errors.
error_stack = []
def hold_errors():
error_stack.append([])
def release_errors(ignore=False):
held_errors = error_stack.pop()
if not ignore:
for err in held_errors:
report_error(err)
def held_errors():
return error_stack[-1]
# this module needs a redesign to support parallel cythonisation, but
# for now, the following works at least in sequential compiler runs
def reset():
_warn_once_seen.clear()
del error_stack[:]
| apache-2.0 |
PableraShow/Learn-to-program-with-Python-guide | 02 - Functions, logic and conditionals/more_operations-numbers_and_strings.py | 1 | 2156 | # More Operations
# Numbers and Strings
# You can convert a string to a number (float or int) and
# vice versa using a few simple functions.
print "Ex. 1:", int("3")
print "Ex. 2:", float("3.4")
print "Ex. 3:", str(34)
print "Ex. 4:", str(3.4)
print
# Since the above outputs look exactly the same as they
# would have without the method call, let's look at it
# another way.
int_string = "123"
float_string = "5.8"
int_num = 4
float_num = 7.4
#print "Error:", int_string + int_num
print "Ex. 5:", int(int_string) + int_num
#print "Error:", float_string + float_num
print "Ex. 6:", float(float_string) + float_num
# Note: While strings representing integers can be converted
# into floats, strings representing floats cannot be
# converted into ints.
print "Ex. 7:", float_num + float(int_string)
#print "Error:", int_num + int(float_string)
print "--------"
# There are also additional methods in the documentation
# involving numbers that can be extremely useful.
# abs() returns the absolute value of the number (gets rid
# of any negative signs)
print "Ex. 8:", abs(3.4)
print "Ex. 9:", abs(-2)
print
# max() returns the greatest value in the given arguments,
# while min() returns the smallest.
print "Ex. 10:", max(3, 7, 10, 2)
print "Ex. 11:", max(-4, 2.9, 1, 2.9, -50, 0)
print "Ex. 12:", min(1, 3, 5, 9)
print "Ex. 13:", min(-50, 79.2, -100)
a = 3
b = 4
print "Ex. 14:", max(a, b)
print
# round() rounds the given number to the given number
# of decimal places, or to the nearest whole number if
# only one parameter is given
print "Ex. 15:", round(1.5)
print "Ex. 16:", round(-2.4)
print "Ex. 17:", round(0.123456, 4)
# Still technically rounds, but does not show the extra 0's
print "Ex. 18:", round(4, 5)
print
# round() is very useful when dealing with normal float point
# math errors
x = .9 % .03
print "Ex. 19:", x
# At most there can only be a remainder of 2 decimal places
print "Ex. 20:", round(x, 2)
x = 5.4 % 3
print "Ex. 21:", x
# At most there can only be a remainder of 1 decimal place
print "Ex. 22:", round(x, 1)
| mit |
mrfuxi/django | tests/m2m_recursive/tests.py | 424 | 5410 | from __future__ import unicode_literals
from operator import attrgetter
from django.test import TestCase
from .models import Person
class RecursiveM2MTests(TestCase):
def setUp(self):
self.a, self.b, self.c, self.d = [
Person.objects.create(name=name)
for name in ["Anne", "Bill", "Chuck", "David"]
]
# Anne is friends with Bill and Chuck
self.a.friends.add(self.b, self.c)
# David is friends with Anne and Chuck - add in reverse direction
self.d.friends.add(self.a, self.c)
def test_recursive_m2m_all(self):
""" Test that m2m relations are reported correctly """
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Bill",
"Chuck",
"David"
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), [
"Anne",
],
attrgetter("name")
)
# Who is friends with Chuck?
self.assertQuerysetEqual(
self.c.friends.all(), [
"Anne",
"David"
],
attrgetter("name"),
ordered=False
)
# Who is friends with David?
self.assertQuerysetEqual(
self.d.friends.all(), [
"Anne",
"Chuck",
],
attrgetter("name"),
ordered=False
)
def test_recursive_m2m_reverse_add(self):
""" Test reverse m2m relation is consistent """
# Bill is already friends with Anne - add Anne again, but in the
# reverse direction
self.b.friends.add(self.a)
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Bill",
"Chuck",
"David",
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), [
"Anne",
],
attrgetter("name")
)
def test_recursive_m2m_remove(self):
""" Test that we can remove items from an m2m relationship """
# Remove Anne from Bill's friends
self.b.friends.remove(self.a)
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Chuck",
"David",
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), []
)
def test_recursive_m2m_clear(self):
""" Tests the clear method works as expected on m2m fields """
# Clear Anne's group of friends
self.a.friends.clear()
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), []
)
# Reverse relationships should also be gone
# Who is friends with Chuck?
self.assertQuerysetEqual(
self.c.friends.all(), [
"David",
],
attrgetter("name")
)
# Who is friends with David?
self.assertQuerysetEqual(
self.d.friends.all(), [
"Chuck",
],
attrgetter("name")
)
def test_recursive_m2m_add_via_related_name(self):
""" Tests that we can add m2m relations via the related_name attribute """
# David is idolized by Anne and Chuck - add in reverse direction
self.d.stalkers.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"David",
],
attrgetter("name"),
ordered=False
)
# Who is stalking Anne?
self.assertQuerysetEqual(
self.a.stalkers.all(), [],
attrgetter("name")
)
def test_recursive_m2m_add_in_both_directions(self):
""" Check that adding the same relation twice results in a single relation """
# Ann idolizes David
self.a.idols.add(self.d)
# David is idolized by Anne
self.d.stalkers.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"David",
],
attrgetter("name"),
ordered=False
)
# As the assertQuerysetEqual uses a set for comparison,
# check we've only got David listed once
self.assertEqual(self.a.idols.all().count(), 1)
def test_recursive_m2m_related_to_self(self):
""" Check the expected behavior when an instance is related to itself """
# Ann idolizes herself
self.a.idols.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"Anne",
],
attrgetter("name"),
ordered=False
)
# Who is stalking Anne?
self.assertQuerysetEqual(
self.a.stalkers.all(), [
"Anne",
],
attrgetter("name")
)
| bsd-3-clause |
saurabh6790/frappe | frappe/website/context.py | 1 | 9236 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, os, json
from frappe.website.doctype.website_settings.website_settings import get_website_settings
from frappe.website.router import get_page_context
from frappe.model.document import Document
def get_context(path, args=None):
if args and args.source:
context = args
else:
context = get_page_context(path)
if args:
context.update(args)
if hasattr(frappe.local, 'request'):
# for <body data-path=""> (remove leading slash)
# path could be overriden in render.resolve_from_map
context["path"] = frappe.local.request.path.strip('/ ')
else:
context["path"] = path
context.canonical = frappe.utils.get_url(frappe.utils.escape_html(context.path))
context.route = context.path
context = build_context(context)
# set using frappe.respond_as_web_page
if hasattr(frappe.local, 'response') and frappe.local.response.get('context'):
context.update(frappe.local.response.context)
# to be able to inspect the context dict
# Use the macro "inspect" from macros.html
context._context_dict = context
context.developer_mode = frappe.conf.developer_mode
return context
def update_controller_context(context, controller):
module = frappe.get_module(controller)
if module:
# get config fields
for prop in ("base_template_path", "template", "no_cache", "sitemap",
"condition_field"):
if hasattr(module, prop):
context[prop] = getattr(module, prop)
if hasattr(module, "get_context"):
import inspect
try:
if inspect.getfullargspec(module.get_context).args:
ret = module.get_context(context)
else:
ret = module.get_context()
if ret:
context.update(ret)
except (frappe.PermissionError, frappe.PageDoesNotExistError, frappe.Redirect):
raise
except:
if not any([frappe.flags.in_migrate, frappe.flags.in_website_search_build]):
frappe.errprint(frappe.utils.get_traceback())
if hasattr(module, "get_children"):
context.children = module.get_children(context)
def build_context(context):
"""get_context method of doc or module is supposed to render
content templates and push it into context"""
context = frappe._dict(context)
if not "url_prefix" in context:
context.url_prefix = ""
if context.url_prefix and context.url_prefix[-1]!='/':
context.url_prefix += '/'
# for backward compatibility
context.docs_base_url = '/docs'
context.update(get_website_settings(context))
context.update(frappe.local.conf.get("website_context") or {})
# provide doc
if context.doc:
context.update(context.doc.as_dict())
context.update(context.doc.get_website_properties())
if not context.template:
context.template = context.doc.meta.get_web_template()
if hasattr(context.doc, "get_context"):
ret = context.doc.get_context(context)
if ret:
context.update(ret)
for prop in ("no_cache", "sitemap"):
if not prop in context:
context[prop] = getattr(context.doc, prop, False)
elif context.controller:
# controller based context
update_controller_context(context, context.controller)
# controller context extensions
context_controller_hooks = frappe.get_hooks("extend_website_page_controller_context") or {}
for controller, extension in context_controller_hooks.items():
if isinstance(extension, list):
for ext in extension:
if controller == context.controller:
update_controller_context(context, ext)
else:
update_controller_context(context, extension)
add_metatags(context)
add_sidebar_and_breadcrumbs(context)
# determine templates to be used
if not context.base_template_path:
app_base = frappe.get_hooks("base_template")
context.base_template_path = app_base[-1] if app_base else "templates/base.html"
if context.title_prefix and context.title and not context.title.startswith(context.title_prefix):
context.title = '{0} - {1}'.format(context.title_prefix, context.title)
# apply context from hooks
update_website_context = frappe.get_hooks('update_website_context')
for method in update_website_context:
values = frappe.get_attr(method)(context)
if values:
context.update(values)
return context
def load_sidebar(context, sidebar_json_path):
with open(sidebar_json_path, 'r') as sidebarfile:
try:
sidebar_json = sidebarfile.read()
context.sidebar_items = json.loads(sidebar_json)
context.show_sidebar = 1
except json.decoder.JSONDecodeError:
frappe.throw('Invalid Sidebar JSON at ' + sidebar_json_path)
def get_sidebar_json_path(path, look_for=False):
'''
Get _sidebar.json path from directory path
:param path: path of the current diretory
:param look_for: if True, look for _sidebar.json going upwards from given path
:return: _sidebar.json path
'''
if os.path.split(path)[1] == 'www' or path == '/' or not path:
return ''
sidebar_json_path = os.path.join(path, '_sidebar.json')
if os.path.exists(sidebar_json_path):
return sidebar_json_path
else:
if look_for:
return get_sidebar_json_path(os.path.split(path)[0], look_for)
else:
return ''
def add_sidebar_and_breadcrumbs(context):
'''Add sidebar and breadcrumbs to context'''
from frappe.website.router import get_page_info_from_template
if context.show_sidebar:
context.no_cache = 1
add_sidebar_data(context)
else:
if context.basepath:
hooks = frappe.get_hooks('look_for_sidebar_json')
look_for_sidebar_json = hooks[0] if hooks else 0
sidebar_json_path = get_sidebar_json_path(
context.basepath,
look_for_sidebar_json
)
if sidebar_json_path:
load_sidebar(context, sidebar_json_path)
if context.add_breadcrumbs and not context.parents:
if context.basepath:
parent_path = os.path.dirname(context.path).rstrip('/')
page_info = get_page_info_from_template(parent_path)
if page_info:
context.parents = [dict(route=parent_path, title=page_info.title)]
def add_sidebar_data(context):
from frappe.utils.user import get_fullname_and_avatar
import frappe.www.list
if context.show_sidebar and context.website_sidebar:
context.sidebar_items = frappe.get_all('Website Sidebar Item',
filters=dict(parent=context.website_sidebar), fields=['title', 'route', '`group`'],
order_by='idx asc')
if not context.sidebar_items:
sidebar_items = frappe.cache().hget('portal_menu_items', frappe.session.user)
if sidebar_items == None:
sidebar_items = []
roles = frappe.get_roles()
portal_settings = frappe.get_doc('Portal Settings', 'Portal Settings')
def add_items(sidebar_items, items):
for d in items:
if d.get('enabled') and ((not d.get('role')) or d.get('role') in roles):
sidebar_items.append(d.as_dict() if isinstance(d, Document) else d)
if not portal_settings.hide_standard_menu:
add_items(sidebar_items, portal_settings.get('menu'))
if portal_settings.custom_menu:
add_items(sidebar_items, portal_settings.get('custom_menu'))
items_via_hooks = frappe.get_hooks('portal_menu_items')
if items_via_hooks:
for i in items_via_hooks: i['enabled'] = 1
add_items(sidebar_items, items_via_hooks)
frappe.cache().hset('portal_menu_items', frappe.session.user, sidebar_items)
context.sidebar_items = sidebar_items
info = get_fullname_and_avatar(frappe.session.user)
context["fullname"] = info.fullname
context["user_image"] = info.avatar
context["user"] = info.name
def add_metatags(context):
tags = frappe._dict(context.get("metatags") or {})
if "og:type" not in tags:
tags["og:type"] = "article"
if "title" not in tags and context.title:
tags["title"] = context.title
title = tags.get("name") or tags.get("title")
if title:
tags["og:title"] = tags["twitter:title"] = title
tags["twitter:card"] = "summary"
if "description" not in tags and context.description:
tags["description"] = context.description
description = tags.get("description")
if description:
tags["og:description"] = tags["twitter:description"] = description
if "image" not in tags and context.image:
tags["image"] = context.image
image = tags.get("image")
if image:
tags["og:image"] = tags["twitter:image"] = tags["image"] = frappe.utils.get_url(image)
tags['twitter:card'] = "summary_large_image"
if "author" not in tags and context.author:
tags["author"] = context.author
tags["og:url"] = tags["url"] = frappe.utils.get_url(context.path)
if "published_on" not in tags and context.published_on:
tags["published_on"] = context.published_on
if "published_on" in tags:
tags["datePublished"] = tags["published_on"]
del tags["published_on"]
tags["language"] = frappe.local.lang or "en"
# Get meta tags from Website Route meta
# they can override the defaults set above
route = context.path
if route == '':
# homepage
route = frappe.db.get_single_value('Website Settings', 'home_page')
route_exists = (route
and not route.endswith(('.js', '.css'))
and frappe.db.exists('Website Route Meta', route))
if route_exists:
website_route_meta = frappe.get_doc('Website Route Meta', route)
for meta_tag in website_route_meta.meta_tags:
d = meta_tag.get_meta_dict()
tags.update(d)
# update tags in context
context.metatags = tags
| mit |
cmdunkers/winter_ws | protobuf-2.6.0/gtest/scripts/pump.py | 2471 | 23673 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""pump v0.2.0 - Pretty Useful for Meta Programming.
A tool for preprocessor meta programming. Useful for generating
repetitive boilerplate code. Especially useful for writing C++
classes, functions, macros, and templates that need to work with
various number of arguments.
USAGE:
pump.py SOURCE_FILE
EXAMPLES:
pump.py foo.cc.pump
Converts foo.cc.pump to foo.cc.
GRAMMAR:
CODE ::= ATOMIC_CODE*
ATOMIC_CODE ::= $var ID = EXPRESSION
| $var ID = [[ CODE ]]
| $range ID EXPRESSION..EXPRESSION
| $for ID SEPARATOR [[ CODE ]]
| $($)
| $ID
| $(EXPRESSION)
| $if EXPRESSION [[ CODE ]] ELSE_BRANCH
| [[ CODE ]]
| RAW_CODE
SEPARATOR ::= RAW_CODE | EMPTY
ELSE_BRANCH ::= $else [[ CODE ]]
| $elif EXPRESSION [[ CODE ]] ELSE_BRANCH
| EMPTY
EXPRESSION has Python syntax.
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
import sys
TOKEN_TABLE = [
(re.compile(r'\$var\s+'), '$var'),
(re.compile(r'\$elif\s+'), '$elif'),
(re.compile(r'\$else\s+'), '$else'),
(re.compile(r'\$for\s+'), '$for'),
(re.compile(r'\$if\s+'), '$if'),
(re.compile(r'\$range\s+'), '$range'),
(re.compile(r'\$[_A-Za-z]\w*'), '$id'),
(re.compile(r'\$\(\$\)'), '$($)'),
(re.compile(r'\$'), '$'),
(re.compile(r'\[\[\n?'), '[['),
(re.compile(r'\]\]\n?'), ']]'),
]
class Cursor:
"""Represents a position (line and column) in a text file."""
def __init__(self, line=-1, column=-1):
self.line = line
self.column = column
def __eq__(self, rhs):
return self.line == rhs.line and self.column == rhs.column
def __ne__(self, rhs):
return not self == rhs
def __lt__(self, rhs):
return self.line < rhs.line or (
self.line == rhs.line and self.column < rhs.column)
def __le__(self, rhs):
return self < rhs or self == rhs
def __gt__(self, rhs):
return rhs < self
def __ge__(self, rhs):
return rhs <= self
def __str__(self):
if self == Eof():
return 'EOF'
else:
return '%s(%s)' % (self.line + 1, self.column)
def __add__(self, offset):
return Cursor(self.line, self.column + offset)
def __sub__(self, offset):
return Cursor(self.line, self.column - offset)
def Clone(self):
"""Returns a copy of self."""
return Cursor(self.line, self.column)
# Special cursor to indicate the end-of-file.
def Eof():
"""Returns the special cursor to denote the end-of-file."""
return Cursor(-1, -1)
class Token:
"""Represents a token in a Pump source file."""
def __init__(self, start=None, end=None, value=None, token_type=None):
if start is None:
self.start = Eof()
else:
self.start = start
if end is None:
self.end = Eof()
else:
self.end = end
self.value = value
self.token_type = token_type
def __str__(self):
return 'Token @%s: \'%s\' type=%s' % (
self.start, self.value, self.token_type)
def Clone(self):
"""Returns a copy of self."""
return Token(self.start.Clone(), self.end.Clone(), self.value,
self.token_type)
def StartsWith(lines, pos, string):
"""Returns True iff the given position in lines starts with 'string'."""
return lines[pos.line][pos.column:].startswith(string)
def FindFirstInLine(line, token_table):
best_match_start = -1
for (regex, token_type) in token_table:
m = regex.search(line)
if m:
# We found regex in lines
if best_match_start < 0 or m.start() < best_match_start:
best_match_start = m.start()
best_match_length = m.end() - m.start()
best_match_token_type = token_type
if best_match_start < 0:
return None
return (best_match_start, best_match_length, best_match_token_type)
def FindFirst(lines, token_table, cursor):
"""Finds the first occurrence of any string in strings in lines."""
start = cursor.Clone()
cur_line_number = cursor.line
for line in lines[start.line:]:
if cur_line_number == start.line:
line = line[start.column:]
m = FindFirstInLine(line, token_table)
if m:
# We found a regex in line.
(start_column, length, token_type) = m
if cur_line_number == start.line:
start_column += start.column
found_start = Cursor(cur_line_number, start_column)
found_end = found_start + length
return MakeToken(lines, found_start, found_end, token_type)
cur_line_number += 1
# We failed to find str in lines
return None
def SubString(lines, start, end):
"""Returns a substring in lines."""
if end == Eof():
end = Cursor(len(lines) - 1, len(lines[-1]))
if start >= end:
return ''
if start.line == end.line:
return lines[start.line][start.column:end.column]
result_lines = ([lines[start.line][start.column:]] +
lines[start.line + 1:end.line] +
[lines[end.line][:end.column]])
return ''.join(result_lines)
def StripMetaComments(str):
"""Strip meta comments from each line in the given string."""
# First, completely remove lines containing nothing but a meta
# comment, including the trailing \n.
str = re.sub(r'^\s*\$\$.*\n', '', str)
# Then, remove meta comments from contentful lines.
return re.sub(r'\s*\$\$.*', '', str)
def MakeToken(lines, start, end, token_type):
"""Creates a new instance of Token."""
return Token(start, end, SubString(lines, start, end), token_type)
def ParseToken(lines, pos, regex, token_type):
line = lines[pos.line][pos.column:]
m = regex.search(line)
if m and not m.start():
return MakeToken(lines, pos, pos + m.end(), token_type)
else:
print 'ERROR: %s expected at %s.' % (token_type, pos)
sys.exit(1)
ID_REGEX = re.compile(r'[_A-Za-z]\w*')
EQ_REGEX = re.compile(r'=')
REST_OF_LINE_REGEX = re.compile(r'.*?(?=$|\$\$)')
OPTIONAL_WHITE_SPACES_REGEX = re.compile(r'\s*')
WHITE_SPACE_REGEX = re.compile(r'\s')
DOT_DOT_REGEX = re.compile(r'\.\.')
def Skip(lines, pos, regex):
line = lines[pos.line][pos.column:]
m = re.search(regex, line)
if m and not m.start():
return pos + m.end()
else:
return pos
def SkipUntil(lines, pos, regex, token_type):
line = lines[pos.line][pos.column:]
m = re.search(regex, line)
if m:
return pos + m.start()
else:
print ('ERROR: %s expected on line %s after column %s.' %
(token_type, pos.line + 1, pos.column))
sys.exit(1)
def ParseExpTokenInParens(lines, pos):
def ParseInParens(pos):
pos = Skip(lines, pos, OPTIONAL_WHITE_SPACES_REGEX)
pos = Skip(lines, pos, r'\(')
pos = Parse(pos)
pos = Skip(lines, pos, r'\)')
return pos
def Parse(pos):
pos = SkipUntil(lines, pos, r'\(|\)', ')')
if SubString(lines, pos, pos + 1) == '(':
pos = Parse(pos + 1)
pos = Skip(lines, pos, r'\)')
return Parse(pos)
else:
return pos
start = pos.Clone()
pos = ParseInParens(pos)
return MakeToken(lines, start, pos, 'exp')
def RStripNewLineFromToken(token):
if token.value.endswith('\n'):
return Token(token.start, token.end, token.value[:-1], token.token_type)
else:
return token
def TokenizeLines(lines, pos):
while True:
found = FindFirst(lines, TOKEN_TABLE, pos)
if not found:
yield MakeToken(lines, pos, Eof(), 'code')
return
if found.start == pos:
prev_token = None
prev_token_rstripped = None
else:
prev_token = MakeToken(lines, pos, found.start, 'code')
prev_token_rstripped = RStripNewLineFromToken(prev_token)
if found.token_type == '$var':
if prev_token_rstripped:
yield prev_token_rstripped
yield found
id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
yield id_token
pos = Skip(lines, id_token.end, OPTIONAL_WHITE_SPACES_REGEX)
eq_token = ParseToken(lines, pos, EQ_REGEX, '=')
yield eq_token
pos = Skip(lines, eq_token.end, r'\s*')
if SubString(lines, pos, pos + 2) != '[[':
exp_token = ParseToken(lines, pos, REST_OF_LINE_REGEX, 'exp')
yield exp_token
pos = Cursor(exp_token.end.line + 1, 0)
elif found.token_type == '$for':
if prev_token_rstripped:
yield prev_token_rstripped
yield found
id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
yield id_token
pos = Skip(lines, id_token.end, WHITE_SPACE_REGEX)
elif found.token_type == '$range':
if prev_token_rstripped:
yield prev_token_rstripped
yield found
id_token = ParseToken(lines, found.end, ID_REGEX, 'id')
yield id_token
pos = Skip(lines, id_token.end, OPTIONAL_WHITE_SPACES_REGEX)
dots_pos = SkipUntil(lines, pos, DOT_DOT_REGEX, '..')
yield MakeToken(lines, pos, dots_pos, 'exp')
yield MakeToken(lines, dots_pos, dots_pos + 2, '..')
pos = dots_pos + 2
new_pos = Cursor(pos.line + 1, 0)
yield MakeToken(lines, pos, new_pos, 'exp')
pos = new_pos
elif found.token_type == '$':
if prev_token:
yield prev_token
yield found
exp_token = ParseExpTokenInParens(lines, found.end)
yield exp_token
pos = exp_token.end
elif (found.token_type == ']]' or found.token_type == '$if' or
found.token_type == '$elif' or found.token_type == '$else'):
if prev_token_rstripped:
yield prev_token_rstripped
yield found
pos = found.end
else:
if prev_token:
yield prev_token
yield found
pos = found.end
def Tokenize(s):
"""A generator that yields the tokens in the given string."""
if s != '':
lines = s.splitlines(True)
for token in TokenizeLines(lines, Cursor(0, 0)):
yield token
class CodeNode:
def __init__(self, atomic_code_list=None):
self.atomic_code = atomic_code_list
class VarNode:
def __init__(self, identifier=None, atomic_code=None):
self.identifier = identifier
self.atomic_code = atomic_code
class RangeNode:
def __init__(self, identifier=None, exp1=None, exp2=None):
self.identifier = identifier
self.exp1 = exp1
self.exp2 = exp2
class ForNode:
def __init__(self, identifier=None, sep=None, code=None):
self.identifier = identifier
self.sep = sep
self.code = code
class ElseNode:
def __init__(self, else_branch=None):
self.else_branch = else_branch
class IfNode:
def __init__(self, exp=None, then_branch=None, else_branch=None):
self.exp = exp
self.then_branch = then_branch
self.else_branch = else_branch
class RawCodeNode:
def __init__(self, token=None):
self.raw_code = token
class LiteralDollarNode:
def __init__(self, token):
self.token = token
class ExpNode:
def __init__(self, token, python_exp):
self.token = token
self.python_exp = python_exp
def PopFront(a_list):
head = a_list[0]
a_list[:1] = []
return head
def PushFront(a_list, elem):
a_list[:0] = [elem]
def PopToken(a_list, token_type=None):
token = PopFront(a_list)
if token_type is not None and token.token_type != token_type:
print 'ERROR: %s expected at %s' % (token_type, token.start)
print 'ERROR: %s found instead' % (token,)
sys.exit(1)
return token
def PeekToken(a_list):
if not a_list:
return None
return a_list[0]
def ParseExpNode(token):
python_exp = re.sub(r'([_A-Za-z]\w*)', r'self.GetValue("\1")', token.value)
return ExpNode(token, python_exp)
def ParseElseNode(tokens):
def Pop(token_type=None):
return PopToken(tokens, token_type)
next = PeekToken(tokens)
if not next:
return None
if next.token_type == '$else':
Pop('$else')
Pop('[[')
code_node = ParseCodeNode(tokens)
Pop(']]')
return code_node
elif next.token_type == '$elif':
Pop('$elif')
exp = Pop('code')
Pop('[[')
code_node = ParseCodeNode(tokens)
Pop(']]')
inner_else_node = ParseElseNode(tokens)
return CodeNode([IfNode(ParseExpNode(exp), code_node, inner_else_node)])
elif not next.value.strip():
Pop('code')
return ParseElseNode(tokens)
else:
return None
def ParseAtomicCodeNode(tokens):
def Pop(token_type=None):
return PopToken(tokens, token_type)
head = PopFront(tokens)
t = head.token_type
if t == 'code':
return RawCodeNode(head)
elif t == '$var':
id_token = Pop('id')
Pop('=')
next = PeekToken(tokens)
if next.token_type == 'exp':
exp_token = Pop()
return VarNode(id_token, ParseExpNode(exp_token))
Pop('[[')
code_node = ParseCodeNode(tokens)
Pop(']]')
return VarNode(id_token, code_node)
elif t == '$for':
id_token = Pop('id')
next_token = PeekToken(tokens)
if next_token.token_type == 'code':
sep_token = next_token
Pop('code')
else:
sep_token = None
Pop('[[')
code_node = ParseCodeNode(tokens)
Pop(']]')
return ForNode(id_token, sep_token, code_node)
elif t == '$if':
exp_token = Pop('code')
Pop('[[')
code_node = ParseCodeNode(tokens)
Pop(']]')
else_node = ParseElseNode(tokens)
return IfNode(ParseExpNode(exp_token), code_node, else_node)
elif t == '$range':
id_token = Pop('id')
exp1_token = Pop('exp')
Pop('..')
exp2_token = Pop('exp')
return RangeNode(id_token, ParseExpNode(exp1_token),
ParseExpNode(exp2_token))
elif t == '$id':
return ParseExpNode(Token(head.start + 1, head.end, head.value[1:], 'id'))
elif t == '$($)':
return LiteralDollarNode(head)
elif t == '$':
exp_token = Pop('exp')
return ParseExpNode(exp_token)
elif t == '[[':
code_node = ParseCodeNode(tokens)
Pop(']]')
return code_node
else:
PushFront(tokens, head)
return None
def ParseCodeNode(tokens):
atomic_code_list = []
while True:
if not tokens:
break
atomic_code_node = ParseAtomicCodeNode(tokens)
if atomic_code_node:
atomic_code_list.append(atomic_code_node)
else:
break
return CodeNode(atomic_code_list)
def ParseToAST(pump_src_text):
"""Convert the given Pump source text into an AST."""
tokens = list(Tokenize(pump_src_text))
code_node = ParseCodeNode(tokens)
return code_node
class Env:
def __init__(self):
self.variables = []
self.ranges = []
def Clone(self):
clone = Env()
clone.variables = self.variables[:]
clone.ranges = self.ranges[:]
return clone
def PushVariable(self, var, value):
# If value looks like an int, store it as an int.
try:
int_value = int(value)
if ('%s' % int_value) == value:
value = int_value
except Exception:
pass
self.variables[:0] = [(var, value)]
def PopVariable(self):
self.variables[:1] = []
def PushRange(self, var, lower, upper):
self.ranges[:0] = [(var, lower, upper)]
def PopRange(self):
self.ranges[:1] = []
def GetValue(self, identifier):
for (var, value) in self.variables:
if identifier == var:
return value
print 'ERROR: meta variable %s is undefined.' % (identifier,)
sys.exit(1)
def EvalExp(self, exp):
try:
result = eval(exp.python_exp)
except Exception, e:
print 'ERROR: caught exception %s: %s' % (e.__class__.__name__, e)
print ('ERROR: failed to evaluate meta expression %s at %s' %
(exp.python_exp, exp.token.start))
sys.exit(1)
return result
def GetRange(self, identifier):
for (var, lower, upper) in self.ranges:
if identifier == var:
return (lower, upper)
print 'ERROR: range %s is undefined.' % (identifier,)
sys.exit(1)
class Output:
def __init__(self):
self.string = ''
def GetLastLine(self):
index = self.string.rfind('\n')
if index < 0:
return ''
return self.string[index + 1:]
def Append(self, s):
self.string += s
def RunAtomicCode(env, node, output):
if isinstance(node, VarNode):
identifier = node.identifier.value.strip()
result = Output()
RunAtomicCode(env.Clone(), node.atomic_code, result)
value = result.string
env.PushVariable(identifier, value)
elif isinstance(node, RangeNode):
identifier = node.identifier.value.strip()
lower = int(env.EvalExp(node.exp1))
upper = int(env.EvalExp(node.exp2))
env.PushRange(identifier, lower, upper)
elif isinstance(node, ForNode):
identifier = node.identifier.value.strip()
if node.sep is None:
sep = ''
else:
sep = node.sep.value
(lower, upper) = env.GetRange(identifier)
for i in range(lower, upper + 1):
new_env = env.Clone()
new_env.PushVariable(identifier, i)
RunCode(new_env, node.code, output)
if i != upper:
output.Append(sep)
elif isinstance(node, RawCodeNode):
output.Append(node.raw_code.value)
elif isinstance(node, IfNode):
cond = env.EvalExp(node.exp)
if cond:
RunCode(env.Clone(), node.then_branch, output)
elif node.else_branch is not None:
RunCode(env.Clone(), node.else_branch, output)
elif isinstance(node, ExpNode):
value = env.EvalExp(node)
output.Append('%s' % (value,))
elif isinstance(node, LiteralDollarNode):
output.Append('$')
elif isinstance(node, CodeNode):
RunCode(env.Clone(), node, output)
else:
print 'BAD'
print node
sys.exit(1)
def RunCode(env, code_node, output):
for atomic_code in code_node.atomic_code:
RunAtomicCode(env, atomic_code, output)
def IsSingleLineComment(cur_line):
return '//' in cur_line
def IsInPreprocessorDirective(prev_lines, cur_line):
if cur_line.lstrip().startswith('#'):
return True
return prev_lines and prev_lines[-1].endswith('\\')
def WrapComment(line, output):
loc = line.find('//')
before_comment = line[:loc].rstrip()
if before_comment == '':
indent = loc
else:
output.append(before_comment)
indent = len(before_comment) - len(before_comment.lstrip())
prefix = indent*' ' + '// '
max_len = 80 - len(prefix)
comment = line[loc + 2:].strip()
segs = [seg for seg in re.split(r'(\w+\W*)', comment) if seg != '']
cur_line = ''
for seg in segs:
if len((cur_line + seg).rstrip()) < max_len:
cur_line += seg
else:
if cur_line.strip() != '':
output.append(prefix + cur_line.rstrip())
cur_line = seg.lstrip()
if cur_line.strip() != '':
output.append(prefix + cur_line.strip())
def WrapCode(line, line_concat, output):
indent = len(line) - len(line.lstrip())
prefix = indent*' ' # Prefix of the current line
max_len = 80 - indent - len(line_concat) # Maximum length of the current line
new_prefix = prefix + 4*' ' # Prefix of a continuation line
new_max_len = max_len - 4 # Maximum length of a continuation line
# Prefers to wrap a line after a ',' or ';'.
segs = [seg for seg in re.split(r'([^,;]+[,;]?)', line.strip()) if seg != '']
cur_line = '' # The current line without leading spaces.
for seg in segs:
# If the line is still too long, wrap at a space.
while cur_line == '' and len(seg.strip()) > max_len:
seg = seg.lstrip()
split_at = seg.rfind(' ', 0, max_len)
output.append(prefix + seg[:split_at].strip() + line_concat)
seg = seg[split_at + 1:]
prefix = new_prefix
max_len = new_max_len
if len((cur_line + seg).rstrip()) < max_len:
cur_line = (cur_line + seg).lstrip()
else:
output.append(prefix + cur_line.rstrip() + line_concat)
prefix = new_prefix
max_len = new_max_len
cur_line = seg.lstrip()
if cur_line.strip() != '':
output.append(prefix + cur_line.strip())
def WrapPreprocessorDirective(line, output):
WrapCode(line, ' \\', output)
def WrapPlainCode(line, output):
WrapCode(line, '', output)
def IsMultiLineIWYUPragma(line):
return re.search(r'/\* IWYU pragma: ', line)
def IsHeaderGuardIncludeOrOneLineIWYUPragma(line):
return (re.match(r'^#(ifndef|define|endif\s*//)\s*[\w_]+\s*$', line) or
re.match(r'^#include\s', line) or
# Don't break IWYU pragmas, either; that causes iwyu.py problems.
re.search(r'// IWYU pragma: ', line))
def WrapLongLine(line, output):
line = line.rstrip()
if len(line) <= 80:
output.append(line)
elif IsSingleLineComment(line):
if IsHeaderGuardIncludeOrOneLineIWYUPragma(line):
# The style guide made an exception to allow long header guard lines,
# includes and IWYU pragmas.
output.append(line)
else:
WrapComment(line, output)
elif IsInPreprocessorDirective(output, line):
if IsHeaderGuardIncludeOrOneLineIWYUPragma(line):
# The style guide made an exception to allow long header guard lines,
# includes and IWYU pragmas.
output.append(line)
else:
WrapPreprocessorDirective(line, output)
elif IsMultiLineIWYUPragma(line):
output.append(line)
else:
WrapPlainCode(line, output)
def BeautifyCode(string):
lines = string.splitlines()
output = []
for line in lines:
WrapLongLine(line, output)
output2 = [line.rstrip() for line in output]
return '\n'.join(output2) + '\n'
def ConvertFromPumpSource(src_text):
"""Return the text generated from the given Pump source text."""
ast = ParseToAST(StripMetaComments(src_text))
output = Output()
RunCode(Env(), ast, output)
return BeautifyCode(output.string)
def main(argv):
if len(argv) == 1:
print __doc__
sys.exit(1)
file_path = argv[-1]
output_str = ConvertFromPumpSource(file(file_path, 'r').read())
if file_path.endswith('.pump'):
output_file_path = file_path[:-5]
else:
output_file_path = '-'
if output_file_path == '-':
print output_str,
else:
output_file = file(output_file_path, 'w')
output_file.write('// This file was GENERATED by command:\n')
output_file.write('// %s %s\n' %
(os.path.basename(__file__), os.path.basename(file_path)))
output_file.write('// DO NOT EDIT BY HAND!!!\n\n')
output_file.write(output_str)
output_file.close()
if __name__ == '__main__':
main(sys.argv)
| bsd-3-clause |
emilk/sproxel | distro/common/lib/httplib.py | 50 | 49485 | """HTTP/1.1 client library
<intro stuff goes here>
<other stuff, too>
HTTPConnection goes through a number of "states", which define when a client
may legally make another request or fetch the response for a particular
request. This diagram details these state transitions:
(null)
|
| HTTPConnection()
v
Idle
|
| putrequest()
v
Request-started
|
| ( putheader() )* endheaders()
v
Request-sent
|
| response = getresponse()
v
Unread-response [Response-headers-read]
|\____________________
| |
| response.read() | putrequest()
v v
Idle Req-started-unread-response
______/|
/ |
response.read() | | ( putheader() )* endheaders()
v v
Request-started Req-sent-unread-response
|
| response.read()
v
Request-sent
This diagram presents the following rules:
-- a second request may not be started until {response-headers-read}
-- a response [object] cannot be retrieved until {request-sent}
-- there is no differentiation between an unread response body and a
partially read response body
Note: this enforcement is applied by the HTTPConnection class. The
HTTPResponse class does not enforce this state machine, which
implies sophisticated clients may accelerate the request/response
pipeline. Caution should be taken, though: accelerating the states
beyond the above pattern may imply knowledge of the server's
connection-close behavior for certain requests. For example, it
is impossible to tell whether the server will close the connection
UNTIL the response headers have been read; this means that further
requests cannot be placed into the pipeline until it is known that
the server will NOT be closing the connection.
Logical State __state __response
------------- ------- ----------
Idle _CS_IDLE None
Request-started _CS_REQ_STARTED None
Request-sent _CS_REQ_SENT None
Unread-response _CS_IDLE <response_class>
Req-started-unread-response _CS_REQ_STARTED <response_class>
Req-sent-unread-response _CS_REQ_SENT <response_class>
"""
from array import array
import os
import socket
from sys import py3kwarning
from urlparse import urlsplit
import warnings
with warnings.catch_warnings():
if py3kwarning:
warnings.filterwarnings("ignore", ".*mimetools has been removed",
DeprecationWarning)
import mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
__all__ = ["HTTP", "HTTPResponse", "HTTPConnection",
"HTTPException", "NotConnected", "UnknownProtocol",
"UnknownTransferEncoding", "UnimplementedFileMode",
"IncompleteRead", "InvalidURL", "ImproperConnectionState",
"CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
"BadStatusLine", "error", "responses"]
HTTP_PORT = 80
HTTPS_PORT = 443
_UNKNOWN = 'UNKNOWN'
# connection states
_CS_IDLE = 'Idle'
_CS_REQ_STARTED = 'Request-started'
_CS_REQ_SENT = 'Request-sent'
# status codes
# informational
CONTINUE = 100
SWITCHING_PROTOCOLS = 101
PROCESSING = 102
# successful
OK = 200
CREATED = 201
ACCEPTED = 202
NON_AUTHORITATIVE_INFORMATION = 203
NO_CONTENT = 204
RESET_CONTENT = 205
PARTIAL_CONTENT = 206
MULTI_STATUS = 207
IM_USED = 226
# redirection
MULTIPLE_CHOICES = 300
MOVED_PERMANENTLY = 301
FOUND = 302
SEE_OTHER = 303
NOT_MODIFIED = 304
USE_PROXY = 305
TEMPORARY_REDIRECT = 307
# client error
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
METHOD_NOT_ALLOWED = 405
NOT_ACCEPTABLE = 406
PROXY_AUTHENTICATION_REQUIRED = 407
REQUEST_TIMEOUT = 408
CONFLICT = 409
GONE = 410
LENGTH_REQUIRED = 411
PRECONDITION_FAILED = 412
REQUEST_ENTITY_TOO_LARGE = 413
REQUEST_URI_TOO_LONG = 414
UNSUPPORTED_MEDIA_TYPE = 415
REQUESTED_RANGE_NOT_SATISFIABLE = 416
EXPECTATION_FAILED = 417
UNPROCESSABLE_ENTITY = 422
LOCKED = 423
FAILED_DEPENDENCY = 424
UPGRADE_REQUIRED = 426
# server error
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
BAD_GATEWAY = 502
SERVICE_UNAVAILABLE = 503
GATEWAY_TIMEOUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
INSUFFICIENT_STORAGE = 507
NOT_EXTENDED = 510
# Mapping status codes to official W3C names
responses = {
100: 'Continue',
101: 'Switching Protocols',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
306: '(Unused)',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request-URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported',
}
# maximal amount of data to read at one time in _safe_read
MAXAMOUNT = 1048576
# maximal line length when calling readline().
_MAXLINE = 65536
class HTTPMessage(mimetools.Message):
def addheader(self, key, value):
"""Add header for field key handling repeats."""
prev = self.dict.get(key)
if prev is None:
self.dict[key] = value
else:
combined = ", ".join((prev, value))
self.dict[key] = combined
def addcontinue(self, key, more):
"""Add more field data from a continuation line."""
prev = self.dict[key]
self.dict[key] = prev + "\n " + more
def readheaders(self):
"""Read header lines.
Read header lines up to the entirely blank line that terminates them.
The (normally blank) line that ends the headers is skipped, but not
included in the returned list. If a non-header line ends the headers,
(which is an error), an attempt is made to backspace over it; it is
never included in the returned list.
The variable self.status is set to the empty string if all went well,
otherwise it is an error message. The variable self.headers is a
completely uninterpreted list of lines contained in the header (so
printing them will reproduce the header exactly as it appears in the
file).
If multiple header fields with the same name occur, they are combined
according to the rules in RFC 2616 sec 4.2:
Appending each subsequent field-value to the first, each separated
by a comma. The order in which header fields with the same field-name
are received is significant to the interpretation of the combined
field value.
"""
# XXX The implementation overrides the readheaders() method of
# rfc822.Message. The base class design isn't amenable to
# customized behavior here so the method here is a copy of the
# base class code with a few small changes.
self.dict = {}
self.unixfrom = ''
self.headers = hlist = []
self.status = ''
headerseen = ""
firstline = 1
startofline = unread = tell = None
if hasattr(self.fp, 'unread'):
unread = self.fp.unread
elif self.seekable:
tell = self.fp.tell
while True:
if tell:
try:
startofline = tell()
except IOError:
startofline = tell = None
self.seekable = 0
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("header line")
if not line:
self.status = 'EOF in headers'
break
# Skip unix From name time lines
if firstline and line.startswith('From '):
self.unixfrom = self.unixfrom + line
continue
firstline = 0
if headerseen and line[0] in ' \t':
# XXX Not sure if continuation lines are handled properly
# for http and/or for repeating headers
# It's a continuation line.
hlist.append(line)
self.addcontinue(headerseen, line.strip())
continue
elif self.iscomment(line):
# It's a comment. Ignore it.
continue
elif self.islast(line):
# Note! No pushback here! The delimiter line gets eaten.
break
headerseen = self.isheader(line)
if headerseen:
# It's a legal header line, save it.
hlist.append(line)
self.addheader(headerseen, line[len(headerseen)+1:].strip())
continue
else:
# It's not a header line; throw it back and stop here.
if not self.dict:
self.status = 'No headers'
else:
self.status = 'Non-header line where header expected'
# Try to undo the read.
if unread:
unread(line)
elif tell:
self.fp.seek(startofline)
else:
self.status = self.status + '; bad seek'
break
class HTTPResponse:
# strict: If true, raise BadStatusLine if the status line can't be
# parsed as a valid HTTP/1.0 or 1.1 status line. By default it is
# false because it prevents clients from talking to HTTP/0.9
# servers. Note that a response with a sufficiently corrupted
# status line will look like an HTTP/0.9 response.
# See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details.
def __init__(self, sock, debuglevel=0, strict=0, method=None, buffering=False):
if buffering:
# The caller won't be using any sock.recv() calls, so buffering
# is fine and recommended for performance.
self.fp = sock.makefile('rb')
else:
# The buffer size is specified as zero, because the headers of
# the response are read with readline(). If the reads were
# buffered the readline() calls could consume some of the
# response, which make be read via a recv() on the underlying
# socket.
self.fp = sock.makefile('rb', 0)
self.debuglevel = debuglevel
self.strict = strict
self._method = method
self.msg = None
# from the Status-Line of the response
self.version = _UNKNOWN # HTTP-Version
self.status = _UNKNOWN # Status-Code
self.reason = _UNKNOWN # Reason-Phrase
self.chunked = _UNKNOWN # is "chunked" being used?
self.chunk_left = _UNKNOWN # bytes left to read in current chunk
self.length = _UNKNOWN # number of bytes left in response
self.will_close = _UNKNOWN # conn will close at end of response
def _read_status(self):
# Initialize with Simple-Response defaults
line = self.fp.readline()
if self.debuglevel > 0:
print "reply:", repr(line)
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
raise BadStatusLine(line)
try:
[version, status, reason] = line.split(None, 2)
except ValueError:
try:
[version, status] = line.split(None, 1)
reason = ""
except ValueError:
# empty version will cause next test to fail and status
# will be treated as 0.9 response.
version = ""
if not version.startswith('HTTP/'):
if self.strict:
self.close()
raise BadStatusLine(line)
else:
# assume it's a Simple-Response from an 0.9 server
self.fp = LineAndFileWrapper(line, self.fp)
return "HTTP/0.9", 200, ""
# The status code is a three-digit number
try:
status = int(status)
if status < 100 or status > 999:
raise BadStatusLine(line)
except ValueError:
raise BadStatusLine(line)
return version, status, reason
def begin(self):
if self.msg is not None:
# we've already started reading the response
return
# read until we get a non-100 response
while True:
version, status, reason = self._read_status()
if status != CONTINUE:
break
# skip the header from the 100 response
while True:
skip = self.fp.readline(_MAXLINE + 1)
if len(skip) > _MAXLINE:
raise LineTooLong("header line")
skip = skip.strip()
if not skip:
break
if self.debuglevel > 0:
print "header:", skip
self.status = status
self.reason = reason.strip()
if version == 'HTTP/1.0':
self.version = 10
elif version.startswith('HTTP/1.'):
self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
elif version == 'HTTP/0.9':
self.version = 9
else:
raise UnknownProtocol(version)
if self.version == 9:
self.length = None
self.chunked = 0
self.will_close = 1
self.msg = HTTPMessage(StringIO())
return
self.msg = HTTPMessage(self.fp, 0)
if self.debuglevel > 0:
for hdr in self.msg.headers:
print "header:", hdr,
# don't let the msg keep an fp
self.msg.fp = None
# are we using the chunked-style of transfer encoding?
tr_enc = self.msg.getheader('transfer-encoding')
if tr_enc and tr_enc.lower() == "chunked":
self.chunked = 1
self.chunk_left = None
else:
self.chunked = 0
# will the connection close at the end of the response?
self.will_close = self._check_close()
# do we have a Content-Length?
# NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
length = self.msg.getheader('content-length')
if length and not self.chunked:
try:
self.length = int(length)
except ValueError:
self.length = None
else:
if self.length < 0: # ignore nonsensical negative lengths
self.length = None
else:
self.length = None
# does the body have a fixed length? (of zero)
if (status == NO_CONTENT or status == NOT_MODIFIED or
100 <= status < 200 or # 1xx codes
self._method == 'HEAD'):
self.length = 0
# if the connection remains open, and we aren't using chunked, and
# a content-length was not provided, then assume that the connection
# WILL close.
if not self.will_close and \
not self.chunked and \
self.length is None:
self.will_close = 1
def _check_close(self):
conn = self.msg.getheader('connection')
if self.version == 11:
# An HTTP/1.1 proxy is assumed to stay open unless
# explicitly closed.
conn = self.msg.getheader('connection')
if conn and "close" in conn.lower():
return True
return False
# Some HTTP/1.0 implementations have support for persistent
# connections, using rules different than HTTP/1.1.
# For older HTTP, Keep-Alive indicates persistent connection.
if self.msg.getheader('keep-alive'):
return False
# At least Akamai returns a "Connection: Keep-Alive" header,
# which was supposed to be sent by the client.
if conn and "keep-alive" in conn.lower():
return False
# Proxy-Connection is a netscape hack.
pconn = self.msg.getheader('proxy-connection')
if pconn and "keep-alive" in pconn.lower():
return False
# otherwise, assume it will close
return True
def close(self):
if self.fp:
self.fp.close()
self.fp = None
def isclosed(self):
# NOTE: it is possible that we will not ever call self.close(). This
# case occurs when will_close is TRUE, length is None, and we
# read up to the last byte, but NOT past it.
#
# IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
# called, meaning self.isclosed() is meaningful.
return self.fp is None
# XXX It would be nice to have readline and __iter__ for this, too.
def read(self, amt=None):
if self.fp is None:
return ''
if self._method == 'HEAD':
self.close()
return ''
if self.chunked:
return self._read_chunked(amt)
if amt is None:
# unbounded read
if self.length is None:
s = self.fp.read()
else:
s = self._safe_read(self.length)
self.length = 0
self.close() # we read everything
return s
if self.length is not None:
if amt > self.length:
# clip the read to the "end of response"
amt = self.length
# we do not use _safe_read() here because this may be a .will_close
# connection, and the user is reading more bytes than will be provided
# (for example, reading in 1k chunks)
s = self.fp.read(amt)
if self.length is not None:
self.length -= len(s)
if not self.length:
self.close()
return s
def _read_chunked(self, amt):
assert self.chunked != _UNKNOWN
chunk_left = self.chunk_left
value = []
while True:
if chunk_left is None:
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("chunk size")
i = line.find(';')
if i >= 0:
line = line[:i] # strip chunk-extensions
try:
chunk_left = int(line, 16)
except ValueError:
# close the connection as protocol synchronisation is
# probably lost
self.close()
raise IncompleteRead(''.join(value))
if chunk_left == 0:
break
if amt is None:
value.append(self._safe_read(chunk_left))
elif amt < chunk_left:
value.append(self._safe_read(amt))
self.chunk_left = chunk_left - amt
return ''.join(value)
elif amt == chunk_left:
value.append(self._safe_read(amt))
self._safe_read(2) # toss the CRLF at the end of the chunk
self.chunk_left = None
return ''.join(value)
else:
value.append(self._safe_read(chunk_left))
amt -= chunk_left
# we read the whole chunk, get another
self._safe_read(2) # toss the CRLF at the end of the chunk
chunk_left = None
# read and discard trailer up to the CRLF terminator
### note: we shouldn't have any trailers!
while True:
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("trailer line")
if not line:
# a vanishingly small number of sites EOF without
# sending the trailer
break
if line == '\r\n':
break
# we read everything; close the "file"
self.close()
return ''.join(value)
def _safe_read(self, amt):
"""Read the number of bytes requested, compensating for partial reads.
Normally, we have a blocking socket, but a read() can be interrupted
by a signal (resulting in a partial read).
Note that we cannot distinguish between EOF and an interrupt when zero
bytes have been read. IncompleteRead() will be raised in this
situation.
This function should be used when <amt> bytes "should" be present for
reading. If the bytes are truly not available (due to EOF), then the
IncompleteRead exception can be used to detect the problem.
"""
# NOTE(gps): As of svn r74426 socket._fileobject.read(x) will never
# return less than x bytes unless EOF is encountered. It now handles
# signal interruptions (socket.error EINTR) internally. This code
# never caught that exception anyways. It seems largely pointless.
# self.fp.read(amt) will work fine.
s = []
while amt > 0:
chunk = self.fp.read(min(amt, MAXAMOUNT))
if not chunk:
raise IncompleteRead(''.join(s), amt)
s.append(chunk)
amt -= len(chunk)
return ''.join(s)
def fileno(self):
return self.fp.fileno()
def getheader(self, name, default=None):
if self.msg is None:
raise ResponseNotReady()
return self.msg.getheader(name, default)
def getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise ResponseNotReady()
return self.msg.items()
class HTTPConnection:
_http_vsn = 11
_http_vsn_str = 'HTTP/1.1'
response_class = HTTPResponse
default_port = HTTP_PORT
auto_open = 1
debuglevel = 0
strict = 0
def __init__(self, host, port=None, strict=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
self.timeout = timeout
self.source_address = source_address
self.sock = None
self._buffer = []
self.__response = None
self.__state = _CS_IDLE
self._method = None
self._tunnel_host = None
self._tunnel_port = None
self._tunnel_headers = {}
self._set_hostport(host, port)
if strict is not None:
self.strict = strict
def set_tunnel(self, host, port=None, headers=None):
""" Sets up the host and the port for the HTTP CONNECT Tunnelling.
The headers argument should be a mapping of extra HTTP headers
to send with the CONNECT request.
"""
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def _set_hostport(self, host, port):
if port is None:
i = host.rfind(':')
j = host.rfind(']') # ipv6 addresses have [...]
if i > j:
try:
port = int(host[i+1:])
except ValueError:
raise InvalidURL("nonnumeric port: '%s'" % host[i+1:])
host = host[:i]
else:
port = self.default_port
if host and host[0] == '[' and host[-1] == ']':
host = host[1:-1]
self.host = host
self.port = port
def set_debuglevel(self, level):
self.debuglevel = level
def _tunnel(self):
self._set_hostport(self._tunnel_host, self._tunnel_port)
self.send("CONNECT %s:%d HTTP/1.0\r\n" % (self.host, self.port))
for header, value in self._tunnel_headers.iteritems():
self.send("%s: %s\r\n" % (header, value))
self.send("\r\n")
response = self.response_class(self.sock, strict = self.strict,
method = self._method)
(version, code, message) = response._read_status()
if code != 200:
self.close()
raise socket.error("Tunnel connection failed: %d %s" % (code,
message.strip()))
while True:
line = response.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("header line")
if line == '\r\n': break
def connect(self):
"""Connect to the host and port specified in __init__."""
self.sock = socket.create_connection((self.host,self.port),
self.timeout, self.source_address)
if self._tunnel_host:
self._tunnel()
def close(self):
"""Close the connection to the HTTP server."""
if self.sock:
self.sock.close() # close it manually... there may be other refs
self.sock = None
if self.__response:
self.__response.close()
self.__response = None
self.__state = _CS_IDLE
def send(self, data):
"""Send `data' to the server."""
if self.sock is None:
if self.auto_open:
self.connect()
else:
raise NotConnected()
if self.debuglevel > 0:
print "send:", repr(data)
blocksize = 8192
if hasattr(data,'read') and not isinstance(data, array):
if self.debuglevel > 0: print "sendIng a read()able"
datablock = data.read(blocksize)
while datablock:
self.sock.sendall(datablock)
datablock = data.read(blocksize)
else:
self.sock.sendall(data)
def _output(self, s):
"""Add a line of output to the current request buffer.
Assumes that the line does *not* end with \\r\\n.
"""
self._buffer.append(s)
def _send_output(self, message_body=None):
"""Send the currently buffered request and clear the buffer.
Appends an extra \\r\\n to the buffer.
A message_body may be specified, to be appended to the request.
"""
self._buffer.extend(("", ""))
msg = "\r\n".join(self._buffer)
del self._buffer[:]
# If msg and message_body are sent in a single send() call,
# it will avoid performance problems caused by the interaction
# between delayed ack and the Nagle algorithm.
if isinstance(message_body, str):
msg += message_body
message_body = None
self.send(msg)
if message_body is not None:
#message_body was not a string (i.e. it is a file) and
#we must run the risk of Nagle
self.send(message_body)
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
"""Send a request to the server.
`method' specifies an HTTP request method, e.g. 'GET'.
`url' specifies the object being requested, e.g. '/index.html'.
`skip_host' if True does not add automatically a 'Host:' header
`skip_accept_encoding' if True does not add automatically an
'Accept-Encoding:' header
"""
# if a prior response has been completed, then forget about it.
if self.__response and self.__response.isclosed():
self.__response = None
# in certain cases, we cannot issue another request on this connection.
# this occurs when:
# 1) we are in the process of sending a request. (_CS_REQ_STARTED)
# 2) a response to a previous request has signalled that it is going
# to close the connection upon completion.
# 3) the headers for the previous response have not been read, thus
# we cannot determine whether point (2) is true. (_CS_REQ_SENT)
#
# if there is no prior response, then we can request at will.
#
# if point (2) is true, then we will have passed the socket to the
# response (effectively meaning, "there is no prior response"), and
# will open a new one when a new request is made.
#
# Note: if a prior response exists, then we *can* start a new request.
# We are not allowed to begin fetching the response to this new
# request, however, until that prior response is complete.
#
if self.__state == _CS_IDLE:
self.__state = _CS_REQ_STARTED
else:
raise CannotSendRequest()
# Save the method we use, we need it later in the response phase
self._method = method
if not url:
url = '/'
hdr = '%s %s %s' % (method, url, self._http_vsn_str)
self._output(hdr)
if self._http_vsn == 11:
# Issue some standard headers for better HTTP/1.1 compliance
if not skip_host:
# this header is issued *only* for HTTP/1.1
# connections. more specifically, this means it is
# only issued when the client uses the new
# HTTPConnection() class. backwards-compat clients
# will be using HTTP/1.0 and those clients may be
# issuing this header themselves. we should NOT issue
# it twice; some web servers (such as Apache) barf
# when they see two Host: headers
# If we need a non-standard port,include it in the
# header. If the request is going through a proxy,
# but the host of the actual URL, not the host of the
# proxy.
netloc = ''
if url.startswith('http'):
nil, netloc, nil, nil, nil = urlsplit(url)
if netloc:
try:
netloc_enc = netloc.encode("ascii")
except UnicodeEncodeError:
netloc_enc = netloc.encode("idna")
self.putheader('Host', netloc_enc)
else:
try:
host_enc = self.host.encode("ascii")
except UnicodeEncodeError:
host_enc = self.host.encode("idna")
# Wrap the IPv6 Host Header with [] (RFC 2732)
if host_enc.find(':') >= 0:
host_enc = "[" + host_enc + "]"
if self.port == self.default_port:
self.putheader('Host', host_enc)
else:
self.putheader('Host', "%s:%s" % (host_enc, self.port))
# note: we are assuming that clients will not attempt to set these
# headers since *this* library must deal with the
# consequences. this also means that when the supporting
# libraries are updated to recognize other forms, then this
# code should be changed (removed or updated).
# we only want a Content-Encoding of "identity" since we don't
# support encodings such as x-gzip or x-deflate.
if not skip_accept_encoding:
self.putheader('Accept-Encoding', 'identity')
# we can accept "chunked" Transfer-Encodings, but no others
# NOTE: no TE header implies *only* "chunked"
#self.putheader('TE', 'chunked')
# if TE is supplied in the header, then it must appear in a
# Connection header.
#self.putheader('Connection', 'TE')
else:
# For HTTP/1.0, the server will assume "not chunked"
pass
def putheader(self, header, *values):
"""Send a request header line to the server.
For example: h.putheader('Accept', 'text/html')
"""
if self.__state != _CS_REQ_STARTED:
raise CannotSendHeader()
hdr = '%s: %s' % (header, '\r\n\t'.join([str(v) for v in values]))
self._output(hdr)
def endheaders(self, message_body=None):
"""Indicate that the last header line has been sent to the server.
This method sends the request to the server. The optional
message_body argument can be used to pass message body
associated with the request. The message body will be sent in
the same packet as the message headers if possible. The
message_body should be a string.
"""
if self.__state == _CS_REQ_STARTED:
self.__state = _CS_REQ_SENT
else:
raise CannotSendHeader()
self._send_output(message_body)
def request(self, method, url, body=None, headers={}):
"""Send a complete request to the server."""
self._send_request(method, url, body, headers)
def _set_content_length(self, body):
# Set the content-length based on the body.
thelen = None
try:
thelen = str(len(body))
except TypeError, te:
# If this is a file-like object, try to
# fstat its file descriptor
try:
thelen = str(os.fstat(body.fileno()).st_size)
except (AttributeError, OSError):
# Don't send a length if this failed
if self.debuglevel > 0: print "Cannot stat!!"
if thelen is not None:
self.putheader('Content-Length', thelen)
def _send_request(self, method, url, body, headers):
# Honor explicitly requested Host: and Accept-Encoding: headers.
header_names = dict.fromkeys([k.lower() for k in headers])
skips = {}
if 'host' in header_names:
skips['skip_host'] = 1
if 'accept-encoding' in header_names:
skips['skip_accept_encoding'] = 1
self.putrequest(method, url, **skips)
if body and ('content-length' not in header_names):
self._set_content_length(body)
for hdr, value in headers.iteritems():
self.putheader(hdr, value)
self.endheaders(body)
def getresponse(self, buffering=False):
"Get the response from the server."
# if a prior response has been completed, then forget about it.
if self.__response and self.__response.isclosed():
self.__response = None
#
# if a prior response exists, then it must be completed (otherwise, we
# cannot read this response's header to determine the connection-close
# behavior)
#
# note: if a prior response existed, but was connection-close, then the
# socket and response were made independent of this HTTPConnection
# object since a new request requires that we open a whole new
# connection
#
# this means the prior response had one of two states:
# 1) will_close: this connection was reset and the prior socket and
# response operate independently
# 2) persistent: the response was retained and we await its
# isclosed() status to become true.
#
if self.__state != _CS_REQ_SENT or self.__response:
raise ResponseNotReady()
args = (self.sock,)
kwds = {"strict":self.strict, "method":self._method}
if self.debuglevel > 0:
args += (self.debuglevel,)
if buffering:
#only add this keyword if non-default, for compatibility with
#other response_classes.
kwds["buffering"] = True;
response = self.response_class(*args, **kwds)
response.begin()
assert response.will_close != _UNKNOWN
self.__state = _CS_IDLE
if response.will_close:
# this effectively passes the connection to the response
self.close()
else:
# remember this, so we can tell when it is complete
self.__response = response
return response
class HTTP:
"Compatibility class with httplib.py from 1.5."
_http_vsn = 10
_http_vsn_str = 'HTTP/1.0'
debuglevel = 0
_connection_class = HTTPConnection
def __init__(self, host='', port=None, strict=None):
"Provide a default host, since the superclass requires one."
# some joker passed 0 explicitly, meaning default port
if port == 0:
port = None
# Note that we may pass an empty string as the host; this will throw
# an error when we attempt to connect. Presumably, the client code
# will call connect before then, with a proper host.
self._setup(self._connection_class(host, port, strict))
def _setup(self, conn):
self._conn = conn
# set up delegation to flesh out interface
self.send = conn.send
self.putrequest = conn.putrequest
self.putheader = conn.putheader
self.endheaders = conn.endheaders
self.set_debuglevel = conn.set_debuglevel
conn._http_vsn = self._http_vsn
conn._http_vsn_str = self._http_vsn_str
self.file = None
def connect(self, host=None, port=None):
"Accept arguments to set the host/port, since the superclass doesn't."
if host is not None:
self._conn._set_hostport(host, port)
self._conn.connect()
def getfile(self):
"Provide a getfile, since the superclass' does not use this concept."
return self.file
def getreply(self, buffering=False):
"""Compat definition since superclass does not define it.
Returns a tuple consisting of:
- server status code (e.g. '200' if all goes well)
- server "reason" corresponding to status code
- any RFC822 headers in the response from the server
"""
try:
if not buffering:
response = self._conn.getresponse()
else:
#only add this keyword if non-default for compatibility
#with other connection classes
response = self._conn.getresponse(buffering)
except BadStatusLine, e:
### hmm. if getresponse() ever closes the socket on a bad request,
### then we are going to have problems with self.sock
### should we keep this behavior? do people use it?
# keep the socket open (as a file), and return it
self.file = self._conn.sock.makefile('rb', 0)
# close our socket -- we want to restart after any protocol error
self.close()
self.headers = None
return -1, e.line, None
self.headers = response.msg
self.file = response.fp
return response.status, response.reason, response.msg
def close(self):
self._conn.close()
# note that self.file == response.fp, which gets closed by the
# superclass. just clear the object ref here.
### hmm. messy. if status==-1, then self.file is owned by us.
### well... we aren't explicitly closing, but losing this ref will
### do it
self.file = None
try:
import ssl
except ImportError:
pass
else:
class HTTPSConnection(HTTPConnection):
"This class allows communication via SSL."
default_port = HTTPS_PORT
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
HTTPConnection.__init__(self, host, port, strict, timeout,
source_address)
self.key_file = key_file
self.cert_file = cert_file
def connect(self):
"Connect to a host on a given (SSL) port."
sock = socket.create_connection((self.host, self.port),
self.timeout, self.source_address)
if self._tunnel_host:
self.sock = sock
self._tunnel()
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file)
__all__.append("HTTPSConnection")
class HTTPS(HTTP):
"""Compatibility with 1.5 httplib interface
Python 1.5.2 did not have an HTTPS class, but it defined an
interface for sending http requests that is also useful for
https.
"""
_connection_class = HTTPSConnection
def __init__(self, host='', port=None, key_file=None, cert_file=None,
strict=None):
# provide a default host, pass the X509 cert info
# urf. compensate for bad input.
if port == 0:
port = None
self._setup(self._connection_class(host, port, key_file,
cert_file, strict))
# we never actually use these for anything, but we keep them
# here for compatibility with post-1.5.2 CVS.
self.key_file = key_file
self.cert_file = cert_file
def FakeSocket (sock, sslobj):
warnings.warn("FakeSocket is deprecated, and won't be in 3.x. " +
"Use the result of ssl.wrap_socket() directly instead.",
DeprecationWarning, stacklevel=2)
return sslobj
class HTTPException(Exception):
# Subclasses that define an __init__ must call Exception.__init__
# or define self.args. Otherwise, str() will fail.
pass
class NotConnected(HTTPException):
pass
class InvalidURL(HTTPException):
pass
class UnknownProtocol(HTTPException):
def __init__(self, version):
self.args = version,
self.version = version
class UnknownTransferEncoding(HTTPException):
pass
class UnimplementedFileMode(HTTPException):
pass
class IncompleteRead(HTTPException):
def __init__(self, partial, expected=None):
self.args = partial,
self.partial = partial
self.expected = expected
def __repr__(self):
if self.expected is not None:
e = ', %i more expected' % self.expected
else:
e = ''
return 'IncompleteRead(%i bytes read%s)' % (len(self.partial), e)
def __str__(self):
return repr(self)
class ImproperConnectionState(HTTPException):
pass
class CannotSendRequest(ImproperConnectionState):
pass
class CannotSendHeader(ImproperConnectionState):
pass
class ResponseNotReady(ImproperConnectionState):
pass
class BadStatusLine(HTTPException):
def __init__(self, line):
if not line:
line = repr(line)
self.args = line,
self.line = line
class LineTooLong(HTTPException):
def __init__(self, line_type):
HTTPException.__init__(self, "got more than %d bytes when reading %s"
% (_MAXLINE, line_type))
# for backwards compatibility
error = HTTPException
class LineAndFileWrapper:
"""A limited file-like object for HTTP/0.9 responses."""
# The status-line parsing code calls readline(), which normally
# get the HTTP status line. For a 0.9 response, however, this is
# actually the first line of the body! Clients need to get a
# readable file object that contains that line.
def __init__(self, line, file):
self._line = line
self._file = file
self._line_consumed = 0
self._line_offset = 0
self._line_left = len(line)
def __getattr__(self, attr):
return getattr(self._file, attr)
def _done(self):
# called when the last byte is read from the line. After the
# call, all read methods are delegated to the underlying file
# object.
self._line_consumed = 1
self.read = self._file.read
self.readline = self._file.readline
self.readlines = self._file.readlines
def read(self, amt=None):
if self._line_consumed:
return self._file.read(amt)
assert self._line_left
if amt is None or amt > self._line_left:
s = self._line[self._line_offset:]
self._done()
if amt is None:
return s + self._file.read()
else:
return s + self._file.read(amt - len(s))
else:
assert amt <= self._line_left
i = self._line_offset
j = i + amt
s = self._line[i:j]
self._line_offset = j
self._line_left -= amt
if self._line_left == 0:
self._done()
return s
def readline(self):
if self._line_consumed:
return self._file.readline()
assert self._line_left
s = self._line[self._line_offset:]
self._done()
return s
def readlines(self, size=None):
if self._line_consumed:
return self._file.readlines(size)
assert self._line_left
L = [self._line[self._line_offset:]]
self._done()
if size is None:
return L + self._file.readlines()
else:
return L + self._file.readlines(size)
def test():
"""Test this module.
A hodge podge of tests collected here, because they have too many
external dependencies for the regular test suite.
"""
import sys
import getopt
opts, args = getopt.getopt(sys.argv[1:], 'd')
dl = 0
for o, a in opts:
if o == '-d': dl = dl + 1
host = 'www.python.org'
selector = '/'
if args[0:]: host = args[0]
if args[1:]: selector = args[1]
h = HTTP()
h.set_debuglevel(dl)
h.connect(host)
h.putrequest('GET', selector)
h.endheaders()
status, reason, headers = h.getreply()
print 'status =', status
print 'reason =', reason
print "read", len(h.getfile().read())
print
if headers:
for header in headers.headers: print header.strip()
print
# minimal test that code to extract host from url works
class HTTP11(HTTP):
_http_vsn = 11
_http_vsn_str = 'HTTP/1.1'
h = HTTP11('www.python.org')
h.putrequest('GET', 'http://www.python.org/~jeremy/')
h.endheaders()
h.getreply()
h.close()
try:
import ssl
except ImportError:
pass
else:
for host, selector in (('sourceforge.net', '/projects/python'),
):
print "https://%s%s" % (host, selector)
hs = HTTPS()
hs.set_debuglevel(dl)
hs.connect(host)
hs.putrequest('GET', selector)
hs.endheaders()
status, reason, headers = hs.getreply()
print 'status =', status
print 'reason =', reason
print "read", len(hs.getfile().read())
print
if headers:
for header in headers.headers: print header.strip()
print
if __name__ == '__main__':
test()
| bsd-3-clause |
bslatkin/8-bits | appengine-mapreduce/python/test/testlib/mox.py | 5 | 62680 | #!/usr/bin/env python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mox, an object-mocking framework for Python.
Mox works in the record-replay-verify paradigm. When you first create
a mock object, it is in record mode. You then programmatically set
the expected behavior of the mock object (what methods are to be
called on it, with what parameters, what they should return, and in
what order).
Once you have set up the expected mock behavior, you put it in replay
mode. Now the mock responds to method calls just as you told it to.
If an unexpected method (or an expected method with unexpected
parameters) is called, then an exception will be raised.
Once you are done interacting with the mock, you need to verify that
all the expected interactions occured. (Maybe your code exited
prematurely without calling some cleanup method!) The verify phase
ensures that every expected method was called; otherwise, an exception
will be raised.
WARNING! Mock objects created by Mox are not thread-safe. If you are
call a mock in multiple threads, it should be guarded by a mutex.
TODO(user): Add the option to make mocks thread-safe!
Suggested usage / workflow:
# Create Mox factory
my_mox = Mox()
# Create a mock data access object
mock_dao = my_mox.CreateMock(DAOClass)
# Set up expected behavior
mock_dao.RetrievePersonWithIdentifier('1').AndReturn(person)
mock_dao.DeletePerson(person)
# Put mocks in replay mode
my_mox.ReplayAll()
# Inject mock object and run test
controller.SetDao(mock_dao)
controller.DeletePersonById('1')
# Verify all methods were called as expected
my_mox.VerifyAll()
"""
from collections import deque
import difflib
import inspect
import re
import types
import unittest
import stubout
class Error(AssertionError):
"""Base exception for this module."""
pass
class ExpectedMethodCallsError(Error):
"""Raised when Verify() is called before all expected methods have been called
"""
def __init__(self, expected_methods):
"""Init exception.
Args:
# expected_methods: A sequence of MockMethod objects that should have been
# called.
expected_methods: [MockMethod]
Raises:
ValueError: if expected_methods contains no methods.
"""
if not expected_methods:
raise ValueError("There must be at least one expected method")
Error.__init__(self)
self._expected_methods = expected_methods
def __str__(self):
calls = "\n".join(["%3d. %s" % (i, m)
for i, m in enumerate(self._expected_methods)])
return "Verify: Expected methods never called:\n%s" % (calls,)
class UnexpectedMethodCallError(Error):
"""Raised when an unexpected method is called.
This can occur if a method is called with incorrect parameters, or out of the
specified order.
"""
def __init__(self, unexpected_method, expected):
"""Init exception.
Args:
# unexpected_method: MockMethod that was called but was not at the head of
# the expected_method queue.
# expected: MockMethod or UnorderedGroup the method should have
# been in.
unexpected_method: MockMethod
expected: MockMethod or UnorderedGroup
"""
Error.__init__(self)
if expected is None:
self._str = "Unexpected method call %s" % (unexpected_method,)
else:
differ = difflib.Differ()
diff = differ.compare(str(unexpected_method).splitlines(True),
str(expected).splitlines(True))
self._str = ("Unexpected method call. unexpected:- expected:+\n%s"
% ("\n".join(line.rstrip() for line in diff),))
def __str__(self):
return self._str
class UnknownMethodCallError(Error):
"""Raised if an unknown method is requested of the mock object."""
def __init__(self, unknown_method_name):
"""Init exception.
Args:
# unknown_method_name: Method call that is not part of the mocked class's
# public interface.
unknown_method_name: str
"""
Error.__init__(self)
self._unknown_method_name = unknown_method_name
def __str__(self):
return "Method called is not a member of the object: %s" % \
self._unknown_method_name
class PrivateAttributeError(Error):
"""
Raised if a MockObject is passed a private additional attribute name.
"""
def __init__(self, attr):
Error.__init__(self)
self._attr = attr
def __str__(self):
return ("Attribute '%s' is private and should not be available in a mock "
"object." % attr)
class ExpectedMockCreationError(Error):
"""Raised if mocks should have been created by StubOutClassWithMocks."""
def __init__(self, expected_mocks):
"""Init exception.
Args:
# expected_mocks: A sequence of MockObjects that should have been
# created
Raises:
ValueError: if expected_mocks contains no methods.
"""
if not expected_mocks:
raise ValueError("There must be at least one expected method")
Error.__init__(self)
self._expected_mocks = expected_mocks
def __str__(self):
mocks = "\n".join(["%3d. %s" % (i, m)
for i, m in enumerate(self._expected_mocks)])
return "Verify: Expected mocks never created:\n%s" % (mocks,)
class UnexpectedMockCreationError(Error):
"""Raised if too many mocks were created by StubOutClassWithMocks."""
def __init__(self, instance, *params, **named_params):
"""Init exception.
Args:
# instance: the type of obejct that was created
# params: parameters given during instantiation
# named_params: named parameters given during instantiation
"""
Error.__init__(self)
self._instance = instance
self._params = params
self._named_params = named_params
def __str__(self):
args = ", ".join(["%s" % v for i, v in enumerate(self._params)])
error = "Unexpected mock creation: %s(%s" % (self._instance, args)
if self._named_params:
error += ", " + ", ".join(["%s=%s" % (k, v) for k, v in
self._named_params.iteritems()])
error += ")"
return error
class SwallowedExceptionError(Error):
"""Raised when Verify() is called after something already threw an exception.
This means that the exception that was thrown was somehow swallowed, allowing
the test to continue when it should not have.
"""
def __init__(self, previous_exceptions):
"""Init exception.
Args:
# previous_exceptions: A sequence of Error objects that were raised.
previous_exceptions: [Error]
"""
Error.__init__(self)
self._previous_exceptions = previous_exceptions
def __str__(self):
exceptions = "\n".join(["%3d. %s: %s" % (i, e.__class__.__name__, e)
for i, e in enumerate(self._previous_exceptions)])
return "Previous exceptions thrown:\n%s" % (exceptions,)
class Mox(object):
"""Mox: a factory for creating mock objects."""
# A list of types that should be stubbed out with MockObjects (as
# opposed to MockAnythings).
_USE_MOCK_OBJECT = [types.ClassType, types.FunctionType, types.InstanceType,
types.ModuleType, types.ObjectType, types.TypeType,
types.MethodType, types.UnboundMethodType,
]
# A list of types that may be stubbed out with a MockObjectFactory.
_USE_MOCK_FACTORY = [types.ClassType, types.ObjectType, types.TypeType]
def __init__(self):
"""Initialize a new Mox."""
self._mock_objects = []
self.stubs = stubout.StubOutForTesting()
def CreateMock(self, class_to_mock, attrs=None):
"""Create a new mock object.
Args:
# class_to_mock: the class to be mocked
class_to_mock: class
attrs: dict of attribute names to values that will be set on the mock
object. Only public attributes may be set.
Returns:
MockObject that can be used as the class_to_mock would be.
"""
if attrs is None:
attrs = {}
new_mock = MockObject(class_to_mock, attrs=attrs)
self._mock_objects.append(new_mock)
return new_mock
def CreateMockAnything(self, description=None):
"""Create a mock that will accept any method calls.
This does not enforce an interface.
Args:
description: str. Optionally, a descriptive name for the mock object being
created, for debugging output purposes.
"""
new_mock = MockAnything(description=description)
self._mock_objects.append(new_mock)
return new_mock
def ReplayAll(self):
"""Set all mock objects to replay mode."""
for mock_obj in self._mock_objects:
mock_obj._Replay()
def VerifyAll(self):
"""Call verify on all mock objects created."""
for mock_obj in self._mock_objects:
mock_obj._Verify()
def ResetAll(self):
"""Call reset on all mock objects. This does not unset stubs."""
for mock_obj in self._mock_objects:
mock_obj._Reset()
def StubOutWithMock(self, obj, attr_name, use_mock_anything=False):
"""Replace a method, attribute, etc. with a Mock.
This will replace a class or module with a MockObject, and everything else
(method, function, etc) with a MockAnything. This can be overridden to
always use a MockAnything by setting use_mock_anything to True.
Args:
obj: A Python object (class, module, instance, callable).
attr_name: str. The name of the attribute to replace with a mock.
use_mock_anything: bool. True if a MockAnything should be used regardless
of the type of attribute.
"""
attr_to_replace = getattr(obj, attr_name)
attr_type = type(attr_to_replace)
if attr_type == MockAnything or attr_type == MockObject:
raise TypeError('Cannot mock a MockAnything! Did you remember to '
'call UnsetStubs in your previous test?')
if attr_type in self._USE_MOCK_OBJECT and not use_mock_anything:
stub = self.CreateMock(attr_to_replace)
else:
stub = self.CreateMockAnything(description='Stub for %s' % attr_to_replace)
stub.__name__ = attr_name
self.stubs.Set(obj, attr_name, stub)
def StubOutClassWithMocks(self, obj, attr_name):
"""Replace a class with a "mock factory" that will create mock objects.
This is useful if the code-under-test directly instantiates
dependencies. Previously some boilder plate was necessary to
create a mock that would act as a factory. Using
StubOutClassWithMocks, once you've stubbed out the class you may
use the stubbed class as you would any other mock created by mox:
during the record phase, new mock instances will be created, and
during replay, the recorded mocks will be returned.
In replay mode
# Example using StubOutWithMock (the old, clunky way):
mock1 = mox.CreateMock(my_import.FooClass)
mock2 = mox.CreateMock(my_import.FooClass)
foo_factory = mox.StubOutWithMock(my_import, 'FooClass',
use_mock_anything=True)
foo_factory(1, 2).AndReturn(mock1)
foo_factory(9, 10).AndReturn(mock2)
mox.ReplayAll()
my_import.FooClass(1, 2) # Returns mock1 again.
my_import.FooClass(9, 10) # Returns mock2 again.
mox.VerifyAll()
# Example using StubOutClassWithMocks:
mox.StubOutClassWithMocks(my_import, 'FooClass')
mock1 = my_import.FooClass(1, 2) # Returns a new mock of FooClass
mock2 = my_import.FooClass(9, 10) # Returns another mock instance
mox.ReplayAll()
my_import.FooClass(1, 2) # Returns mock1 again.
my_import.FooClass(9, 10) # Returns mock2 again.
mox.VerifyAll()
"""
attr_to_replace = getattr(obj, attr_name)
attr_type = type(attr_to_replace)
if attr_type == MockAnything or attr_type == MockObject:
raise TypeError('Cannot mock a MockAnything! Did you remember to '
'call UnsetStubs in your previous test?')
if attr_type not in self._USE_MOCK_FACTORY:
raise TypeError('Given attr is not a Class. Use StubOutWithMock.')
factory = _MockObjectFactory(attr_to_replace, self)
self._mock_objects.append(factory)
self.stubs.Set(obj, attr_name, factory)
def UnsetStubs(self):
"""Restore stubs to their original state."""
self.stubs.UnsetAll()
def Replay(*args):
"""Put mocks into Replay mode.
Args:
# args is any number of mocks to put into replay mode.
"""
for mock in args:
mock._Replay()
def Verify(*args):
"""Verify mocks.
Args:
# args is any number of mocks to be verified.
"""
for mock in args:
mock._Verify()
def Reset(*args):
"""Reset mocks.
Args:
# args is any number of mocks to be reset.
"""
for mock in args:
mock._Reset()
class MockAnything:
"""A mock that can be used to mock anything.
This is helpful for mocking classes that do not provide a public interface.
"""
def __init__(self, description=None):
"""Initialize a new MockAnything.
Args:
description: str. Optionally, a descriptive name for the mock object being
created, for debugging output purposes.
"""
self._description = description
self._exceptions_thrown = []
self._Reset()
def __bases__(self):
pass
def __members__(self):
pass
def __methods__(self):
pass
def __repr__(self):
if self._description:
return '<MockAnything instance of %s>' % self._description
else:
return '<MockAnything instance>'
def __getattr__(self, method_name):
"""Intercept method calls on this object.
A new MockMethod is returned that is aware of the MockAnything's
state (record or replay). The call will be recorded or replayed
by the MockMethod's __call__.
Args:
# method name: the name of the method being called.
method_name: str
Returns:
A new MockMethod aware of MockAnything's state (record or replay).
"""
if method_name == '__dir__':
return self.__class__.__dir__.__get__(self, self.__class__)
return self._CreateMockMethod(method_name)
def _CreateMockMethod(self, method_name, method_to_mock=None):
"""Create a new mock method call and return it.
Args:
# method_name: the name of the method being called.
# method_to_mock: The actual method being mocked, used for introspection.
method_name: str
method_to_mock: a method object
Returns:
A new MockMethod aware of MockAnything's state (record or replay).
"""
return MockMethod(method_name, self._expected_calls_queue,
self._exceptions_thrown, self._replay_mode,
method_to_mock=method_to_mock,
description=self._description)
def __nonzero__(self):
"""Return 1 for nonzero so the mock can be used as a conditional."""
return 1
def __eq__(self, rhs):
"""Provide custom logic to compare objects."""
return (isinstance(rhs, MockAnything) and
self._replay_mode == rhs._replay_mode and
self._expected_calls_queue == rhs._expected_calls_queue)
def __ne__(self, rhs):
"""Provide custom logic to compare objects."""
return not self == rhs
def _Replay(self):
"""Start replaying expected method calls."""
self._replay_mode = True
def _Verify(self):
"""Verify that all of the expected calls have been made.
Raises:
ExpectedMethodCallsError: if there are still more method calls in the
expected queue.
Any exception previously raised by this object: if _Verify was called
afterwards anyway. (This detects tests passing erroneously.)
"""
# If any exceptions were thrown, re-raise them. (This should only happen
# if the original exception was swallowed, in which case it's necessary to
# re-raise it so that the test will fail. See Issue #16.)
if self._exceptions_thrown:
raise SwallowedExceptionError(self._exceptions_thrown)
# If the list of expected calls is not empty, raise an exception
if self._expected_calls_queue:
# The last MultipleTimesGroup is not popped from the queue.
if (len(self._expected_calls_queue) == 1 and
isinstance(self._expected_calls_queue[0], MultipleTimesGroup) and
self._expected_calls_queue[0].IsSatisfied()):
pass
else:
raise ExpectedMethodCallsError(self._expected_calls_queue)
def _Reset(self):
"""Reset the state of this mock to record mode with an empty queue."""
# Maintain a list of method calls we are expecting
self._expected_calls_queue = deque()
# Make sure we are in setup mode, not replay mode
self._replay_mode = False
class MockObject(MockAnything, object):
"""A mock object that simulates the public/protected interface of a class."""
def __init__(self, class_to_mock, attrs=None):
"""Initialize a mock object.
This determines the methods and properties of the class and stores them.
Args:
# class_to_mock: class to be mocked
class_to_mock: class
attrs: dict of attribute names to values that will be set on the mock
object. Only public attributes may be set.
Raises:
PrivateAttributeError: if a supplied attribute is not public.
ValueError: if an attribute would mask an existing method.
"""
if attrs is None:
attrs = {}
# This is used to hack around the mixin/inheritance of MockAnything, which
# is not a proper object (it can be anything. :-)
MockAnything.__dict__['__init__'](self)
# Get a list of all the public and special methods we should mock.
self._known_methods = set()
self._known_vars = set()
self._class_to_mock = class_to_mock
try:
if inspect.isclass(self._class_to_mock):
self._description = class_to_mock.__name__
else:
self._description = type(class_to_mock).__name__
except Exception:
pass
for method in dir(class_to_mock):
attr = getattr(class_to_mock, method)
if callable(attr):
self._known_methods.add(method)
elif not (type(attr) is property):
# treating properties as class vars makes little sense.
self._known_vars.add(method)
# Set additional attributes at instantiation time; this is quicker
# than manually setting attributes that are normally created in
# __init__.
for attr, value in attrs.items():
if attr.startswith("_"):
raise PrivateAttributeError(attr)
elif attr in self._known_methods:
raise ValueError("'%s' is a method of '%s' objects." % (attr,
class_to_mock))
else:
setattr(self, attr, value)
def __getattr__(self, name):
"""Intercept attribute request on this object.
If the attribute is a public class variable, it will be returned and not
recorded as a call.
If the attribute is not a variable, it is handled like a method
call. The method name is checked against the set of mockable
methods, and a new MockMethod is returned that is aware of the
MockObject's state (record or replay). The call will be recorded
or replayed by the MockMethod's __call__.
Args:
# name: the name of the attribute being requested.
name: str
Returns:
Either a class variable or a new MockMethod that is aware of the state
of the mock (record or replay).
Raises:
UnknownMethodCallError if the MockObject does not mock the requested
method.
"""
if name in self._known_vars:
return getattr(self._class_to_mock, name)
if name in self._known_methods:
return self._CreateMockMethod(
name,
method_to_mock=getattr(self._class_to_mock, name))
exception = UnknownMethodCallError(name)
self._exceptions_thrown.append(exception)
raise exception
def __eq__(self, rhs):
"""Provide custom logic to compare objects."""
return (isinstance(rhs, MockObject) and
self._class_to_mock == rhs._class_to_mock and
self._replay_mode == rhs._replay_mode and
self._expected_calls_queue == rhs._expected_calls_queue)
def __setitem__(self, key, value):
"""Provide custom logic for mocking classes that support item assignment.
Args:
key: Key to set the value for.
value: Value to set.
Returns:
Expected return value in replay mode. A MockMethod object for the
__setitem__ method that has already been called if not in replay mode.
Raises:
TypeError if the underlying class does not support item assignment.
UnexpectedMethodCallError if the object does not expect the call to
__setitem__.
"""
# Verify the class supports item assignment.
if '__setitem__' not in dir(self._class_to_mock):
raise TypeError('object does not support item assignment')
# If we are in replay mode then simply call the mock __setitem__ method.
if self._replay_mode:
return MockMethod('__setitem__', self._expected_calls_queue,
self._exceptions_thrown, self._replay_mode)(key, value)
# Otherwise, create a mock method __setitem__.
return self._CreateMockMethod('__setitem__')(key, value)
def __getitem__(self, key):
"""Provide custom logic for mocking classes that are subscriptable.
Args:
key: Key to return the value for.
Returns:
Expected return value in replay mode. A MockMethod object for the
__getitem__ method that has already been called if not in replay mode.
Raises:
TypeError if the underlying class is not subscriptable.
UnexpectedMethodCallError if the object does not expect the call to
__getitem__.
"""
# Verify the class supports item assignment.
if '__getitem__' not in dir(self._class_to_mock):
raise TypeError('unsubscriptable object')
# If we are in replay mode then simply call the mock __getitem__ method.
if self._replay_mode:
return MockMethod('__getitem__', self._expected_calls_queue,
self._exceptions_thrown, self._replay_mode)(key)
# Otherwise, create a mock method __getitem__.
return self._CreateMockMethod('__getitem__')(key)
def __iter__(self):
"""Provide custom logic for mocking classes that are iterable.
Returns:
Expected return value in replay mode. A MockMethod object for the
__iter__ method that has already been called if not in replay mode.
Raises:
TypeError if the underlying class is not iterable.
UnexpectedMethodCallError if the object does not expect the call to
__iter__.
"""
methods = dir(self._class_to_mock)
# Verify the class supports iteration.
if '__iter__' not in methods:
# If it doesn't have iter method and we are in replay method, then try to
# iterate using subscripts.
if '__getitem__' not in methods or not self._replay_mode:
raise TypeError('not iterable object')
else:
results = []
index = 0
try:
while True:
results.append(self[index])
index += 1
except IndexError:
return iter(results)
# If we are in replay mode then simply call the mock __iter__ method.
if self._replay_mode:
return MockMethod('__iter__', self._expected_calls_queue,
self._exceptions_thrown, self._replay_mode)()
# Otherwise, create a mock method __iter__.
return self._CreateMockMethod('__iter__')()
def __contains__(self, key):
"""Provide custom logic for mocking classes that contain items.
Args:
key: Key to look in container for.
Returns:
Expected return value in replay mode. A MockMethod object for the
__contains__ method that has already been called if not in replay mode.
Raises:
TypeError if the underlying class does not implement __contains__
UnexpectedMethodCaller if the object does not expect the call to
__contains__.
"""
contains = self._class_to_mock.__dict__.get('__contains__', None)
if contains is None:
raise TypeError('unsubscriptable object')
if self._replay_mode:
return MockMethod('__contains__', self._expected_calls_queue,
self._exceptions_thrown, self._replay_mode)(key)
return self._CreateMockMethod('__contains__')(key)
def __call__(self, *params, **named_params):
"""Provide custom logic for mocking classes that are callable."""
# Verify the class we are mocking is callable.
callable = hasattr(self._class_to_mock, '__call__')
if not callable:
raise TypeError('Not callable')
# Because the call is happening directly on this object instead of a method,
# the call on the mock method is made right here
# If we are mocking a Function, then use the function, and not the
# __call__ method
method = None
if type(self._class_to_mock) in (types.FunctionType, types.MethodType):
method = self._class_to_mock;
else:
method = getattr(self._class_to_mock, '__call__')
mock_method = self._CreateMockMethod('__call__', method_to_mock=method)
return mock_method(*params, **named_params)
@property
def __class__(self):
"""Return the class that is being mocked."""
return self._class_to_mock
@property
def __name__(self):
"""Return the name that is being mocked."""
return self._description
class _MockObjectFactory(MockObject):
"""A MockObjectFactory creates mocks and verifies __init__ params.
A MockObjectFactory removes the boiler plate code that was previously
necessary to stub out direction instantiation of a class.
The MockObjectFactory creates new MockObjects when called and verifies the
__init__ params are correct when in record mode. When replaying, existing
mocks are returned, and the __init__ params are verified.
See StubOutWithMock vs StubOutClassWithMocks for more detail.
"""
def __init__(self, class_to_mock, mox_instance):
MockObject.__init__(self, class_to_mock)
self._mox = mox_instance
self._instance_queue = deque()
def __call__(self, *params, **named_params):
"""Instantiate and record that a new mock has been created."""
method = getattr(self._class_to_mock, '__init__')
mock_method = self._CreateMockMethod('__init__', method_to_mock=method)
# Note: calling mock_method() is deferred in order to catch the
# empty instance_queue first.
if self._replay_mode:
if not self._instance_queue:
exception = UnexpectedMockCreationError(self._class_to_mock, *params,
**named_params)
self._exceptions_thrown.append(exception)
raise exception
mock_method(*params, **named_params)
return self._instance_queue.pop()
else:
mock_method(*params, **named_params)
instance = self._mox.CreateMock(self._class_to_mock)
self._instance_queue.appendleft(instance)
return instance
def _Verify(self):
"""Verify that all mocks have been created."""
if self._instance_queue:
raise ExpectedMockCreationError(self._instance_queue)
super(_MockObjectFactory, self)._Verify()
class MethodSignatureChecker(object):
"""Ensures that methods are called correctly."""
_NEEDED, _DEFAULT, _GIVEN = range(3)
def __init__(self, method):
"""Creates a checker.
Args:
# method: A method to check.
method: function
Raises:
ValueError: method could not be inspected, so checks aren't possible.
Some methods and functions like built-ins can't be inspected.
"""
try:
self._args, varargs, varkw, defaults = inspect.getargspec(method)
except TypeError:
raise ValueError('Could not get argument specification for %r'
% (method,))
if inspect.ismethod(method):
self._args = self._args[1:] # Skip 'self'.
self._method = method
self._instance = None # May contain the instance this is bound to.
self._has_varargs = varargs is not None
self._has_varkw = varkw is not None
if defaults is None:
self._required_args = self._args
self._default_args = []
else:
self._required_args = self._args[:-len(defaults)]
self._default_args = self._args[-len(defaults):]
def _RecordArgumentGiven(self, arg_name, arg_status):
"""Mark an argument as being given.
Args:
# arg_name: The name of the argument to mark in arg_status.
# arg_status: Maps argument names to one of _NEEDED, _DEFAULT, _GIVEN.
arg_name: string
arg_status: dict
Raises:
AttributeError: arg_name is already marked as _GIVEN.
"""
if arg_status.get(arg_name, None) == MethodSignatureChecker._GIVEN:
raise AttributeError('%s provided more than once' % (arg_name,))
arg_status[arg_name] = MethodSignatureChecker._GIVEN
def Check(self, params, named_params):
"""Ensures that the parameters used while recording a call are valid.
Args:
# params: A list of positional parameters.
# named_params: A dict of named parameters.
params: list
named_params: dict
Raises:
AttributeError: the given parameters don't work with the given method.
"""
arg_status = dict((a, MethodSignatureChecker._NEEDED)
for a in self._required_args)
for arg in self._default_args:
arg_status[arg] = MethodSignatureChecker._DEFAULT
# WARNING: Suspect hack ahead.
#
# Check to see if this is an unbound method, where the instance
# should be bound as the first argument. We try to determine if
# the first argument (param[0]) is an instance of the class, or it
# is equivalent to the class (used to account for Comparators).
#
# NOTE: If a Func() comparator is used, and the signature is not
# correct, this will cause extra executions of the function.
if inspect.ismethod(self._method):
# The extra param accounts for the bound instance.
if len(params) > len(self._required_args):
expected = getattr(self._method, 'im_class', None)
# Check if the param is an instance of the expected class,
# or check equality (useful for checking Comparators).
# This is a hack to work around the fact that the first
# parameter can be a Comparator, and the comparison may raise
# an exception during this comparison, which is OK.
try:
param_equality = (params[0] == expected)
except:
param_equality = False;
if isinstance(params[0], expected) or param_equality:
params = params[1:]
# If the IsA() comparator is being used, we need to check the
# inverse of the usual case - that the given instance is a subclass
# of the expected class. For example, the code under test does
# late binding to a subclass.
elif isinstance(params[0], IsA) and params[0]._IsSubClass(expected):
params = params[1:]
# Check that each positional param is valid.
for i in range(len(params)):
try:
arg_name = self._args[i]
except IndexError:
if not self._has_varargs:
raise AttributeError('%s does not take %d or more positional '
'arguments' % (self._method.__name__, i))
else:
self._RecordArgumentGiven(arg_name, arg_status)
# Check each keyword argument.
for arg_name in named_params:
if arg_name not in arg_status and not self._has_varkw:
raise AttributeError('%s is not expecting keyword argument %s'
% (self._method.__name__, arg_name))
self._RecordArgumentGiven(arg_name, arg_status)
# Ensure all the required arguments have been given.
still_needed = [k for k, v in arg_status.iteritems()
if v == MethodSignatureChecker._NEEDED]
if still_needed:
raise AttributeError('No values given for arguments: %s'
% (' '.join(sorted(still_needed))))
class MockMethod(object):
"""Callable mock method.
A MockMethod should act exactly like the method it mocks, accepting parameters
and returning a value, or throwing an exception (as specified). When this
method is called, it can optionally verify whether the called method (name and
signature) matches the expected method.
"""
def __init__(self, method_name, call_queue, exception_list, replay_mode,
method_to_mock=None, description=None):
"""Construct a new mock method.
Args:
# method_name: the name of the method
# call_queue: deque of calls, verify this call against the head, or add
# this call to the queue.
# exception_list: list of exceptions; any exceptions thrown by this
# instance are appended to this list.
# replay_mode: False if we are recording, True if we are verifying calls
# against the call queue.
# method_to_mock: The actual method being mocked, used for introspection.
# description: optionally, a descriptive name for this method. Typically
# this is equal to the descriptive name of the method's class.
method_name: str
call_queue: list or deque
exception_list: list
replay_mode: bool
method_to_mock: a method object
description: str or None
"""
self._name = method_name
self.__name__ = method_name
self._call_queue = call_queue
if not isinstance(call_queue, deque):
self._call_queue = deque(self._call_queue)
self._exception_list = exception_list
self._replay_mode = replay_mode
self._description = description
self._params = None
self._named_params = None
self._return_value = None
self._exception = None
self._side_effects = None
try:
self._checker = MethodSignatureChecker(method_to_mock)
except ValueError:
self._checker = None
def __call__(self, *params, **named_params):
"""Log parameters and return the specified return value.
If the Mock(Anything/Object) associated with this call is in record mode,
this MockMethod will be pushed onto the expected call queue. If the mock
is in replay mode, this will pop a MockMethod off the top of the queue and
verify this call is equal to the expected call.
Raises:
UnexpectedMethodCall if this call is supposed to match an expected method
call and it does not.
"""
self._params = params
self._named_params = named_params
if not self._replay_mode:
if self._checker is not None:
self._checker.Check(params, named_params)
self._call_queue.append(self)
return self
expected_method = self._VerifyMethodCall()
if expected_method._side_effects:
result = expected_method._side_effects(*params, **named_params)
if expected_method._return_value is None:
expected_method._return_value = result
if expected_method._exception:
raise expected_method._exception
return expected_method._return_value
def __getattr__(self, name):
"""Raise an AttributeError with a helpful message."""
raise AttributeError('MockMethod has no attribute "%s". '
'Did you remember to put your mocks in replay mode?' % name)
def __iter__(self):
"""Raise a TypeError with a helpful message."""
raise TypeError('MockMethod cannot be iterated. '
'Did you remember to put your mocks in replay mode?')
def next(self):
"""Raise a TypeError with a helpful message."""
raise TypeError('MockMethod cannot be iterated. '
'Did you remember to put your mocks in replay mode?')
def _PopNextMethod(self):
"""Pop the next method from our call queue."""
try:
return self._call_queue.popleft()
except IndexError:
exception = UnexpectedMethodCallError(self, None)
self._exception_list.append(exception)
raise exception
def _VerifyMethodCall(self):
"""Verify the called method is expected.
This can be an ordered method, or part of an unordered set.
Returns:
The expected mock method.
Raises:
UnexpectedMethodCall if the method called was not expected.
"""
expected = self._PopNextMethod()
# Loop here, because we might have a MethodGroup followed by another
# group.
while isinstance(expected, MethodGroup):
expected, method = expected.MethodCalled(self)
if method is not None:
return method
# This is a mock method, so just check equality.
if expected != self:
exception = UnexpectedMethodCallError(self, expected)
self._exception_list.append(exception)
raise exception
return expected
def __str__(self):
params = ', '.join(
[repr(p) for p in self._params or []] +
['%s=%r' % x for x in sorted((self._named_params or {}).items())])
full_desc = "%s(%s) -> %r" % (self._name, params, self._return_value)
if self._description:
full_desc = "%s.%s" % (self._description, full_desc)
return full_desc
def __eq__(self, rhs):
"""Test whether this MockMethod is equivalent to another MockMethod.
Args:
# rhs: the right hand side of the test
rhs: MockMethod
"""
return (isinstance(rhs, MockMethod) and
self._name == rhs._name and
self._params == rhs._params and
self._named_params == rhs._named_params)
def __ne__(self, rhs):
"""Test whether this MockMethod is not equivalent to another MockMethod.
Args:
# rhs: the right hand side of the test
rhs: MockMethod
"""
return not self == rhs
def GetPossibleGroup(self):
"""Returns a possible group from the end of the call queue or None if no
other methods are on the stack.
"""
# Remove this method from the tail of the queue so we can add it to a group.
this_method = self._call_queue.pop()
assert this_method == self
# Determine if the tail of the queue is a group, or just a regular ordered
# mock method.
group = None
try:
group = self._call_queue[-1]
except IndexError:
pass
return group
def _CheckAndCreateNewGroup(self, group_name, group_class):
"""Checks if the last method (a possible group) is an instance of our
group_class. Adds the current method to this group or creates a new one.
Args:
group_name: the name of the group.
group_class: the class used to create instance of this new group
"""
group = self.GetPossibleGroup()
# If this is a group, and it is the correct group, add the method.
if isinstance(group, group_class) and group.group_name() == group_name:
group.AddMethod(self)
return self
# Create a new group and add the method.
new_group = group_class(group_name, self._exception_list)
new_group.AddMethod(self)
self._call_queue.append(new_group)
return self
def InAnyOrder(self, group_name="default"):
"""Move this method into a group of unordered calls.
A group of unordered calls must be defined together, and must be executed
in full before the next expected method can be called. There can be
multiple groups that are expected serially, if they are given
different group names. The same group name can be reused if there is a
standard method call, or a group with a different name, spliced between
usages.
Args:
group_name: the name of the unordered group.
Returns:
self
"""
return self._CheckAndCreateNewGroup(group_name, UnorderedGroup)
def MultipleTimes(self, group_name="default"):
"""Move this method into group of calls which may be called multiple times.
A group of repeating calls must be defined together, and must be executed in
full before the next expected method can be called.
Args:
group_name: the name of the unordered group.
Returns:
self
"""
return self._CheckAndCreateNewGroup(group_name, MultipleTimesGroup)
def AndReturn(self, return_value):
"""Set the value to return when this method is called.
Args:
# return_value can be anything.
"""
self._return_value = return_value
return return_value
def AndRaise(self, exception):
"""Set the exception to raise when this method is called.
Args:
# exception: the exception to raise when this method is called.
exception: Exception
"""
self._exception = exception
def WithSideEffects(self, side_effects):
"""Set the side effects that are simulated when this method is called.
Args:
side_effects: A callable which modifies the parameters or other relevant
state which a given test case depends on.
Returns:
Self for chaining with AndReturn and AndRaise.
"""
self._side_effects = side_effects
return self
class Comparator:
"""Base class for all Mox comparators.
A Comparator can be used as a parameter to a mocked method when the exact
value is not known. For example, the code you are testing might build up a
long SQL string that is passed to your mock DAO. You're only interested that
the IN clause contains the proper primary keys, so you can set your mock
up as follows:
mock_dao.RunQuery(StrContains('IN (1, 2, 4, 5)')).AndReturn(mock_result)
Now whatever query is passed in must contain the string 'IN (1, 2, 4, 5)'.
A Comparator may replace one or more parameters, for example:
# return at most 10 rows
mock_dao.RunQuery(StrContains('SELECT'), 10)
or
# Return some non-deterministic number of rows
mock_dao.RunQuery(StrContains('SELECT'), IsA(int))
"""
def equals(self, rhs):
"""Special equals method that all comparators must implement.
Args:
rhs: any python object
"""
raise NotImplementedError, 'method must be implemented by a subclass.'
def __eq__(self, rhs):
return self.equals(rhs)
def __ne__(self, rhs):
return not self.equals(rhs)
class Is(Comparator):
"""Comparison class used to check identity, instead of equality."""
def __init__(self, obj):
self._obj = obj
def equals(self, rhs):
return rhs is self._obj
def __repr__(self):
return "<is %r (%s)>" % (self._obj, id(self._obj))
class IsA(Comparator):
"""This class wraps a basic Python type or class. It is used to verify
that a parameter is of the given type or class.
Example:
mock_dao.Connect(IsA(DbConnectInfo))
"""
def __init__(self, class_name):
"""Initialize IsA
Args:
class_name: basic python type or a class
"""
self._class_name = class_name
def equals(self, rhs):
"""Check to see if the RHS is an instance of class_name.
Args:
# rhs: the right hand side of the test
rhs: object
Returns:
bool
"""
try:
return isinstance(rhs, self._class_name)
except TypeError:
# Check raw types if there was a type error. This is helpful for
# things like cStringIO.StringIO.
return type(rhs) == type(self._class_name)
def _IsSubClass(self, clazz):
"""Check to see if the IsA comparators class is a subclass of clazz.
Args:
# clazz: a class object
Returns:
bool
"""
try:
return issubclass(self._class_name, clazz)
except TypeError:
# Check raw types if there was a type error. This is helpful for
# things like cStringIO.StringIO.
return type(clazz) == type(self._class_name)
def __repr__(self):
return 'mox.IsA(%s) ' % str(self._class_name)
class IsAlmost(Comparator):
"""Comparison class used to check whether a parameter is nearly equal
to a given value. Generally useful for floating point numbers.
Example mock_dao.SetTimeout((IsAlmost(3.9)))
"""
def __init__(self, float_value, places=7):
"""Initialize IsAlmost.
Args:
float_value: The value for making the comparison.
places: The number of decimal places to round to.
"""
self._float_value = float_value
self._places = places
def equals(self, rhs):
"""Check to see if RHS is almost equal to float_value
Args:
rhs: the value to compare to float_value
Returns:
bool
"""
try:
return round(rhs-self._float_value, self._places) == 0
except Exception:
# This is probably because either float_value or rhs is not a number.
return False
def __repr__(self):
return str(self._float_value)
class StrContains(Comparator):
"""Comparison class used to check whether a substring exists in a
string parameter. This can be useful in mocking a database with SQL
passed in as a string parameter, for example.
Example:
mock_dao.RunQuery(StrContains('IN (1, 2, 4, 5)')).AndReturn(mock_result)
"""
def __init__(self, search_string):
"""Initialize.
Args:
# search_string: the string you are searching for
search_string: str
"""
self._search_string = search_string
def equals(self, rhs):
"""Check to see if the search_string is contained in the rhs string.
Args:
# rhs: the right hand side of the test
rhs: object
Returns:
bool
"""
try:
return rhs.find(self._search_string) > -1
except Exception:
return False
def __repr__(self):
return '<str containing \'%s\'>' % self._search_string
class Regex(Comparator):
"""Checks if a string matches a regular expression.
This uses a given regular expression to determine equality.
"""
def __init__(self, pattern, flags=0):
"""Initialize.
Args:
# pattern is the regular expression to search for
pattern: str
# flags passed to re.compile function as the second argument
flags: int
"""
self.regex = re.compile(pattern, flags=flags)
def equals(self, rhs):
"""Check to see if rhs matches regular expression pattern.
Returns:
bool
"""
try:
return self.regex.search(rhs) is not None
except Exception:
return False
def __repr__(self):
s = '<regular expression \'%s\'' % self.regex.pattern
if self.regex.flags:
s += ', flags=%d' % self.regex.flags
s += '>'
return s
class In(Comparator):
"""Checks whether an item (or key) is in a list (or dict) parameter.
Example:
mock_dao.GetUsersInfo(In('expectedUserName')).AndReturn(mock_result)
"""
def __init__(self, key):
"""Initialize.
Args:
# key is any thing that could be in a list or a key in a dict
"""
self._key = key
def equals(self, rhs):
"""Check to see whether key is in rhs.
Args:
rhs: dict
Returns:
bool
"""
try:
return self._key in rhs
except Exception:
return False
def __repr__(self):
return '<sequence or map containing \'%s\'>' % str(self._key)
class Not(Comparator):
"""Checks whether a predicates is False.
Example:
mock_dao.UpdateUsers(Not(ContainsKeyValue('stevepm', stevepm_user_info)))
"""
def __init__(self, predicate):
"""Initialize.
Args:
# predicate: a Comparator instance.
"""
assert isinstance(predicate, Comparator), ("predicate %r must be a"
" Comparator." % predicate)
self._predicate = predicate
def equals(self, rhs):
"""Check to see whether the predicate is False.
Args:
rhs: A value that will be given in argument of the predicate.
Returns:
bool
"""
try:
return not self._predicate.equals(rhs)
except Exception:
return False
def __repr__(self):
return '<not \'%s\'>' % self._predicate
class ContainsKeyValue(Comparator):
"""Checks whether a key/value pair is in a dict parameter.
Example:
mock_dao.UpdateUsers(ContainsKeyValue('stevepm', stevepm_user_info))
"""
def __init__(self, key, value):
"""Initialize.
Args:
# key: a key in a dict
# value: the corresponding value
"""
self._key = key
self._value = value
def equals(self, rhs):
"""Check whether the given key/value pair is in the rhs dict.
Returns:
bool
"""
try:
return rhs[self._key] == self._value
except Exception:
return False
def __repr__(self):
return '<map containing the entry \'%s: %s\'>' % (str(self._key),
str(self._value))
class ContainsAttributeValue(Comparator):
"""Checks whether a passed parameter contains attributes with a given value.
Example:
mock_dao.UpdateSomething(ContainsAttribute('stevepm', stevepm_user_info))
"""
def __init__(self, key, value):
"""Initialize.
Args:
# key: an attribute name of an object
# value: the corresponding value
"""
self._key = key
self._value = value
def equals(self, rhs):
"""Check whether the given attribute has a matching value in the rhs object.
Returns:
bool
"""
try:
return getattr(rhs, self._key) == self._value
except Exception:
return False
class SameElementsAs(Comparator):
"""Checks whether sequences contain the same elements (ignoring order).
Example:
mock_dao.ProcessUsers(SameElementsAs('stevepm', 'salomaki'))
"""
def __init__(self, expected_seq):
"""Initialize.
Args:
expected_seq: a sequence
"""
# Store in case expected_seq is an iterator.
self._expected_list = list(expected_seq)
def equals(self, actual_seq):
"""Check to see whether actual_seq has same elements as expected_seq.
Args:
actual_seq: sequence
Returns:
bool
"""
try:
# Store in case actual_seq is an iterator. We potentially iterate twice:
# once to make the dict, once in the list fallback.
actual_list = list(actual_seq)
except TypeError:
# actual_seq cannot be read as a sequence.
#
# This happens because Mox uses __eq__ both to check object equality (in
# MethodSignatureChecker) and to invoke Comparators.
return False
try:
expected = dict([(element, None) for element in self._expected_list])
actual = dict([(element, None) for element in actual_list])
except TypeError:
# Fall back to slower list-compare if any of the objects are unhashable.
expected = self._expected_list
actual = actual_list
expected.sort()
actual.sort()
return expected == actual
def __repr__(self):
return '<sequence with same elements as \'%s\'>' % self._expected_list
class And(Comparator):
"""Evaluates one or more Comparators on RHS and returns an AND of the results.
"""
def __init__(self, *args):
"""Initialize.
Args:
*args: One or more Comparator
"""
self._comparators = args
def equals(self, rhs):
"""Checks whether all Comparators are equal to rhs.
Args:
# rhs: can be anything
Returns:
bool
"""
for comparator in self._comparators:
if not comparator.equals(rhs):
return False
return True
def __repr__(self):
return '<AND %s>' % str(self._comparators)
class Or(Comparator):
"""Evaluates one or more Comparators on RHS and returns an OR of the results.
"""
def __init__(self, *args):
"""Initialize.
Args:
*args: One or more Mox comparators
"""
self._comparators = args
def equals(self, rhs):
"""Checks whether any Comparator is equal to rhs.
Args:
# rhs: can be anything
Returns:
bool
"""
for comparator in self._comparators:
if comparator.equals(rhs):
return True
return False
def __repr__(self):
return '<OR %s>' % str(self._comparators)
class Func(Comparator):
"""Call a function that should verify the parameter passed in is correct.
You may need the ability to perform more advanced operations on the parameter
in order to validate it. You can use this to have a callable validate any
parameter. The callable should return either True or False.
Example:
def myParamValidator(param):
# Advanced logic here
return True
mock_dao.DoSomething(Func(myParamValidator), true)
"""
def __init__(self, func):
"""Initialize.
Args:
func: callable that takes one parameter and returns a bool
"""
self._func = func
def equals(self, rhs):
"""Test whether rhs passes the function test.
rhs is passed into func.
Args:
rhs: any python object
Returns:
the result of func(rhs)
"""
return self._func(rhs)
def __repr__(self):
return str(self._func)
class IgnoreArg(Comparator):
"""Ignore an argument.
This can be used when we don't care about an argument of a method call.
Example:
# Check if CastMagic is called with 3 as first arg and 'disappear' as third.
mymock.CastMagic(3, IgnoreArg(), 'disappear')
"""
def equals(self, unused_rhs):
"""Ignores arguments and returns True.
Args:
unused_rhs: any python object
Returns:
always returns True
"""
return True
def __repr__(self):
return '<IgnoreArg>'
class Value(Comparator):
"""Compares argument against a remembered value.
To be used in conjunction with Remember comparator. See Remember()
for example.
"""
def __init__(self):
self._value = None
self._has_value = False
def store_value(self, rhs):
self._value = rhs
self._has_value = True
def equals(self, rhs):
if not self._has_value:
return False
else:
return rhs == self._value
def __repr__(self):
if self._has_value:
return "<Value %r>" % self._value
else:
return "<Value>"
class Remember(Comparator):
"""Remembers the argument to a value store.
To be used in conjunction with Value comparator.
Example:
# Remember the argument for one method call.
users_list = Value()
mock_dao.ProcessUsers(Remember(users_list))
# Check argument against remembered value.
mock_dao.ReportUsers(users_list)
"""
def __init__(self, value_store):
if not isinstance(value_store, Value):
raise TypeError("value_store is not an instance of the Value class")
self._value_store = value_store
def equals(self, rhs):
self._value_store.store_value(rhs)
return True
def __repr__(self):
return "<Remember %d>" % id(self._value_store)
class MethodGroup(object):
"""Base class containing common behaviour for MethodGroups."""
def __init__(self, group_name, exception_list):
"""Construct a new method group.
Args:
# group_name: the name of the method group
# exception_list: list of exceptions; any exceptions thrown by this
# instance are appended to this list.
group_name: str
exception_list: list
"""
self._group_name = group_name
self._exception_list = exception_list
def group_name(self):
return self._group_name
def __str__(self):
return '<%s "%s">' % (self.__class__.__name__, self._group_name)
def AddMethod(self, mock_method):
raise NotImplementedError
def MethodCalled(self, mock_method):
raise NotImplementedError
def IsSatisfied(self):
raise NotImplementedError
class UnorderedGroup(MethodGroup):
"""UnorderedGroup holds a set of method calls that may occur in any order.
This construct is helpful for non-deterministic events, such as iterating
over the keys of a dict.
"""
def __init__(self, group_name, exception_list):
super(UnorderedGroup, self).__init__(group_name, exception_list)
self._methods = []
def __str__(self):
return '%s "%s" pending calls:\n%s' % (
self.__class__.__name__,
self._group_name,
"\n".join(str(method) for method in self._methods))
def AddMethod(self, mock_method):
"""Add a method to this group.
Args:
mock_method: A mock method to be added to this group.
"""
self._methods.append(mock_method)
def MethodCalled(self, mock_method):
"""Remove a method call from the group.
If the method is not in the set, an UnexpectedMethodCallError will be
raised.
Args:
mock_method: a mock method that should be equal to a method in the group.
Returns:
The mock method from the group
Raises:
UnexpectedMethodCallError if the mock_method was not in the group.
"""
# Check to see if this method exists, and if so, remove it from the set
# and return it.
for method in self._methods:
if method == mock_method:
# Remove the called mock_method instead of the method in the group.
# The called method will match any comparators when equality is checked
# during removal. The method in the group could pass a comparator to
# another comparator during the equality check.
self._methods.remove(mock_method)
# If this group is not empty, put it back at the head of the queue.
if not self.IsSatisfied():
mock_method._call_queue.appendleft(self)
return self, method
exception = UnexpectedMethodCallError(mock_method, self)
self._exception_list.append(exception)
raise exception
def IsSatisfied(self):
"""Return True if there are not any methods in this group."""
return len(self._methods) == 0
class MultipleTimesGroup(MethodGroup):
"""MultipleTimesGroup holds methods that may be called any number of times.
Note: Each method must be called at least once.
This is helpful, if you don't know or care how many times a method is called.
"""
def __init__(self, group_name, exception_list):
super(MultipleTimesGroup, self).__init__(group_name, exception_list)
self._methods = set()
self._methods_left = set()
def AddMethod(self, mock_method):
"""Add a method to this group.
Args:
mock_method: A mock method to be added to this group.
"""
self._methods.add(mock_method)
self._methods_left.add(mock_method)
def MethodCalled(self, mock_method):
"""Remove a method call from the group.
If the method is not in the set, an UnexpectedMethodCallError will be
raised.
Args:
mock_method: a mock method that should be equal to a method in the group.
Returns:
The mock method from the group
Raises:
UnexpectedMethodCallError if the mock_method was not in the group.
"""
# Check to see if this method exists, and if so add it to the set of
# called methods.
for method in self._methods:
if method == mock_method:
self._methods_left.discard(method)
# Always put this group back on top of the queue, because we don't know
# when we are done.
mock_method._call_queue.appendleft(self)
return self, method
if self.IsSatisfied():
next_method = mock_method._PopNextMethod();
return next_method, None
else:
exception = UnexpectedMethodCallError(mock_method, self)
self._exception_list.append(exception)
raise exception
def IsSatisfied(self):
"""Return True if all methods in this group are called at least once."""
return len(self._methods_left) == 0
class MoxMetaTestBase(type):
"""Metaclass to add mox cleanup and verification to every test.
As the mox unit testing class is being constructed (MoxTestBase or a
subclass), this metaclass will modify all test functions to call the
CleanUpMox method of the test class after they finish. This means that
unstubbing and verifying will happen for every test with no additional code,
and any failures will result in test failures as opposed to errors.
"""
def __init__(cls, name, bases, d):
super(MoxMetaTestBase, cls).__init__(name, bases, d)
type.__init__(cls, name, bases, d)
# also get all the attributes from the base classes to account
# for a case when test class is not the immediate child of MoxTestBase
for base in bases:
for attr_name in dir(base):
if attr_name not in d:
d[attr_name] = getattr(base, attr_name)
for func_name, func in d.items():
if func_name.startswith('test') and callable(func):
setattr(cls, func_name, MoxMetaTestBase.CleanUpTest(cls, func))
@staticmethod
def CleanUpTest(cls, func):
"""Adds Mox cleanup code to any MoxTestBase method.
Always unsets stubs after a test. Will verify all mocks for tests that
otherwise pass.
Args:
cls: MoxTestBase or subclass; the class whose test method we are altering.
func: method; the method of the MoxTestBase test class we wish to alter.
Returns:
The modified method.
"""
def new_method(self, *args, **kwargs):
mox_obj = getattr(self, 'mox', None)
stubout_obj = getattr(self, 'stubs', None)
cleanup_mox = False
cleanup_stubout = False
if mox_obj and isinstance(mox_obj, Mox):
cleanup_mox = True
if stubout_obj and isinstance(stubout_obj, stubout.StubOutForTesting):
cleanup_stubout = True
try:
func(self, *args, **kwargs)
finally:
if cleanup_mox:
mox_obj.UnsetStubs()
if cleanup_stubout:
stubout_obj.UnsetAll()
stubout_obj.SmartUnsetAll()
if cleanup_mox:
mox_obj.VerifyAll()
new_method.__name__ = func.__name__
new_method.__doc__ = func.__doc__
new_method.__module__ = func.__module__
return new_method
class MoxTestBase(unittest.TestCase):
"""Convenience test class to make stubbing easier.
Sets up a "mox" attribute which is an instance of Mox (any mox tests will
want this), and a "stubs" attribute that is an instance of StubOutForTesting
(needed at times). Also automatically unsets any stubs and verifies that all
mock methods have been called at the end of each test, eliminating boilerplate
code.
"""
__metaclass__ = MoxMetaTestBase
def setUp(self):
super(MoxTestBase, self).setUp()
self.mox = Mox()
self.stubs = stubout.StubOutForTesting()
| apache-2.0 |
Mj258/weiboapi | srapyDemo/envs/Lib/site-packages/twisted/news/nntp.py | 8 | 32091 | # -*- test-case-name: twisted.news.test.test_nntp -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
NNTP protocol support.
The following protocol commands are currently understood::
LIST LISTGROUP XOVER XHDR
POST GROUP ARTICLE STAT HEAD
BODY NEXT MODE STREAM MODE READER SLAVE
LAST QUIT HELP IHAVE XPATH
XINDEX XROVER TAKETHIS CHECK
The following protocol commands require implementation::
NEWNEWS
XGTITLE XPAT
XTHREAD AUTHINFO NEWGROUPS
Other desired features:
- A real backend
- More robust client input handling
- A control protocol
"""
import time
from twisted.protocols import basic
from twisted.python import log
def parseRange(text):
articles = text.split('-')
if len(articles) == 1:
try:
a = int(articles[0])
return a, a
except ValueError:
return None, None
elif len(articles) == 2:
try:
if len(articles[0]):
l = int(articles[0])
else:
l = None
if len(articles[1]):
h = int(articles[1])
else:
h = None
except ValueError:
return None, None
return l, h
def extractCode(line):
line = line.split(' ', 1)
if len(line) != 2:
return None
try:
return int(line[0]), line[1]
except ValueError:
return None
class NNTPError(Exception):
def __init__(self, string):
self.string = string
def __str__(self):
return 'NNTPError: %s' % self.string
class NNTPClient(basic.LineReceiver):
MAX_COMMAND_LENGTH = 510
def __init__(self):
self.currentGroup = None
self._state = []
self._error = []
self._inputBuffers = []
self._responseCodes = []
self._responseHandlers = []
self._postText = []
self._newState(self._statePassive, None, self._headerInitial)
def gotAllGroups(self, groups):
"Override for notification when fetchGroups() action is completed"
def getAllGroupsFailed(self, error):
"Override for notification when fetchGroups() action fails"
def gotOverview(self, overview):
"Override for notification when fetchOverview() action is completed"
def getOverviewFailed(self, error):
"Override for notification when fetchOverview() action fails"
def gotSubscriptions(self, subscriptions):
"Override for notification when fetchSubscriptions() action is completed"
def getSubscriptionsFailed(self, error):
"Override for notification when fetchSubscriptions() action fails"
def gotGroup(self, group):
"Override for notification when fetchGroup() action is completed"
def getGroupFailed(self, error):
"Override for notification when fetchGroup() action fails"
def gotArticle(self, article):
"Override for notification when fetchArticle() action is completed"
def getArticleFailed(self, error):
"Override for notification when fetchArticle() action fails"
def gotHead(self, head):
"Override for notification when fetchHead() action is completed"
def getHeadFailed(self, error):
"Override for notification when fetchHead() action fails"
def gotBody(self, info):
"Override for notification when fetchBody() action is completed"
def getBodyFailed(self, body):
"Override for notification when fetchBody() action fails"
def postedOk(self):
"Override for notification when postArticle() action is successful"
def postFailed(self, error):
"Override for notification when postArticle() action fails"
def gotXHeader(self, headers):
"Override for notification when getXHeader() action is successful"
def getXHeaderFailed(self, error):
"Override for notification when getXHeader() action fails"
def gotNewNews(self, news):
"Override for notification when getNewNews() action is successful"
def getNewNewsFailed(self, error):
"Override for notification when getNewNews() action fails"
def gotNewGroups(self, groups):
"Override for notification when getNewGroups() action is successful"
def getNewGroupsFailed(self, error):
"Override for notification when getNewGroups() action fails"
def setStreamSuccess(self):
"Override for notification when setStream() action is successful"
def setStreamFailed(self, error):
"Override for notification when setStream() action fails"
def fetchGroups(self):
"""
Request a list of all news groups from the server. gotAllGroups()
is called on success, getGroupsFailed() on failure
"""
self.sendLine('LIST')
self._newState(self._stateList, self.getAllGroupsFailed)
def fetchOverview(self):
"""
Request the overview format from the server. gotOverview() is called
on success, getOverviewFailed() on failure
"""
self.sendLine('LIST OVERVIEW.FMT')
self._newState(self._stateOverview, self.getOverviewFailed)
def fetchSubscriptions(self):
"""
Request a list of the groups it is recommended a new user subscribe to.
gotSubscriptions() is called on success, getSubscriptionsFailed() on
failure
"""
self.sendLine('LIST SUBSCRIPTIONS')
self._newState(self._stateSubscriptions, self.getSubscriptionsFailed)
def fetchGroup(self, group):
"""
Get group information for the specified group from the server. gotGroup()
is called on success, getGroupFailed() on failure.
"""
self.sendLine('GROUP %s' % (group,))
self._newState(None, self.getGroupFailed, self._headerGroup)
def fetchHead(self, index = ''):
"""
Get the header for the specified article (or the currently selected
article if index is '') from the server. gotHead() is called on
success, getHeadFailed() on failure
"""
self.sendLine('HEAD %s' % (index,))
self._newState(self._stateHead, self.getHeadFailed)
def fetchBody(self, index = ''):
"""
Get the body for the specified article (or the currently selected
article if index is '') from the server. gotBody() is called on
success, getBodyFailed() on failure
"""
self.sendLine('BODY %s' % (index,))
self._newState(self._stateBody, self.getBodyFailed)
def fetchArticle(self, index = ''):
"""
Get the complete article with the specified index (or the currently
selected article if index is '') or Message-ID from the server.
gotArticle() is called on success, getArticleFailed() on failure.
"""
self.sendLine('ARTICLE %s' % (index,))
self._newState(self._stateArticle, self.getArticleFailed)
def postArticle(self, text):
"""
Attempt to post an article with the specified text to the server. 'text'
must consist of both head and body data, as specified by RFC 850. If the
article is posted successfully, postedOk() is called, otherwise postFailed()
is called.
"""
self.sendLine('POST')
self._newState(None, self.postFailed, self._headerPost)
self._postText.append(text)
def fetchNewNews(self, groups, date, distributions = ''):
"""
Get the Message-IDs for all new news posted to any of the given
groups since the specified date - in seconds since the epoch, GMT -
optionally restricted to the given distributions. gotNewNews() is
called on success, getNewNewsFailed() on failure.
One invocation of this function may result in multiple invocations
of gotNewNews()/getNewNewsFailed().
"""
date, timeStr = time.strftime('%y%m%d %H%M%S', time.gmtime(date)).split()
line = 'NEWNEWS %%s %s %s %s' % (date, timeStr, distributions)
groupPart = ''
while len(groups) and len(line) + len(groupPart) + len(groups[-1]) + 1 < NNTPClient.MAX_COMMAND_LENGTH:
group = groups.pop()
groupPart = groupPart + ',' + group
self.sendLine(line % (groupPart,))
self._newState(self._stateNewNews, self.getNewNewsFailed)
if len(groups):
self.fetchNewNews(groups, date, distributions)
def fetchNewGroups(self, date, distributions):
"""
Get the names of all new groups created/added to the server since
the specified date - in seconds since the ecpoh, GMT - optionally
restricted to the given distributions. gotNewGroups() is called
on success, getNewGroupsFailed() on failure.
"""
date, timeStr = time.strftime('%y%m%d %H%M%S', time.gmtime(date)).split()
self.sendLine('NEWGROUPS %s %s %s' % (date, timeStr, distributions))
self._newState(self._stateNewGroups, self.getNewGroupsFailed)
def fetchXHeader(self, header, low = None, high = None, id = None):
"""
Request a specific header from the server for an article or range
of articles. If 'id' is not None, a header for only the article
with that Message-ID will be requested. If both low and high are
None, a header for the currently selected article will be selected;
If both low and high are zero-length strings, headers for all articles
in the currently selected group will be requested; Otherwise, high
and low will be used as bounds - if one is None the first or last
article index will be substituted, as appropriate.
"""
if id is not None:
r = header + ' <%s>' % (id,)
elif low is high is None:
r = header
elif high is None:
r = header + ' %d-' % (low,)
elif low is None:
r = header + ' -%d' % (high,)
else:
r = header + ' %d-%d' % (low, high)
self.sendLine('XHDR ' + r)
self._newState(self._stateXHDR, self.getXHeaderFailed)
def setStream(self):
"""
Set the mode to STREAM, suspending the normal "lock-step" mode of
communications. setStreamSuccess() is called on success,
setStreamFailed() on failure.
"""
self.sendLine('MODE STREAM')
self._newState(None, self.setStreamFailed, self._headerMode)
def quit(self):
self.sendLine('QUIT')
self.transport.loseConnection()
def _newState(self, method, error, responseHandler = None):
self._inputBuffers.append([])
self._responseCodes.append(None)
self._state.append(method)
self._error.append(error)
self._responseHandlers.append(responseHandler)
def _endState(self):
buf = self._inputBuffers[0]
del self._responseCodes[0]
del self._inputBuffers[0]
del self._state[0]
del self._error[0]
del self._responseHandlers[0]
return buf
def _newLine(self, line, check = 1):
if check and line and line[0] == '.':
line = line[1:]
self._inputBuffers[0].append(line)
def _setResponseCode(self, code):
self._responseCodes[0] = code
def _getResponseCode(self):
return self._responseCodes[0]
def lineReceived(self, line):
if not len(self._state):
self._statePassive(line)
elif self._getResponseCode() is None:
code = extractCode(line)
if code is None or not (200 <= code[0] < 400): # An error!
self._error[0](line)
self._endState()
else:
self._setResponseCode(code)
if self._responseHandlers[0]:
self._responseHandlers[0](code)
else:
self._state[0](line)
def _statePassive(self, line):
log.msg('Server said: %s' % line)
def _passiveError(self, error):
log.err('Passive Error: %s' % (error,))
def _headerInitial(self, (code, message)):
if code == 200:
self.canPost = 1
else:
self.canPost = 0
self._endState()
def _stateList(self, line):
if line != '.':
data = filter(None, line.strip().split())
self._newLine((data[0], int(data[1]), int(data[2]), data[3]), 0)
else:
self.gotAllGroups(self._endState())
def _stateOverview(self, line):
if line != '.':
self._newLine(filter(None, line.strip().split()), 0)
else:
self.gotOverview(self._endState())
def _stateSubscriptions(self, line):
if line != '.':
self._newLine(line.strip(), 0)
else:
self.gotSubscriptions(self._endState())
def _headerGroup(self, (code, line)):
self.gotGroup(tuple(line.split()))
self._endState()
def _stateArticle(self, line):
if line != '.':
if line.startswith('.'):
line = line[1:]
self._newLine(line, 0)
else:
self.gotArticle('\n'.join(self._endState())+'\n')
def _stateHead(self, line):
if line != '.':
self._newLine(line, 0)
else:
self.gotHead('\n'.join(self._endState()))
def _stateBody(self, line):
if line != '.':
if line.startswith('.'):
line = line[1:]
self._newLine(line, 0)
else:
self.gotBody('\n'.join(self._endState())+'\n')
def _headerPost(self, (code, message)):
if code == 340:
self.transport.write(self._postText[0].replace('\n', '\r\n').replace('\r\n.', '\r\n..'))
if self._postText[0][-1:] != '\n':
self.sendLine('')
self.sendLine('.')
del self._postText[0]
self._newState(None, self.postFailed, self._headerPosted)
else:
self.postFailed('%d %s' % (code, message))
self._endState()
def _headerPosted(self, (code, message)):
if code == 240:
self.postedOk()
else:
self.postFailed('%d %s' % (code, message))
self._endState()
def _stateXHDR(self, line):
if line != '.':
self._newLine(line.split(), 0)
else:
self._gotXHeader(self._endState())
def _stateNewNews(self, line):
if line != '.':
self._newLine(line, 0)
else:
self.gotNewNews(self._endState())
def _stateNewGroups(self, line):
if line != '.':
self._newLine(line, 0)
else:
self.gotNewGroups(self._endState())
def _headerMode(self, (code, message)):
if code == 203:
self.setStreamSuccess()
else:
self.setStreamFailed((code, message))
self._endState()
class NNTPServer(basic.LineReceiver):
COMMANDS = [
'LIST', 'GROUP', 'ARTICLE', 'STAT', 'MODE', 'LISTGROUP', 'XOVER',
'XHDR', 'HEAD', 'BODY', 'NEXT', 'LAST', 'POST', 'QUIT', 'IHAVE',
'HELP', 'SLAVE', 'XPATH', 'XINDEX', 'XROVER', 'TAKETHIS', 'CHECK'
]
def __init__(self):
self.servingSlave = 0
def connectionMade(self):
self.inputHandler = None
self.currentGroup = None
self.currentIndex = None
self.sendLine('200 server ready - posting allowed')
def lineReceived(self, line):
if self.inputHandler is not None:
self.inputHandler(line)
else:
parts = line.strip().split()
if len(parts):
cmd, parts = parts[0].upper(), parts[1:]
if cmd in NNTPServer.COMMANDS:
func = getattr(self, 'do_%s' % cmd)
try:
func(*parts)
except TypeError:
self.sendLine('501 command syntax error')
log.msg("501 command syntax error")
log.msg("command was", line)
log.deferr()
except:
self.sendLine('503 program fault - command not performed')
log.msg("503 program fault")
log.msg("command was", line)
log.deferr()
else:
self.sendLine('500 command not recognized')
def do_LIST(self, subcmd = '', *dummy):
subcmd = subcmd.strip().lower()
if subcmd == 'newsgroups':
# XXX - this could use a real implementation, eh?
self.sendLine('215 Descriptions in form "group description"')
self.sendLine('.')
elif subcmd == 'overview.fmt':
defer = self.factory.backend.overviewRequest()
defer.addCallbacks(self._gotOverview, self._errOverview)
log.msg('overview')
elif subcmd == 'subscriptions':
defer = self.factory.backend.subscriptionRequest()
defer.addCallbacks(self._gotSubscription, self._errSubscription)
log.msg('subscriptions')
elif subcmd == '':
defer = self.factory.backend.listRequest()
defer.addCallbacks(self._gotList, self._errList)
else:
self.sendLine('500 command not recognized')
def _gotList(self, list):
self.sendLine('215 newsgroups in form "group high low flags"')
for i in list:
self.sendLine('%s %d %d %s' % tuple(i))
self.sendLine('.')
def _errList(self, failure):
print 'LIST failed: ', failure
self.sendLine('503 program fault - command not performed')
def _gotSubscription(self, parts):
self.sendLine('215 information follows')
for i in parts:
self.sendLine(i)
self.sendLine('.')
def _errSubscription(self, failure):
print 'SUBSCRIPTIONS failed: ', failure
self.sendLine('503 program fault - comand not performed')
def _gotOverview(self, parts):
self.sendLine('215 Order of fields in overview database.')
for i in parts:
self.sendLine(i + ':')
self.sendLine('.')
def _errOverview(self, failure):
print 'LIST OVERVIEW.FMT failed: ', failure
self.sendLine('503 program fault - command not performed')
def do_LISTGROUP(self, group = None):
group = group or self.currentGroup
if group is None:
self.sendLine('412 Not currently in newsgroup')
else:
defer = self.factory.backend.listGroupRequest(group)
defer.addCallbacks(self._gotListGroup, self._errListGroup)
def _gotListGroup(self, (group, articles)):
self.currentGroup = group
if len(articles):
self.currentIndex = int(articles[0])
else:
self.currentIndex = None
self.sendLine('211 list of article numbers follow')
for i in articles:
self.sendLine(str(i))
self.sendLine('.')
def _errListGroup(self, failure):
print 'LISTGROUP failed: ', failure
self.sendLine('502 no permission')
def do_XOVER(self, range):
if self.currentGroup is None:
self.sendLine('412 No news group currently selected')
else:
l, h = parseRange(range)
defer = self.factory.backend.xoverRequest(self.currentGroup, l, h)
defer.addCallbacks(self._gotXOver, self._errXOver)
def _gotXOver(self, parts):
self.sendLine('224 Overview information follows')
for i in parts:
self.sendLine('\t'.join(map(str, i)))
self.sendLine('.')
def _errXOver(self, failure):
print 'XOVER failed: ', failure
self.sendLine('420 No article(s) selected')
def xhdrWork(self, header, range):
if self.currentGroup is None:
self.sendLine('412 No news group currently selected')
else:
if range is None:
if self.currentIndex is None:
self.sendLine('420 No current article selected')
return
else:
l = h = self.currentIndex
else:
# FIXME: articles may be a message-id
l, h = parseRange(range)
if l is h is None:
self.sendLine('430 no such article')
else:
return self.factory.backend.xhdrRequest(self.currentGroup, l, h, header)
def do_XHDR(self, header, range = None):
d = self.xhdrWork(header, range)
if d:
d.addCallbacks(self._gotXHDR, self._errXHDR)
def _gotXHDR(self, parts):
self.sendLine('221 Header follows')
for i in parts:
self.sendLine('%d %s' % i)
self.sendLine('.')
def _errXHDR(self, failure):
print 'XHDR failed: ', failure
self.sendLine('502 no permission')
def do_POST(self):
self.inputHandler = self._doingPost
self.message = ''
self.sendLine('340 send article to be posted. End with <CR-LF>.<CR-LF>')
def _doingPost(self, line):
if line == '.':
self.inputHandler = None
group, article = self.currentGroup, self.message
self.message = ''
defer = self.factory.backend.postRequest(article)
defer.addCallbacks(self._gotPost, self._errPost)
else:
self.message = self.message + line + '\r\n'
def _gotPost(self, parts):
self.sendLine('240 article posted ok')
def _errPost(self, failure):
print 'POST failed: ', failure
self.sendLine('441 posting failed')
def do_CHECK(self, id):
d = self.factory.backend.articleExistsRequest(id)
d.addCallbacks(self._gotCheck, self._errCheck)
def _gotCheck(self, result):
if result:
self.sendLine("438 already have it, please don't send it to me")
else:
self.sendLine('238 no such article found, please send it to me')
def _errCheck(self, failure):
print 'CHECK failed: ', failure
self.sendLine('431 try sending it again later')
def do_TAKETHIS(self, id):
self.inputHandler = self._doingTakeThis
self.message = ''
def _doingTakeThis(self, line):
if line == '.':
self.inputHandler = None
article = self.message
self.message = ''
d = self.factory.backend.postRequest(article)
d.addCallbacks(self._didTakeThis, self._errTakeThis)
else:
self.message = self.message + line + '\r\n'
def _didTakeThis(self, result):
self.sendLine('239 article transferred ok')
def _errTakeThis(self, failure):
print 'TAKETHIS failed: ', failure
self.sendLine('439 article transfer failed')
def do_GROUP(self, group):
defer = self.factory.backend.groupRequest(group)
defer.addCallbacks(self._gotGroup, self._errGroup)
def _gotGroup(self, (name, num, high, low, flags)):
self.currentGroup = name
self.currentIndex = low
self.sendLine('211 %d %d %d %s group selected' % (num, low, high, name))
def _errGroup(self, failure):
print 'GROUP failed: ', failure
self.sendLine('411 no such group')
def articleWork(self, article, cmd, func):
if self.currentGroup is None:
self.sendLine('412 no newsgroup has been selected')
else:
if not article:
if self.currentIndex is None:
self.sendLine('420 no current article has been selected')
else:
article = self.currentIndex
else:
if article[0] == '<':
return func(self.currentGroup, index = None, id = article)
else:
try:
article = int(article)
return func(self.currentGroup, article)
except ValueError:
self.sendLine('501 command syntax error')
def do_ARTICLE(self, article = None):
defer = self.articleWork(article, 'ARTICLE', self.factory.backend.articleRequest)
if defer:
defer.addCallbacks(self._gotArticle, self._errArticle)
def _gotArticle(self, (index, id, article)):
self.currentIndex = index
self.sendLine('220 %d %s article' % (index, id))
s = basic.FileSender()
d = s.beginFileTransfer(article, self.transport)
d.addCallback(self.finishedFileTransfer)
##
## Helper for FileSender
##
def finishedFileTransfer(self, lastsent):
if lastsent != '\n':
line = '\r\n.'
else:
line = '.'
self.sendLine(line)
##
def _errArticle(self, failure):
print 'ARTICLE failed: ', failure
self.sendLine('423 bad article number')
def do_STAT(self, article = None):
defer = self.articleWork(article, 'STAT', self.factory.backend.articleRequest)
if defer:
defer.addCallbacks(self._gotStat, self._errStat)
def _gotStat(self, (index, id, article)):
self.currentIndex = index
self.sendLine('223 %d %s article retreived - request text separately' % (index, id))
def _errStat(self, failure):
print 'STAT failed: ', failure
self.sendLine('423 bad article number')
def do_HEAD(self, article = None):
defer = self.articleWork(article, 'HEAD', self.factory.backend.headRequest)
if defer:
defer.addCallbacks(self._gotHead, self._errHead)
def _gotHead(self, (index, id, head)):
self.currentIndex = index
self.sendLine('221 %d %s article retrieved' % (index, id))
self.transport.write(head + '\r\n')
self.sendLine('.')
def _errHead(self, failure):
print 'HEAD failed: ', failure
self.sendLine('423 no such article number in this group')
def do_BODY(self, article):
defer = self.articleWork(article, 'BODY', self.factory.backend.bodyRequest)
if defer:
defer.addCallbacks(self._gotBody, self._errBody)
def _gotBody(self, (index, id, body)):
self.currentIndex = index
self.sendLine('221 %d %s article retrieved' % (index, id))
self.lastsent = ''
s = basic.FileSender()
d = s.beginFileTransfer(body, self.transport)
d.addCallback(self.finishedFileTransfer)
def _errBody(self, failure):
print 'BODY failed: ', failure
self.sendLine('423 no such article number in this group')
# NEXT and LAST are just STATs that increment currentIndex first.
# Accordingly, use the STAT callbacks.
def do_NEXT(self):
i = self.currentIndex + 1
defer = self.factory.backend.articleRequest(self.currentGroup, i)
defer.addCallbacks(self._gotStat, self._errStat)
def do_LAST(self):
i = self.currentIndex - 1
defer = self.factory.backend.articleRequest(self.currentGroup, i)
defer.addCallbacks(self._gotStat, self._errStat)
def do_MODE(self, cmd):
cmd = cmd.strip().upper()
if cmd == 'READER':
self.servingSlave = 0
self.sendLine('200 Hello, you can post')
elif cmd == 'STREAM':
self.sendLine('500 Command not understood')
else:
# This is not a mistake
self.sendLine('500 Command not understood')
def do_QUIT(self):
self.sendLine('205 goodbye')
self.transport.loseConnection()
def do_HELP(self):
self.sendLine('100 help text follows')
self.sendLine('Read the RFC.')
self.sendLine('.')
def do_SLAVE(self):
self.sendLine('202 slave status noted')
self.servingeSlave = 1
def do_XPATH(self, article):
# XPATH is a silly thing to have. No client has the right to ask
# for this piece of information from me, and so that is what I'll
# tell them.
self.sendLine('502 access restriction or permission denied')
def do_XINDEX(self, article):
# XINDEX is another silly command. The RFC suggests it be relegated
# to the history books, and who am I to disagree?
self.sendLine('502 access restriction or permission denied')
def do_XROVER(self, range=None):
"""
Handle a request for references of all messages in the currently
selected group.
This generates the same response a I{XHDR References} request would
generate.
"""
self.do_XHDR('References', range)
def do_IHAVE(self, id):
self.factory.backend.articleExistsRequest(id).addCallback(self._foundArticle)
def _foundArticle(self, result):
if result:
self.sendLine('437 article rejected - do not try again')
else:
self.sendLine('335 send article to be transferred. End with <CR-LF>.<CR-LF>')
self.inputHandler = self._handleIHAVE
self.message = ''
def _handleIHAVE(self, line):
if line == '.':
self.inputHandler = None
self.factory.backend.postRequest(
self.message
).addCallbacks(self._gotIHAVE, self._errIHAVE)
self.message = ''
else:
self.message = self.message + line + '\r\n'
def _gotIHAVE(self, result):
self.sendLine('235 article transferred ok')
def _errIHAVE(self, failure):
print 'IHAVE failed: ', failure
self.sendLine('436 transfer failed - try again later')
class UsenetClientProtocol(NNTPClient):
"""
A client that connects to an NNTP server and asks for articles new
since a certain time.
"""
def __init__(self, groups, date, storage):
"""
Fetch all new articles from the given groups since the
given date and dump them into the given storage. groups
is a list of group names. date is an integer or floating
point representing seconds since the epoch (GMT). storage is
any object that implements the NewsStorage interface.
"""
NNTPClient.__init__(self)
self.groups, self.date, self.storage = groups, date, storage
def connectionMade(self):
NNTPClient.connectionMade(self)
log.msg("Initiating update with remote host: " + str(self.transport.getPeer()))
self.setStream()
self.fetchNewNews(self.groups, self.date, '')
def articleExists(self, exists, article):
if exists:
self.fetchArticle(article)
else:
self.count = self.count - 1
self.disregard = self.disregard + 1
def gotNewNews(self, news):
self.disregard = 0
self.count = len(news)
log.msg("Transferring " + str(self.count) +
" articles from remote host: " + str(self.transport.getPeer()))
for i in news:
self.storage.articleExistsRequest(i).addCallback(self.articleExists, i)
def getNewNewsFailed(self, reason):
log.msg("Updated failed (" + reason + ") with remote host: " + str(self.transport.getPeer()))
self.quit()
def gotArticle(self, article):
self.storage.postRequest(article)
self.count = self.count - 1
if not self.count:
log.msg("Completed update with remote host: " + str(self.transport.getPeer()))
if self.disregard:
log.msg("Disregarded %d articles." % (self.disregard,))
self.factory.updateChecks(self.transport.getPeer())
self.quit()
| mit |
pferreir/indico-backup | indico/ext/statistics/piwik/implementation.py | 2 | 7990 | # -*- coding: utf-8 -*-
##
##
## This file is part of Indico
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN)
##
## Indico is free software: you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico. If not, see <http://www.gnu.org/licenses/>.
import os
import indico.ext.statistics.piwik
from flask import request
from indico.ext.statistics.base.implementation import BaseStatisticsImplementation, JSHookBase
from MaKaC.plugins.base import PluginsHolder
from MaKaC.i18n import _
from MaKaC.conference import LocalFile
class PiwikStatisticsImplementation(BaseStatisticsImplementation):
QUERY_SCRIPT = 'piwik.php'
QUERY_KEY_NAME = 'token_auth'
_name = 'Piwik'
def __init__(self):
BaseStatisticsImplementation.__init__(self)
self._implementationPackage = indico.ext.statistics.piwik
self._APISegmentation = []
self._setHasJSHook(True)
self._setHasDownloadListener(True)
self.setAPIToken(self._getSavedAPIToken())
self.setAPISiteID(self._getSavedAPISiteID())
def _buildPluginPath(self):
"""
Local, absolute location of plugin.
"""
self._FSPath = os.path.join(indico.ext.statistics.piwik.__path__)[0]
@staticmethod
def getVarFromPluginStorage(varName):
"""
Retrieves varName from the options of the plugin.
"""
piwik = PluginsHolder().getPluginType('statistics').getPlugin('piwik')
return piwik.getOptions()[varName].getValue()
def _getSavedAPIPath(self, server='primary'):
"""
Returns the String saved in the plugin configuration for the
Piwik server URL.
"""
if not self._getUsesOnlyGeneralServer() and server == 'secondary':
return PiwikStatisticsImplementation.getVarFromPluginStorage('serverAPIUrl')
else:
return PiwikStatisticsImplementation.getVarFromPluginStorage('serverUrl')
def _getSavedAPIToken(self):
"""
Returns the String saved in the plugin configuration for the
Piwik token auth.
"""
return PiwikStatisticsImplementation.getVarFromPluginStorage('serverTok')
def _getUsesOnlyGeneralServer(self):
"""
Returns the boolean saved for whether we should only use the primary server
for all requests.
"""
return PiwikStatisticsImplementation.getVarFromPluginStorage('useOnlyServerURL')
def _getSavedAPISiteID(self):
"""
Returns the String saved in the plugin configuration for the
Piwik ID Site
"""
return PiwikStatisticsImplementation.getVarFromPluginStorage('serverSiteID')
def hasJSHook(self):
"""
This implementation permits the JSHook & Download listener to be
enabled/disabled separately, checks for this option first then falls
back to default plugin activity if not disabled locally. By doing this,
the components are not appended to the list of subsribers when listeners
are iterating.
"""
enabled = PiwikStatisticsImplementation.getVarFromPluginStorage('jsHookEnabled')
return BaseStatisticsImplementation.hasJSHook(self) if enabled else False
def hasDownloadListener(self):
"""
Overridden method, see self.hasJSHook for explaination of logic.
"""
enabled = PiwikStatisticsImplementation.getVarFromPluginStorage('downloadTrackingEnabled')
return BaseStatisticsImplementation.hasDownloadListener(self) if enabled else False
@staticmethod
@BaseStatisticsImplementation.memoizeReport
def getConferenceReport(startDate, endDate, confId, contribId=None):
"""
Returns the report object which satisifies the confId given.
"""
from indico.ext.statistics.piwik.reports import PiwikReport
return PiwikReport(startDate, endDate, confId, contribId).fossilize()
@staticmethod
def getContributionReport(startDate, endDate, confId, contribId):
"""
Returns the report object for the contribId given.
"""
return PiwikStatisticsImplementation.getConferenceReport(startDate, endDate,
confId, contribId)
def getJSHookObject(self, instantiate=False):
"""
Returns a reference to or an instance of the JSHook class.
"""
reference = indico.ext.statistics.piwik.implementation.JSHook
return reference() if instantiate else reference
def setAPISiteID(self, id):
"""
Piwik identifies sites by their 'idSite' attribute.
"""
self.setAPIParams({'idSite': id})
def setAPIAction(self, action):
self.setAPIParams({'action': action})
def setAPIInnerAction(self, action):
self.setAPIParams({'apiAction': action})
def setAPIMethod(self, method):
self.setAPIParams({'method': method})
def setAPIModule(self, module):
self.setAPIParams({'module': module})
def setAPIInnerModule(self, module):
self.setAPIParams({'apiModule': module})
def setAPIFormat(self, format='JSON'):
self.setAPIParams({'format': format})
def setAPIPeriod(self, period='day'):
self.setAPIParams({'period': period})
def setAPIDate(self, date=['last7']):
newDate = date[0] if len(date) == 1 else ','.join(date)
self.setAPIParams({'date': newDate})
def setAPISegmentation(self, segmentation):
"""
segmentation = {'key': ('equality', 'value')}
"""
for segmentName, (equality, segmentValue) in segmentation.iteritems():
if isinstance(segmentValue, list):
value = ','.join(segmentValue)
else:
value = str(segmentValue)
segmentBuild = segmentName + equality + value
if segmentBuild not in self._APISegmentation:
self._APISegmentation.append(segmentBuild)
segmentation = self.QUERY_BREAK.join(self._APISegmentation)
self.setAPIParams({'segment': segmentation})
def trackDownload(self, material):
"""
Wraps around the Piwik query object for tracking downloads, constructs
the name by which we want to log the download and the link.
"""
from indico.ext.statistics.piwik.queries import PiwikQueryTrackDownload
tracker = PiwikQueryTrackDownload()
downloadLink = request.url if isinstance(material, LocalFile) else material.getURL()
downloadTitle = _('Download') + ' - ' + (material.getFileName() if isinstance(material, LocalFile) else material.getURL())
tracker.trackDownload(downloadLink, downloadTitle)
class JSHook(JSHookBase):
varConference = 'Conference'
varContribution = 'Contribution'
def __init__(self, instance, extra):
super(JSHook, self).__init__(instance)
self.hasConfId = self.hasContribId = False
self._buildVars(extra)
def _buildVars(self, item):
"""
Builds the references to Conferences & Contributions.
"""
self.siteId = PiwikStatisticsImplementation.getVarFromPluginStorage('serverSiteID')
if hasattr(item, '_conf'):
self.hasConfId = True
self.confId = item._conf.getId()
if hasattr(item, '_contrib'):
self.hasContribId = True
self.contribId = item._contrib.getUniqueId()
| gpl-3.0 |
kholidfu/django | django/utils/synch.py | 586 | 2558 | """
Synchronization primitives:
- reader-writer lock (preference to writers)
(Contributed to Django by [email protected])
"""
import contextlib
import threading
class RWLock(object):
"""
Classic implementation of reader-writer lock with preference to writers.
Readers can access a resource simultaneously.
Writers get an exclusive access.
API is self-descriptive:
reader_enters()
reader_leaves()
writer_enters()
writer_leaves()
"""
def __init__(self):
self.mutex = threading.RLock()
self.can_read = threading.Semaphore(0)
self.can_write = threading.Semaphore(0)
self.active_readers = 0
self.active_writers = 0
self.waiting_readers = 0
self.waiting_writers = 0
def reader_enters(self):
with self.mutex:
if self.active_writers == 0 and self.waiting_writers == 0:
self.active_readers += 1
self.can_read.release()
else:
self.waiting_readers += 1
self.can_read.acquire()
def reader_leaves(self):
with self.mutex:
self.active_readers -= 1
if self.active_readers == 0 and self.waiting_writers != 0:
self.active_writers += 1
self.waiting_writers -= 1
self.can_write.release()
@contextlib.contextmanager
def reader(self):
self.reader_enters()
try:
yield
finally:
self.reader_leaves()
def writer_enters(self):
with self.mutex:
if self.active_writers == 0 and self.waiting_writers == 0 and self.active_readers == 0:
self.active_writers += 1
self.can_write.release()
else:
self.waiting_writers += 1
self.can_write.acquire()
def writer_leaves(self):
with self.mutex:
self.active_writers -= 1
if self.waiting_writers != 0:
self.active_writers += 1
self.waiting_writers -= 1
self.can_write.release()
elif self.waiting_readers != 0:
t = self.waiting_readers
self.waiting_readers = 0
self.active_readers += t
while t > 0:
self.can_read.release()
t -= 1
@contextlib.contextmanager
def writer(self):
self.writer_enters()
try:
yield
finally:
self.writer_leaves()
| bsd-3-clause |
odoo-brazil/odoo-brazil-addons | l10n_br_search_zip/models/l10n_br_zip.py | 1 | 5338 | # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2015 TrustCode - www.trustcode.com.br #
# Danimar Ribeiro <[email protected]> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
import re
import logging
import requests
from openerp import models
from openerp.exceptions import Warning
_logger = logging.getLogger(__name__)
class L10nbrZip(models.Model):
_inherit = 'l10n_br.zip'
def _search_by_cep(self, zip_code):
try:
url_viacep = 'http://viacep.com.br/ws/' + \
zip_code + '/json/unicode/'
obj_viacep = requests.get(url_viacep)
res = obj_viacep.json()
if res:
city = self.env['l10n_br_base.city'].search(
[('ibge_code', '=', res['ibge'][2:]),
('state_id.code', '=', res['uf'])])
self.env['l10n_br.zip'].create(
{'zip': re.sub('[^0-9]', '', res['cep']),
'street': res['logradouro'],
'district': res['bairro'],
'country_id': city.state_id.country_id.id,
'state_id': city.state_id.id,
'l10n_br_city_id': city.id})
except Exception as e:
_logger.error(e.message, exc_info=True)
def _search_by_address(self, state_id, city_id, street):
try:
city = self.env['l10n_br_base.city'].browse(city_id)
url_viacep = 'http://viacep.com.br/ws/' + city.state_id.code + \
'/' + city.name + '/' + street + '/json/unicode/'
obj_viacep = requests.get(url_viacep)
results = obj_viacep.json()
if results:
for res in results:
city = self.env['l10n_br_base.city'].search(
[('ibge_code', '=', res['ibge'][2:]),
('state_id.code', '=', res['uf'])])
self.env['l10n_br.zip'].create(
{'zip': re.sub('[^0-9]', '', res['cep']),
'street': res['logradouro'],
'district': res['bairro'],
'country_id': city.state_id.country_id.id,
'state_id': city.state_id.id,
'l10n_br_city_id': city.id})
except Exception as e:
_logger.error(e.message, exc_info=True)
def zip_search_multi(self, country_id=False,
state_id=False, l10n_br_city_id=False,
district=False, street=False, zip_code=False):
zip_ids = super(L10nbrZip, self).zip_search_multi(
country_id=country_id,
state_id=state_id, l10n_br_city_id=l10n_br_city_id,
district=district, street=street, zip_code=zip_code)
if len(zip_ids) == 0:
if zip_code and len(zip_code) == 9:
self._search_by_cep(zip_code)
elif zip_code:
raise Warning('Digite o cep corretamente')
else:
self._search_by_address(state_id, l10n_br_city_id, street)
return super(L10nbrZip, self).zip_search_multi(
country_id=country_id,
state_id=state_id, l10n_br_city_id=l10n_br_city_id,
district=district, street=street, zip_code=zip_code)
else:
return zip_ids
def set_result(self, zip_obj=None):
if zip_obj:
zip_code = zip_obj.zip
if len(zip_code) == 8:
zip_code = '%s-%s' % (zip_code[0:5], zip_code[5:8])
result = {
'country_id': zip_obj.country_id.id,
'state_id': zip_obj.state_id.id,
'l10n_br_city_id': zip_obj.l10n_br_city_id.id,
'district': zip_obj.district,
'street': zip_obj.street or '',
'zip': zip_code,
}
else:
result = {}
return result
| agpl-3.0 |
myerpengine/odoo | addons/hr/res_users.py | 44 | 3136 | from openerp.osv import fields, osv
from openerp.tools.translate import _
class res_users(osv.Model):
""" Update of res.users class
- if adding groups to an user, check if base.group_user is in it
(member of 'Employee'), create an employee form linked to it.
"""
_name = 'res.users'
_inherit = ['res.users']
_columns = {
'display_employees_suggestions': fields.boolean("Display Employees Suggestions"),
}
_defaults = {
'display_employees_suggestions': True,
}
def __init__(self, pool, cr):
""" Override of __init__ to add access rights on
display_employees_suggestions fields. Access rights are disabled by
default, but allowed on some specific fields defined in
self.SELF_{READ/WRITE}ABLE_FIELDS.
"""
init_res = super(res_users, self).__init__(pool, cr)
# duplicate list to avoid modifying the original reference
self.SELF_WRITEABLE_FIELDS = list(self.SELF_WRITEABLE_FIELDS)
self.SELF_WRITEABLE_FIELDS.append('display_employees_suggestions')
# duplicate list to avoid modifying the original reference
self.SELF_READABLE_FIELDS = list(self.SELF_READABLE_FIELDS)
self.SELF_READABLE_FIELDS.append('display_employees_suggestions')
return init_res
def stop_showing_employees_suggestions(self, cr, uid, user_id, context=None):
"""Update display_employees_suggestions value to False"""
if context is None:
context = {}
self.write(cr, uid, user_id, {"display_employees_suggestions": False}, context)
def _create_welcome_message(self, cr, uid, user, context=None):
"""Do not welcome new users anymore, welcome new employees instead"""
return True
def _message_post_get_eid(self, cr, uid, thread_id, context=None):
assert thread_id, "res.users does not support posting global messages"
if context and 'thread_model' in context:
context['thread_model'] = 'hr.employee'
if isinstance(thread_id, (list, tuple)):
thread_id = thread_id[0]
return self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', thread_id)], context=context)
def message_post(self, cr, uid, thread_id, context=None, **kwargs):
""" Redirect the posting of message on res.users to the related employee.
This is done because when giving the context of Chatter on the
various mailboxes, we do not have access to the current partner_id. """
if kwargs.get('type') == 'email':
return super(res_users, self).message_post(cr, uid, thread_id, context=context, **kwargs)
res = None
employee_ids = self._message_post_get_eid(cr, uid, thread_id, context=context)
if not employee_ids: # no employee: fall back on previous behavior
return super(res_users, self).message_post(cr, uid, thread_id, context=context, **kwargs)
for employee_id in employee_ids:
res = self.pool.get('hr.employee').message_post(cr, uid, employee_id, context=context, **kwargs)
return res
| agpl-3.0 |
heeraj123/oh-mainline | vendor/packages/gdata/src/gdata/contacts/__init__.py | 119 | 28208 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains extensions to ElementWrapper objects used with Google Contacts."""
__author__ = 'dbrattli (Dag Brattli)'
import atom
import gdata
## Constants from http://code.google.com/apis/gdata/elements.html ##
REL_HOME = 'http://schemas.google.com/g/2005#home'
REL_WORK = 'http://schemas.google.com/g/2005#work'
REL_OTHER = 'http://schemas.google.com/g/2005#other'
# AOL Instant Messenger protocol
IM_AIM = 'http://schemas.google.com/g/2005#AIM'
IM_MSN = 'http://schemas.google.com/g/2005#MSN' # MSN Messenger protocol
IM_YAHOO = 'http://schemas.google.com/g/2005#YAHOO' # Yahoo Messenger protocol
IM_SKYPE = 'http://schemas.google.com/g/2005#SKYPE' # Skype protocol
IM_QQ = 'http://schemas.google.com/g/2005#QQ' # QQ protocol
# Google Talk protocol
IM_GOOGLE_TALK = 'http://schemas.google.com/g/2005#GOOGLE_TALK'
IM_ICQ = 'http://schemas.google.com/g/2005#ICQ' # ICQ protocol
IM_JABBER = 'http://schemas.google.com/g/2005#JABBER' # Jabber protocol
IM_NETMEETING = 'http://schemas.google.com/g/2005#netmeeting' # NetMeeting
PHOTO_LINK_REL = 'http://schemas.google.com/contacts/2008/rel#photo'
PHOTO_EDIT_LINK_REL = 'http://schemas.google.com/contacts/2008/rel#edit-photo'
# Different phone types, for more info see:
# http://code.google.com/apis/gdata/docs/2.0/elements.html#gdPhoneNumber
PHONE_CAR = 'http://schemas.google.com/g/2005#car'
PHONE_FAX = 'http://schemas.google.com/g/2005#fax'
PHONE_GENERAL = 'http://schemas.google.com/g/2005#general'
PHONE_HOME = REL_HOME
PHONE_HOME_FAX = 'http://schemas.google.com/g/2005#home_fax'
PHONE_INTERNAL = 'http://schemas.google.com/g/2005#internal-extension'
PHONE_MOBILE = 'http://schemas.google.com/g/2005#mobile'
PHONE_OTHER = REL_OTHER
PHONE_PAGER = 'http://schemas.google.com/g/2005#pager'
PHONE_SATELLITE = 'http://schemas.google.com/g/2005#satellite'
PHONE_VOIP = 'http://schemas.google.com/g/2005#voip'
PHONE_WORK = REL_WORK
PHONE_WORK_FAX = 'http://schemas.google.com/g/2005#work_fax'
PHONE_WORK_MOBILE = 'http://schemas.google.com/g/2005#work_mobile'
PHONE_WORK_PAGER = 'http://schemas.google.com/g/2005#work_pager'
PHONE_MAIN = 'http://schemas.google.com/g/2005#main'
PHONE_ASSISTANT = 'http://schemas.google.com/g/2005#assistant'
PHONE_CALLBACK = 'http://schemas.google.com/g/2005#callback'
PHONE_COMPANY_MAIN = 'http://schemas.google.com/g/2005#company_main'
PHONE_ISDN = 'http://schemas.google.com/g/2005#isdn'
PHONE_OTHER_FAX = 'http://schemas.google.com/g/2005#other_fax'
PHONE_RADIO = 'http://schemas.google.com/g/2005#radio'
PHONE_TELEX = 'http://schemas.google.com/g/2005#telex'
PHONE_TTY_TDD = 'http://schemas.google.com/g/2005#tty_tdd'
EXTERNAL_ID_ORGANIZATION = 'organization'
RELATION_MANAGER = 'manager'
CONTACTS_NAMESPACE = 'http://schemas.google.com/contact/2008'
class GDataBase(atom.AtomBase):
"""The Google Contacts intermediate class from atom.AtomBase."""
_namespace = gdata.GDATA_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
def __init__(self, text=None,
extension_elements=None, extension_attributes=None):
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class ContactsBase(GDataBase):
"""The Google Contacts intermediate class for Contacts namespace."""
_namespace = CONTACTS_NAMESPACE
class OrgName(GDataBase):
"""The Google Contacts OrgName element."""
_tag = 'orgName'
class OrgTitle(GDataBase):
"""The Google Contacts OrgTitle element."""
_tag = 'orgTitle'
class OrgDepartment(GDataBase):
"""The Google Contacts OrgDepartment element."""
_tag = 'orgDepartment'
class OrgJobDescription(GDataBase):
"""The Google Contacts OrgJobDescription element."""
_tag = 'orgJobDescription'
class Where(GDataBase):
"""The Google Contacts Where element."""
_tag = 'where'
_children = GDataBase._children.copy()
_attributes = GDataBase._attributes.copy()
_attributes['rel'] = 'rel'
_attributes['label'] = 'label'
_attributes['valueString'] = 'value_string'
def __init__(self, value_string=None, rel=None, label=None,
text=None, extension_elements=None, extension_attributes=None):
GDataBase.__init__(self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.rel = rel
self.label = label
self.value_string = value_string
class When(GDataBase):
"""The Google Contacts When element."""
_tag = 'when'
_children = GDataBase._children.copy()
_attributes = GDataBase._attributes.copy()
_attributes['startTime'] = 'start_time'
_attributes['endTime'] = 'end_time'
_attributes['label'] = 'label'
def __init__(self, start_time=None, end_time=None, label=None,
text=None, extension_elements=None, extension_attributes=None):
GDataBase.__init__(self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.start_time = start_time
self.end_time = end_time
self.label = label
class Organization(GDataBase):
"""The Google Contacts Organization element."""
_tag = 'organization'
_children = GDataBase._children.copy()
_attributes = GDataBase._attributes.copy()
_attributes['label'] = 'label'
_attributes['rel'] = 'rel'
_attributes['primary'] = 'primary'
_children['{%s}orgName' % GDataBase._namespace] = (
'org_name', OrgName)
_children['{%s}orgTitle' % GDataBase._namespace] = (
'org_title', OrgTitle)
_children['{%s}orgDepartment' % GDataBase._namespace] = (
'org_department', OrgDepartment)
_children['{%s}orgJobDescription' % GDataBase._namespace] = (
'org_job_description', OrgJobDescription)
#_children['{%s}where' % GDataBase._namespace] = ('where', Where)
def __init__(self, label=None, rel=None, primary='false', org_name=None,
org_title=None, org_department=None, org_job_description=None,
where=None, text=None,
extension_elements=None, extension_attributes=None):
GDataBase.__init__(self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.label = label
self.rel = rel or REL_OTHER
self.primary = primary
self.org_name = org_name
self.org_title = org_title
self.org_department = org_department
self.org_job_description = org_job_description
self.where = where
class PostalAddress(GDataBase):
"""The Google Contacts PostalAddress element."""
_tag = 'postalAddress'
_children = GDataBase._children.copy()
_attributes = GDataBase._attributes.copy()
_attributes['rel'] = 'rel'
_attributes['primary'] = 'primary'
def __init__(self, primary=None, rel=None, text=None,
extension_elements=None, extension_attributes=None):
GDataBase.__init__(self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.rel = rel or REL_OTHER
self.primary = primary
class FormattedAddress(GDataBase):
"""The Google Contacts FormattedAddress element."""
_tag = 'formattedAddress'
class StructuredPostalAddress(GDataBase):
"""The Google Contacts StructuredPostalAddress element."""
_tag = 'structuredPostalAddress'
_children = GDataBase._children.copy()
_attributes = GDataBase._attributes.copy()
_attributes['rel'] = 'rel'
_attributes['primary'] = 'primary'
_children['{%s}formattedAddress' % GDataBase._namespace] = (
'formatted_address', FormattedAddress)
def __init__(self, rel=None, primary=None,
formatted_address=None, text=None,
extension_elements=None, extension_attributes=None):
GDataBase.__init__(self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.rel = rel or REL_OTHER
self.primary = primary
self.formatted_address = formatted_address
class IM(GDataBase):
"""The Google Contacts IM element."""
_tag = 'im'
_children = GDataBase._children.copy()
_attributes = GDataBase._attributes.copy()
_attributes['address'] = 'address'
_attributes['primary'] = 'primary'
_attributes['protocol'] = 'protocol'
_attributes['label'] = 'label'
_attributes['rel'] = 'rel'
def __init__(self, primary='false', rel=None, address=None, protocol=None,
label=None, text=None,
extension_elements=None, extension_attributes=None):
GDataBase.__init__(self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.protocol = protocol
self.address = address
self.primary = primary
self.rel = rel or REL_OTHER
self.label = label
class Email(GDataBase):
"""The Google Contacts Email element."""
_tag = 'email'
_children = GDataBase._children.copy()
_attributes = GDataBase._attributes.copy()
_attributes['address'] = 'address'
_attributes['primary'] = 'primary'
_attributes['rel'] = 'rel'
_attributes['label'] = 'label'
def __init__(self, label=None, rel=None, address=None, primary='false',
text=None, extension_elements=None, extension_attributes=None):
GDataBase.__init__(self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.label = label
self.rel = rel or REL_OTHER
self.address = address
self.primary = primary
class PhoneNumber(GDataBase):
"""The Google Contacts PhoneNumber element."""
_tag = 'phoneNumber'
_children = GDataBase._children.copy()
_attributes = GDataBase._attributes.copy()
_attributes['label'] = 'label'
_attributes['rel'] = 'rel'
_attributes['uri'] = 'uri'
_attributes['primary'] = 'primary'
def __init__(self, label=None, rel=None, uri=None, primary='false',
text=None, extension_elements=None, extension_attributes=None):
GDataBase.__init__(self, text=text, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.label = label
self.rel = rel or REL_OTHER
self.uri = uri
self.primary = primary
class Nickname(ContactsBase):
"""The Google Contacts Nickname element."""
_tag = 'nickname'
class Occupation(ContactsBase):
"""The Google Contacts Occupation element."""
_tag = 'occupation'
class Gender(ContactsBase):
"""The Google Contacts Gender element."""
_tag = 'gender'
_children = ContactsBase._children.copy()
_attributes = ContactsBase._attributes.copy()
_attributes['value'] = 'value'
def __init__(self, value=None,
text=None, extension_elements=None, extension_attributes=None):
ContactsBase.__init__(self, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.value = value
class Birthday(ContactsBase):
"""The Google Contacts Birthday element."""
_tag = 'birthday'
_children = ContactsBase._children.copy()
_attributes = ContactsBase._attributes.copy()
_attributes['when'] = 'when'
def __init__(self, when=None,
text=None, extension_elements=None, extension_attributes=None):
ContactsBase.__init__(self, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.when = when
class Relation(ContactsBase):
"""The Google Contacts Relation element."""
_tag = 'relation'
_children = ContactsBase._children.copy()
_attributes = ContactsBase._attributes.copy()
_attributes['label'] = 'label'
_attributes['rel'] = 'rel'
def __init__(self, label=None, rel=None,
text=None, extension_elements=None, extension_attributes=None):
ContactsBase.__init__(self, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.label = label
self.rel = rel
def RelationFromString(xml_string):
return atom.CreateClassFromXMLString(Relation, xml_string)
class UserDefinedField(ContactsBase):
"""The Google Contacts UserDefinedField element."""
_tag = 'userDefinedField'
_children = ContactsBase._children.copy()
_attributes = ContactsBase._attributes.copy()
_attributes['key'] = 'key'
_attributes['value'] = 'value'
def __init__(self, key=None, value=None,
text=None, extension_elements=None, extension_attributes=None):
ContactsBase.__init__(self, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.key = key
self.value = value
def UserDefinedFieldFromString(xml_string):
return atom.CreateClassFromXMLString(UserDefinedField, xml_string)
class Website(ContactsBase):
"""The Google Contacts Website element."""
_tag = 'website'
_children = ContactsBase._children.copy()
_attributes = ContactsBase._attributes.copy()
_attributes['href'] = 'href'
_attributes['label'] = 'label'
_attributes['primary'] = 'primary'
_attributes['rel'] = 'rel'
def __init__(self, href=None, label=None, primary='false', rel=None,
text=None, extension_elements=None, extension_attributes=None):
ContactsBase.__init__(self, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.href = href
self.label = label
self.primary = primary
self.rel = rel
def WebsiteFromString(xml_string):
return atom.CreateClassFromXMLString(Website, xml_string)
class ExternalId(ContactsBase):
"""The Google Contacts ExternalId element."""
_tag = 'externalId'
_children = ContactsBase._children.copy()
_attributes = ContactsBase._attributes.copy()
_attributes['label'] = 'label'
_attributes['rel'] = 'rel'
_attributes['value'] = 'value'
def __init__(self, label=None, rel=None, value=None,
text=None, extension_elements=None, extension_attributes=None):
ContactsBase.__init__(self, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.label = label
self.rel = rel
self.value = value
def ExternalIdFromString(xml_string):
return atom.CreateClassFromXMLString(ExternalId, xml_string)
class Event(ContactsBase):
"""The Google Contacts Event element."""
_tag = 'event'
_children = ContactsBase._children.copy()
_attributes = ContactsBase._attributes.copy()
_attributes['label'] = 'label'
_attributes['rel'] = 'rel'
_children['{%s}when' % ContactsBase._namespace] = ('when', When)
def __init__(self, label=None, rel=None, when=None,
text=None, extension_elements=None, extension_attributes=None):
ContactsBase.__init__(self, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.label = label
self.rel = rel
self.when = when
def EventFromString(xml_string):
return atom.CreateClassFromXMLString(Event, xml_string)
class Deleted(GDataBase):
"""The Google Contacts Deleted element."""
_tag = 'deleted'
class GroupMembershipInfo(ContactsBase):
"""The Google Contacts GroupMembershipInfo element."""
_tag = 'groupMembershipInfo'
_children = ContactsBase._children.copy()
_attributes = ContactsBase._attributes.copy()
_attributes['deleted'] = 'deleted'
_attributes['href'] = 'href'
def __init__(self, deleted=None, href=None, text=None,
extension_elements=None, extension_attributes=None):
ContactsBase.__init__(self, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.deleted = deleted
self.href = href
class PersonEntry(gdata.BatchEntry):
"""Base class for ContactEntry and ProfileEntry."""
_children = gdata.BatchEntry._children.copy()
_children['{%s}organization' % gdata.GDATA_NAMESPACE] = (
'organization', [Organization])
_children['{%s}phoneNumber' % gdata.GDATA_NAMESPACE] = (
'phone_number', [PhoneNumber])
_children['{%s}nickname' % CONTACTS_NAMESPACE] = ('nickname', Nickname)
_children['{%s}occupation' % CONTACTS_NAMESPACE] = ('occupation', Occupation)
_children['{%s}gender' % CONTACTS_NAMESPACE] = ('gender', Gender)
_children['{%s}birthday' % CONTACTS_NAMESPACE] = ('birthday', Birthday)
_children['{%s}postalAddress' % gdata.GDATA_NAMESPACE] = ('postal_address',
[PostalAddress])
_children['{%s}structuredPostalAddress' % gdata.GDATA_NAMESPACE] = (
'structured_postal_address', [StructuredPostalAddress])
_children['{%s}email' % gdata.GDATA_NAMESPACE] = ('email', [Email])
_children['{%s}im' % gdata.GDATA_NAMESPACE] = ('im', [IM])
_children['{%s}relation' % CONTACTS_NAMESPACE] = ('relation', [Relation])
_children['{%s}userDefinedField' % CONTACTS_NAMESPACE] = (
'user_defined_field', [UserDefinedField])
_children['{%s}website' % CONTACTS_NAMESPACE] = ('website', [Website])
_children['{%s}externalId' % CONTACTS_NAMESPACE] = (
'external_id', [ExternalId])
_children['{%s}event' % CONTACTS_NAMESPACE] = ('event', [Event])
# The following line should be removed once the Python support
# for GData 2.0 is mature.
_attributes = gdata.BatchEntry._attributes.copy()
_attributes['{%s}etag' % gdata.GDATA_NAMESPACE] = 'etag'
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None, organization=None, phone_number=None,
nickname=None, occupation=None, gender=None, birthday=None,
postal_address=None, structured_postal_address=None, email=None,
im=None, relation=None, user_defined_field=None, website=None,
external_id=None, event=None, batch_operation=None,
batch_id=None, batch_status=None, text=None,
extension_elements=None, extension_attributes=None, etag=None):
gdata.BatchEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published,
batch_operation=batch_operation,
batch_id=batch_id, batch_status=batch_status,
title=title, updated=updated)
self.organization = organization or []
self.phone_number = phone_number or []
self.nickname = nickname
self.occupation = occupation
self.gender = gender
self.birthday = birthday
self.postal_address = postal_address or []
self.structured_postal_address = structured_postal_address or []
self.email = email or []
self.im = im or []
self.relation = relation or []
self.user_defined_field = user_defined_field or []
self.website = website or []
self.external_id = external_id or []
self.event = event or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
# The following line should be removed once the Python support
# for GData 2.0 is mature.
self.etag = etag
class ContactEntry(PersonEntry):
"""A Google Contact flavor of an Atom Entry."""
_children = PersonEntry._children.copy()
_children['{%s}deleted' % gdata.GDATA_NAMESPACE] = ('deleted', Deleted)
_children['{%s}groupMembershipInfo' % CONTACTS_NAMESPACE] = (
'group_membership_info', [GroupMembershipInfo])
_children['{%s}extendedProperty' % gdata.GDATA_NAMESPACE] = (
'extended_property', [gdata.ExtendedProperty])
# Overwrite the organization rule in PersonEntry so that a ContactEntry
# may only contain one <gd:organization> element.
_children['{%s}organization' % gdata.GDATA_NAMESPACE] = (
'organization', Organization)
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None, organization=None, phone_number=None,
nickname=None, occupation=None, gender=None, birthday=None,
postal_address=None, structured_postal_address=None, email=None,
im=None, relation=None, user_defined_field=None, website=None,
external_id=None, event=None, batch_operation=None,
batch_id=None, batch_status=None, text=None,
extension_elements=None, extension_attributes=None, etag=None,
deleted=None, extended_property=None,
group_membership_info=None):
PersonEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title, updated=updated,
organization=organization, phone_number=phone_number,
nickname=nickname, occupation=occupation,
gender=gender, birthday=birthday,
postal_address=postal_address,
structured_postal_address=structured_postal_address,
email=email, im=im, relation=relation,
user_defined_field=user_defined_field,
website=website, external_id=external_id, event=event,
batch_operation=batch_operation, batch_id=batch_id,
batch_status=batch_status, text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes, etag=etag)
self.deleted = deleted
self.extended_property = extended_property or []
self.group_membership_info = group_membership_info or []
def GetPhotoLink(self):
for a_link in self.link:
if a_link.rel == PHOTO_LINK_REL:
return a_link
return None
def GetPhotoEditLink(self):
for a_link in self.link:
if a_link.rel == PHOTO_EDIT_LINK_REL:
return a_link
return None
def ContactEntryFromString(xml_string):
return atom.CreateClassFromXMLString(ContactEntry, xml_string)
class ContactsFeed(gdata.BatchFeed, gdata.LinkFinder):
"""A Google Contacts feed flavor of an Atom Feed."""
_children = gdata.BatchFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [ContactEntry])
def __init__(self, author=None, category=None, contributor=None,
generator=None, icon=None, atom_id=None, link=None, logo=None,
rights=None, subtitle=None, title=None, updated=None,
entry=None, total_results=None, start_index=None,
items_per_page=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.BatchFeed.__init__(self, author=author, category=category,
contributor=contributor, generator=generator,
icon=icon, atom_id=atom_id, link=link,
logo=logo, rights=rights, subtitle=subtitle,
title=title, updated=updated, entry=entry,
total_results=total_results,
start_index=start_index,
items_per_page=items_per_page,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
def ContactsFeedFromString(xml_string):
return atom.CreateClassFromXMLString(ContactsFeed, xml_string)
class GroupEntry(gdata.BatchEntry):
"""Represents a contact group."""
_children = gdata.BatchEntry._children.copy()
_children['{%s}extendedProperty' % gdata.GDATA_NAMESPACE] = (
'extended_property', [gdata.ExtendedProperty])
def __init__(self, author=None, category=None, content=None,
contributor=None, atom_id=None, link=None, published=None,
rights=None, source=None, summary=None, control=None,
title=None, updated=None,
extended_property=None, batch_operation=None, batch_id=None,
batch_status=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.BatchEntry.__init__(self, author=author, category=category,
content=content,
atom_id=atom_id, link=link, published=published,
batch_operation=batch_operation,
batch_id=batch_id, batch_status=batch_status,
title=title, updated=updated)
self.extended_property = extended_property or []
def GroupEntryFromString(xml_string):
return atom.CreateClassFromXMLString(GroupEntry, xml_string)
class GroupsFeed(gdata.BatchFeed):
"""A Google contact groups feed flavor of an Atom Feed."""
_children = gdata.BatchFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [GroupEntry])
def GroupsFeedFromString(xml_string):
return atom.CreateClassFromXMLString(GroupsFeed, xml_string)
class ProfileEntry(PersonEntry):
"""A Google Profiles flavor of an Atom Entry."""
def ProfileEntryFromString(xml_string):
"""Converts an XML string into a ProfileEntry object.
Args:
xml_string: string The XML describing a Profile entry.
Returns:
A ProfileEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(ProfileEntry, xml_string)
class ProfilesFeed(gdata.BatchFeed, gdata.LinkFinder):
"""A Google Profiles feed flavor of an Atom Feed."""
_children = gdata.BatchFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [ProfileEntry])
def __init__(self, author=None, category=None, contributor=None,
generator=None, icon=None, atom_id=None, link=None, logo=None,
rights=None, subtitle=None, title=None, updated=None,
entry=None, total_results=None, start_index=None,
items_per_page=None, extension_elements=None,
extension_attributes=None, text=None):
gdata.BatchFeed.__init__(self, author=author, category=category,
contributor=contributor, generator=generator,
icon=icon, atom_id=atom_id, link=link,
logo=logo, rights=rights, subtitle=subtitle,
title=title, updated=updated, entry=entry,
total_results=total_results,
start_index=start_index,
items_per_page=items_per_page,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
def ProfilesFeedFromString(xml_string):
"""Converts an XML string into a ProfilesFeed object.
Args:
xml_string: string The XML describing a Profiles feed.
Returns:
A ProfilesFeed object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(ProfilesFeed, xml_string)
| agpl-3.0 |
bitifirefly/edx-platform | lms/djangoapps/notification_prefs/tests.py | 137 | 10272 | import json
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from mock import Mock, patch
from notification_prefs import NOTIFICATION_PREF_KEY
from notification_prefs.views import ajax_enable, ajax_disable, ajax_status, set_subscription, UsernameCipher
from student.tests.factories import UserFactory
from edxmako.tests import mako_middleware_process_request
from openedx.core.djangoapps.user_api.models import UserPreference
from util.testing import UrlResetMixin
@override_settings(SECRET_KEY="test secret key")
class NotificationPrefViewTest(UrlResetMixin, TestCase):
INITIALIZATION_VECTOR = "\x00" * 16
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(NotificationPrefViewTest, self).setUp()
self.user = UserFactory.create(username="testuser")
# Tokens are intentionally hard-coded instead of computed to help us
# avoid breaking existing links.
self.tokens = {
self.user: "AAAAAAAAAAAAAAAAAAAAAA8mMQo96FZfb1YKv1R5X6s=",
# Username with length equal to AES block length to test padding
UserFactory.create(username="sixteencharsuser"):
"AAAAAAAAAAAAAAAAAAAAAPxPWCuI2Ay9TATBVnfw7eIj-hUh6erQ_-VkbDqHqm8D",
# Even longer username
UserFactory.create(username="thisusernameissoveryverylong"):
"AAAAAAAAAAAAAAAAAAAAAPECbYqPI7_W4mRF8LbTaHuHt3tNXPggZ1Bke-zDyEiZ",
# Non-ASCII username
UserFactory.create(username=u"\u4e2d\u56fd"):
"AAAAAAAAAAAAAAAAAAAAAMjfGAhZKIZsI3L-Z7nflTA="
}
self.request_factory = RequestFactory()
def create_prefs(self):
"""Create all test preferences in the database"""
for (user, token) in self.tokens.items():
UserPreference.objects.create(user=user, key=NOTIFICATION_PREF_KEY, value=token)
def assertPrefValid(self, user):
"""Ensure that the correct preference for the user is persisted"""
pref = UserPreference.objects.get(user=user, key=NOTIFICATION_PREF_KEY)
self.assertTrue(pref) # check exists and only 1 (.get)
# now coerce username to utf-8 encoded str, since we test with non-ascii unicdoe above and
# the unittest framework has hard time coercing to unicode.
# decrypt also can't take a unicode input, so coerce its input to str
self.assertEqual(str(user.username.encode('utf-8')), UsernameCipher().decrypt(str(pref.value)))
def assertNotPrefExists(self, user):
"""Ensure that the user does not have a persisted preference"""
self.assertFalse(
UserPreference.objects.filter(user=user, key=NOTIFICATION_PREF_KEY).exists()
)
# AJAX status view
def test_ajax_status_get_0(self):
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), {"status": 0})
def test_ajax_status_get_1(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content), {"status": 1})
def test_ajax_status_post(self):
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_status(request)
self.assertEqual(response.status_code, 405)
def test_ajax_status_anon_user(self):
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_status, request)
# AJAX enable view
def test_ajax_enable_get(self):
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_enable(request)
self.assertEqual(response.status_code, 405)
self.assertNotPrefExists(self.user)
def test_ajax_enable_anon_user(self):
request = self.request_factory.post("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_enable, request)
self.assertNotPrefExists(self.user)
@patch("Crypto.Random.new")
def test_ajax_enable_success(self, mock_random_new):
mock_stream = Mock()
mock_stream.read.return_value = self.INITIALIZATION_VECTOR
mock_random_new.return_value = mock_stream
def test_user(user):
request = self.request_factory.post("dummy")
request.user = user
response = ajax_enable(request)
self.assertEqual(response.status_code, 204)
self.assertPrefValid(user)
for user in self.tokens.keys():
test_user(user)
def test_ajax_enable_already_enabled(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_enable(request)
self.assertEqual(response.status_code, 204)
self.assertPrefValid(self.user)
def test_ajax_enable_distinct_values(self):
request = self.request_factory.post("dummy")
request.user = self.user
ajax_enable(request)
other_user = UserFactory.create()
request.user = other_user
ajax_enable(request)
self.assertNotEqual(
UserPreference.objects.get(user=self.user, key=NOTIFICATION_PREF_KEY).value,
UserPreference.objects.get(user=other_user, key=NOTIFICATION_PREF_KEY).value
)
# AJAX disable view
def test_ajax_disable_get(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 405)
self.assertPrefValid(self.user)
def test_ajax_disable_anon_user(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, ajax_disable, request)
self.assertPrefValid(self.user)
def test_ajax_disable_success(self):
self.create_prefs()
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 204)
self.assertNotPrefExists(self.user)
def test_ajax_disable_already_disabled(self):
request = self.request_factory.post("dummy")
request.user = self.user
response = ajax_disable(request)
self.assertEqual(response.status_code, 204)
self.assertNotPrefExists(self.user)
# Unsubscribe view
def test_unsubscribe_post(self):
request = self.request_factory.post("dummy")
response = set_subscription(request, "dummy", subscribe=False)
self.assertEqual(response.status_code, 405)
def test_unsubscribe_invalid_token(self):
def test_invalid_token(token, message):
request = self.request_factory.get("dummy")
self.assertRaisesRegexp(Http404, "^{}$".format(message), set_subscription, request, token, False)
# Invalid base64 encoding
test_invalid_token("ZOMG INVALID BASE64 CHARS!!!", "base64url")
test_invalid_token("Non-ASCII\xff", "base64url")
test_invalid_token(self.tokens[self.user][:-1], "base64url")
# Token not long enough to contain initialization vector
test_invalid_token("AAAAAAAAAAA=", "initialization_vector")
# Token length not a multiple of AES block length
test_invalid_token(self.tokens[self.user][:-4], "aes")
# Invalid padding (ends in 0 byte)
# Encrypted value: "testuser" + "\x00" * 8
test_invalid_token("AAAAAAAAAAAAAAAAAAAAAMoazRI7ePLjEWXN1N7keLw=", "padding")
# Invalid padding (ends in byte > 16)
# Encrypted value: "testusertestuser"
test_invalid_token("AAAAAAAAAAAAAAAAAAAAAC6iLXGhjkFytJoJSBJZzJ4=", "padding")
# Invalid padding (entire string is padding)
# Encrypted value: "\x10" * 16
test_invalid_token("AAAAAAAAAAAAAAAAAAAAANRGw8HDEmlcLVFawgY9wI8=", "padding")
# Nonexistent user
# Encrypted value: "nonexistentuser\x01"
test_invalid_token("AAAAAAAAAAAAAAAAAAAAACpyUxTGIrUjnpuUsNi7mAY=", "username")
def test_unsubscribe_success(self):
self.create_prefs()
def test_user(user):
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
response = set_subscription(request, self.tokens[user], subscribe=False)
self.assertEqual(response.status_code, 200)
self.assertNotPrefExists(user)
for user in self.tokens.keys():
test_user(user)
def test_unsubscribe_twice(self):
self.create_prefs()
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
set_subscription(request, self.tokens[self.user], False)
response = set_subscription(request, self.tokens[self.user], subscribe=False)
self.assertEqual(response.status_code, 200)
self.assertNotPrefExists(self.user)
def test_resubscribe_success(self):
def test_user(user):
# start without a pref key
self.assertFalse(UserPreference.objects.filter(user=user, key=NOTIFICATION_PREF_KEY))
request = self.request_factory.get("dummy")
request.user = AnonymousUser()
mako_middleware_process_request(request)
response = set_subscription(request, self.tokens[user], subscribe=True)
self.assertEqual(response.status_code, 200)
self.assertPrefValid(user)
for user in self.tokens.keys():
test_user(user)
| agpl-3.0 |
ben-hopps/nupic | tests/unit/nupic/research/spatial_pooler_cpp_api_test.py | 35 | 1320 | #! /usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import unittest2 as unittest
from nupic.bindings.algorithms import SpatialPooler as CPPSpatialPooler
import spatial_pooler_py_api_test
spatial_pooler_py_api_test.SpatialPooler = CPPSpatialPooler
SpatialPoolerCPPAPITest = spatial_pooler_py_api_test.SpatialPoolerAPITest
if __name__ == "__main__":
unittest.main()
| agpl-3.0 |
0x0all/nupic | py/nupic/encoders/multi.py | 2 | 5878 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from nupic.encoders.base import Encoder
from nupic.encoders.scalar import ScalarEncoder
from nupic.encoders.adaptivescalar import AdaptiveScalarEncoder
from nupic.encoders.date import DateEncoder
from nupic.encoders.logenc import LogEncoder
from nupic.encoders.category import CategoryEncoder
from nupic.encoders.sdrcategory import SDRCategoryEncoder
from nupic.encoders.sdrrandom import SDRRandomEncoder
from nupic.encoders.nonuniformscalar import NonUniformScalarEncoder
from nupic.encoders.delta import DeltaEncoder
from nupic.encoders.scalarspace import ScalarSpaceEncoder
from nupic.encoders.vector import VectorEncoder, SimpleVectorEncoder, VectorEncoderOPF
from nupic.encoders.passthru import PassThruEncoder
# multiencoder must be imported last because it imports * from this module!
from nupic.encoders.utils import bitsToString
from nupic.encoders.random_distributed_scalar import RandomDistributedScalarEncoder
class MultiEncoder(Encoder):
"""A MultiEncoder encodes a dictionary or object with
multiple components. A MultiEncode contains a number
of sub-encoders, each of which encodes a separate component."""
def __init__(self, encoderDescriptions=None):
self.width = 0
self.encoders = []
self.description = []
self.name = ''
if encoderDescriptions is not None:
self.addMultipleEncoders(encoderDescriptions)
############################################################################
def setFieldStats(self, fieldName, fieldStatistics ):
for (name, encoder, offset) in self.encoders:
encoder.setFieldStats(name, fieldStatistics)
############################################################################
def addEncoder(self, name, encoder):
self.encoders.append((name, encoder, self.width))
for d in encoder.getDescription():
self.description.append((d[0], d[1] + self.width))
self.width += encoder.getWidth()
self._flattenedEncoderList = None
self._flattenedFieldTypeList = None
############################################################################
def encodeIntoArray(self, obj, output):
for name, encoder, offset in self.encoders:
encoder.encodeIntoArray(self._getInputValue(obj, name), output[offset:])
############################################################################
def getDescription(self):
return self.description
############################################################################
def getWidth(self):
return self.width
def setLearning(self,learningEnabled):
encoders = self.getEncoderList()
for encoder in encoders:
encoder.setLearning(learningEnabled)
return
############################################################################
def encodeField(self, fieldName, value):
for name, encoder, offset in self.encoders:
if name == fieldName:
return encoder.encode(value)
############################################################################
def encodeEachField(self, inputRecord):
encodings = []
for name, encoder, offset in self.encoders:
encodings.append(encoder.encode(getattr(inputRecord, name)))
return encodings
############################################################################
def addMultipleEncoders(self, fieldEncodings):
"""
fieldEncodings -- a dict of dicts, mapping field names to the field params
dict.
Each field params dict has the following keys
1) data fieldname that matches the key ('fieldname')
2) an encoder type ('type')
3) and the encoder params (all other keys)
For example,
fieldEncodings={
'dateTime': dict(fieldname='dateTime', type='DateEncoder',
timeOfDay=(5,5)),
'attendeeCount': dict(fieldname='attendeeCount', type='ScalarEncoder',
name='attendeeCount', minval=0, maxval=250,
clipInput=True, w=5, resolution=10),
'consumption': dict(fieldname='consumption',type='ScalarEncoder',
name='consumption', minval=0,maxval=110,
clipInput=True, w=5, resolution=5),
}
"""
# Sort the encoders so that they end up in a controlled order
encoderList = sorted(fieldEncodings.items())
for key, fieldParams in encoderList:
if ':' not in key and fieldParams is not None:
fieldParams = fieldParams.copy()
fieldName = fieldParams.pop('fieldname')
encoderName = fieldParams.pop('type')
try:
self.addEncoder(fieldName, eval(encoderName)(**fieldParams))
except TypeError, e:
print ("#### Error in constructing %s encoder. Possibly missing "
"some required constructor parameters. Parameters "
"that were provided are: %s" % (encoderName, fieldParams))
raise
| gpl-3.0 |
J861449197/edx-platform | common/djangoapps/django_comment_common/utils.py | 104 | 2909 | from django_comment_common.models import Role
class ThreadContext(object):
""" An enumeration that represents the context of a thread. Used primarily by the comments service. """
STANDALONE = 'standalone'
COURSE = 'course'
_STUDENT_ROLE_PERMISSIONS = ["vote", "update_thread", "follow_thread", "unfollow_thread",
"update_comment", "create_sub_comment", "unvote", "create_thread",
"follow_commentable", "unfollow_commentable", "create_comment", ]
_MODERATOR_ROLE_PERMISSIONS = ["edit_content", "delete_thread", "openclose_thread",
"endorse_comment", "delete_comment", "see_all_cohorts"]
_ADMINISTRATOR_ROLE_PERMISSIONS = ["manage_moderator"]
def _save_forum_role(course_key, name):
"""
Save and Update 'course_key' for all roles which are already created to keep course_id same
as actual passed course key
"""
role, created = Role.objects.get_or_create(name=name, course_id=course_key)
if created is False:
role.course_id = course_key
role.save()
return role
def seed_permissions_roles(course_key):
"""
Create and assign permissions for forum roles
"""
administrator_role = _save_forum_role(course_key, "Administrator")
moderator_role = _save_forum_role(course_key, "Moderator")
community_ta_role = _save_forum_role(course_key, "Community TA")
student_role = _save_forum_role(course_key, "Student")
for per in _STUDENT_ROLE_PERMISSIONS:
student_role.add_permission(per)
for per in _MODERATOR_ROLE_PERMISSIONS:
moderator_role.add_permission(per)
for per in _ADMINISTRATOR_ROLE_PERMISSIONS:
administrator_role.add_permission(per)
moderator_role.inherit_permissions(student_role)
# For now, Community TA == Moderator, except for the styling.
community_ta_role.inherit_permissions(moderator_role)
administrator_role.inherit_permissions(moderator_role)
def are_permissions_roles_seeded(course_id):
"""
Returns whether the forums permissions for a course have been provisioned in
the database
"""
try:
administrator_role = Role.objects.get(name="Administrator", course_id=course_id)
moderator_role = Role.objects.get(name="Moderator", course_id=course_id)
student_role = Role.objects.get(name="Student", course_id=course_id)
except:
return False
for per in _STUDENT_ROLE_PERMISSIONS:
if not student_role.has_permission(per):
return False
for per in _MODERATOR_ROLE_PERMISSIONS + _STUDENT_ROLE_PERMISSIONS:
if not moderator_role.has_permission(per):
return False
for per in _ADMINISTRATOR_ROLE_PERMISSIONS + _MODERATOR_ROLE_PERMISSIONS + _STUDENT_ROLE_PERMISSIONS:
if not administrator_role.has_permission(per):
return False
return True
| agpl-3.0 |
armab/st2 | st2common/tests/unit/test_rbac_resolvers_action.py | 3 | 24521 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.rbac.types import PermissionType
from st2common.rbac.types import ResourceType
from st2common.persistence.auth import User
from st2common.persistence.rbac import Role
from st2common.persistence.rbac import UserRoleAssignment
from st2common.persistence.rbac import PermissionGrant
from st2common.persistence.action import Action
from st2common.models.db.auth import UserDB
from st2common.models.db.rbac import RoleDB
from st2common.models.db.rbac import UserRoleAssignmentDB
from st2common.models.db.rbac import PermissionGrantDB
from st2common.models.db.action import ActionDB
from st2common.models.api.action import ActionAPI
from st2common.rbac.resolvers import ActionPermissionsResolver
from tests.unit.test_rbac_resolvers import BasePermissionsResolverTestCase
__all__ = [
'ActionPermissionsResolverTestCase'
]
class ActionPermissionsResolverTestCase(BasePermissionsResolverTestCase):
def setUp(self):
super(ActionPermissionsResolverTestCase, self).setUp()
# Create some mock users
user_1_db = UserDB(name='1_role_action_pack_grant')
user_1_db = User.add_or_update(user_1_db)
self.users['custom_role_action_pack_grant'] = user_1_db
user_2_db = UserDB(name='1_role_action_grant')
user_2_db = User.add_or_update(user_2_db)
self.users['custom_role_action_grant'] = user_2_db
user_3_db = UserDB(name='custom_role_pack_action_all_grant')
user_3_db = User.add_or_update(user_3_db)
self.users['custom_role_pack_action_all_grant'] = user_3_db
user_4_db = UserDB(name='custom_role_action_all_grant')
user_4_db = User.add_or_update(user_4_db)
self.users['custom_role_action_all_grant'] = user_4_db
user_5_db = UserDB(name='custom_role_action_execute_grant')
user_5_db = User.add_or_update(user_5_db)
self.users['custom_role_action_execute_grant'] = user_5_db
user_6_db = UserDB(name='action_pack_action_create_grant')
user_6_db = User.add_or_update(user_6_db)
self.users['action_pack_action_create_grant'] = user_6_db
user_7_db = UserDB(name='action_pack_action_all_grant')
user_7_db = User.add_or_update(user_7_db)
self.users['action_pack_action_all_grant'] = user_7_db
user_8_db = UserDB(name='action_action_create_grant')
user_8_db = User.add_or_update(user_8_db)
self.users['action_action_create_grant'] = user_8_db
user_9_db = UserDB(name='action_action_all_grant')
user_9_db = User.add_or_update(user_9_db)
self.users['action_action_all_grant'] = user_9_db
user_10_db = UserDB(name='custom_role_action_list_grant')
user_10_db = User.add_or_update(user_10_db)
self.users['custom_role_action_list_grant'] = user_10_db
# Create some mock resources on which permissions can be granted
action_1_db = ActionDB(pack='test_pack_1', name='action1', entry_point='',
runner_type={'name': 'run-local'})
action_1_db = Action.add_or_update(action_1_db)
self.resources['action_1'] = action_1_db
action_2_db = ActionDB(pack='test_pack_1', name='action2', entry_point='',
runner_type={'name': 'run-local'})
action_2_db = Action.add_or_update(action_1_db)
self.resources['action_2'] = action_2_db
action_3_db = ActionDB(pack='test_pack_2', name='action3', entry_point='',
runner_type={'name': 'run-local'})
action_3_db = Action.add_or_update(action_3_db)
self.resources['action_3'] = action_3_db
# Create some mock roles with associated permission grants
# Custom role 2 - one grant on parent pack
# "action_view" on pack_1
grant_db = PermissionGrantDB(resource_uid=self.resources['pack_1'].get_uid(),
resource_type=ResourceType.PACK,
permission_types=[PermissionType.ACTION_VIEW])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_3_db = RoleDB(name='custom_role_action_pack_grant',
permission_grants=permission_grants)
role_3_db = Role.add_or_update(role_3_db)
self.roles['custom_role_action_pack_grant'] = role_3_db
# Custom role 4 - one grant on action
# "action_view" on action_3
grant_db = PermissionGrantDB(resource_uid=self.resources['action_3'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_VIEW])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_4_db = RoleDB(name='custom_role_action_grant', permission_grants=permission_grants)
role_4_db = Role.add_or_update(role_4_db)
self.roles['custom_role_action_grant'] = role_4_db
# Custom role - "action_all" grant on a parent action pack
grant_db = PermissionGrantDB(resource_uid=self.resources['pack_1'].get_uid(),
resource_type=ResourceType.PACK,
permission_types=[PermissionType.ACTION_ALL])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_4_db = RoleDB(name='custom_role_pack_action_all_grant',
permission_grants=permission_grants)
role_4_db = Role.add_or_update(role_4_db)
self.roles['custom_role_pack_action_all_grant'] = role_4_db
# Custom role - "action_all" grant on action
grant_db = PermissionGrantDB(resource_uid=self.resources['action_1'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_ALL])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_4_db = RoleDB(name='custom_role_action_all_grant', permission_grants=permission_grants)
role_4_db = Role.add_or_update(role_4_db)
self.roles['custom_role_action_all_grant'] = role_4_db
# Custom role - "action_execute" on action_1
grant_db = PermissionGrantDB(resource_uid=self.resources['action_1'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_EXECUTE])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_5_db = RoleDB(name='custom_role_action_execute_grant',
permission_grants=permission_grants)
role_5_db = Role.add_or_update(role_5_db)
self.roles['custom_role_action_execute_grant'] = role_5_db
# Custom role - "action_create" grant on pack_1
grant_db = PermissionGrantDB(resource_uid=self.resources['pack_1'].get_uid(),
resource_type=ResourceType.PACK,
permission_types=[PermissionType.ACTION_CREATE])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_6_db = RoleDB(name='action_pack_action_create_grant',
permission_grants=permission_grants)
role_6_db = Role.add_or_update(role_6_db)
self.roles['action_pack_action_create_grant'] = role_6_db
# Custom role - "action_all" grant on pack_1
grant_db = PermissionGrantDB(resource_uid=self.resources['pack_1'].get_uid(),
resource_type=ResourceType.PACK,
permission_types=[PermissionType.ACTION_ALL])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_7_db = RoleDB(name='action_pack_action_all_grant',
permission_grants=permission_grants)
role_7_db = Role.add_or_update(role_7_db)
self.roles['action_pack_action_all_grant'] = role_7_db
# Custom role - "action_create" grant on action_1
grant_db = PermissionGrantDB(resource_uid=self.resources['action_1'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_CREATE])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_8_db = RoleDB(name='action_action_create_grant',
permission_grants=permission_grants)
role_8_db = Role.add_or_update(role_8_db)
self.roles['action_action_create_grant'] = role_8_db
# Custom role - "action_all" grant on action_1
grant_db = PermissionGrantDB(resource_uid=self.resources['action_1'].get_uid(),
resource_type=ResourceType.ACTION,
permission_types=[PermissionType.ACTION_ALL])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_9_db = RoleDB(name='action_action_all_grant',
permission_grants=permission_grants)
role_9_db = Role.add_or_update(role_9_db)
self.roles['action_action_all_grant'] = role_9_db
# Custom role - "action_list" grant
grant_db = PermissionGrantDB(resource_uid=None,
resource_type=None,
permission_types=[PermissionType.ACTION_LIST])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_10_db = RoleDB(name='custom_role_action_list_grant',
permission_grants=permission_grants)
role_10_db = Role.add_or_update(role_10_db)
self.roles['custom_role_action_list_grant'] = role_10_db
# Create some mock role assignments
user_db = self.users['custom_role_action_pack_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_action_pack_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_action_grant']
role_assignment_db = UserRoleAssignmentDB(user=user_db.name,
role=self.roles['custom_role_action_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_pack_action_all_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_pack_action_all_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_action_all_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_action_all_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_action_execute_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_action_execute_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['action_pack_action_create_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['action_pack_action_create_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['action_pack_action_all_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['action_pack_action_all_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['action_action_create_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['action_action_create_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['action_action_all_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['action_action_all_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
user_db = self.users['custom_role_action_list_grant']
role_assignment_db = UserRoleAssignmentDB(
user=user_db.name,
role=self.roles['custom_role_action_list_grant'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
def test_user_has_permission(self):
resolver = ActionPermissionsResolver()
# Admin user, should always return true
user_db = self.users['admin']
self.assertTrue(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
# Observer, should always return true for VIEW permissions
user_db = self.users['observer']
self.assertTrue(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
# No roles, should return false for everything
user_db = self.users['no_roles']
self.assertFalse(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
# Custom role with no permission grants, should return false for everything
user_db = self.users['1_custom_role_no_permissions']
self.assertFalse(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
# Custom role with "action_list" grant
user_db = self.users['custom_role_action_list_grant']
self.assertTrue(resolver.user_has_permission(user_db=user_db,
permission_type=PermissionType.ACTION_LIST))
def test_user_has_resource_api_permission(self):
resolver = ActionPermissionsResolver()
# Admin user, should always return true
user_db = self.users['admin']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Observer, should return false
user_db = self.users['observer']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertFalse(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# No roles, should return false
user_db = self.users['no_roles']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertFalse(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with no permission grants, should return false
user_db = self.users['1_custom_role_no_permissions']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertFalse(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with "action_create" grant on parent pack
user_db = self.users['action_pack_action_create_grant']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with "action_all" grant on the parent pack
user_db = self.users['action_pack_action_all_grant']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with "action_create" grant directly on the resource
user_db = self.users['action_action_create_grant']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
# Custom role with "action_all" grant directly on the resource
user_db = self.users['action_action_all_grant']
resource_db = self.resources['action_1']
resource_api = ActionAPI.from_model(resource_db)
self.assertTrue(resolver.user_has_resource_api_permission(
user_db=user_db,
resource_api=resource_api,
permission_type=PermissionType.ACTION_CREATE))
def test_user_has_resource_db_permission(self):
resolver = ActionPermissionsResolver()
all_permission_types = PermissionType.get_valid_permissions_for_resource_type(
ResourceType.ACTION)
# Admin user, should always return true
resource_db = self.resources['action_1']
user_db = self.users['admin']
self.assertTrue(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Observer, should always return true for VIEW permission
user_db = self.users['observer']
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_VIEW))
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_VIEW))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_MODIFY))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_DELETE))
# No roles, should return false for everything
user_db = self.users['no_roles']
self.assertFalse(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Custom role with no permission grants, should return false for everything
user_db = self.users['1_custom_role_no_permissions']
self.assertFalse(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Custom role with unrelated permission grant to parent pack
user_db = self.users['custom_role_pack_grant']
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_VIEW))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_EXECUTE))
# Custom role with with grant on the parent pack
user_db = self.users['custom_role_action_pack_grant']
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_1'],
permission_type=PermissionType.ACTION_VIEW))
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_VIEW))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_EXECUTE))
# Custom role with a direct grant on action
user_db = self.users['custom_role_action_grant']
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_3'],
permission_type=PermissionType.ACTION_VIEW))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_2'],
permission_type=PermissionType.ACTION_EXECUTE))
self.assertFalse(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=self.resources['action_3'],
permission_type=PermissionType.ACTION_EXECUTE))
# Custom role - "action_all" grant on the action parent pack
user_db = self.users['custom_role_pack_action_all_grant']
resource_db = self.resources['action_1']
self.assertTrue(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Custom role - "action_all" grant on the action
user_db = self.users['custom_role_action_all_grant']
resource_db = self.resources['action_1']
self.assertTrue(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=all_permission_types))
# Custom role - "action_execute" grant on action_1
user_db = self.users['custom_role_action_execute_grant']
resource_db = self.resources['action_1']
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=resource_db,
permission_type=PermissionType.ACTION_EXECUTE))
# "execute" also grants "view"
self.assertTrue(resolver.user_has_resource_db_permission(
user_db=user_db,
resource_db=resource_db,
permission_type=PermissionType.ACTION_VIEW))
permission_types = [
PermissionType.ACTION_CREATE,
PermissionType.ACTION_MODIFY,
PermissionType.ACTION_DELETE
]
self.assertFalse(self._user_has_resource_db_permissions(
resolver=resolver,
user_db=user_db,
resource_db=resource_db,
permission_types=permission_types))
| apache-2.0 |
yawnosnorous/python-for-android | python3-alpha/python3-src/Lib/test/test_re.py | 49 | 42150 | from test.support import verbose, run_unittest
import re
from re import Scanner
import sys
import string
import traceback
from weakref import proxy
# Misc tests from Tim Peters' re.doc
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefully modeled to
# cover most of the code.
import unittest
class ReTests(unittest.TestCase):
def test_weakref(self):
s = 'QabbbcR'
x = re.compile('ab+c')
y = proxy(x)
self.assertEqual(x.findall('QabbbcR'), y.findall('QabbbcR'))
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertEqual(re.search('x', 'aaa'), None)
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertEqual(re.match('a+', 'xxx'), None)
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
# fails for group followed by other escape
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
# Test for sub() on escaped characters
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_bug_1661(self):
# Verify that flags do not get silently ignored with compiled patterns
pattern = re.compile('.')
self.assertRaises(ValueError, re.match, pattern, 'A', re.I)
self.assertRaises(ValueError, re.search, pattern, 'A', re.I)
self.assertRaises(ValueError, re.findall, pattern, 'A', re.I)
self.assertRaises(ValueError, re.compile, pattern, re.I)
def test_bug_3629(self):
# A regex that triggered a bug in the sre-code validator
re.compile("(?P<quote>)(?(quote))")
def test_sub_template_numeric_escape(self):
# bug 776311 and friends
self.assertEqual(re.sub('x', r'\0', 'x'), '\0')
self.assertEqual(re.sub('x', r'\000', 'x'), '\000')
self.assertEqual(re.sub('x', r'\001', 'x'), '\001')
self.assertEqual(re.sub('x', r'\008', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\009', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\111', 'x'), '\111')
self.assertEqual(re.sub('x', r'\117', 'x'), '\117')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\1111')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\111' + '1')
self.assertEqual(re.sub('x', r'\00', 'x'), '\x00')
self.assertEqual(re.sub('x', r'\07', 'x'), '\x07')
self.assertEqual(re.sub('x', r'\08', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\09', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\0a', 'x'), '\0' + 'a')
self.assertEqual(re.sub('x', r'\400', 'x'), '\0')
self.assertEqual(re.sub('x', r'\777', 'x'), '\377')
self.assertRaises(re.error, re.sub, 'x', r'\1', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\8', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\9', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\11', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\18', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\1a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\90', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\99', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\118', 'x') # r'\11' + '8'
self.assertRaises(re.error, re.sub, 'x', r'\11a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\181', 'x') # r'\18' + '1'
self.assertRaises(re.error, re.sub, 'x', r'\800', 'x') # r'\80' + '0'
# in python2.3 (etc), these loop endlessly in sre_parser.py
self.assertEqual(re.sub('(((((((((((x)))))))))))', r'\11', 'x'), 'x')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\118', 'xyz'),
'xz8')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\11a', 'xyz'),
'xza')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
# Test for empty sub() behaviour, see SF bug #462270
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\g<b>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\\2', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<-1>', 'xx')
def test_re_subn(self):
self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2))
self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1))
self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0))
self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4))
self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2))
def test_re_split(self):
self.assertEqual(re.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c'])
self.assertEqual(re.split(":*", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:*)", ":a:b::c"),
['', ':', 'a', ':', 'b', '::', 'c'])
self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:)*", ":a:b::c"),
['', ':', 'a', ':', 'b', ':', 'c'])
self.assertEqual(re.split("([b:]+)", ":a:b::c"),
['', ':', 'a', ':b::', 'c'])
self.assertEqual(re.split("(b)|(:+)", ":a:b::c"),
['', None, ':', 'a', None, ':', '', 'b', None, '',
None, '::', 'c'])
self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"),
['', 'a', '', '', 'c'])
def test_qualified_re_split(self):
self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c'])
self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d'])
self.assertEqual(re.split("(:)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
self.assertEqual(re.split("(:*)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
def test_re_findall(self):
self.assertEqual(re.findall(":+", "abc"), [])
self.assertEqual(re.findall(":+", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:+)", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:)(:*)", "a:b::c:::d"), [(":", ""),
(":", ":"),
(":", "::")])
def test_bug_117612(self):
self.assertEqual(re.findall(r"(a|(b))", "aba"),
[("a", ""),("b", "b"),("a", "")])
def test_re_match(self):
self.assertEqual(re.match('a', 'a').groups(), ())
self.assertEqual(re.match('(a)', 'a').groups(), ('a',))
self.assertEqual(re.match(r'(a)', 'a').group(0), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1, 1), ('a', 'a'))
pat = re.compile('((a)|(b))(c)?')
self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None))
self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None))
self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c'))
self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c'))
self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c'))
# A single group
m = re.match('(a)', 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(1), 'a')
self.assertEqual(m.group(1, 1), ('a', 'a'))
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None))
self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'),
(None, 'b', None))
self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c'))
def test_re_groupref_exists(self):
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(),
('(', 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'), None)
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a'), None)
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(),
('a', 'b'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(),
('a', ''))
# Tests for bug #1177831: exercise groups other than the first group
p = re.compile('(?P<g1>a)(?P<g2>b)?((?(g2)c|d))')
self.assertEqual(p.match('abc').groups(),
('a', 'b', 'c'))
self.assertEqual(p.match('ad').groups(),
('a', None, 'd'))
self.assertEqual(p.match('abd'), None)
self.assertEqual(p.match('ac'), None)
def test_re_groupref(self):
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(),
('|', 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', 'a|'), None)
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a'), None)
self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(),
('a', 'a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(),
(None, None))
def test_groupdict(self):
self.assertEqual(re.match('(?P<first>first) (?P<second>second)',
'first second').groupdict(),
{'first':'first', 'second':'second'})
def test_expand(self):
self.assertEqual(re.match("(?P<first>first) (?P<second>second)",
"first second")
.expand(r"\2 \1 \g<second> \g<first>"),
"second first second first")
def test_repeat_minmax(self):
self.assertEqual(re.match("^(\w){1}$", "abc"), None)
self.assertEqual(re.match("^(\w){1}?$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}?$", "abc"), None)
self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^x{1}$", "xxx"), None)
self.assertEqual(re.match("^x{1}?$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertEqual(re.match("^x{}$", "xxx"), None)
self.assertNotEqual(re.match("^x{}$", "x{}"), None)
def test_getattr(self):
self.assertEqual(re.compile("(?i)(a)(b)").pattern, "(?i)(a)(b)")
self.assertEqual(re.compile("(?i)(a)(b)").flags, re.I | re.U)
self.assertEqual(re.compile("(?i)(a)(b)").groups, 2)
self.assertEqual(re.compile("(?i)(a)(b)").groupindex, {})
self.assertEqual(re.compile("(?i)(?P<first>a)(?P<other>b)").groupindex,
{'first': 1, 'other': 2})
self.assertEqual(re.match("(a)", "a").pos, 0)
self.assertEqual(re.match("(a)", "a").endpos, 1)
self.assertEqual(re.match("(a)", "a").string, "a")
self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1)))
self.assertNotEqual(re.match("(a)", "a").re, None)
def test_special_escapes(self):
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a").group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.LOCALE).group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.UNICODE).group(0), "1aa! a")
def test_bigcharset(self):
self.assertEqual(re.match("([\u2222\u2223])",
"\u2222").group(1), "\u2222")
self.assertEqual(re.match("([\u2222\u2223])",
"\u2222", re.UNICODE).group(1), "\u2222")
def test_anyall(self):
self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0),
"a\nb")
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa")
self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa")
def test_category(self):
self.assertEqual(re.match(r"(\s)", " ").group(1), " ")
def test_getlower(self):
import _sre
self.assertEqual(_sre.getlower(ord('A'), 0), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a'))
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
def test_not_literal(self):
self.assertEqual(re.search("\s([^a])", " b").group(1), "b")
self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb")
def test_search_coverage(self):
self.assertEqual(re.search("\s(b)", " b").group(1), "b")
self.assertEqual(re.search("a\s", "a ").group(0), "a ")
def assertMatch(self, pattern, text, match=None, span=None,
matcher=re.match):
if match is None and span is None:
# the pattern matches the whole text
match = text
span = (0, len(text))
elif match is None or span is None:
raise ValueError('If match is not None, span should be specified '
'(and vice versa).')
m = matcher(pattern, text)
self.assertTrue(m)
self.assertEqual(m.group(), match)
self.assertEqual(m.span(), span)
def test_re_escape(self):
alnum_chars = string.ascii_letters + string.digits
p = ''.join(chr(i) for i in range(256))
for c in p:
if c in alnum_chars:
self.assertEqual(re.escape(c), c)
elif c == '\x00':
self.assertEqual(re.escape(c), '\\000')
else:
self.assertEqual(re.escape(c), '\\' + c)
self.assertMatch(re.escape(c), c)
self.assertMatch(re.escape(p), p)
def test_re_escape_byte(self):
alnum_chars = (string.ascii_letters + string.digits).encode('ascii')
p = bytes(range(256))
for i in p:
b = bytes([i])
if b in alnum_chars:
self.assertEqual(re.escape(b), b)
elif i == 0:
self.assertEqual(re.escape(b), b'\\000')
else:
self.assertEqual(re.escape(b), b'\\' + b)
self.assertMatch(re.escape(b), b)
self.assertMatch(re.escape(p), p)
def test_re_escape_non_ascii(self):
s = 'xxx\u2620\u2620\u2620xxx'
s_escaped = re.escape(s)
self.assertEqual(s_escaped, 'xxx\\\u2620\\\u2620\\\u2620xxx')
self.assertMatch(s_escaped, s)
self.assertMatch('.%s+.' % re.escape('\u2620'), s,
'x\u2620\u2620\u2620x', (2, 7), re.search)
def test_re_escape_non_ascii_bytes(self):
b = 'y\u2620y\u2620y'.encode('utf-8')
b_escaped = re.escape(b)
self.assertEqual(b_escaped, b'y\\\xe2\\\x98\\\xa0y\\\xe2\\\x98\\\xa0y')
self.assertMatch(b_escaped, b)
res = re.findall(re.escape('\u2620'.encode('utf-8')), b)
self.assertEqual(len(res), 2)
def pickle_test(self, pickle):
oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)')
s = pickle.dumps(oldpat)
newpat = pickle.loads(s)
self.assertEqual(oldpat, newpat)
def test_constants(self):
self.assertEqual(re.I, re.IGNORECASE)
self.assertEqual(re.L, re.LOCALE)
self.assertEqual(re.M, re.MULTILINE)
self.assertEqual(re.S, re.DOTALL)
self.assertEqual(re.X, re.VERBOSE)
def test_flags(self):
for flag in [re.I, re.M, re.X, re.S, re.L]:
self.assertNotEqual(re.compile('^pattern$', flag), None)
def test_sre_character_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"\%03o" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\%03o0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\%03o8" % i, chr(i)+"8"), None)
self.assertNotEqual(re.match(r"\x%02x" % i, chr(i)), None)
self.assertNotEqual(re.match(r"\x%02x0" % i, chr(i)+"0"), None)
self.assertNotEqual(re.match(r"\x%02xz" % i, chr(i)+"z"), None)
self.assertRaises(re.error, re.match, "\911", "")
def test_sre_character_class_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertNotEqual(re.match(r"[\%03o]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\%03o0]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\%03o8]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02x]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02x0]" % i, chr(i)), None)
self.assertNotEqual(re.match(r"[\x%02xz]" % i, chr(i)), None)
self.assertRaises(re.error, re.match, "[\911]", "")
def test_bug_113254(self):
self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1))
def test_bug_527371(self):
# bug described in patches 527371/672491
self.assertEqual(re.match(r'(a)?a','a').lastindex, None)
self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1)
self.assertEqual(re.match(r'(?P<a>a)(?P<b>b)?b','ab').lastgroup, 'a')
self.assertEqual(re.match("(?P<a>a(b))", "ab").lastgroup, 'a')
self.assertEqual(re.match("((a))", "a").lastindex, 1)
def test_bug_545855(self):
# bug 545855 -- This pattern failed to cause a compile error as it
# should, instead provoking a TypeError.
self.assertRaises(re.error, re.compile, 'foo[a-')
def test_bug_418626(self):
# bugs 418626 at al. -- Testing Greg Chapman's addition of op code
# SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of
# pattern '*?' on a long string.
self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001)
self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0),
20003)
self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001)
# non-simple '*?' still used to hit the recursion limit, before the
# non-recursive scheme was implemented.
self.assertEqual(re.search('(a|b)*?c', 10000*'ab'+'cd').end(0), 20001)
def test_bug_612074(self):
pat="["+re.escape("\u2039")+"]"
self.assertEqual(re.compile(pat) and 1, 1)
def test_stack_overflow(self):
# nasty cases that used to overflow the straightforward recursive
# implementation of repeated groups.
self.assertEqual(re.match('(x)*', 50000*'x').group(1), 'x')
self.assertEqual(re.match('(x)*y', 50000*'x'+'y').group(1), 'x')
self.assertEqual(re.match('(x)*?y', 50000*'x'+'y').group(1), 'x')
def test_scanner(self):
def s_ident(scanner, token): return token
def s_operator(scanner, token): return "op%s" % token
def s_float(scanner, token): return float(token)
def s_int(scanner, token): return int(token)
scanner = Scanner([
(r"[a-zA-Z_]\w*", s_ident),
(r"\d+\.\d*", s_float),
(r"\d+", s_int),
(r"=|\+|-|\*|/", s_operator),
(r"\s+", None),
])
self.assertNotEqual(scanner.scanner.scanner("").pattern, None)
self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"),
(['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5,
'op+', 'bar'], ''))
def test_bug_448951(self):
# bug 448951 (similar to 429357, but with single char match)
# (Also test greedy matches.)
for op in '','?','*':
self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(),
(None, None))
self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(),
('a:', 'a'))
def test_bug_725106(self):
# capturing groups in alternatives in repeats
self.assertEqual(re.match('^((a)|b)*', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(),
('b', None))
def test_bug_725149(self):
# mark_stack_base restoring before restoring marks
self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(),
('a', None))
self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(),
('a', None, None))
def test_bug_764548(self):
# bug 764548, re.compile() barfs on str/unicode subclasses
class my_unicode(str): pass
pat = re.compile(my_unicode("abc"))
self.assertEqual(pat.match("xyz"), None)
def test_finditer(self):
iter = re.finditer(r":+", "a:b::c:::d")
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
def test_bug_926075(self):
self.assertTrue(re.compile('bug_926075') is not
re.compile(b'bug_926075'))
def test_bug_931848(self):
pattern = eval('"[\u002E\u3002\uFF0E\uFF61]"')
self.assertEqual(re.compile(pattern).split("a.b.c"),
['a','b','c'])
def test_bug_581080(self):
iter = re.finditer(r"\s", "a b")
self.assertEqual(next(iter).span(), (1,2))
self.assertRaises(StopIteration, next, iter)
scanner = re.compile(r"\s").scanner("a b")
self.assertEqual(scanner.search().span(), (1, 2))
self.assertEqual(scanner.search(), None)
def test_bug_817234(self):
iter = re.finditer(r".*", "asdf")
self.assertEqual(next(iter).span(), (0, 4))
self.assertEqual(next(iter).span(), (4, 4))
self.assertRaises(StopIteration, next, iter)
def test_bug_6561(self):
# '\d' should match characters in Unicode category 'Nd'
# (Number, Decimal Digit), but not those in 'Nl' (Number,
# Letter) or 'No' (Number, Other).
decimal_digits = [
'\u0037', # '\N{DIGIT SEVEN}', category 'Nd'
'\u0e58', # '\N{THAI DIGIT SIX}', category 'Nd'
'\uff10', # '\N{FULLWIDTH DIGIT ZERO}', category 'Nd'
]
for x in decimal_digits:
self.assertEqual(re.match('^\d$', x).group(0), x)
not_decimal_digits = [
'\u2165', # '\N{ROMAN NUMERAL SIX}', category 'Nl'
'\u3039', # '\N{HANGZHOU NUMERAL TWENTY}', category 'Nl'
'\u2082', # '\N{SUBSCRIPT TWO}', category 'No'
'\u32b4', # '\N{CIRCLED NUMBER THIRTY NINE}', category 'No'
]
for x in not_decimal_digits:
self.assertIsNone(re.match('^\d$', x))
def test_empty_array(self):
# SF buf 1647541
import array
for typecode in 'bBuhHiIlLfd':
a = array.array(typecode)
self.assertEqual(re.compile(b"bla").match(a), None)
self.assertEqual(re.compile(b"").match(a).groups(), ())
def test_inline_flags(self):
# Bug #1700
upper_char = chr(0x1ea0) # Latin Capital Letter A with Dot Bellow
lower_char = chr(0x1ea1) # Latin Small Letter A with Dot Bellow
p = re.compile(upper_char, re.I | re.U)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile(lower_char, re.I | re.U)
q = p.match(upper_char)
self.assertNotEqual(q, None)
p = re.compile('(?i)' + upper_char, re.U)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile('(?i)' + lower_char, re.U)
q = p.match(upper_char)
self.assertNotEqual(q, None)
p = re.compile('(?iu)' + upper_char)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile('(?iu)' + lower_char)
q = p.match(upper_char)
self.assertNotEqual(q, None)
def test_dollar_matches_twice(self):
"$ matches the end of string, and just before the terminating \n"
pattern = re.compile('$')
self.assertEqual(pattern.sub('#', 'a\nb\n'), 'a\nb#\n#')
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a\nb\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
pattern = re.compile('$', re.MULTILINE)
self.assertEqual(pattern.sub('#', 'a\nb\n' ), 'a#\nb#\n#' )
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a#\nb#\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
def test_bytes_str_mixing(self):
# Mixing str and bytes is disallowed
pat = re.compile('.')
bpat = re.compile(b'.')
self.assertRaises(TypeError, pat.match, b'b')
self.assertRaises(TypeError, bpat.match, 'b')
self.assertRaises(TypeError, pat.sub, b'b', 'c')
self.assertRaises(TypeError, pat.sub, 'b', b'c')
self.assertRaises(TypeError, pat.sub, b'b', b'c')
self.assertRaises(TypeError, bpat.sub, b'b', 'c')
self.assertRaises(TypeError, bpat.sub, 'b', b'c')
self.assertRaises(TypeError, bpat.sub, 'b', 'c')
def test_ascii_and_unicode_flag(self):
# String patterns
for flags in (0, re.UNICODE):
pat = re.compile('\xc0', flags | re.IGNORECASE)
self.assertNotEqual(pat.match('\xe0'), None)
pat = re.compile('\w', flags)
self.assertNotEqual(pat.match('\xe0'), None)
pat = re.compile('\xc0', re.ASCII | re.IGNORECASE)
self.assertEqual(pat.match('\xe0'), None)
pat = re.compile('(?a)\xc0', re.IGNORECASE)
self.assertEqual(pat.match('\xe0'), None)
pat = re.compile('\w', re.ASCII)
self.assertEqual(pat.match('\xe0'), None)
pat = re.compile('(?a)\w')
self.assertEqual(pat.match('\xe0'), None)
# Bytes patterns
for flags in (0, re.ASCII):
pat = re.compile(b'\xc0', re.IGNORECASE)
self.assertEqual(pat.match(b'\xe0'), None)
pat = re.compile(b'\w')
self.assertEqual(pat.match(b'\xe0'), None)
# Incompatibilities
self.assertRaises(ValueError, re.compile, b'\w', re.UNICODE)
self.assertRaises(ValueError, re.compile, b'(?u)\w')
self.assertRaises(ValueError, re.compile, '\w', re.UNICODE | re.ASCII)
self.assertRaises(ValueError, re.compile, '(?u)\w', re.ASCII)
self.assertRaises(ValueError, re.compile, '(?a)\w', re.UNICODE)
self.assertRaises(ValueError, re.compile, '(?au)\w')
def test_bug_6509(self):
# Replacement strings of both types must parse properly.
# all strings
pat = re.compile('a(\w)')
self.assertEqual(pat.sub('b\\1', 'ac'), 'bc')
pat = re.compile('a(.)')
self.assertEqual(pat.sub('b\\1', 'a\u1234'), 'b\u1234')
pat = re.compile('..')
self.assertEqual(pat.sub(lambda m: 'str', 'a5'), 'str')
# all bytes
pat = re.compile(b'a(\w)')
self.assertEqual(pat.sub(b'b\\1', b'ac'), b'bc')
pat = re.compile(b'a(.)')
self.assertEqual(pat.sub(b'b\\1', b'a\xCD'), b'b\xCD')
pat = re.compile(b'..')
self.assertEqual(pat.sub(lambda m: b'bytes', b'a5'), b'bytes')
def test_dealloc(self):
# issue 3299: check for segfault in debug build
import _sre
# the overflow limit is different on wide and narrow builds and it
# depends on the definition of SRE_CODE (see sre.h).
# 2**128 should be big enough to overflow on both. For smaller values
# a RuntimeError is raised instead of OverflowError.
long_overflow = 2**128
self.assertRaises(TypeError, re.finditer, "a", {})
self.assertRaises(OverflowError, _sre.compile, "abc", 0, [long_overflow])
self.assertRaises(TypeError, _sre.compile, {}, 0, [])
def run_re_tests():
from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR
if verbose:
print('Running re_tests test suite')
else:
# To save time, only run the first and last 10 tests
#tests = tests[:10] + tests[-10:]
pass
for t in tests:
sys.stdout.flush()
pattern = s = outcome = repl = expected = None
if len(t) == 5:
pattern, s, outcome, repl, expected = t
elif len(t) == 3:
pattern, s, outcome = t
else:
raise ValueError('Test tuples should have 3 or 5 fields', t)
try:
obj = re.compile(pattern)
except re.error:
if outcome == SYNTAX_ERROR: pass # Expected a syntax error
else:
print('=== Syntax error:', t)
except KeyboardInterrupt: raise KeyboardInterrupt
except:
print('*** Unexpected error ***', t)
if verbose:
traceback.print_exc(file=sys.stdout)
else:
try:
result = obj.search(s)
except re.error as msg:
print('=== Unexpected exception', t, repr(msg))
if outcome == SYNTAX_ERROR:
# This should have been a syntax error; forget it.
pass
elif outcome == FAIL:
if result is None: pass # No match, as expected
else: print('=== Succeeded incorrectly', t)
elif outcome == SUCCEED:
if result is not None:
# Matched, as expected, so now we compute the
# result string and compare it to our expected result.
start, end = result.span(0)
vardict={'found': result.group(0),
'groups': result.group(),
'flags': result.re.flags}
for i in range(1, 100):
try:
gi = result.group(i)
# Special hack because else the string concat fails:
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict['g%d' % i] = gi
for i in result.re.groupindex.keys():
try:
gi = result.group(i)
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict[i] = gi
repl = eval(repl, vardict)
if repl != expected:
print('=== grouping error', t, end=' ')
print(repr(repl) + ' should be ' + repr(expected))
else:
print('=== Failed incorrectly', t)
# Try the match with both pattern and string converted to
# bytes, and check that it still succeeds.
try:
bpat = bytes(pattern, "ascii")
bs = bytes(s, "ascii")
except UnicodeEncodeError:
# skip non-ascii tests
pass
else:
try:
bpat = re.compile(bpat)
except Exception:
print('=== Fails on bytes pattern compile', t)
if verbose:
traceback.print_exc(file=sys.stdout)
else:
bytes_result = bpat.search(bs)
if bytes_result is None:
print('=== Fails on bytes pattern match', t)
# Try the match with the search area limited to the extent
# of the match and see if it still succeeds. \B will
# break (because it won't match at the end or start of a
# string), so we'll ignore patterns that feature it.
if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \
and result is not None:
obj = re.compile(pattern)
result = obj.search(s, result.start(0), result.end(0) + 1)
if result is None:
print('=== Failed on range-limited match', t)
# Try the match with IGNORECASE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.IGNORECASE)
result = obj.search(s)
if result is None:
print('=== Fails on case-insensitive match', t)
# Try the match with LOCALE enabled, and check that it
# still succeeds.
if '(?u)' not in pattern:
obj = re.compile(pattern, re.LOCALE)
result = obj.search(s)
if result is None:
print('=== Fails on locale-sensitive match', t)
# Try the match with UNICODE locale enabled, and check
# that it still succeeds.
obj = re.compile(pattern, re.UNICODE)
result = obj.search(s)
if result is None:
print('=== Fails on unicode-sensitive match', t)
def test_main():
run_unittest(ReTests)
run_re_tests()
if __name__ == "__main__":
test_main()
| apache-2.0 |
bcoca/ansible | test/support/network-integration/collections/ansible_collections/ansible/netcommon/plugins/modules/cli_config.py | 47 | 15653 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "network",
}
DOCUMENTATION = """module: cli_config
author: Trishna Guha (@trishnaguha)
notes:
- The commands will be returned only for platforms that do not support onbox diff.
The C(--diff) option with the playbook will return the difference in configuration
for devices that has support for onbox diff
short_description: Push text based configuration to network devices over network_cli
description:
- This module provides platform agnostic way of pushing text based configuration to
network devices over network_cli connection plugin.
extends_documentation_fragment:
- ansible.netcommon.network_agnostic
options:
config:
description:
- The config to be pushed to the network device. This argument is mutually exclusive
with C(rollback) and either one of the option should be given as input. The
config should have indentation that the device uses.
type: str
commit:
description:
- The C(commit) argument instructs the module to push the configuration to the
device. This is mapped to module check mode.
type: bool
replace:
description:
- If the C(replace) argument is set to C(yes), it will replace the entire running-config
of the device with the C(config) argument value. For devices that support replacing
running configuration from file on device like NXOS/JUNOS, the C(replace) argument
takes path to the file on the device that will be used for replacing the entire
running-config. The value of C(config) option should be I(None) for such devices.
Nexus 9K devices only support replace. Use I(net_put) or I(nxos_file_copy) in
case of NXOS module to copy the flat file to remote device and then use set
the fullpath to this argument.
type: str
backup:
description:
- This argument will cause the module to create a full backup of the current running
config from the remote device before any changes are made. If the C(backup_options)
value is not given, the backup file is written to the C(backup) folder in the
playbook root directory or role root directory, if playbook is part of an ansible
role. If the directory does not exist, it is created.
type: bool
default: 'no'
rollback:
description:
- The C(rollback) argument instructs the module to rollback the current configuration
to the identifier specified in the argument. If the specified rollback identifier
does not exist on the remote device, the module will fail. To rollback to the
most recent commit, set the C(rollback) argument to 0. This option is mutually
exclusive with C(config).
commit_comment:
description:
- The C(commit_comment) argument specifies a text string to be used when committing
the configuration. If the C(commit) argument is set to False, this argument
is silently ignored. This argument is only valid for the platforms that support
commit operation with comment.
type: str
defaults:
description:
- The I(defaults) argument will influence how the running-config is collected
from the device. When the value is set to true, the command used to collect
the running-config is append with the all keyword. When the value is set to
false, the command is issued without the all keyword.
default: 'no'
type: bool
multiline_delimiter:
description:
- This argument is used when pushing a multiline configuration element to the
device. It specifies the character to use as the delimiting character. This
only applies to the configuration action.
type: str
diff_replace:
description:
- Instructs the module on the way to perform the configuration on the device.
If the C(diff_replace) argument is set to I(line) then the modified lines are
pushed to the device in configuration mode. If the argument is set to I(block)
then the entire command block is pushed to the device in configuration mode
if any line is not correct. Note that this parameter will be ignored if the
platform has onbox diff support.
choices:
- line
- block
- config
diff_match:
description:
- Instructs the module on the way to perform the matching of the set of commands
against the current device config. If C(diff_match) is set to I(line), commands
are matched line by line. If C(diff_match) is set to I(strict), command lines
are matched with respect to position. If C(diff_match) is set to I(exact), command
lines must be an equal match. Finally, if C(diff_match) is set to I(none), the
module will not attempt to compare the source configuration with the running
configuration on the remote device. Note that this parameter will be ignored
if the platform has onbox diff support.
choices:
- line
- strict
- exact
- none
diff_ignore_lines:
description:
- Use this argument to specify one or more lines that should be ignored during
the diff. This is used for lines in the configuration that are automatically
updated by the system. This argument takes a list of regular expressions or
exact line matches. Note that this parameter will be ignored if the platform
has onbox diff support.
backup_options:
description:
- This is a dict object containing configurable options related to backup file
path. The value of this option is read only when C(backup) is set to I(yes),
if C(backup) is set to I(no) this option will be silently ignored.
suboptions:
filename:
description:
- The filename to be used to store the backup configuration. If the filename
is not given it will be generated based on the hostname, current time and
date in format defined by <hostname>_config.<current-date>@<current-time>
dir_path:
description:
- This option provides the path ending with directory name in which the backup
configuration file will be stored. If the directory does not exist it will
be first created and the filename is either the value of C(filename) or
default filename as described in C(filename) options description. If the
path value is not given in that case a I(backup) directory will be created
in the current working directory and backup configuration will be copied
in C(filename) within I(backup) directory.
type: path
type: dict
"""
EXAMPLES = """
- name: configure device with config
cli_config:
config: "{{ lookup('template', 'basic/config.j2') }}"
- name: multiline config
cli_config:
config: |
hostname foo
feature nxapi
- name: configure device with config with defaults enabled
cli_config:
config: "{{ lookup('template', 'basic/config.j2') }}"
defaults: yes
- name: Use diff_match
cli_config:
config: "{{ lookup('file', 'interface_config') }}"
diff_match: none
- name: nxos replace config
cli_config:
replace: 'bootflash:nxoscfg'
- name: junos replace config
cli_config:
replace: '/var/home/ansible/junos01.cfg'
- name: commit with comment
cli_config:
config: set system host-name foo
commit_comment: this is a test
- name: configurable backup path
cli_config:
config: "{{ lookup('template', 'basic/config.j2') }}"
backup: yes
backup_options:
filename: backup.cfg
dir_path: /home/user
"""
RETURN = """
commands:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['interface Loopback999', 'no shutdown']
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: str
sample: /playbooks/ansible/backup/hostname_config.2016-07-16@22:28:34
"""
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils._text import to_text
def validate_args(module, device_operations):
"""validate param if it is supported on the platform
"""
feature_list = [
"replace",
"rollback",
"commit_comment",
"defaults",
"multiline_delimiter",
"diff_replace",
"diff_match",
"diff_ignore_lines",
]
for feature in feature_list:
if module.params[feature]:
supports_feature = device_operations.get("supports_%s" % feature)
if supports_feature is None:
module.fail_json(
"This platform does not specify whether %s is supported or not. "
"Please report an issue against this platform's cliconf plugin."
% feature
)
elif not supports_feature:
module.fail_json(
msg="Option %s is not supported on this platform" % feature
)
def run(
module, device_operations, connection, candidate, running, rollback_id
):
result = {}
resp = {}
config_diff = []
banner_diff = {}
replace = module.params["replace"]
commit_comment = module.params["commit_comment"]
multiline_delimiter = module.params["multiline_delimiter"]
diff_replace = module.params["diff_replace"]
diff_match = module.params["diff_match"]
diff_ignore_lines = module.params["diff_ignore_lines"]
commit = not module.check_mode
if replace in ("yes", "true", "True"):
replace = True
elif replace in ("no", "false", "False"):
replace = False
if (
replace is not None
and replace not in [True, False]
and candidate is not None
):
module.fail_json(
msg="Replace value '%s' is a configuration file path already"
" present on the device. Hence 'replace' and 'config' options"
" are mutually exclusive" % replace
)
if rollback_id is not None:
resp = connection.rollback(rollback_id, commit)
if "diff" in resp:
result["changed"] = True
elif device_operations.get("supports_onbox_diff"):
if diff_replace:
module.warn(
"diff_replace is ignored as the device supports onbox diff"
)
if diff_match:
module.warn(
"diff_mattch is ignored as the device supports onbox diff"
)
if diff_ignore_lines:
module.warn(
"diff_ignore_lines is ignored as the device supports onbox diff"
)
if candidate and not isinstance(candidate, list):
candidate = candidate.strip("\n").splitlines()
kwargs = {
"candidate": candidate,
"commit": commit,
"replace": replace,
"comment": commit_comment,
}
resp = connection.edit_config(**kwargs)
if "diff" in resp:
result["changed"] = True
elif device_operations.get("supports_generate_diff"):
kwargs = {"candidate": candidate, "running": running}
if diff_match:
kwargs.update({"diff_match": diff_match})
if diff_replace:
kwargs.update({"diff_replace": diff_replace})
if diff_ignore_lines:
kwargs.update({"diff_ignore_lines": diff_ignore_lines})
diff_response = connection.get_diff(**kwargs)
config_diff = diff_response.get("config_diff")
banner_diff = diff_response.get("banner_diff")
if config_diff:
if isinstance(config_diff, list):
candidate = config_diff
else:
candidate = config_diff.splitlines()
kwargs = {
"candidate": candidate,
"commit": commit,
"replace": replace,
"comment": commit_comment,
}
if commit:
connection.edit_config(**kwargs)
result["changed"] = True
result["commands"] = config_diff.split("\n")
if banner_diff:
candidate = json.dumps(banner_diff)
kwargs = {"candidate": candidate, "commit": commit}
if multiline_delimiter:
kwargs.update({"multiline_delimiter": multiline_delimiter})
if commit:
connection.edit_banner(**kwargs)
result["changed"] = True
if module._diff:
if "diff" in resp:
result["diff"] = {"prepared": resp["diff"]}
else:
diff = ""
if config_diff:
if isinstance(config_diff, list):
diff += "\n".join(config_diff)
else:
diff += config_diff
if banner_diff:
diff += json.dumps(banner_diff)
result["diff"] = {"prepared": diff}
return result
def main():
"""main entry point for execution
"""
backup_spec = dict(filename=dict(), dir_path=dict(type="path"))
argument_spec = dict(
backup=dict(default=False, type="bool"),
backup_options=dict(type="dict", options=backup_spec),
config=dict(type="str"),
commit=dict(type="bool"),
replace=dict(type="str"),
rollback=dict(type="int"),
commit_comment=dict(type="str"),
defaults=dict(default=False, type="bool"),
multiline_delimiter=dict(type="str"),
diff_replace=dict(choices=["line", "block", "config"]),
diff_match=dict(choices=["line", "strict", "exact", "none"]),
diff_ignore_lines=dict(type="list"),
)
mutually_exclusive = [("config", "rollback")]
required_one_of = [["backup", "config", "rollback"]]
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
required_one_of=required_one_of,
supports_check_mode=True,
)
result = {"changed": False}
connection = Connection(module._socket_path)
capabilities = module.from_json(connection.get_capabilities())
if capabilities:
device_operations = capabilities.get("device_operations", dict())
validate_args(module, device_operations)
else:
device_operations = dict()
if module.params["defaults"]:
if "get_default_flag" in capabilities.get("rpc"):
flags = connection.get_default_flag()
else:
flags = "all"
else:
flags = []
candidate = module.params["config"]
candidate = (
to_text(candidate, errors="surrogate_then_replace")
if candidate
else None
)
running = connection.get_config(flags=flags)
rollback_id = module.params["rollback"]
if module.params["backup"]:
result["__backup__"] = running
if candidate or rollback_id or module.params["replace"]:
try:
result.update(
run(
module,
device_operations,
connection,
candidate,
running,
rollback_id,
)
)
except Exception as exc:
module.fail_json(msg=to_text(exc))
module.exit_json(**result)
if __name__ == "__main__":
main()
| gpl-3.0 |
jessstrap/servotk | tests/wpt/web-platform-tests/tools/py/conftest.py | 161 | 2347 | import py
import sys
pytest_plugins = 'doctest pytester'.split()
collect_ignore = ['build', 'doc/_build']
import os, py
pid = os.getpid()
def pytest_addoption(parser):
group = parser.getgroup("pylib", "py lib testing options")
group.addoption('--runslowtests',
action="store_true", dest="runslowtests", default=False,
help=("run slow tests"))
def pytest_funcarg__sshhost(request):
val = request.config.getvalue("sshhost")
if val:
return val
py.test.skip("need --sshhost option")
def pytest_generate_tests(metafunc):
multi = getattr(metafunc.function, 'multi', None)
if multi is not None:
assert len(multi.kwargs) == 1
for name, l in multi.kwargs.items():
for val in l:
metafunc.addcall(funcargs={name: val})
elif 'anypython' in metafunc.funcargnames:
for name in ('python2.4', 'python2.5', 'python2.6',
'python2.7', 'python3.1', 'pypy-c', 'jython'):
metafunc.addcall(id=name, param=name)
# XXX copied from execnet's conftest.py - needs to be merged
winpymap = {
'python2.7': r'C:\Python27\python.exe',
'python2.6': r'C:\Python26\python.exe',
'python2.5': r'C:\Python25\python.exe',
'python2.4': r'C:\Python24\python.exe',
'python3.1': r'C:\Python31\python.exe',
}
def getexecutable(name, cache={}):
try:
return cache[name]
except KeyError:
executable = py.path.local.sysfind(name)
if executable:
if name == "jython":
import subprocess
popen = subprocess.Popen([str(executable), "--version"],
universal_newlines=True, stderr=subprocess.PIPE)
out, err = popen.communicate()
if not err or "2.5" not in err:
executable = None
cache[name] = executable
return executable
def pytest_funcarg__anypython(request):
name = request.param
executable = getexecutable(name)
if executable is None:
if sys.platform == "win32":
executable = winpymap.get(name, None)
if executable:
executable = py.path.local(executable)
if executable.check():
return executable
py.test.skip("no %s found" % (name,))
return executable
| mpl-2.0 |
jernsthausen/datesplitter | lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/charsetgroupprober.py | 2929 | 3791 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
from .charsetprober import CharSetProber
class CharSetGroupProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mActiveNum = 0
self._mProbers = []
self._mBestGuessProber = None
def reset(self):
CharSetProber.reset(self)
self._mActiveNum = 0
for prober in self._mProbers:
if prober:
prober.reset()
prober.active = True
self._mActiveNum += 1
self._mBestGuessProber = None
def get_charset_name(self):
if not self._mBestGuessProber:
self.get_confidence()
if not self._mBestGuessProber:
return None
# self._mBestGuessProber = self._mProbers[0]
return self._mBestGuessProber.get_charset_name()
def feed(self, aBuf):
for prober in self._mProbers:
if not prober:
continue
if not prober.active:
continue
st = prober.feed(aBuf)
if not st:
continue
if st == constants.eFoundIt:
self._mBestGuessProber = prober
return self.get_state()
elif st == constants.eNotMe:
prober.active = False
self._mActiveNum -= 1
if self._mActiveNum <= 0:
self._mState = constants.eNotMe
return self.get_state()
return self.get_state()
def get_confidence(self):
st = self.get_state()
if st == constants.eFoundIt:
return 0.99
elif st == constants.eNotMe:
return 0.01
bestConf = 0.0
self._mBestGuessProber = None
for prober in self._mProbers:
if not prober:
continue
if not prober.active:
if constants._debug:
sys.stderr.write(prober.get_charset_name()
+ ' not active\n')
continue
cf = prober.get_confidence()
if constants._debug:
sys.stderr.write('%s confidence = %s\n' %
(prober.get_charset_name(), cf))
if bestConf < cf:
bestConf = cf
self._mBestGuessProber = prober
if not self._mBestGuessProber:
return 0.0
return bestConf
# else:
# self._mBestGuessProber = self._mProbers[0]
# return self._mBestGuessProber.get_confidence()
| mit |
hbiyik/tribler | src/tribler-core/tribler_core/modules/metadata_store/tests/test_tracker_state.py | 1 | 1420 | from ipv8.keyvault.crypto import default_eccrypto
from pony.orm import db_session
from tribler_core.modules.metadata_store.store import MetadataStore
from tribler_core.tests.tools.base_test import TriblerCoreTest
from tribler_core.utilities.tracker_utils import MalformedTrackerURLException
class TestTrackerState(TriblerCoreTest):
"""
Contains various tests for the TrackerState class.
"""
async def setUp(self):
await super(TestTrackerState, self).setUp()
self.my_key = default_eccrypto.generate_key(u"curve25519")
self.mds = MetadataStore(":memory:", self.session_base_dir, self.my_key)
async def tearDown(self):
self.mds.shutdown()
await super(TestTrackerState, self).tearDown()
@db_session
def test_create_tracker_state(self):
ts = self.mds.TrackerState(url='http://tracker.tribler.org:80/announce')
self.assertEqual(list(self.mds.TrackerState.select())[0], ts)
@db_session
def test_canonicalize_tracker_state(self):
ts = self.mds.TrackerState(url='http://tracker.tribler.org:80/announce/')
self.assertEqual(self.mds.TrackerState.get(url='http://tracker.tribler.org/announce'), ts)
@db_session
def test_canonicalize_raise_on_malformed_url(self):
self.assertRaises(
MalformedTrackerURLException, self.mds.TrackerState, url='udp://tracker.tribler.org/announce/'
)
| lgpl-3.0 |
cslzchen/osf.io | osf_tests/test_draft_registration.py | 3 | 30690 | import mock
import pytest
import datetime
from framework.auth.core import Auth
from framework.exceptions import PermissionsError
from osf.exceptions import UserNotAffiliatedError, DraftRegistrationStateError, NodeStateError
from osf.models import RegistrationSchema, DraftRegistration, DraftRegistrationContributor, NodeLicense, Node, NodeLog
from osf.utils.permissions import ADMIN, READ, WRITE
from osf_tests.test_node import TestNodeEditableFieldsMixin, TestTagging, TestNodeSubjects
from osf_tests.test_node_license import TestNodeLicenses
from website import settings
from . import factories
pytestmark = pytest.mark.django_db
@pytest.fixture()
def user():
return factories.UserFactory()
@pytest.fixture()
def project(user, auth, fake):
ret = factories.ProjectFactory(creator=user)
ret.add_tag(fake.word(), auth=auth)
return ret
@pytest.fixture()
def auth(user):
return Auth(user)
@pytest.fixture()
def draft_registration(project):
return factories.DraftRegistrationFactory(branched_from=project)
class TestDraftRegistrations:
# copied from tests/test_registrations/test_models.py
def test_factory(self):
draft = factories.DraftRegistrationFactory()
assert draft.branched_from is not None
assert draft.initiator is not None
assert draft.registration_schema is not None
user = factories.UserFactory()
draft = factories.DraftRegistrationFactory(initiator=user)
assert draft.initiator == user
node = factories.ProjectFactory()
draft = factories.DraftRegistrationFactory(branched_from=node)
assert draft.branched_from == node
assert draft.initiator == node.creator
# Pick an arbitrary v2 schema
schema = RegistrationSchema.objects.filter(schema_version=2).first()
data = {'some': 'data'}
draft = factories.DraftRegistrationFactory(registration_schema=schema, registration_metadata=data)
assert draft.registration_schema == schema
assert draft.registration_metadata == data
@mock.patch('website.settings.ENABLE_ARCHIVER', False)
def test_register(self):
user = factories.UserFactory()
auth = Auth(user)
project = factories.ProjectFactory(creator=user)
draft = factories.DraftRegistrationFactory(branched_from=project)
assert not draft.registered_node
draft.register(auth)
assert draft.registered_node
# group member with admin access cannot register
member = factories.AuthUserFactory()
osf_group = factories.OSFGroupFactory(creator=user)
osf_group.make_member(member, auth=auth)
project.add_osf_group(osf_group, ADMIN)
draft_2 = factories.DraftRegistrationFactory(branched_from=project)
assert project.has_permission(member, ADMIN)
with pytest.raises(PermissionsError):
draft_2.register(Auth(member))
assert not draft_2.registered_node
@mock.patch('website.settings.ENABLE_ARCHIVER', False)
def test_register_no_title_fails(self):
user = factories.UserFactory()
auth = Auth(user)
project = factories.ProjectFactory(creator=user)
draft = factories.DraftRegistrationFactory(branched_from=project)
draft.title = ''
draft.save()
with pytest.raises(NodeStateError) as e:
draft.register(auth)
assert str(e.value) == 'Draft Registration must have title to be registered'
def test_update_metadata_updates_registration_responses(self, project):
schema = RegistrationSchema.objects.get(
name='OSF-Standard Pre-Data Collection Registration',
schema_version=2
)
draft = factories.DraftRegistrationFactory(registration_schema=schema, branched_from=project)
new_metadata = {
'looked': {
'comments': [],
'value': 'Yes',
'extra': []
},
'datacompletion': {
'comments': [],
'value': 'No, data collection has not begun',
'extra': []
},
'comments': {
'comments': [],
'value': '',
'extra': []
}
}
draft.update_metadata(new_metadata)
draft.save()
# To preserve both workflows, if update_metadata is called,
# a flattened version of that metadata is stored in
# registration_responses
assert draft.registration_responses == {
'looked': 'Yes',
'datacompletion': 'No, data collection has not begun',
'comments': ''
}
def test_update_metadata_tracks_changes(self, project):
draft = factories.DraftRegistrationFactory(branched_from=project)
draft.registration_metadata = {
'foo': {
'value': 'bar',
},
'a': {
'value': 1,
},
'b': {
'value': True
},
}
changes = draft.update_metadata({
'foo': {
'value': 'foobar',
},
'a': {
'value': 1,
},
'b': {
'value': True,
},
'c': {
'value': 2,
},
})
draft.save()
for key in ['foo', 'c']:
assert key in changes
def test_update_registration_responses(self, project):
schema = RegistrationSchema.objects.get(
name='OSF-Standard Pre-Data Collection Registration',
schema_version=2
)
draft = factories.DraftRegistrationFactory(registration_schema=schema, branched_from=project)
registration_responses = {
'looked': 'Yes',
'datacompletion': 'No, data collection has not begun',
'comments': ''
}
draft.update_registration_responses(registration_responses)
draft.save()
# To preserve both workflows, if update_metadata is called,
# a flattened version of that metadata is stored in
# registration_responses
assert draft.registration_metadata == {
'looked': {
'comments': [],
'value': 'Yes',
'extra': []
},
'datacompletion': {
'comments': [],
'value': 'No, data collection has not begun',
'extra': []
},
'comments': {
'comments': [],
'value': '',
'extra': []
}
}
def test_has_active_draft_registrations(self):
project, project2 = factories.ProjectFactory(), factories.ProjectFactory()
factories.DraftRegistrationFactory(branched_from=project)
assert project.has_active_draft_registrations is True
assert project2.has_active_draft_registrations is False
def test_draft_registrations_active(self):
project = factories.ProjectFactory()
registration = factories.RegistrationFactory(project=project)
deleted_registration = factories.RegistrationFactory(project=project)
deleted_registration.is_deleted = True
deleted_registration.save()
draft = factories.DraftRegistrationFactory(branched_from=project, user=project.creator)
draft2 = factories.DraftRegistrationFactory(branched_from=project, user=project.creator)
draft2.registered_node = deleted_registration
draft2.save()
finished_draft = factories.DraftRegistrationFactory(branched_from=project, user=project.creator)
finished_draft.registered_node = registration
finished_draft.save()
assert draft in project.draft_registrations_active.all()
assert draft2 in project.draft_registrations_active.all()
assert finished_draft not in project.draft_registrations_active.all()
def test_update_metadata_interleaves_comments_by_created_timestamp(self, project):
draft = factories.DraftRegistrationFactory(branched_from=project)
now = datetime.datetime.today()
comments = []
times = (now + datetime.timedelta(minutes=i) for i in range(6))
for time in times:
comments.append({
'created': time.isoformat(),
'value': 'Foo'
})
orig_data = {
'foo': {
'value': 'bar',
'comments': [comments[i] for i in range(0, 6, 2)]
}
}
draft.update_metadata(orig_data)
draft.save()
assert draft.registration_metadata['foo']['comments'] == [comments[i] for i in range(0, 6, 2)]
new_data = {
'foo': {
'value': 'bar',
'comments': [comments[i] for i in range(1, 6, 2)]
}
}
draft.update_metadata(new_data)
draft.save()
assert draft.registration_metadata['foo']['comments'] == comments
def test_draft_registration_url(self):
project = factories.ProjectFactory()
draft = factories.DraftRegistrationFactory(branched_from=project)
assert draft.url == settings.DOMAIN + 'registries/drafts/{}'.format(draft._id)
def test_create_from_node_existing(self, user):
node = factories.ProjectFactory(creator=user)
member = factories.AuthUserFactory()
osf_group = factories.OSFGroupFactory(creator=user)
osf_group.make_member(member, auth=Auth(user))
node.add_osf_group(osf_group, ADMIN)
write_contrib = factories.AuthUserFactory()
subject = factories.SubjectFactory()
institution = factories.InstitutionFactory()
user.affiliated_institutions.add(institution)
title = 'A Study of Elephants'
description = 'Loxodonta africana'
category = 'Methods and Materials'
node.set_title(title, Auth(user))
node.set_description(description, Auth(user))
node.category = category
node.add_contributor(write_contrib, permissions=WRITE)
GPL3 = NodeLicense.objects.get(license_id='GPL3')
NEW_YEAR = '2014'
COPYLEFT_HOLDERS = ['Richard Stallman']
node.set_node_license(
{
'id': GPL3.license_id,
'year': NEW_YEAR,
'copyrightHolders': COPYLEFT_HOLDERS
},
auth=Auth(user),
save=True
)
node.add_tag('savanna', Auth(user))
node.add_tag('taxonomy', Auth(user))
node.set_subjects([[subject._id]], auth=Auth(node.creator))
node.affiliated_institutions.add(institution)
node.save()
draft = DraftRegistration.create_from_node(
node=node,
user=user,
schema=factories.get_default_metaschema(),
)
# Assert existing metadata-like node attributes are copied to the draft
assert draft.title == title
assert draft.description == description
assert draft.category == category
assert user in draft.contributors.all()
assert write_contrib in draft.contributors.all()
assert member not in draft.contributors.all()
assert not draft.has_permission(member, 'read')
assert draft.get_permissions(user) == [READ, WRITE, ADMIN]
assert draft.get_permissions(write_contrib) == [READ, WRITE]
assert draft.node_license.license_id == GPL3.license_id
assert draft.node_license.name == GPL3.name
assert draft.node_license.copyright_holders == COPYLEFT_HOLDERS
draft_tags = draft.tags.values_list('name', flat=True)
assert 'savanna' in draft_tags
assert 'taxonomy' in draft_tags
assert subject in draft.subjects.all()
assert institution in draft.affiliated_institutions.all()
assert draft.branched_from == node
def test_create_from_node_draft_node(self, user):
draft = DraftRegistration.create_from_node(
user=user,
schema=factories.get_default_metaschema(),
)
assert draft.title == 'Untitled'
assert draft.description == ''
assert draft.category == ''
assert user in draft.contributors.all()
assert len(draft.contributors.all()) == 1
assert draft.get_permissions(user) == [READ, WRITE, ADMIN]
assert draft.node_license is None
draft_tags = draft.tags.values_list('name', flat=True)
assert len(draft_tags) == 0
assert draft.subjects.count() == 0
assert draft.affiliated_institutions.count() == 0
def test_branched_from_must_be_a_node_or_draft_node(self):
with pytest.raises(DraftRegistrationStateError):
DraftRegistration.create_from_node(
user=user,
node=factories.RegistrationFactory(),
schema=factories.get_default_metaschema()
)
with pytest.raises(DraftRegistrationStateError):
DraftRegistration.create_from_node(
user=user,
node=factories.CollectionFactory(),
schema=factories.get_default_metaschema()
)
def test_can_view_property(self, user):
project = factories.ProjectFactory(creator=user)
write_contrib = factories.UserFactory()
read_contrib = factories.UserFactory()
non_contrib = factories.UserFactory()
draft = DraftRegistration.create_from_node(
user=user,
node=project,
schema=factories.get_default_metaschema()
)
project.add_contributor(non_contrib, ADMIN, save=True)
draft.add_contributor(write_contrib, WRITE, save=True)
draft.add_contributor(read_contrib, READ, save=True)
assert draft.get_permissions(user) == [READ, WRITE, ADMIN]
assert draft.get_permissions(write_contrib) == [READ, WRITE]
assert draft.get_permissions(read_contrib) == [READ]
assert draft.can_view(Auth(user)) is True
assert draft.can_view(Auth(write_contrib)) is True
assert draft.can_view(Auth(read_contrib)) is True
assert draft.can_view(Auth(non_contrib)) is False
class TestSetDraftRegistrationEditableFields(TestNodeEditableFieldsMixin):
@pytest.fixture()
def resource(self, project):
return factories.DraftRegistrationFactory(branched_from=project, title='That Was Then', description='A description')
@pytest.fixture()
def model(self):
return DraftRegistration
class TestDraftRegistrationContributorMethods():
def test_add_contributor(self, draft_registration, user, auth):
# A user is added as a contributor
user = factories.UserFactory()
draft_registration.add_contributor(contributor=user, auth=auth)
draft_registration.save()
assert draft_registration.is_contributor(user) is True
assert draft_registration.has_permission(user, ADMIN) is False
assert draft_registration.has_permission(user, WRITE) is True
assert draft_registration.has_permission(user, READ) is True
last_log = draft_registration.logs.all().order_by('-created')[0]
assert last_log.action == 'contributor_added'
assert last_log.params['contributors'] == [user._id]
def test_add_contributors(self, draft_registration, auth):
user1 = factories.UserFactory()
user2 = factories.UserFactory()
draft_registration.add_contributors(
[
{'user': user1, 'permissions': ADMIN, 'visible': True},
{'user': user2, 'permissions': WRITE, 'visible': False}
],
auth=auth
)
last_log = draft_registration.logs.all().order_by('-created')[0]
assert (
last_log.params['contributors'] ==
[user1._id, user2._id]
)
assert draft_registration.is_contributor(user1)
assert draft_registration.is_contributor(user2)
assert user1._id in draft_registration.visible_contributor_ids
assert user2._id not in draft_registration.visible_contributor_ids
assert draft_registration.get_permissions(user1) == [READ, WRITE, ADMIN]
assert draft_registration.get_permissions(user2) == [READ, WRITE]
last_log = draft_registration.logs.all().order_by('-created')[0]
assert (
last_log.params['contributors'] ==
[user1._id, user2._id]
)
def test_cant_add_creator_as_contributor_twice(self, draft_registration, user):
draft_registration.add_contributor(contributor=user)
draft_registration.save()
assert len(draft_registration.contributors) == 1
def test_cant_add_same_contributor_twice(self, draft_registration):
contrib = factories.UserFactory()
draft_registration.add_contributor(contributor=contrib)
draft_registration.save()
draft_registration.add_contributor(contributor=contrib)
draft_registration.save()
assert len(draft_registration.contributors) == 2
def test_remove_unregistered_conributor_removes_unclaimed_record(self, draft_registration, auth):
new_user = draft_registration.add_unregistered_contributor(fullname='David Davidson',
email='[email protected]', auth=auth)
draft_registration.save()
assert draft_registration.is_contributor(new_user) # sanity check
assert draft_registration._primary_key in new_user.unclaimed_records
draft_registration.remove_contributor(
auth=auth,
contributor=new_user
)
draft_registration.save()
new_user.refresh_from_db()
assert draft_registration.is_contributor(new_user) is False
assert draft_registration._primary_key not in new_user.unclaimed_records
def test_is_contributor(self, draft_registration):
contrib, noncontrib = factories.UserFactory(), factories.UserFactory()
DraftRegistrationContributor.objects.create(user=contrib, draft_registration=draft_registration)
assert draft_registration.is_contributor(contrib) is True
assert draft_registration.is_contributor(noncontrib) is False
assert draft_registration.is_contributor(None) is False
def test_visible_initiator(self, project, user):
project_contributor = project.contributor_set.get(user=user)
assert project_contributor.visible is True
draft_reg = factories.DraftRegistrationFactory(branched_from=project, initiator=user)
draft_reg_contributor = draft_reg.contributor_set.get(user=user)
assert draft_reg_contributor.visible is True
def test_non_visible_initiator(self, project, user):
invisible_user = factories.UserFactory()
project.add_contributor(contributor=invisible_user, permissions=ADMIN, visible=False)
invisible_project_contributor = project.contributor_set.get(user=invisible_user)
assert invisible_project_contributor.visible is False
draft_reg = factories.DraftRegistrationFactory(branched_from=project, initiator=invisible_user)
invisible_draft_reg_contributor = draft_reg.contributor_set.get(user=invisible_user)
assert invisible_draft_reg_contributor.visible is False
def test_visible_contributor_ids(self, draft_registration, user):
visible_contrib = factories.UserFactory()
invisible_contrib = factories.UserFactory()
DraftRegistrationContributor.objects.create(user=visible_contrib, draft_registration=draft_registration, visible=True)
DraftRegistrationContributor.objects.create(user=invisible_contrib, draft_registration=draft_registration, visible=False)
assert visible_contrib._id in draft_registration.visible_contributor_ids
assert invisible_contrib._id not in draft_registration.visible_contributor_ids
def test_visible_contributors(self, draft_registration, user):
visible_contrib = factories.UserFactory()
invisible_contrib = factories.UserFactory()
DraftRegistrationContributor.objects.create(user=visible_contrib, draft_registration=draft_registration, visible=True)
DraftRegistrationContributor.objects.create(user=invisible_contrib, draft_registration=draft_registration, visible=False)
assert visible_contrib in draft_registration.visible_contributors
assert invisible_contrib not in draft_registration.visible_contributors
def test_set_visible_false(self, draft_registration, auth):
contrib = factories.UserFactory()
DraftRegistrationContributor.objects.create(user=contrib, draft_registration=draft_registration, visible=True)
draft_registration.set_visible(contrib, visible=False, auth=auth)
draft_registration.save()
assert DraftRegistrationContributor.objects.filter(user=contrib, draft_registration=draft_registration, visible=False).exists() is True
last_log = draft_registration.logs.all().order_by('-created')[0]
assert last_log.user == auth.user
assert last_log.action == NodeLog.MADE_CONTRIBUTOR_INVISIBLE
def test_set_visible_true(self, draft_registration, auth):
contrib = factories.UserFactory()
DraftRegistrationContributor.objects.create(user=contrib, draft_registration=draft_registration, visible=False)
draft_registration.set_visible(contrib, visible=True, auth=auth)
draft_registration.save()
assert DraftRegistrationContributor.objects.filter(user=contrib, draft_registration=draft_registration, visible=True).exists() is True
last_log = draft_registration.logs.all().order_by('-created')[0]
assert last_log.user == auth.user
assert last_log.action == NodeLog.MADE_CONTRIBUTOR_VISIBLE
def test_set_visible_is_noop_if_visibility_is_unchanged(self, draft_registration, auth):
visible, invisible = factories.UserFactory(), factories.UserFactory()
DraftRegistrationContributor.objects.create(user=visible, draft_registration=draft_registration, visible=True)
DraftRegistrationContributor.objects.create(user=invisible, draft_registration=draft_registration, visible=False)
original_log_count = draft_registration.logs.count()
draft_registration.set_visible(invisible, visible=False, auth=auth)
draft_registration.set_visible(visible, visible=True, auth=auth)
draft_registration.save()
assert draft_registration.logs.count() == original_log_count
def test_set_visible_contributor_with_only_one_contributor(self, draft_registration, user):
with pytest.raises(ValueError) as excinfo:
draft_registration.set_visible(user=user, visible=False, auth=None)
assert str(excinfo.value) == 'Must have at least one visible contributor'
def test_set_visible_missing(self, draft_registration):
with pytest.raises(ValueError):
draft_registration.set_visible(factories.UserFactory(), True)
def test_remove_contributor(self, draft_registration, auth):
# A user is added as a contributor
user2 = factories.UserFactory()
draft_registration.add_contributor(contributor=user2, auth=auth, save=True)
assert user2 in draft_registration.contributors
assert draft_registration.has_permission(user2, WRITE)
# The user is removed
draft_registration.remove_contributor(auth=auth, contributor=user2)
draft_registration.reload()
assert user2 not in draft_registration.contributors
assert draft_registration.get_permissions(user2) == []
assert draft_registration.logs.latest().action == 'contributor_removed'
assert draft_registration.logs.latest().params['contributors'] == [user2._id]
def test_remove_contributors(self, draft_registration, auth):
user1 = factories.UserFactory()
user2 = factories.UserFactory()
draft_registration.add_contributors(
[
{'user': user1, 'permissions': WRITE, 'visible': True},
{'user': user2, 'permissions': WRITE, 'visible': True}
],
auth=auth
)
assert user1 in draft_registration.contributors
assert user2 in draft_registration.contributors
assert draft_registration.has_permission(user1, WRITE)
assert draft_registration.has_permission(user2, WRITE)
draft_registration.remove_contributors(auth=auth, contributors=[user1, user2], save=True)
draft_registration.reload()
assert user1 not in draft_registration.contributors
assert user2 not in draft_registration.contributors
assert draft_registration.get_permissions(user1) == []
assert draft_registration.get_permissions(user2) == []
assert draft_registration.logs.latest().action == 'contributor_removed'
def test_replace_contributor(self, draft_registration):
contrib = factories.UserFactory()
draft_registration.add_contributor(contrib, auth=Auth(draft_registration.creator))
draft_registration.save()
assert contrib in draft_registration.contributors.all() # sanity check
replacer = factories.UserFactory()
old_length = draft_registration.contributors.count()
draft_registration.replace_contributor(contrib, replacer)
draft_registration.save()
new_length = draft_registration.contributors.count()
assert contrib not in draft_registration.contributors.all()
assert replacer in draft_registration.contributors.all()
assert old_length == new_length
# test unclaimed_records is removed
assert (
draft_registration._id not in
contrib.unclaimed_records.keys()
)
def test_permission_override_fails_if_no_admins(self, draft_registration, user):
# User has admin permissions because they are the creator
# Cannot lower permissions
with pytest.raises(DraftRegistrationStateError):
draft_registration.add_contributor(user, permissions=WRITE)
def test_update_contributor(self, draft_registration, auth):
new_contrib = factories.AuthUserFactory()
draft_registration.add_contributor(new_contrib, permissions=WRITE, auth=auth)
assert draft_registration.get_permissions(new_contrib) == [READ, WRITE]
assert draft_registration.get_visible(new_contrib) is True
draft_registration.update_contributor(
new_contrib,
READ,
False,
auth=auth
)
assert draft_registration.get_permissions(new_contrib) == [READ]
assert draft_registration.get_visible(new_contrib) is False
def test_update_contributor_non_admin_raises_error(self, draft_registration, auth):
non_admin = factories.AuthUserFactory()
draft_registration.add_contributor(
non_admin,
permissions=WRITE,
auth=auth
)
with pytest.raises(PermissionsError):
draft_registration.update_contributor(
non_admin,
None,
False,
auth=Auth(non_admin)
)
def test_update_contributor_only_admin_raises_error(self, draft_registration, auth):
with pytest.raises(DraftRegistrationStateError):
draft_registration.update_contributor(
auth.user,
WRITE,
True,
auth=auth
)
def test_update_contributor_non_contrib_raises_error(self, draft_registration, auth):
non_contrib = factories.AuthUserFactory()
with pytest.raises(ValueError):
draft_registration.update_contributor(
non_contrib,
ADMIN,
True,
auth=auth
)
class TestDraftRegistrationAffiliatedInstitutions:
def test_affiliated_institutions(self, draft_registration):
inst1, inst2 = factories.InstitutionFactory(), factories.InstitutionFactory()
user = draft_registration.initiator
user.affiliated_institutions.add(inst1, inst2)
draft_registration.add_affiliated_institution(inst1, user=user)
assert inst1 in draft_registration.affiliated_institutions.all()
assert inst2 not in draft_registration.affiliated_institutions.all()
draft_registration.remove_affiliated_institution(inst1, user=user)
assert inst1 not in draft_registration.affiliated_institutions.all()
assert inst2 not in draft_registration.affiliated_institutions.all()
user.affiliated_institutions.remove(inst1)
with pytest.raises(UserNotAffiliatedError):
draft_registration.add_affiliated_institution(inst1, user=user)
class TestDraftRegistrationTagging(TestTagging):
@pytest.fixture()
def node(self, user):
# Overrides "node" resource on tag test, to make it a draft registration instead
project = Node.objects.create(title='Project title', creator_id=user.id)
return factories.DraftRegistrationFactory(branched_from=project)
class TestDraftRegistrationLicenses(TestNodeLicenses):
@pytest.fixture()
def node(self, draft_registration, node_license, user):
# Overrides "node" resource to make it a draft registration instead
draft_registration.node_license = factories.NodeLicenseRecordFactory(
node_license=node_license,
year=self.YEAR,
copyright_holders=self.COPYRIGHT_HOLDERS
)
draft_registration.save()
return draft_registration
class TestDraftRegistrationSubjects(TestNodeSubjects):
@pytest.fixture()
def project(self, draft_registration):
# Overrides "project" resource to make it a draft registration instead
return draft_registration
@pytest.fixture()
def subject(self):
return factories.SubjectFactory()
@pytest.fixture()
def read_contrib(self, project):
read_contrib = factories.AuthUserFactory()
project.add_contributor(read_contrib, auth=Auth(project.creator), permissions=READ)
project.save()
return read_contrib
def test_cannot_set_subjects(self, project, subject, read_contrib):
initial_subjects = list(project.subjects.all())
with pytest.raises(PermissionsError):
project.set_subjects([[subject._id]], auth=Auth(read_contrib))
project.reload()
assert initial_subjects == list(project.subjects.all())
| apache-2.0 |
kingvuplus/xrd-alliance | lib/python/Components/Renderer/Canvas.py | 49 | 1478 | from Renderer import Renderer
from enigma import eCanvas, eRect, ePoint, gRGB
class Canvas(Renderer):
GUI_WIDGET = eCanvas
def __init__(self):
Renderer.__init__(self)
self.sequence = None
self.draw_count = 0
def pull_updates(self):
if self.instance is None:
return
# do an incremental update
list = self.source.drawlist
if list is None:
return
# if the lists sequence count changed, re-start from begin
if list[0] != self.sequence:
self.sequence = list[0]
self.draw_count = 0
self.draw(list[1][self.draw_count:])
self.draw_count = len(list[1])
def draw(self, list):
for l in list:
if l[0] == 1:
self.instance.fillRect(eRect(l[1], l[2], l[3], l[4]), gRGB(l[5]))
elif l[0] == 2:
self.instance.writeText(eRect(l[1], l[2], l[3], l[4]), gRGB(l[5]), gRGB(l[6]), l[7], l[8], l[9])
elif l[0] == 3:
self.instance.drawLine(l[1], l[2], l[3], l[4], gRGB(l[5]))
elif l[0] == 4:
self.instance.drawRotatedLine(l[1], l[2], l[3], l[4], l[5], l[6], l[7], l[8], gRGB(l[9]))
else:
print "drawlist entry:", l
raise RuntimeError("invalid drawlist entry")
def changed(self, what):
self.pull_updates()
def postWidgetCreate(self, instance):
self.sequence = None
from enigma import eSize
def parseSize(str):
x, y = str.split(',')
return eSize(int(x), int(y))
for (attrib, value) in self.skinAttributes:
if attrib == "size":
self.instance.setSize(parseSize(value))
self.pull_updates()
| gpl-2.0 |
deepmind/grid-cells | utils.py | 1 | 5720 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helper functions for creating the training graph and plotting.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
import ensembles # pylint: disable=g-bad-import-order
np.seterr(invalid="ignore")
def get_place_cell_ensembles(
env_size, neurons_seed, targets_type, lstm_init_type, n_pc, pc_scale):
"""Create the ensembles for the Place cells."""
place_cell_ensembles = [
ensembles.PlaceCellEnsemble(
n,
stdev=s,
pos_min=-env_size / 2.0,
pos_max=env_size / 2.0,
seed=neurons_seed,
soft_targets=targets_type,
soft_init=lstm_init_type)
for n, s in zip(n_pc, pc_scale)
]
return place_cell_ensembles
def get_head_direction_ensembles(
neurons_seed, targets_type, lstm_init_type, n_hdc, hdc_concentration):
"""Create the ensembles for the Head direction cells."""
head_direction_ensembles = [
ensembles.HeadDirectionCellEnsemble(
n,
concentration=con,
seed=neurons_seed,
soft_targets=targets_type,
soft_init=lstm_init_type)
for n, con in zip(n_hdc, hdc_concentration)
]
return head_direction_ensembles
def encode_initial_conditions(init_pos, init_hd, place_cell_ensembles,
head_direction_ensembles):
initial_conds = []
for ens in place_cell_ensembles:
initial_conds.append(
tf.squeeze(ens.get_init(init_pos[:, tf.newaxis, :]), axis=1))
for ens in head_direction_ensembles:
initial_conds.append(
tf.squeeze(ens.get_init(init_hd[:, tf.newaxis, :]), axis=1))
return initial_conds
def encode_targets(target_pos, target_hd, place_cell_ensembles,
head_direction_ensembles):
ensembles_targets = []
for ens in place_cell_ensembles:
ensembles_targets.append(ens.get_targets(target_pos))
for ens in head_direction_ensembles:
ensembles_targets.append(ens.get_targets(target_hd))
return ensembles_targets
def clip_all_gradients(g, var, limit):
# print(var.name)
return (tf.clip_by_value(g, -limit, limit), var)
def clip_bottleneck_gradient(g, var, limit):
if ("bottleneck" in var.name or "pc_logits" in var.name):
return (tf.clip_by_value(g, -limit, limit), var)
else:
return (g, var)
def no_clipping(g, var):
return (g, var)
def concat_dict(acc, new_data):
"""Dictionary concatenation function."""
def to_array(kk):
if isinstance(kk, np.ndarray):
return kk
else:
return np.asarray([kk])
for k, v in new_data.iteritems():
if isinstance(v, dict):
if k in acc:
acc[k] = concat_dict(acc[k], v)
else:
acc[k] = concat_dict(dict(), v)
else:
v = to_array(v)
if k in acc:
acc[k] = np.concatenate([acc[k], v])
else:
acc[k] = np.copy(v)
return acc
def get_scores_and_plot(scorer,
data_abs_xy,
activations,
directory,
filename,
plot_graphs=True, # pylint: disable=unused-argument
nbins=20, # pylint: disable=unused-argument
cm="jet",
sort_by_score_60=True):
"""Plotting function."""
# Concatenate all trajectories
xy = data_abs_xy.reshape(-1, data_abs_xy.shape[-1])
act = activations.reshape(-1, activations.shape[-1])
n_units = act.shape[1]
# Get the rate-map for each unit
s = [
scorer.calculate_ratemap(xy[:, 0], xy[:, 1], act[:, i])
for i in xrange(n_units)
]
# Get the scores
score_60, score_90, max_60_mask, max_90_mask, sac = zip(
*[scorer.get_scores(rate_map) for rate_map in s])
# Separations
# separations = map(np.mean, max_60_mask)
# Sort by score if desired
if sort_by_score_60:
ordering = np.argsort(-np.array(score_60))
else:
ordering = range(n_units)
# Plot
cols = 16
rows = int(np.ceil(n_units / cols))
fig = plt.figure(figsize=(24, rows * 4))
for i in xrange(n_units):
rf = plt.subplot(rows * 2, cols, i + 1)
acr = plt.subplot(rows * 2, cols, n_units + i + 1)
if i < n_units:
index = ordering[i]
title = "%d (%.2f)" % (index, score_60[index])
# Plot the activation maps
scorer.plot_ratemap(s[index], ax=rf, title=title, cmap=cm)
# Plot the autocorrelation of the activation maps
scorer.plot_sac(
sac[index],
mask_params=max_60_mask[index],
ax=acr,
title=title,
cmap=cm)
# Save
if not os.path.exists(directory):
os.makedirs(directory)
with PdfPages(os.path.join(directory, filename), "w") as f:
plt.savefig(f, format="pdf")
plt.close(fig)
return (np.asarray(score_60), np.asarray(score_90),
np.asarray(map(np.mean, max_60_mask)),
np.asarray(map(np.mean, max_90_mask)))
| apache-2.0 |
haridsv/pip | tests/functional/test_install_user.py | 27 | 11282 | """
tests specific to "pip install --user"
"""
import os
import textwrap
import pytest
from os.path import curdir, isdir, isfile
from pip.compat import uses_pycache, cache_from_source
from tests.lib.local_repos import local_checkout
from tests.lib import pyversion
def _patch_dist_in_site_packages(script):
sitecustomize_path = script.lib_path.join("sitecustomize.py")
sitecustomize_path.write(textwrap.dedent("""
def dist_in_site_packages(dist):
return False
from pip.req import req_install
req_install.dist_in_site_packages = dist_in_site_packages
"""))
# Caught py32 with an outdated __pycache__ file after a sitecustomize
# update (after python should have updated it) so will delete the cache
# file to be sure
# See: https://github.com/pypa/pip/pull/893#issuecomment-16426701
if uses_pycache:
cache_path = cache_from_source(sitecustomize_path)
if os.path.isfile(cache_path):
os.remove(cache_path)
class Tests_UserSite:
@pytest.mark.network
def test_reset_env_system_site_packages_usersite(self, script, virtualenv):
"""
reset_env(system_site_packages=True) produces env where a --user
install can be found using pkg_resources
"""
virtualenv.system_site_packages = True
script.pip('install', '--user', 'INITools==0.2')
result = script.run(
'python', '-c',
"import pkg_resources; print(pkg_resources.get_distribution"
"('initools').project_name)",
)
project_name = result.stdout.strip()
assert (
'INITools' == project_name, "'%s' should be 'INITools'" %
project_name
)
@pytest.mark.network
def test_install_subversion_usersite_editable_with_distribute(
self, script, virtualenv, tmpdir):
"""
Test installing current directory ('.') into usersite after installing
distribute
"""
virtualenv.system_site_packages = True
result = script.pip(
'install', '--user', '-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache"),
)
)
result.assert_installed('INITools', use_user_site=True)
def test_install_curdir_usersite(self, script, virtualenv, data):
"""
Test installing current directory ('.') into usersite
"""
virtualenv.system_site_packages = True
run_from = data.packages.join("FSPkg")
result = script.pip(
'install', '-vvv', '--user', curdir,
cwd=run_from,
expect_error=False,
)
fspkg_folder = script.user_site / 'fspkg'
egg_info_folder = (
script.user_site / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, result.stdout
assert egg_info_folder in result.files_created
def test_install_user_venv_nositepkgs_fails(self, script, data):
"""
user install in virtualenv (with no system packages) fails with message
"""
run_from = data.packages.join("FSPkg")
result = script.pip(
'install', '--user', curdir,
cwd=run_from,
expect_error=True,
)
assert (
"Can not perform a '--user' install. User site-packages are not "
"visible in this virtualenv." in result.stderr
)
@pytest.mark.network
def test_install_user_conflict_in_usersite(self, script, virtualenv):
"""
Test user install with conflict in usersite updates usersite.
"""
virtualenv.system_site_packages = True
script.pip('install', '--user', 'INITools==0.3')
result2 = script.pip('install', '--user', 'INITools==0.1')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_v3_file = (
# file only in 0.3
script.base_path / script.user_site / 'initools' /
'configparser.py'
)
assert egg_info_folder in result2.files_created, str(result2)
assert not isfile(initools_v3_file), initools_v3_file
@pytest.mark.network
def test_install_user_conflict_in_globalsite(self, script, virtualenv):
"""
Test user install with conflict in global site ignores site and
installs to usersite
"""
# the test framework only supports testing using virtualenvs
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2')
result2 = script.pip('install', '--user', 'INITools==0.1')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_folder = script.user_site / 'initools'
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, str(result2)
# site still has 0.2 (can't look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder)
assert isdir(initools_folder)
@pytest.mark.network
def test_upgrade_user_conflict_in_globalsite(self, script, virtualenv):
"""
Test user install/upgrade with conflict in global site ignores site and
installs to usersite
"""
# the test framework only supports testing using virtualenvs
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2')
result2 = script.pip('install', '--user', '--upgrade', 'INITools')
# usersite has 0.3.1
egg_info_folder = (
script.user_site / 'INITools-0.3.1-py%s.egg-info' % pyversion
)
initools_folder = script.user_site / 'initools'
assert egg_info_folder in result2.files_created, str(result2)
assert initools_folder in result2.files_created, str(result2)
# site still has 0.2 (can't look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder), result2.stdout
assert isdir(initools_folder)
@pytest.mark.network
def test_install_user_conflict_in_globalsite_and_usersite(
self, script, virtualenv):
"""
Test user install with conflict in globalsite and usersite ignores
global site and updates usersite.
"""
# the test framework only supports testing using virtualenvs.
# the sys.path ordering for virtualenvs with --system-site-packages is
# this: virtualenv-site, user-site, global-site.
# this test will use 2 modifications to simulate the
# user-site/global-site relationship
# 1) a monkey patch which will make it appear INITools==0.2 is not in
# the virtualenv site if we don't patch this, pip will return an
# installation error: "Will not install to the usersite because it
# will lack sys.path precedence..."
# 2) adding usersite to PYTHONPATH, so usersite as sys.path precedence
# over the virtualenv site
virtualenv.system_site_packages = True
script.environ["PYTHONPATH"] = script.base_path / script.user_site
_patch_dist_in_site_packages(script)
script.pip('install', 'INITools==0.2')
script.pip('install', '--user', 'INITools==0.3')
result3 = script.pip('install', '--user', 'INITools==0.1')
# usersite has 0.1
egg_info_folder = (
script.user_site / 'INITools-0.1-py%s.egg-info' % pyversion
)
initools_v3_file = (
# file only in 0.3
script.base_path / script.user_site / 'initools' /
'configparser.py'
)
assert egg_info_folder in result3.files_created, str(result3)
assert not isfile(initools_v3_file), initools_v3_file
# site still has 0.2 (can't just look in result1; have to check)
egg_info_folder = (
script.base_path / script.site_packages /
'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.base_path / script.site_packages / 'initools'
assert isdir(egg_info_folder)
assert isdir(initools_folder)
@pytest.mark.network
def test_install_user_in_global_virtualenv_with_conflict_fails(
self, script, virtualenv):
"""
Test user install in --system-site-packages virtualenv with conflict in
site fails.
"""
virtualenv.system_site_packages = True
script.pip('install', 'INITools==0.2')
result2 = script.pip(
'install', '--user', 'INITools==0.1',
expect_error=True,
)
resultp = script.run(
'python', '-c',
"import pkg_resources; print(pkg_resources.get_distribution"
"('initools').location)",
)
dist_location = resultp.stdout.strip()
assert (
"Will not install to the user site because it will lack sys.path "
"precedence to %s in %s" %
('INITools', dist_location) in result2.stderr
)
| mit |
zedr/django | tests/get_earliest_or_latest/tests.py | 41 | 6119 | from __future__ import unicode_literals
from datetime import datetime
from django.test import TestCase
from .models import Article, Person
class EarliestOrLatestTests(TestCase):
"""Tests for the earliest() and latest() objects methods"""
def tearDown(self):
"""Makes sure Article has a get_latest_by"""
if not Article._meta.get_latest_by:
Article._meta.get_latest_by = 'pub_date'
def test_earliest(self):
# Because no Articles exist yet, earliest() raises ArticleDoesNotExist.
self.assertRaises(Article.DoesNotExist, Article.objects.earliest)
a1 = Article.objects.create(
headline="Article 1", pub_date=datetime(2005, 7, 26),
expire_date=datetime(2005, 9, 1)
)
a2 = Article.objects.create(
headline="Article 2", pub_date=datetime(2005, 7, 27),
expire_date=datetime(2005, 7, 28)
)
Article.objects.create(
headline="Article 3", pub_date=datetime(2005, 7, 28),
expire_date=datetime(2005, 8, 27)
)
Article.objects.create(
headline="Article 4", pub_date=datetime(2005, 7, 28),
expire_date=datetime(2005, 7, 30)
)
# Get the earliest Article.
self.assertEqual(Article.objects.earliest(), a1)
# Get the earliest Article that matches certain filters.
self.assertEqual(
Article.objects.filter(pub_date__gt=datetime(2005, 7, 26)).earliest(),
a2
)
# Pass a custom field name to earliest() to change the field that's used
# to determine the earliest object.
self.assertEqual(Article.objects.earliest('expire_date'), a2)
self.assertEqual(Article.objects.filter(
pub_date__gt=datetime(2005, 7, 26)).earliest('expire_date'), a2)
# Ensure that earliest() overrides any other ordering specified on the
# query. Refs #11283.
self.assertEqual(Article.objects.order_by('id').earliest(), a1)
# Ensure that error is raised if the user forgot to add a get_latest_by
# in the Model.Meta
Article.objects.model._meta.get_latest_by = None
self.assertRaisesMessage(
AssertionError,
"earliest() and latest() require either a field_name parameter or "
"'get_latest_by' in the model",
lambda: Article.objects.earliest(),
)
def test_latest(self):
# Because no Articles exist yet, latest() raises ArticleDoesNotExist.
self.assertRaises(Article.DoesNotExist, Article.objects.latest)
a1 = Article.objects.create(
headline="Article 1", pub_date=datetime(2005, 7, 26),
expire_date=datetime(2005, 9, 1)
)
Article.objects.create(
headline="Article 2", pub_date=datetime(2005, 7, 27),
expire_date=datetime(2005, 7, 28)
)
a3 = Article.objects.create(
headline="Article 3", pub_date=datetime(2005, 7, 27),
expire_date=datetime(2005, 8, 27)
)
a4 = Article.objects.create(
headline="Article 4", pub_date=datetime(2005, 7, 28),
expire_date=datetime(2005, 7, 30)
)
# Get the latest Article.
self.assertEqual(Article.objects.latest(), a4)
# Get the latest Article that matches certain filters.
self.assertEqual(
Article.objects.filter(pub_date__lt=datetime(2005, 7, 27)).latest(),
a1
)
# Pass a custom field name to latest() to change the field that's used
# to determine the latest object.
self.assertEqual(Article.objects.latest('expire_date'), a1)
self.assertEqual(
Article.objects.filter(pub_date__gt=datetime(2005, 7, 26)).latest('expire_date'),
a3,
)
# Ensure that latest() overrides any other ordering specified on the query. Refs #11283.
self.assertEqual(Article.objects.order_by('id').latest(), a4)
# Ensure that error is raised if the user forgot to add a get_latest_by
# in the Model.Meta
Article.objects.model._meta.get_latest_by = None
self.assertRaisesMessage(
AssertionError,
"earliest() and latest() require either a field_name parameter or "
"'get_latest_by' in the model",
lambda: Article.objects.latest(),
)
def test_latest_manual(self):
# You can still use latest() with a model that doesn't have
# "get_latest_by" set -- just pass in the field name manually.
Person.objects.create(name="Ralph", birthday=datetime(1950, 1, 1))
p2 = Person.objects.create(name="Stephanie", birthday=datetime(1960, 2, 3))
self.assertRaises(AssertionError, Person.objects.latest)
self.assertEqual(Person.objects.latest("birthday"), p2)
def test_first(self):
p1 = Person.objects.create(name="Bob", birthday=datetime(1950, 1, 1))
p2 = Person.objects.create(name="Alice", birthday=datetime(1961, 2, 3))
self.assertEqual(
Person.objects.first(), p1)
self.assertEqual(
Person.objects.order_by('name').first(), p2)
self.assertEqual(
Person.objects.filter(birthday__lte=datetime(1955, 1, 1)).first(),
p1)
self.assertIs(
Person.objects.filter(birthday__lte=datetime(1940, 1, 1)).first(),
None)
def test_last(self):
p1 = Person.objects.create(
name="Alice", birthday=datetime(1950, 1, 1))
p2 = Person.objects.create(
name="Bob", birthday=datetime(1960, 2, 3))
# Note: by default PK ordering.
self.assertEqual(
Person.objects.last(), p2)
self.assertEqual(
Person.objects.order_by('-name').last(), p1)
self.assertEqual(
Person.objects.filter(birthday__lte=datetime(1955, 1, 1)).last(),
p1)
self.assertIs(
Person.objects.filter(birthday__lte=datetime(1940, 1, 1)).last(),
None)
| bsd-3-clause |
dabiboo/youtube-dl | youtube_dl/extractor/pinkbike.py | 136 | 3411 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
remove_end,
remove_start,
str_to_int,
unified_strdate,
)
class PinkbikeIE(InfoExtractor):
_VALID_URL = r'https?://(?:(?:www\.)?pinkbike\.com/video/|es\.pinkbike\.org/i/kvid/kvid-y5\.swf\?id=)(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://www.pinkbike.com/video/402811/',
'md5': '4814b8ca7651034cd87e3361d5c2155a',
'info_dict': {
'id': '402811',
'ext': 'mp4',
'title': 'Brandon Semenuk - RAW 100',
'description': 'Official release: www.redbull.ca/rupertwalker',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 100,
'upload_date': '20150406',
'uploader': 'revelco',
'location': 'Victoria, British Columbia, Canada',
'view_count': int,
'comment_count': int,
}
}, {
'url': 'http://es.pinkbike.org/i/kvid/kvid-y5.swf?id=406629',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'http://www.pinkbike.com/video/%s' % video_id, video_id)
formats = []
for _, format_id, src in re.findall(
r'data-quality=((?:\\)?["\'])(.+?)\1[^>]+src=\1(.+?)\1', webpage):
height = int_or_none(self._search_regex(
r'^(\d+)[pP]$', format_id, 'height', default=None))
formats.append({
'url': src,
'format_id': format_id,
'height': height,
})
self._sort_formats(formats)
title = remove_end(self._og_search_title(webpage), ' Video - Pinkbike')
description = self._html_search_regex(
r'(?s)id="media-description"[^>]*>(.+?)<',
webpage, 'description', default=None) or remove_start(
self._og_search_description(webpage), title + '. ')
thumbnail = self._og_search_thumbnail(webpage)
duration = int_or_none(self._html_search_meta(
'video:duration', webpage, 'duration'))
uploader = self._search_regex(
r'un:\s*"([^"]+)"', webpage, 'uploader', fatal=False)
upload_date = unified_strdate(self._search_regex(
r'class="fullTime"[^>]+title="([^"]+)"',
webpage, 'upload date', fatal=False))
location = self._html_search_regex(
r'(?s)<dt>Location</dt>\s*<dd>(.+?)<',
webpage, 'location', fatal=False)
def extract_count(webpage, label):
return str_to_int(self._search_regex(
r'<span[^>]+class="stat-num"[^>]*>([\d,.]+)</span>\s*<span[^>]+class="stat-label"[^>]*>%s' % label,
webpage, label, fatal=False))
view_count = extract_count(webpage, 'Views')
comment_count = extract_count(webpage, 'Comments')
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'upload_date': upload_date,
'uploader': uploader,
'location': location,
'view_count': view_count,
'comment_count': comment_count,
'formats': formats
}
| unlicense |
KDB2/OpenReliability | veusz/plugins/datasetplugin.py | 2 | 74727 | # -*- coding: utf-8 -*-
# Copyright (C) 2010 Jeremy S. Sanders
# Email: Jeremy Sanders <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
"""Plugins for creating datasets."""
from __future__ import division, print_function
import numpy as N
from . import field
from ..compat import czip, citems, cstr
from .. import utils
from .. import datasets
try:
from ..helpers import qtloops
except ImportError:
pass
from .. import qtall as qt4
def _(text, disambiguation=None, context='DatasetPlugin'):
"""Translate text."""
return qt4.QCoreApplication.translate(context, text, disambiguation)
# add an instance of your class to this list to be registered
datasetpluginregistry = []
class DatasetPluginException(RuntimeError):
"""Raise this to report an error.
"""
pass
def numpyCopyOrNone(data):
"""If data is None return None
Otherwise return a numpy array corresponding to data."""
if data is None:
return None
return N.array(data, dtype=N.float64)
# these classes are returned from dataset plugins
class Dataset1D(object):
"""1D dataset for ImportPlugin or DatasetPlugin."""
def __init__(self, name, data=[], serr=None, perr=None, nerr=None):
"""1D dataset
name: name of dataset
data: data in dataset: list of floats or numpy 1D array
serr: (optional) symmetric errors on data: list or numpy array
perr: (optional) positive errors on data: list or numpy array
nerr: (optional) negative errors on data: list or numpy array
If errors are returned for data give serr or nerr and perr.
nerr should be negative values if used.
perr should be positive values if used.
"""
self.name = name
self.update(data=data, serr=serr, perr=perr, nerr=nerr)
def update(self, data=[], serr=None, perr=None, nerr=None):
"""Update values to those given."""
self.data = numpyCopyOrNone(data)
self.serr = numpyCopyOrNone(serr)
self.perr = numpyCopyOrNone(perr)
self.nerr = numpyCopyOrNone(nerr)
def _null(self):
"""Empty data contents."""
self.data = N.array([])
self.serr = self.perr = self.nerr = None
def _makeVeuszDataset(self, manager):
"""Make a Veusz dataset from the plugin dataset."""
return datasets.Dataset1DPlugin(manager, self)
class Dataset2D(object):
"""2D dataset for ImportPlugin or DatasetPlugin."""
def __init__(self, name, data=[[]], rangex=None, rangey=None,
xedge=None, yedge=None,
xcent=None, ycent=None):
"""2D dataset.
name: name of dataset
data: 2D numpy array of values or list of lists of floats
rangex: optional tuple with X range of data (min, max)
rangey: optional tuple with Y range of data (min, max)
xedge: x values for grid (instead of rangex)
yedge: y values for grid (instead of rangey)
xcent: x values for pixel centres (instead of rangex)
ycent: y values for pixel centres (instead of rangey)
"""
self.name = name
self.update(data=data, rangex=rangex, rangey=rangey,
xedge=xedge, yedge=yedge,
xcent=xcent, ycent=ycent)
def update(self, data=[[]], rangex=None, rangey=None,
xedge=None, yedge=None,
xcent=None, ycent=None):
self.data = N.array(data, dtype=N.float64)
self.rangex = rangex
self.rangey = rangey
self.xedge = xedge
self.yedge = yedge
self.xcent = xcent
self.ycent = ycent
def _null(self):
"""Empty data contents."""
self.data = N.array([[]])
self.rangex = self.rangey = (0, 1)
self.xedge = self.yedge = self.xcent = self.ycent = None
def _makeVeuszDataset(self, manager):
"""Make a Veusz dataset from the plugin dataset."""
return datasets.Dataset2DPlugin(manager, self)
class DatasetDateTime(object):
"""Date-time dataset for ImportPlugin or DatasetPlugin."""
def __init__(self, name, data=[]):
"""A date dataset
name: name of dataset
data: list of datetime objects
"""
self.name = name
self.update(data=data)
def update(self, data=[]):
self.data = N.array(data)
@staticmethod
def datetimeToFloat(datetimeval):
"""Return a python datetime object to the required float type."""
return utils.datetimeToFloat(datetimeval)
@staticmethod
def dateStringToFloat(text):
"""Try to convert an iso or local date time to the float type."""
return utils.dateStringToDate(text)
@staticmethod
def floatToDateTime(val):
"""Convert float format datetime to Python datetime."""
return utils.floatToDateTime(val)
def _null(self):
"""Empty data contents."""
self.data = N.array([])
def _makeVeuszDataset(self, manager):
"""Make a Veusz dataset from the plugin dataset."""
return datasets.DatasetDateTimePlugin(manager, self)
class DatasetText(object):
"""Text dataset for ImportPlugin or DatasetPlugin."""
def __init__(self, name, data=[]):
"""A text dataset
name: name of dataset
data: data in dataset: list of strings
"""
self.name = name
self.update(data=data)
def update(self, data=[]):
self.data = list(data)
def _null(self):
"""Empty data contents."""
self.data = []
def _makeVeuszDataset(self, manager):
"""Make a Veusz dataset from the plugin dataset."""
return datasets.DatasetTextPlugin(manager, self)
class Constant(object):
"""Dataset to return to set a Veusz constant after import.
This is only useful in an ImportPlugin, not a DatasetPlugin
"""
def __init__(self, name, val):
"""Map string value val to name.
Convert float vals to strings first!"""
self.name = name
self.val = val
class Function(object):
"""Dataset to return to set a Veusz function after import."""
def __init__(self, name, val):
"""Map string value val to name.
name is "funcname(param,...)", val is a text expression of param.
This is only useful in an ImportPlugin, not a DatasetPlugin
"""
self.name = name
self.val = val
# class to pass to plugin to give parameters
class DatasetPluginHelper(object):
"""Helpers to get existing datasets for plugins."""
def __init__(self, doc):
"""Construct helper object to pass to DatasetPlugins."""
self._doc = doc
@property
def datasets1d(self):
"""Return list of existing 1D numeric datasets"""
return [name for name, ds in citems(self._doc.data) if
(ds.dimensions == 1 and ds.datatype == 'numeric')]
@property
def datasets2d(self):
"""Return list of existing 2D numeric datasets"""
return [name for name, ds in citems(self._doc.data) if
(ds.dimensions == 2 and ds.datatype == 'numeric')]
@property
def datasetstext(self):
"""Return list of existing 1D text datasets"""
return [name for name, ds in citems(self._doc.data) if
(ds.dimensions == 1 and ds.datatype == 'text')]
@property
def datasetsdatetime(self):
"""Return list of existing date-time datesets"""
return [name for name, ds in citems(self._doc.data) if
isinstance(ds, datasets.DatasetDateTime)]
@property
def locale(self):
"""Return Qt locale."""
return self._doc.locale
def evaluateExpression(self, expr, part='data'):
"""Return results of evaluating a 1D dataset expression.
part is 'data', 'serr', 'perr' or 'nerr' - these are the
dataset parts which are evaluated by the expression
Returns None if expression could not be evaluated.
"""
ds = self._doc.evalDatasetExpression(expr, part=part)
return None if ds is None else ds.data
def getDataset(self, name, dimensions=1):
"""Return numerical dataset object for name given.
Please make sure that dataset data are not modified.
name: name of dataset
dimensions: number of dimensions dataset requires
name not found: raise a DatasetPluginException
dimensions not right: raise a DatasetPluginException
"""
try:
ds = self._doc.data[name]
except KeyError:
raise DatasetPluginException(_("Unknown dataset '%s'") % name)
if ds.dimensions != dimensions:
raise DatasetPluginException(
_("Dataset '%s' does not have %i dimensions") % (
name, dimensions))
if ds.datatype != 'numeric':
raise DatasetPluginException(
_("Dataset '%s' is not a numerical dataset") % name)
if isinstance(ds, datasets.DatasetDateTime):
return DatasetDateTime(name, data=ds.data)
elif ds.dimensions == 1:
return Dataset1D(name, data=ds.data, serr=ds.serr,
perr=ds.perr, nerr=ds.nerr)
elif ds.dimensions == 2:
return Dataset2D(name, ds.data,
rangex=ds.xrange, rangey=ds.yrange,
xedge=ds.xedge, yedge=ds.yedge,
xcent=ds.xcent, ycent=ds.ycent)
else:
raise RuntimeError("Invalid number of dimensions in dataset")
def getDatasets(self, names, dimensions=1):
"""Get a list of numerical datasets (of the dimension given)."""
return [ self.getDataset(n, dimensions=dimensions) for n in names ]
def getTextDataset(self, name):
"""Return a text dataset with name given.
Do not modify this dataset.
name not found: raise a DatasetPluginException
"""
try:
ds = self._doc.data[name]
except KeyError:
raise DatasetPluginException(_("Unknown dataset '%s'") % name)
if ds.datatype == 'text':
return DatasetText(name, ds.data)
raise DatasetPluginException(_("Dataset '%s' is not a text datset") % name)
# internal object to synchronise datasets created by a plugin
class DatasetPluginManager(object):
"""Manage datasets generated by plugin."""
def __init__(self, plugin, doc, fields):
"""Construct manager object.
plugin - instance of plugin class
doc - document instance
fields - fields to pass to plugin
"""
self.plugin = plugin
self.document = doc
self.helper = DatasetPluginHelper(doc)
self.fields = dict(fields)
self.changeset = -1
self.fixMissingFields()
self.setupDatasets()
def fixMissingFields(self):
"""If fields are missing, use defaults."""
for pluginfield in self.plugin.fields:
if pluginfield.name not in self.fields:
self.fields[pluginfield.name] = pluginfield.default
def setupDatasets(self):
"""Do initial construction of datasets."""
self.datasetnames = []
self.datasets = []
self.veuszdatasets = []
self.datasets = self.plugin.getDatasets(self.fields)
for ds in self.datasets:
self.datasetnames.append(ds.name)
veuszds = ds._makeVeuszDataset(self)
veuszds.document = self.document
self.veuszdatasets.append(veuszds)
def nullDatasets(self):
"""Clear out contents of datasets."""
for ds in self.datasets:
ds._null()
def saveToFile(self, fileobj):
"""Save command to load in plugin and parameters."""
args = [ repr(self.plugin.name), repr(self.fields) ]
# look for renamed or deleted datasets
names = {}
for ds, dsname in czip( self.veuszdatasets, self.datasetnames ):
try:
currentname = self.document.datasetName(ds)
except ValueError:
# deleted
currentname = None
if currentname != dsname:
names[dsname] = currentname
if names:
args.append( "datasetnames="+repr(names) )
fileobj.write( 'DatasetPlugin(%s)\n' % (', '.join(args)) )
def update(self, raiseerrors=False):
"""Update created datasets.
if raiseerrors is True, raise an exception if there is an exeception
when updating the dataset
"""
if self.document.changeset == self.changeset:
return
self.changeset = self.document.changeset
# run the plugin with its parameters
try:
self.plugin.updateDatasets(self.fields, self.helper)
except DatasetPluginException as ex:
# this is for immediate notification
if raiseerrors:
raise
# otherwise if there's an error, then log and null outputs
self.document.log( cstr(ex) )
self.nullDatasets()
class DatasetPlugin(object):
"""Base class for defining dataset plugins."""
# the plugin will get inserted into the menu in a hierarchy based on
# the elements of this tuple
menu = ('Base plugin',)
name = 'Base plugin'
author = ''
description_short = ''
description_full = ''
# if the plugin takes no parameters, set this to False
has_parameters = True
def __init__(self):
"""Override this to declare a list of input fields if required."""
self.fields = []
def getDatasets(self, fields):
"""Override this to return a list of (empty) Dataset1D,
Dataset2D and DatasetText objects to provide the initial names
and type of datasets.
These should be saved for updating in updateDatasets.
fields: dict of results to the field objects given in self.fields
raise a DatasetPluginException if there is a problem with fields
"""
return []
def updateDatasets(self, fields, helper):
"""Override this to update the dataset objects provided by this plugin.
fields: dict of field results (also provided to setup)
helper: DatasetPluginHelper object, to get other datasets in document
raise a DatasetPluginException if there is a problem
"""
class _OneOutputDatasetPlugin(DatasetPlugin):
"""Simplify plugins which create one output with field ds_out."""
def getDatasets(self, fields):
"""Returns single output dataset (self.dsout)."""
if fields['ds_out'] == '':
raise DatasetPluginException(_('Invalid output dataset name'))
self.dsout = Dataset1D(fields['ds_out'])
return [self.dsout]
def errorBarType(ds):
"""Return type of error bars in list of datasets.
'none', 'symmetric', 'asymmetric'
"""
symerr = False
for d in ds:
if d.serr is not None:
symerr = True
elif d.perr is not None or d.nerr is not None:
return 'asymmetric'
if symerr:
return 'symmetric'
return 'none'
def combineAddedErrors(inds, length):
"""Combine error bars from list of input dataset, adding
errors squared (suitable for adding/subtracting)."""
errortype = errorBarType(inds)
serr = perr = nerr = None
if errortype == 'symmetric':
serr = N.zeros(length, dtype=N.float64)
elif errortype == 'asymmetric':
perr = N.zeros(length, dtype=N.float64)
nerr = N.zeros(length, dtype=N.float64)
for d in inds:
f = N.isfinite(d.data)
if errortype == 'symmetric' and d.serr is not None:
serr[f] += d.serr[f]**2
elif errortype == 'asymmetric':
if d.serr is not None:
v = (d.serr[f])**2
perr[f] += v
nerr[f] += v
if d.perr is not None:
perr[f] += (d.perr[f])**2
if d.nerr is not None:
nerr[f] += (d.nerr[f])**2
if serr is not None: serr = N.sqrt(serr)
if perr is not None: perr = N.sqrt(perr)
if nerr is not None: nerr = -N.sqrt(nerr)
return serr, perr, nerr
def combineMultipliedErrors(inds, length, data):
"""Combine error bars from list of input dataset, adding
fractional errors squared (suitable for multipling/dividing)."""
errortype = errorBarType(inds)
serr = perr = nerr = None
if errortype == 'symmetric':
serr = N.zeros(length, dtype=N.float64)
elif errortype == 'asymmetric':
perr = N.zeros(length, dtype=N.float64)
nerr = N.zeros(length, dtype=N.float64)
for d in inds:
f = N.isfinite(d.data)
if len(f) > length:
f = f[:length]
if errortype == 'symmetric' and d.serr is not None:
serr[f] += (d.serr[f]/d.data[f])**2
elif errortype == 'asymmetric':
if d.serr is not None:
v = (d.serr[f]/d.data[f])**2
perr[f] += v
nerr[f] += v
if d.perr is not None:
perr[f] += (d.perr[f]/d.data[f])**2
if d.nerr is not None:
nerr[f] += (d.nerr[f]/d.data[f])**2
if serr is not None: serr = N.abs(N.sqrt(serr) * data)
if perr is not None: perr = N.abs(N.sqrt(perr) * data)
if nerr is not None: nerr = -N.abs(N.sqrt(nerr) * data)
return serr, perr, nerr
###########################################################################
## Real plugins are below
class MultiplyDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to scale a dataset."""
menu = (_('Multiply'), _('By constant'),)
name = 'Multiply'
description_short = _('Multiply dataset by a constant')
description_full = _('Multiply a dataset by a factor. '
'Error bars are also scaled.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldFloat('factor', _('Factor'), default=1.),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do scaling of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
f = fields['factor']
data, serr, perr, nerr = ds_in.data, ds_in.serr, ds_in.perr, ds_in.nerr
data = data * f
if serr is not None: serr = serr * f
if perr is not None: perr = perr * f
if nerr is not None: nerr = nerr * f
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class AddDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to add a constant to a dataset."""
menu = (_('Add'), _('Constant'),)
name = 'Add'
description_short = _('Add a constant to a dataset')
description_full = _('Add a dataset by adding a value. '
'Error bars remain the same.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldFloat('value', _('Add value'), default=0.),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do shifting of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
self.dsout.update(data = ds_in.data + fields['value'],
serr=ds_in.serr, perr=ds_in.perr, nerr=ds_in.nerr)
class ConcatenateDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to concatenate datasets."""
menu = (_('Join'), _('Concatenate'),)
name = 'Concatenate'
description_short = _('Concatenate datasets')
description_full = _('Concatenate datasets into single dataset.\n'
'Error bars are merged.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDatasetMulti('ds_in', _('Input datasets')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do concatenation of dataset."""
dsin = helper.getDatasets(fields['ds_in'])
if len(dsin) == 0:
raise DatasetPluginException(_('Requires one or more input datasets'))
# concatenate main data
dstack = N.hstack([d.data for d in dsin])
sstack = pstack = nstack = None
# what sort of error bars do we need?
errortype = errorBarType(dsin)
if errortype == 'symmetric':
# symmetric and not asymmetric error bars
sstack = []
for d in dsin:
if d.serr is not None:
sstack.append(d.serr)
else:
sstack.append(N.zeros(d.data.shape, dtype=N.float64))
sstack = N.hstack(sstack)
elif errortype == 'asymmetric':
# asymmetric error bars
pstack = []
nstack = []
for d in dsin:
p = n = N.zeros(d.data.shape, dtype=N.float64)
if d.serr is not None:
p, n = d.serr, -d.serr
else:
if d.perr is not None: p = d.perr
if d.nerr is not None: n = d.nerr
pstack.append(p)
nstack.append(n)
pstack = N.hstack(pstack)
nstack = N.hstack(nstack)
self.dsout.update(data=dstack, serr=sstack, perr=pstack, nerr=nstack)
class InterleaveDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to interleave datasets."""
menu = (_('Join'), _('Element by element'),)
name = 'Interleave'
description_short = _('Join datasets, interleaving element by element')
description_full = _('Join datasets, interleaving element by element.\n'
'Error bars are merged.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDatasetMulti('ds_in', _('Input datasets')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do concatenation of dataset."""
dsin = helper.getDatasets(fields['ds_in'])
if len(dsin) == 0:
raise DatasetPluginException(_('Requires one or more input datasets'))
maxlength = max( [len(d.data) for d in dsin] )
def interleave(datasets):
"""This is complex to account for different length datasets."""
# stick in columns
ds = [ N.hstack( (d, N.zeros(maxlength-len(d))) )
for d in datasets ]
# which elements are valid
good = [ N.hstack( (N.ones(len(d), dtype=N.bool),
N.zeros(maxlength-len(d), dtype=N.bool)) )
for d in datasets ]
intl = N.column_stack(ds).reshape(maxlength*len(datasets))
goodintl = N.column_stack(good).reshape(maxlength*len(datasets))
return intl[goodintl]
# do interleaving
data = interleave([d.data for d in dsin])
# interleave error bars
errortype = errorBarType(dsin)
serr = perr = nerr = None
if errortype == 'symmetric':
slist = []
for ds in dsin:
if ds.serr is None:
slist.append(N.zeros_like(ds.data))
else:
slist.append(ds.serr)
serr = interleave(slist)
elif errortype == 'asymmetric':
plist = []
nlist = []
for ds in dsin:
if ds.serr is not None:
plist.append(ds.serr)
nlist.append(-ds.serr)
else:
if ds.perr is not None:
plist.append(ds.perr)
else:
plist.append(N.zeros_like(ds.data))
if ds.nerr is not None:
nlist.append(ds.nerr)
else:
nlist.append(N.zeros_like(ds.data))
perr = interleave(plist)
nerr = interleave(nlist)
# finally update
self.dsout.update(data=data, serr=serr, nerr=nerr, perr=perr)
class ChopDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to chop datasets."""
menu = (_('Split'), _('Chop'),)
name = 'Chop'
description_short = _('Chop dataset part into new dataset')
description_full = _('Chop out a section of a dataset. Give starting '
'index of data and number of datapoints to take.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldInt('start', _('Starting index (from 1)'), default=1),
field.FieldInt('num', _('Maximum number of datapoints'), default=1),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do chopping of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
start = fields['start']
num = fields['num']
data, serr, perr, nerr = ds_in.data, ds_in.serr, ds_in.perr, ds_in.nerr
# chop the data
data = data[start-1:start-1+num]
if serr is not None: serr = serr[start-1:start-1+num]
if perr is not None: perr = perr[start-1:start-1+num]
if nerr is not None: nerr = nerr[start-1:start-1+num]
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class PartsDatasetPlugin(DatasetPlugin):
"""Dataset plugin to split datasets into parts."""
menu = (_('Split'), _('Parts'),)
name = 'Parts'
description_short = _('Split dataset into equal-size parts')
description_full = _('Split dataset into equal-size parts. '
'The parts will differ in size if the dataset '
'cannot be split equally.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldDatasetMulti('ds_out', _('Output datasets')),
]
def getDatasets(self, fields):
"""Get output datasets."""
self.dsout = []
for d in fields['ds_out']:
if d.strip() != '':
self.dsout.append( Dataset1D(d.strip()) )
if len(self.dsout) == 0:
raise DatasetPluginException(_('Needs at least one output dataset'))
return self.dsout
def updateDatasets(self, fields, helper):
"""Do chopping of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
data, serr, perr, nerr = ds_in.data, ds_in.serr, ds_in.perr, ds_in.nerr
plen = len(data) / len(self.dsout)
for i, ds in enumerate(self.dsout):
minv, maxv = int(plen*i), int(plen*(i+1))
pserr = pperr = pnerr = None
pdata = data[minv:maxv]
if serr is not None: pserr = serr[minv:maxv]
if perr is not None: pperr = perr[minv:maxv]
if nerr is not None: pnerr = nerr[minv:maxv]
ds.update(data=pdata, serr=pserr, perr=pperr, nerr=pnerr)
class ThinDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to thin datasets."""
menu = (_('Split'), _('Thin'),)
name = 'Thin'
description_short = _('Select data points at intervals from dataset')
description_full = _('Select data points at intervals from dataset '
'to create new dataset')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldInt('start', _('Starting index (from 1)'), default=1),
field.FieldInt('interval', _('Interval between data points'), default=1),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do thinning of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
start = fields['start']
interval = fields['interval']
data, serr, perr, nerr = ds_in.data, ds_in.serr, ds_in.perr, ds_in.nerr
data = data[start-1::interval]
if serr is not None: serr = serr[start-1::interval]
if perr is not None: perr = perr[start-1::interval]
if nerr is not None: nerr = nerr[start-1::interval]
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class MeanDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to mean datasets together."""
menu = (_('Compute'), _('Mean of datasets'),)
name = 'Mean'
description_short = _('Compute mean of datasets')
description_full = _('Compute mean of multiple datasets to create '
'a single dataset.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDatasetMulti('ds_in', _('Input datasets')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Compute means of dataset."""
inds = helper.getDatasets(fields['ds_in'])
if len(inds) == 0:
raise DatasetPluginException(_('Requires one or more input datasets'))
maxlength = max( [len(d.data) for d in inds] )
# mean data (only use finite values)
tot = N.zeros(maxlength, dtype=N.float64)
num = N.zeros(maxlength, dtype=N.int)
for d in inds:
f = N.isfinite(d.data)
tot[f] += d.data[f]
num[f] += 1
data = tot / num
def averageError(errtype, fallback=None):
"""Get average for an error value."""
tot = N.zeros(maxlength, dtype=N.float64)
num = N.zeros(maxlength, dtype=N.int)
for d in inds:
vals = getattr(d, errtype)
if vals is None and fallback:
vals = getattr(d, fallback)
# add values if not missing
if vals is not None:
f = N.isfinite(vals)
tot[f] += (vals[f]) ** 2
num[f] += 1
else:
# treat as zero errors if missing errors
num[:len(d.data)] += 1
return N.sqrt(tot) / num
# do error bar handling
serr = perr = nerr = None
errortype = errorBarType(inds)
if errortype == 'symmetric':
serr = averageError('serr')
elif errortype == 'asymmetric':
perr = averageError('perr', fallback='serr')
nerr = -averageError('nerr', fallback='serr')
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class AddDatasetsPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to mean datasets together."""
menu = (_('Add'), _('Datasets'),)
name = 'Add Datasets'
description_short = _('Add two or more datasets together')
description_full = _('Add datasets together to make a single dataset. '
'Error bars are combined.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDatasetMulti('ds_in', _('Input datasets')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Compute means of dataset."""
inds = helper.getDatasets(fields['ds_in'])
if len(inds) == 0:
raise DatasetPluginException(_('Requires one or more input datasets'))
maxlength = max( [len(d.data) for d in inds] )
# add data where finite
data = N.zeros(maxlength, dtype=N.float64)
anyfinite = N.zeros(maxlength, dtype=N.bool)
for d in inds:
f = N.isfinite(d.data)
data[f] += d.data[f]
anyfinite[f] = True
data[N.logical_not(anyfinite)] = N.nan
# handle error bars
serr, perr, nerr = combineAddedErrors(inds, maxlength)
# update output dataset
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class SubtractDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to subtract two datasets."""
menu = (_('Subtract'), _('Datasets'),)
name = 'Subtract Datasets'
description_short = _('Subtract two datasets')
description_full = _('Subtract two datasets. '
'Combined error bars are also calculated.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in1', _('Input dataset 1')),
field.FieldDataset('ds_in2', _('Input dataset 2')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do scaling of dataset."""
dsin1 = helper.getDataset(fields['ds_in1'])
dsin2 = helper.getDataset(fields['ds_in2'])
minlength = min( len(dsin1.data), len(dsin2.data) )
data = dsin1.data[:minlength] - dsin2.data[:minlength]
# computing error bars is non trivial!
serr = perr = nerr = None
errortype = errorBarType([dsin1, dsin2])
if errortype == 'symmetric':
serr1 = serr2 = 0
if dsin1.serr is not None:
serr1 = dsin1.serr[:minlength]
if dsin2.serr is not None:
serr2 = dsin2.serr[:minlength]
serr = N.sqrt(serr1**2 + serr2**2)
elif errortype == 'asymmetric':
perr1 = perr2 = nerr1 = nerr2 = 0
if dsin1.serr is not None:
perr1 = nerr1 = dsin1.serr[:minlength]
else:
if dsin1.perr is not None: perr1 = dsin1.perr[:minlength]
if dsin1.nerr is not None: nerr1 = dsin1.nerr[:minlength]
if dsin2.serr is not None:
perr2 = nerr2 = dsin2.serr[:minlength]
else:
if dsin2.perr is not None: perr2 = dsin2.perr[:minlength]
if dsin2.nerr is not None: nerr2 = dsin2.nerr[:minlength]
perr = N.sqrt(perr1**2 + nerr2**2)
nerr = -N.sqrt(nerr1**2 + perr2**2)
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class SubtractMeanDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to subtract mean from dataset."""
menu = (_('Subtract'), _('Mean'),)
name = 'Subtract Mean'
description_short = _('Subtract mean from dataset')
description_full = _('Subtract mean from dataset,'
' optionally dividing by standard deviation.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset 1')),
field.FieldBool('divstddev', _('Divide by standard deviation')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do scaling of dataset."""
dsin = helper.getDataset(fields['ds_in'])
vals = dsin.data
if len(vals) > 0:
mean = vals[N.isfinite(vals)].mean()
vals = vals - mean
if fields['divstddev']:
if len(vals) > 0:
vals /= vals[N.isfinite(vals)].std()
self.dsout.update(
data=vals, serr=dsin.serr, perr=dsin.perr, nerr=dsin.nerr)
class SubtractMinimumDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to subtract minimum from dataset."""
menu = (_('Subtract'), _('Minimum'),)
name = 'Subtract Minimum'
description_short = _('Subtract minimum from dataset')
description_full = _('Subtract the minimum value from a dataset')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset 1')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Do subtraction of dataset."""
dsin = helper.getDataset(fields['ds_in'])
data = dsin.data
filtered = data[N.isfinite(data)]
if len(filtered) != 0:
data = data - filtered.min()
self.dsout.update(
data=data, serr=dsin.serr, perr=dsin.perr, nerr=dsin.nerr)
class MultiplyDatasetsPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to multiply two or more datasets."""
menu = (_('Multiply'), _('Datasets'),)
name = 'Multiply Datasets'
description_short = _('Multiply two or more datasets')
description_full = _('Multiply two or more datasets. '
'Combined error bars are also calculated.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDatasetMulti('ds_in', _('Input datasets')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Multiply the datasets."""
names = fields['ds_in']
inds = [ helper.getDataset(d) for d in names ]
maxlength = max( [d.data.shape[0] for d in inds] )
# output data and where data is finite
data = N.ones(maxlength, dtype=N.float64)
anyfinite = N.zeros(maxlength, dtype=N.bool)
for d in inds:
f = N.isfinite(d.data)
anyfinite[f] = True
data[f] *= d.data[f]
# where always NaN, make NaN
data[N.logical_not(anyfinite)] = N.nan
# get error bars
serr, perr, nerr = combineMultipliedErrors(inds, maxlength, data)
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class DivideDatasetsPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to divide two datasets."""
menu = (_('Divide'), _('Datasets'),)
name = 'Divide Datasets'
description_short = _('Compute ratio or fractional difference'
' between two datasets')
description_full = _('Divide or compute fractional difference'
' between two datasets')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in1', _('Input dataset 1')),
field.FieldDataset('ds_in2', _('Input dataset 2')),
field.FieldBool('frac', _('Compute fractional difference'),
default=False),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
"""Compute ratio."""
inds1 = helper.getDataset( fields['ds_in1'] )
inds2 = helper.getDataset( fields['ds_in2'] )
length = min( len(inds1.data), len(inds2.data) )
# compute ratio
data = inds1.data[:length] / inds2.data[:length]
# get error bars
serr, perr, nerr = combineMultipliedErrors([inds1, inds2], length, data)
# convert to fractional difference (if reqd)
if fields['frac']:
data -= 1
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class DivideMaxPlugin(_OneOutputDatasetPlugin):
"""Plugin to divide by maximum of dataset."""
menu = (_('Divide'), _('By maximum'),)
name = 'Divide Maximum'
description_short = description_full = _('Divide dataset by its maximum')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
inds = helper.getDataset( fields['ds_in'] )
data = inds.data
filtered = data[N.isfinite(data)]
if len(filtered) == 0:
maxval = N.nan
else:
maxval = filtered.max()
# divide data
data = data / maxval
# divide error bars
serr = perr = nerr = None
if inds.serr is not None: serr = inds.serr / maxval
if inds.perr is not None: perr = inds.perr / maxval
if inds.nerr is not None: nerr = inds.nerr / maxval
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class DivideNormalizePlugin(_OneOutputDatasetPlugin):
"""Plugin to normalize dataset."""
menu = (_('Divide'), _('Normalize'),)
name = 'Normalize'
description_short = description_full = _(
'Divide dataset by its sum of values')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def updateDatasets(self, fields, helper):
inds = helper.getDataset( fields['ds_in'] )
data = inds.data
filtered = data[N.isfinite(data)]
if len(filtered) == 0:
tot = 0
else:
tot = N.sum(filtered)
# divide data
data = data / tot
# divide error bars
serr = perr = nerr = None
if inds.serr is not None: serr = inds.serr / tot
if inds.perr is not None: perr = inds.perr / tot
if inds.nerr is not None: nerr = inds.nerr / tot
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class ExtremesDatasetPlugin(DatasetPlugin):
"""Dataset plugin to get extremes of dataset."""
menu = (_('Compute'), _('Dataset extremes'),)
name = 'Extremes'
description_short = _('Compute extreme values of input datasets')
description_full = _('Compute extreme values of input datasets. Creates '
'minimum and maximum datasets.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDatasetMulti('ds_in', _('Input datasets')),
field.FieldBool('errorbars', _('Include error bars')),
field.FieldDataset('ds_min', _('Output minimum dataset (optional)')),
field.FieldDataset('ds_max', _('Output maximum dataset (optional)')),
field.FieldDataset('ds_errorbar', _('Output range as error bars '
'in dataset (optional)')),
]
def getDatasets(self, fields):
"""Returns output dataset."""
dsout = []
self.dsmin = self.dsmax = self.dserror = None
if fields['ds_min'] != '':
self.dsmin = Dataset1D(fields['ds_min'])
dsout.append(self.dsmin)
if fields['ds_max'] != '':
self.dsmax = Dataset1D(fields['ds_max'])
dsout.append(self.dsmax)
if fields['ds_errorbar'] != '':
self.dserror = Dataset1D(fields['ds_errorbar'])
dsout.append(self.dserror)
if not dsout:
raise DatasetPluginException(_('Provide at least one output dataset'))
return dsout
def updateDatasets(self, fields, helper):
"""Compute extremes of datasets."""
names = fields['ds_in']
inds = [ helper.getDataset(d) for d in names ]
maxlength = max( [d.data.shape[0] for d in inds] )
minvals = N.zeros(maxlength, dtype=N.float64) + 1e100
maxvals = N.zeros(maxlength, dtype=N.float64) - 1e100
anyfinite = N.zeros(maxlength, dtype=N.bool)
for d in inds:
f = N.isfinite(d.data)
anyfinite[f] = True
v = d.data
if fields['errorbars']:
if d.serr is not None:
v = v - d.serr
elif d.nerr is not None:
v = v + d.nerr
minvals[f] = N.min( (minvals[f], v[f]), axis=0 )
v = d.data
if fields['errorbars']:
if d.serr is not None:
v = v + d.serr
elif d.perr is not None:
v = v + d.perr
maxvals[f] = N.max( (maxvals[f], v[f]), axis=0 )
minvals[N.logical_not(anyfinite)] = N.nan
maxvals[N.logical_not(anyfinite)] = N.nan
if self.dsmin is not None:
self.dsmin.update(data=minvals)
if self.dsmax is not None:
self.dsmax.update(data=maxvals)
if self.dserror is not None:
# compute mean and look at differences from it
tot = N.zeros(maxlength, dtype=N.float64)
num = N.zeros(maxlength, dtype=N.int)
for d in inds:
f = N.isfinite(d.data)
tot[f] += d.data[f]
num[f] += 1
mean = tot / num
self.dserror.update(data=mean, nerr=minvals-mean, perr=maxvals-mean)
class CumulativePlugin(_OneOutputDatasetPlugin):
"""Compute cumulative values."""
menu = (_('Compute'), _('Cumulative value'),)
name = 'Cumulative'
description_short = _('Compute the cumulative value of a dataset')
description_full = _('Compute the cumulative value of a dataset. '
' Error bars are combined.\n'
'Default behaviour is to accumulate from start.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldBool('fromend', _('Compute cumulative value from end')),
field.FieldDataset('ds_out', _('Output dataset')),
]
def updateDatasets(self, fields, helper):
"""Do accumulation."""
ds_in = helper.getDataset(fields['ds_in'])
fromend = fields['fromend']
def cumsum(v):
"""Compute cumulative, handing nans and reverse."""
v = N.array(v)
if fromend: v = v[::-1]
v[ N.logical_not(N.isfinite(v)) ] = 0.
c = N.cumsum(v)
if fromend: c = c[::-1]
return c
# compute cumulative values
data, serr, perr, nerr = ds_in.data, ds_in.serr, ds_in.perr, ds_in.nerr
data = cumsum(data)
if serr is not None: serr = N.sqrt( cumsum(serr**2) )
if perr is not None: perr = N.sqrt( cumsum(perr**2) )
if nerr is not None: nerr = -N.sqrt( cumsum(nerr**2) )
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class DemultiplexPlugin(DatasetPlugin):
"""Dataset plugin to split a dataset into multiple datasets, element-by-element."""
menu = (_('Split'), _('Element by element'),)
name = 'Demultiplex'
description_short = _('Split dataset into multiple datasets element-by-element')
description_full = _('Split dataset into multiple datasets on an '
'element-by-element basis.\n'
'e.g. 1, 2, 3, 4, 5, 6 could be converted to '
'1, 3, 5 and 2, 4, 6.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldDatasetMulti('ds_out', _('Output datasets')),
]
def getDatasets(self, fields):
"""Returns demuxed output datasets."""
names = [n.strip() for n in fields['ds_out'] if n.strip() != '']
if len(names) == 0:
raise DatasetPluginException(_('Requires at least one output dataset'))
self.ds_out = [ Dataset1D(n) for n in names ]
return self.ds_out
def updateDatasets(self, fields, helper):
"""Compute means of dataset."""
ds_in = helper.getDataset( fields['ds_in'] )
num = len(self.ds_out)
for i, ds in enumerate(self.ds_out):
data = ds_in.data[i::num]
serr = nerr = perr = None
if ds_in.serr is not None:
serr = ds_in.serr[i::num]
if ds_in.perr is not None:
perr = ds_in.perr[i::num]
if ds_in.nerr is not None:
nerr = ds_in.nerr[i::num]
ds.update(data=data, serr=serr, perr=perr, nerr=nerr)
class PolarToCartesianPlugin(DatasetPlugin):
"""Convert from r,theta to x,y coordinates."""
menu = (_('Convert'), _('Polar to Cartesian'),)
name = 'PolarToCartesian'
description_short = _('Convert r,theta coordinates to x,y coordinates')
description_full = _('Convert r,theta coordinates to x,y coordinates.\n'
'Error bars are ignored.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('r_in', _('Input dataset (r)')),
field.FieldDataset('theta_in', _('Input dataset (theta)')),
field.FieldCombo('units', _('Angular units'),
items=('radians', 'degrees'),
editable=False),
field.FieldDataset('x_out', _('Output dataset (x)')),
field.FieldDataset('y_out', _('Output dataset (y)')),
]
def getDatasets(self, fields):
"""Returns x and y output datasets."""
if fields['x_out'] == '':
raise DatasetPluginException(_('Invalid output x dataset name'))
if fields['y_out'] == '':
raise DatasetPluginException(_('Invalid output y dataset name'))
self.x_out = Dataset1D(fields['x_out'])
self.y_out = Dataset1D(fields['y_out'])
return [self.x_out, self.y_out]
def updateDatasets(self, fields, helper):
"""Compute means of dataset."""
ds_r = helper.getDataset( fields['r_in'] ).data
ds_theta = helper.getDataset( fields['theta_in'] ).data
if fields['units'] == 'degrees':
# convert to radians
ds_theta = ds_theta * (N.pi / 180.)
x = ds_r * N.cos(ds_theta)
y = ds_r * N.sin(ds_theta)
self.x_out.update(data=x)
self.y_out.update(data=y)
class FilterDatasetPlugin(_OneOutputDatasetPlugin):
"""Dataset plugin to filter a dataset using an expression."""
menu = (_('Filter'), _('Expression'),)
name = 'FilterExpression'
description_short = _('Filter a dataset using an expression')
description_full = _('Filter a dataset using an expression, '
'e.g. "x>10" or "(x>1) & (y<2)"')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldText('filter', _('Filter expression')),
field.FieldBool('replacenan', _('Replace excluded points by NaN\n'
'(indicate missing points)'),
default=False),
field.FieldDataset('ds_out', _('Output dataset')),
]
def updateDatasets(self, fields, helper):
"""Do filtering of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
filt = helper.evaluateExpression(fields['filter'])
data = ds_in.data
serr = getattr(ds_in, 'serr', None)
perr = getattr(ds_in, 'perr', None)
nerr = getattr(ds_in, 'nerr', None)
if filt is None:
# select nothing
filt = N.zeros(data.shape, dtype=N.bool)
else:
# filter must have int/bool type
filt = N.array(filt, dtype=N.bool)
try:
if fields['replacenan']:
# replace bad points with nan
data = data.copy()
data[N.logical_not(filt)] = N.nan
else:
# just select good points
data = data[filt]
if serr is not None: serr = serr[filt]
if perr is not None: perr = perr[filt]
if nerr is not None: nerr = nerr[filt]
except (ValueError, IndexError) as e:
raise DatasetPluginException(_("Error filtering dataset: '%s')") %
cstr(e))
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class MovingAveragePlugin(_OneOutputDatasetPlugin):
"""Compute moving average for dataset."""
menu = (_('Filtering'), _('Moving Average'),)
name = 'MovingAverage'
description_short = _('Compute moving average for regularly spaced data')
description_full = _('Compute moving average for regularly spaced data.'
'Average is computed either\nside of each data point '
'by number of points given.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldInt('width', _('Points either side of point to average'),
default=1, minval=0),
field.FieldBool('weighterrors', _('Weight by error bars'),
default=True),
field.FieldDataset('ds_out', _('Output dataset')),
]
def updateDatasets(self, fields, helper):
"""Compute moving average of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
weights = None
if fields['weighterrors']:
if ds_in.serr is not None:
weights = 1. / ds_in.serr**2
elif ds_in.perr is not None and ds_in.nerr is not None:
weights = 1. / ( (ds_in.perr**2+ds_in.nerr**2)/2. )
width = fields['width']
data = qtloops.rollingAverage(ds_in.data, weights, width)
self.dsout.update(data=data)
class LinearInterpolatePlugin(_OneOutputDatasetPlugin):
"""Do linear interpolation of data."""
menu = (_('Filtering'), _('Linear interpolation'),)
name = 'LinearInterpolation'
description_short = _('Linear interpolation of x,y data')
description_full = _("Compute linear interpolation of x,y data.\n"
"Given datasets for y = f(x), compute y' = f(x'), "
"using linear interpolation.\n"
"Assumes x dataset increases in value.")
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_x', _('Input dataset x')),
field.FieldDataset('ds_y', _('Input dataset y')),
field.FieldDataset('ds_xprime', _("Input dataset x'")),
field.FieldBool('edgenan', _('Use nan for values outside x range')),
field.FieldDataset('ds_out', _("Output dataset y'")),
]
def updateDatasets(self, fields, helper):
"""Compute linear interpolation of dataset."""
ds_x = helper.getDataset(fields['ds_x']).data
ds_y = helper.getDataset(fields['ds_y']).data
ds_xprime = helper.getDataset(fields['ds_xprime']).data
minlenin = min( len(ds_x), len(ds_y) )
pad = None
if fields['edgenan']:
pad = N.nan
interpol = N.interp(ds_xprime,
ds_x[:minlenin], ds_y[:minlenin],
left=pad, right=pad)
self.dsout.update(data=interpol)
class ReBinXYPlugin(DatasetPlugin):
"""Bin-up data by factor given."""
menu = (_('Filtering'), _('Bin X,Y'))
name = 'RebinXY'
description_short = 'Bin every N datapoints'
description_full = ('Given dataset Y (and optionally X), for every N '
'datapoints calculate the binned value. For '
'dataset Y this is the sum or mean of every N '
'datapoints. For X this is the midpoint of the '
'datapoints (using error bars to give the range.')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_y', _('Input dataset Y')),
field.FieldDataset('ds_x', _('Input dataset X (optional)')),
field.FieldInt('binsize', _('Bin size (N)'),
minval=1, default=2),
field.FieldCombo('mode', _('Mode of binning'),
items=('sum', 'average'),
editable=False),
field.FieldDataset('ds_yout', _("Output Y'")),
field.FieldDataset('ds_xout', _("Output X' (optional)")),
]
def getDatasets(self, fields):
"""Return output datasets"""
if fields['ds_yout'] == '':
raise DatasetPluginException(_('Invalid output Y dataset name'))
if fields['ds_x'] != '' and fields['ds_xout'] == '':
raise DatasetPluginException(_('Invalid output X dataset name'))
self.dssout = out = [ Dataset1D(fields['ds_yout']) ]
if fields['ds_xout'] != '':
out.append(Dataset1D(fields['ds_xout']))
return out
def updateDatasets(self, fields, helper):
"""Do binning."""
binsize = fields['binsize']
average = fields['mode'] == 'average'
def binerr(err):
"""Compute binned error."""
if err is None:
return None
err2 = qtloops.binData(err**2, binsize, False)
cts = qtloops.binData(N.ones(err.shape), binsize, False)
return N.sqrt(err2) / cts
# bin up data and calculate errors (if any)
dsy = helper.getDataset(fields['ds_y'])
binydata = qtloops.binData(dsy.data, binsize, average)
binyserr = binerr(dsy.serr)
binyperr = binerr(dsy.perr)
binynerr = binerr(dsy.nerr)
self.dssout[0].update(data=binydata, serr=binyserr, perr=binyperr, nerr=binynerr)
if len(self.dssout) == 2:
# x datasets
dsx = helper.getDataset(fields['ds_x'])
# Calculate ranges between adjacent binned points. This
# is horribly messy - we have to account for the fact
# there might not be error bars and calculate the midpoint
# to the previous/next point.
minvals = N.array(dsx.data)
if dsx.serr is not None:
minvals -= dsx.serr
elif dsx.nerr is not None:
minvals += dsx.nerr
else:
minvals = 0.5*(dsx.data[1:] + dsx.data[:-1])
if len(dsx.data) > 2:
# assume +ve error bar on last point is as big as its -ve error
minvals = N.insert(minvals, 0, dsx.data[0] - 0.5*(
dsx.data[1] - dsx.data[0]))
elif len(dsx.data) != 0:
# no previous point so we assume 0 error
minvals = N.insert(minvals, 0, dsx.data[0])
maxvals = N.array(dsx.data)
if dsx.serr is not None:
maxvals += dsx.serr
elif dsx.perr is not None:
maxvals += dsx.perr
else:
maxvals = 0.5*(dsx.data[1:] + dsx.data[:-1])
if len(dsx.data) > 2:
maxvals = N.append(maxvals, dsx.data[-1] + 0.5*(
dsx.data[-1] - dsx.data[-2]))
elif len(dsx.data) != 0:
maxvals = N.append(maxvals, dsx.data[-1])
minbin = minvals[::binsize]
maxbin = maxvals[binsize-1::binsize]
if len(minbin) > len(maxbin):
# not an even number of bin size
maxbin = N.append(maxbin, maxvals[-1])
self.dssout[1].update(data=0.5*(minbin+maxbin),
serr=0.5*(maxbin-minbin))
class SortPlugin(_OneOutputDatasetPlugin):
"""Sort a dataset."""
menu = (_('Compute'), _('Sorted'),)
name = 'Sort'
description_short = description_full = _('Sort a dataset')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldDataset('ds_sort', _('Sort by (optional)')),
field.FieldBool('reverse', _('Reverse')),
field.FieldDataset('ds_out', _('Output dataset')),
]
def updateDatasets(self, fields, helper):
"""Do sorting of dataset."""
ds_sort = ds = helper.getDataset(fields['ds_in'])
if fields['ds_sort'].strip():
ds_sort = helper.getDataset(fields['ds_sort'])
minlen = min(len(ds_sort.data), len(ds.data))
idxs = N.argsort(ds_sort.data[:minlen])
if fields['reverse']:
idxs = idxs[::-1]
out = { 'data': ds.data[:minlen][idxs] }
if ds.serr is not None: out['serr'] = ds.serr[:minlen][idxs]
if ds.perr is not None: out['perr'] = ds.perr[:minlen][idxs]
if ds.nerr is not None: out['nerr'] = ds.nerr[:minlen][idxs]
self.dsout.update(**out)
class SortTextPlugin(_OneOutputDatasetPlugin):
"""Sort a text dataset."""
menu = (_('Compute'), _('Sorted Text'),)
name = 'Sort Text'
description_short = description_full = _('Sort a text dataset')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset'), datatype='text'),
field.FieldDataset('ds_sort', _('Sort by (optional)')),
field.FieldBool('reverse', _('Reverse')),
field.FieldDataset('ds_out', _('Output dataset'), datatype='text'),
]
def getDatasets(self, fields):
"""Returns single output dataset (self.dsout)."""
if fields['ds_out'] == '':
raise DatasetPluginException(_('Invalid output dataset name'))
self.dsout = DatasetText(fields['ds_out'])
return [self.dsout]
def updateDatasets(self, fields, helper):
"""Do sorting of dataset."""
ds = helper.getTextDataset(fields['ds_in']).data
if fields['ds_sort'].strip():
ds_sort = helper.getDataset(fields['ds_sort'])
length = min(len(ds), len(ds_sort.data))
ds = ds[:length]
idxs = N.argsort(ds_sort.data[:length])
dout = []
for i in idxs:
dout.append(ds[i])
else:
dout = list(ds)
dout.sort()
if fields['reverse']:
dout = dout[::1]
self.dsout.update(dout)
class Histogram2D(DatasetPlugin):
"""Compute 2D histogram for two 1D dataset.
Algorithm: Count up values in boxes. Sort. Compute probability
working downwards.
"""
menu = (_('Compute'), _('2D histogram'),)
name = 'Histogram 2D'
description_short = _('Compute 2D histogram.')
description_full = _('Given two 1D datasets, compute a 2D histogram. '
'Can optionally compute a probability distribution.')
def __init__(self):
"""Input fields."""
self.fields = [
field.FieldDataset('ds_inx', _('Input dataset x')),
field.FieldFloatOrAuto('minx', _('Minimum value for dataset x')),
field.FieldFloatOrAuto('maxx', _('Maximum value for dataset x')),
field.FieldInt('binsx', _('Number of bins for dataset x'),
default=10, minval=2),
field.FieldDataset('ds_iny', _('Input dataset y')),
field.FieldFloatOrAuto('miny', _('Minimum value for dataset y')),
field.FieldFloatOrAuto('maxy', _('Maximum value for dataset y')),
field.FieldInt('binsy', _('Number of bins for dataset y'),
default=10, minval=2),
field.FieldCombo('mode', _('Mode'),
items=('Count',
'Fraction',
'CumulativeProbability',
'CumulativeProbabilityInverse'),
default='Count', editable=False),
field.FieldDataset('ds_out', _('Output 2D dataset'), dims=2),
]
def probabilityCalculator(self, histo):
"""Convert an image of counts to a cumulative probability
distribution.
"""
# get sorted pixel values
pixvals = N.ravel(histo)
sortpix = N.sort(pixvals)
# cumulative sum of values
probs = N.cumsum(sortpix)
probs = probs * (1./probs[-1])
# values in pixvals which are unique
unique = N.concatenate( (sortpix[:-1] != sortpix[1:], [True]) )
# now we have the pixel values and probabilities
pixaxis = sortpix[unique]
probaxis = probs[unique]
# use linear interpolation to map the pixvals -> cumulative probability
probvals = N.interp(pixvals, pixaxis, probaxis)
probvals = probvals.reshape(histo.shape)
return probvals
def updateDatasets(self, fields, helper):
"""Calculate values of output dataset."""
dsy = helper.getDataset(fields['ds_iny']).data
dsx = helper.getDataset(fields['ds_inx']).data
# use range of data or specified parameters
miny = fields['miny']
if miny == 'Auto': miny = N.nanmin(dsy)
maxy = fields['maxy']
if maxy == 'Auto': maxy = N.nanmax(dsy)
minx = fields['minx']
if minx == 'Auto': minx = N.nanmin(dsx)
maxx = fields['maxx']
if maxx == 'Auto': maxx = N.nanmax(dsx)
# compute counts in each bin
histo, xedge, yedge = N.histogram2d(
dsy, dsx, bins=[fields['binsy'], fields['binsx']],
range=[[miny,maxy], [minx,maxx]], normed=False)
m = fields['mode']
if m == 'Count':
out = histo
elif m == 'Fraction':
out = histo * (1./N.sum(histo))
elif m == 'CumulativeProbability':
out = self.probabilityCalculator(histo)
elif m == 'CumulativeProbabilityInverse':
out = 1. - self.probabilityCalculator(histo)
# update output dataset
self.dsout.update(out, rangey=(miny, maxy), rangex=(minx, maxx))
def getDatasets(self, fields):
"""Returns single output dataset (self.dsout)."""
if fields['ds_out'] == '':
raise DatasetPluginException(_('Invalid output dataset name'))
self.dsout = Dataset2D(fields['ds_out'])
return [self.dsout]
class ConvertNumbersToText(DatasetPlugin):
"""Convert a set of numbers to text."""
menu = (_('Convert'), _('Numbers to Text'),)
name = 'NumbersToText'
description_short = _('Convert numeric dataset to text')
description_full = _('Given a 1D numeric dataset, create a text dataset '
'by applying formatting. Format string is in standard '
'Veusz-extended C formatting, e.g.\n'
' "%Vg" - general,'
' "%Ve" - scientific,'
' "%VE" - engineering suffix,'
' "%.2f" - two decimal places and'
' "%e" - C-style scientific')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldText('format', _('Format'), default='%Vg'),
field.FieldDataset('ds_out', _('Output dataset name')),
]
def getDatasets(self, fields):
if fields['ds_out'] == '':
raise DatasetPluginException(_('Invalid output dataset name'))
self.dsout = DatasetText(fields['ds_out'])
return [self.dsout]
def updateDatasets(self, fields, helper):
"""Convert dataset."""
ds_in = helper.getDataset(fields['ds_in'])
f = fields['format']
data = [ utils.formatNumber(n, f, locale=helper.locale)
for n in ds_in.data ]
self.dsout.update(data=data)
class ClipPlugin(_OneOutputDatasetPlugin):
"""Compute moving average for dataset."""
menu = (_('Compute'), _('Clipped dataset'),)
name = 'Clip'
description_short = _('Clip data between a minimum and maximum')
description_full = _('Clip data points to minimum and/or maximum values')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldFloat('minimum', _('Minimum'), default=0.),
field.FieldBool('disablemin', _('Disable minimum')),
field.FieldFloat('maximum', _('Maximum'), default=1.),
field.FieldBool('disablemax', _('Disable maximum')),
field.FieldBool('cliperrs', _('Clip error bars'), default=True),
field.FieldDataset('ds_out', _('Output dataset')),
]
def updateDatasets(self, fields, helper):
"""Do clipping of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
data = N.array(ds_in.data)
perr = getattr(ds_in, 'perr')
nerr = getattr(ds_in, 'nerr')
serr = getattr(ds_in, 'serr')
cliperrs = fields['cliperrs']
# force asymmetric errors if clipping error bars
if cliperrs and serr is not None and (nerr is None or perr is None):
perr = serr
nerr = -serr
serr = None
# we have to clip the ranges, so calculate these first
upper = (data+perr) if (cliperrs and perr is not None) else None
lower = (data+nerr) if (cliperrs and nerr is not None) else None
# note: this preserves nan values
if not fields['disablemin']:
minv = fields['minimum']
data[data<minv] = minv
if upper is not None:
upper[upper<minv] = minv
if lower is not None:
lower[lower<minv] = minv
if not fields['disablemax']:
maxv = fields['maximum']
data[data>maxv] = maxv
if upper is not None:
upper[upper>maxv] = maxv
if lower is not None:
lower[lower>maxv] = maxv
if upper is not None:
perr = upper-data
if lower is not None:
nerr = lower-data
self.dsout.update(data=data, serr=serr, perr=perr, nerr=nerr)
class LogPlugin(_OneOutputDatasetPlugin):
"""Compute logarithm of data."""
menu = (_('Compute'), _('Log'),)
name = 'Logarithm'
description_short = _('Compute log of data')
description_full = _('Compute logarithm of data with arbitrary base')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldFloat('base', _('Base'), default=10., minval=1e-10),
field.FieldDataset('ds_out', _('Output dataset')),
]
def updateDatasets(self, fields, helper):
"""Compute log of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
data = N.array(ds_in.data)
perr = getattr(ds_in, 'perr')
nerr = getattr(ds_in, 'nerr')
serr = getattr(ds_in, 'serr')
uppers = lowers = None
if perr is not None:
uppers = data + perr
elif serr is not None:
uppers = data + serr
if nerr is not None:
lowers = data + nerr
elif serr is not None:
lowers = data - serr
# convert base e to base given
invlogbase = 1. / N.log(fields['base'])
logdata = N.log(data) * invlogbase
logperr = None if uppers is None else N.log(uppers)*invlogbase - logdata
lognerr = None if lowers is None else N.log(lowers)*invlogbase - logdata
self.dsout.update(data=logdata, perr=logperr, nerr=lognerr)
class ExpPlugin(_OneOutputDatasetPlugin):
"""Compute exponential of data."""
menu = (_('Compute'), _('Exponential'),)
name = 'Exponential'
description_short = _('Compute exponential of data')
description_full = _('Compute exponential of data')
def __init__(self):
"""Define fields."""
self.fields = [
field.FieldDataset('ds_in', _('Input dataset')),
field.FieldFloat('base', _('Base'), default=10., minval=1e-10),
field.FieldDataset('ds_out', _('Output dataset')),
]
def updateDatasets(self, fields, helper):
"""Compute exponential of dataset."""
ds_in = helper.getDataset(fields['ds_in'])
data = N.array(ds_in.data)
perr = getattr(ds_in, 'perr')
nerr = getattr(ds_in, 'nerr')
serr = getattr(ds_in, 'serr')
uppers = lowers = None
if perr is not None:
uppers = data + perr
elif serr is not None:
uppers = data + serr
if nerr is not None:
lowers = data + nerr
elif serr is not None:
lowers = data - serr
base = fields['base']
expdata = base**data
expperr = None if uppers is None else base**uppers - expdata
expnerr = None if lowers is None else base**lowers - expdata
self.dsout.update(data=expdata, perr=expperr, nerr=expnerr)
datasetpluginregistry += [
AddDatasetPlugin,
AddDatasetsPlugin,
SubtractDatasetPlugin,
SubtractMeanDatasetPlugin,
SubtractMinimumDatasetPlugin,
MultiplyDatasetPlugin,
MultiplyDatasetsPlugin,
DivideDatasetsPlugin,
DivideMaxPlugin,
DivideNormalizePlugin,
MeanDatasetPlugin,
ExtremesDatasetPlugin,
CumulativePlugin,
ClipPlugin,
LogPlugin,
ExpPlugin,
ConcatenateDatasetPlugin,
InterleaveDatasetPlugin,
ChopDatasetPlugin,
PartsDatasetPlugin,
DemultiplexPlugin,
ThinDatasetPlugin,
PolarToCartesianPlugin,
ConvertNumbersToText,
FilterDatasetPlugin,
MovingAveragePlugin,
LinearInterpolatePlugin,
ReBinXYPlugin,
SortPlugin,
SortTextPlugin,
Histogram2D,
]
| gpl-2.0 |
ovnicraft/openerp-restaurant | auth_openid/controllers/main.py | 382 | 10399 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import os
import tempfile
import getpass
import werkzeug.urls
import werkzeug.exceptions
from openid import oidutil
from openid.store import filestore
from openid.consumer import consumer
from openid.cryptutil import randomString
from openid.extensions import ax, sreg
import openerp
from openerp import SUPERUSER_ID
from openerp.modules.registry import RegistryManager
from openerp.addons.web.controllers.main import login_and_redirect, set_cookie_and_redirect
import openerp.http as http
from openerp.http import request
from .. import utils
_logger = logging.getLogger(__name__)
oidutil.log = _logger.debug
def get_system_user():
"""Return system user info string, such as USERNAME-EUID"""
try:
info = getpass.getuser()
except ImportError:
if os.name == 'nt':
# when there is no 'USERNAME' in environment, getpass.getuser()
# fail when trying to import 'pwd' module - which is unix only.
# In that case we have to fallback to real win32 API.
import win32api
info = win32api.GetUserName()
else:
raise
euid = getattr(os, 'geteuid', None) # Non available on some platforms
if euid is not None:
info = '%s-%d' % (info, euid())
return info
_storedir = os.path.join(tempfile.gettempdir(),
'openerp-auth_openid-%s-store' % get_system_user())
class GoogleAppsAwareConsumer(consumer.GenericConsumer):
def complete(self, message, endpoint, return_to):
if message.getOpenIDNamespace() == consumer.OPENID2_NS:
server_url = message.getArg(consumer.OPENID2_NS, 'op_endpoint', '')
if server_url.startswith('https://www.google.com/a/'):
assoc_handle = message.getArg(consumer.OPENID_NS, 'assoc_handle')
assoc = self.store.getAssociation(server_url, assoc_handle)
if assoc:
# update fields
for attr in ['claimed_id', 'identity']:
value = message.getArg(consumer.OPENID2_NS, attr, '')
value = 'https://www.google.com/accounts/o8/user-xrds?uri=%s' % werkzeug.url_quote_plus(value)
message.setArg(consumer.OPENID2_NS, attr, value)
# now, resign the message
message.delArg(consumer.OPENID2_NS, 'sig')
message.delArg(consumer.OPENID2_NS, 'signed')
message = assoc.signMessage(message)
return super(GoogleAppsAwareConsumer, self).complete(message, endpoint, return_to)
class OpenIDController(http.Controller):
_store = filestore.FileOpenIDStore(_storedir)
_REQUIRED_ATTRIBUTES = ['email']
_OPTIONAL_ATTRIBUTES = 'nickname fullname postcode country language timezone'.split()
def _add_extensions(self, oidrequest):
"""Add extensions to the oidrequest"""
sreg_request = sreg.SRegRequest(required=self._REQUIRED_ATTRIBUTES,
optional=self._OPTIONAL_ATTRIBUTES)
oidrequest.addExtension(sreg_request)
ax_request = ax.FetchRequest()
for alias in self._REQUIRED_ATTRIBUTES:
uri = utils.SREG2AX[alias]
ax_request.add(ax.AttrInfo(uri, required=True, alias=alias))
for alias in self._OPTIONAL_ATTRIBUTES:
uri = utils.SREG2AX[alias]
ax_request.add(ax.AttrInfo(uri, required=False, alias=alias))
oidrequest.addExtension(ax_request)
def _get_attributes_from_success_response(self, success_response):
attrs = {}
all_attrs = self._REQUIRED_ATTRIBUTES + self._OPTIONAL_ATTRIBUTES
sreg_resp = sreg.SRegResponse.fromSuccessResponse(success_response)
if sreg_resp:
for attr in all_attrs:
value = sreg_resp.get(attr)
if value is not None:
attrs[attr] = value
ax_resp = ax.FetchResponse.fromSuccessResponse(success_response)
if ax_resp:
for attr in all_attrs:
value = ax_resp.getSingle(utils.SREG2AX[attr])
if value is not None:
attrs[attr] = value
return attrs
def _get_realm(self):
return request.httprequest.host_url
@http.route('/auth_openid/login/verify_direct', type='http', auth='none')
def verify_direct(self, db, url):
result = self._verify(db, url)
if 'error' in result:
return werkzeug.exceptions.BadRequest(result['error'])
if result['action'] == 'redirect':
return werkzeug.utils.redirect(result['value'])
return result['value']
@http.route('/auth_openid/login/verify', type='json', auth='none')
def verify(self, db, url):
return self._verify(db, url)
def _verify(self, db, url):
redirect_to = werkzeug.urls.Href(request.httprequest.host_url + 'auth_openid/login/process')(session_id=request.session_id)
realm = self._get_realm()
session = dict(dbname=db, openid_url=url) # TODO add origin page ?
oidconsumer = consumer.Consumer(session, self._store)
try:
oidrequest = oidconsumer.begin(url)
except consumer.DiscoveryFailure, exc:
fetch_error_string = 'Error in discovery: %s' % (str(exc[0]),)
return {'error': fetch_error_string, 'title': 'OpenID Error'}
if oidrequest is None:
return {'error': 'No OpenID services found', 'title': 'OpenID Error'}
request.session.openid_session = session
self._add_extensions(oidrequest)
if oidrequest.shouldSendRedirect():
redirect_url = oidrequest.redirectURL(realm, redirect_to)
return {'action': 'redirect', 'value': redirect_url, 'session_id': request.session_id}
else:
form_html = oidrequest.htmlMarkup(realm, redirect_to)
return {'action': 'post', 'value': form_html, 'session_id': request.session_id}
@http.route('/auth_openid/login/process', type='http', auth='none')
def process(self, **kw):
session = getattr(request.session, 'openid_session', None)
if not session:
return set_cookie_and_redirect('/')
oidconsumer = consumer.Consumer(session, self._store, consumer_class=GoogleAppsAwareConsumer)
query = request.httprequest.args
info = oidconsumer.complete(query, request.httprequest.base_url)
display_identifier = info.getDisplayIdentifier()
session['status'] = info.status
if info.status == consumer.SUCCESS:
dbname = session['dbname']
registry = RegistryManager.get(dbname)
with registry.cursor() as cr:
Modules = registry.get('ir.module.module')
installed = Modules.search_count(cr, SUPERUSER_ID, ['&', ('name', '=', 'auth_openid'), ('state', '=', 'installed')]) == 1
if installed:
Users = registry.get('res.users')
#openid_url = info.endpoint.canonicalID or display_identifier
openid_url = session['openid_url']
attrs = self._get_attributes_from_success_response(info)
attrs['openid_url'] = openid_url
session['attributes'] = attrs
openid_email = attrs.get('email', False)
domain = []
if openid_email:
domain += ['|', ('openid_email', '=', False)]
domain += [('openid_email', '=', openid_email)]
domain += [('openid_url', '=', openid_url), ('active', '=', True)]
ids = Users.search(cr, SUPERUSER_ID, domain)
assert len(ids) < 2
if ids:
user_id = ids[0]
login = Users.browse(cr, SUPERUSER_ID, user_id).login
key = randomString(utils.KEY_LENGTH, '0123456789abcdef')
Users.write(cr, SUPERUSER_ID, [user_id], {'openid_key': key})
# TODO fill empty fields with the ones from sreg/ax
cr.commit()
return login_and_redirect(dbname, login, key)
session['message'] = 'This OpenID identifier is not associated to any active users'
elif info.status == consumer.SETUP_NEEDED:
session['message'] = info.setup_url
elif info.status == consumer.FAILURE and display_identifier:
fmt = "Verification of %s failed: %s"
session['message'] = fmt % (display_identifier, info.message)
else: # FAILURE
# Either we don't understand the code or there is no
# openid_url included with the error. Give a generic
# failure message. The library should supply debug
# information in a log.
session['message'] = 'Verification failed.'
return set_cookie_and_redirect('/#action=login&loginerror=1')
@http.route('/auth_openid/login/status', type='json', auth='none')
def status(self):
session = getattr(request.session, 'openid_session', {})
return {'status': session.get('status'), 'message': session.get('message')}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
GarySparrow/mFlaskWeb | venv/Lib/site-packages/sqlalchemy/orm/evaluator.py | 21 | 5032 | # orm/evaluator.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import operator
from ..sql import operators
from .. import util
class UnevaluatableError(Exception):
pass
_straight_ops = set(getattr(operators, op)
for op in ('add', 'mul', 'sub',
'div',
'mod', 'truediv',
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
_notimplemented_ops = set(getattr(operators, op)
for op in ('like_op', 'notlike_op', 'ilike_op',
'notilike_op', 'between_op', 'in_op',
'notin_op', 'endswith_op', 'concat_op'))
class EvaluatorCompiler(object):
def __init__(self, target_cls=None):
self.target_cls = target_cls
def process(self, clause):
meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
if not meth:
raise UnevaluatableError(
"Cannot evaluate %s" % type(clause).__name__)
return meth(clause)
def visit_grouping(self, clause):
return self.process(clause.element)
def visit_null(self, clause):
return lambda obj: None
def visit_false(self, clause):
return lambda obj: False
def visit_true(self, clause):
return lambda obj: True
def visit_column(self, clause):
if 'parentmapper' in clause._annotations:
parentmapper = clause._annotations['parentmapper']
if self.target_cls and not issubclass(
self.target_cls, parentmapper.class_):
util.warn(
"Can't do in-Python evaluation of criteria against "
"alternate class %s; "
"expiration of objects will not be accurate "
"and/or may fail. synchronize_session should be set to "
"False or 'fetch'. "
"This warning will be an exception "
"in 1.0." % parentmapper.class_
)
key = parentmapper._columntoproperty[clause].key
else:
key = clause.key
get_corresponding_attr = operator.attrgetter(key)
return lambda obj: get_corresponding_attr(obj)
def visit_clauselist(self, clause):
evaluators = list(map(self.process, clause.clauses))
if clause.operator is operators.or_:
def evaluate(obj):
has_null = False
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if value:
return True
has_null = has_null or value is None
if has_null:
return None
return False
elif clause.operator is operators.and_:
def evaluate(obj):
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if not value:
if value is None:
return None
return False
return True
else:
raise UnevaluatableError(
"Cannot evaluate clauselist with operator %s" %
clause.operator)
return evaluate
def visit_binary(self, clause):
eval_left, eval_right = list(map(self.process,
[clause.left, clause.right]))
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
return eval_left(obj) == eval_right(obj)
elif operator is operators.isnot:
def evaluate(obj):
return eval_left(obj) != eval_right(obj)
elif operator in _straight_ops:
def evaluate(obj):
left_val = eval_left(obj)
right_val = eval_right(obj)
if left_val is None or right_val is None:
return None
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
"Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
return evaluate
def visit_unary(self, clause):
eval_inner = self.process(clause.element)
if clause.operator is operators.inv:
def evaluate(obj):
value = eval_inner(obj)
if value is None:
return None
return not value
return evaluate
raise UnevaluatableError(
"Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
def visit_bindparam(self, clause):
val = clause.value
return lambda obj: val
| mit |
HydrelioxGitHub/home-assistant | homeassistant/components/homekit_controller/binary_sensor.py | 2 | 1445 | """Support for Homekit motion sensors."""
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.components.homekit_controller import (
KNOWN_ACCESSORIES, HomeKitEntity)
DEPENDENCIES = ['homekit_controller']
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Homekit motion sensor support."""
if discovery_info is not None:
accessory = hass.data[KNOWN_ACCESSORIES][discovery_info['serial']]
add_entities([HomeKitMotionSensor(accessory, discovery_info)], True)
class HomeKitMotionSensor(HomeKitEntity, BinarySensorDevice):
"""Representation of a Homekit sensor."""
def __init__(self, *args):
"""Initialise the entity."""
super().__init__(*args)
self._on = False
def get_characteristic_types(self):
"""Define the homekit characteristics the entity is tracking."""
# pylint: disable=import-error
from homekit.model.characteristics import CharacteristicsTypes
return [
CharacteristicsTypes.MOTION_DETECTED,
]
def _update_motion_detected(self, value):
self._on = value
@property
def device_class(self):
"""Define this binary_sensor as a motion sensor."""
return 'motion'
@property
def is_on(self):
"""Has motion been detected."""
return self._on
| apache-2.0 |
jonashaag/django-nonrel-nohistory | django/contrib/gis/db/backends/oracle/models.py | 310 | 2184 | """
The GeometryColumns and SpatialRefSys models for the Oracle spatial
backend.
It should be noted that Oracle Spatial does not have database tables
named according to the OGC standard, so the closest analogs are used.
For example, the `USER_SDO_GEOM_METADATA` is used for the GeometryColumns
model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model.
"""
from django.contrib.gis.db import models
from django.contrib.gis.db.models.fields import GeometryField
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
class GeometryColumns(models.Model):
"Maps to the Oracle USER_SDO_GEOM_METADATA table."
table_name = models.CharField(max_length=32)
column_name = models.CharField(max_length=1024)
srid = models.IntegerField(primary_key=True)
# TODO: Add support for `diminfo` column (type MDSYS.SDO_DIM_ARRAY).
class Meta:
db_table = 'USER_SDO_GEOM_METADATA'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the
the feature table name.
"""
return 'table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the
the feature geometry column.
"""
return 'column_name'
def __unicode__(self):
return '%s - %s (SRID: %s)' % (self.table_name, self.column_name, self.srid)
class SpatialRefSys(models.Model, SpatialRefSysMixin):
"Maps to the Oracle MDSYS.CS_SRS table."
cs_name = models.CharField(max_length=68)
srid = models.IntegerField(primary_key=True)
auth_srid = models.IntegerField()
auth_name = models.CharField(max_length=256)
wktext = models.CharField(max_length=2046)
# Optional geometry representing the bounds of this coordinate
# system. By default, all are NULL in the table.
cs_bounds = models.PolygonField(null=True)
objects = models.GeoManager()
class Meta:
db_table = 'CS_SRS'
managed = False
@property
def wkt(self):
return self.wktext
@classmethod
def wkt_col(cls):
return 'wktext'
| bsd-3-clause |
Orange-OpenSource/tinypyki | setup.py | 1 | 1758 | # Copyright (C) 2014 Orange
# This software is distributed under the terms and conditions of the 'BSD
# 3-Clause' license which can be found in the 'LICENSE.txt' file in this package
# distribution or at 'http://opensource.org/licenses/BSD-3-Clause'.
#!/usr/bin/env python
"""Setup script for tinypyki."""
from distutils.core import setup
setup(name="tinypyki",
description="A tiny openssl command line wrapper.",
long_description="""Focus on your PKI and what you want of it, not
spending half an hour figuring out the basics
of openssl certificate generation.\n
\n""" + open("README.md", "r").read(),
author="botview",
author_email="[email protected]",
url="https://github.com/Orange-OpenSource",
license="BSD 3-Clause",
version="0.1",
packages=["tinypyki",],
py_modules=["tinypyki"],
classifiers=["License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Environment :: Console",
"Development Status :: 4 - Beta",
"Topic :: Security",
"Topic :: Internet",
"Topic :: Education :: Testing",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Education",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Intended Audience :: Telecommunications Industry"]
)
| bsd-3-clause |
chensuchun/fas | plugins/fas-plugin-yubikey/burn-key.py | 11 | 2393 | #!/usr/bin/python
# ykpersonalize -ofixed=ccccccccccci -afcaa0c5bf2e83ec040e4aeb7f8565293 -ouid=1e7f1da7d6d1
from fedora.client import AccountSystem, AuthError
from getpass import getpass, getuser
import subprocess, sys, gettext
from optparse import OptionParser
t = gettext.translation('fas', '/usr/share/locale', fallback = True)
_ = t.gettext
parser = OptionParser(version = "0.1")
parser.add_option('-u', '--username',
dest = 'username',
default = None,
metavar = 'username',
help = _('Fedora Account System username'))
parser.add_option('-U', '--url',
dest = 'url',
default = 'https://admin.fedoraproject.org/accounts/',
metavar = 'url',
help = _('FAS URL (Default: https://admin.fedoraproject.org/accounts/'))
(opts, args) = parser.parse_args()
if not opts.username:
print _('Please provide a username.')
parser.print_help()
sys.exit(0)
if not getuser() == 'root':
print _('''Please run this program as root as it will need to write
directly to the yubikey usb''')
sys.exit(5)
print _(
'''
Attention: You are about to reprogram your yubikey! Please ensure it is
plugged in to your USB slot before continuing. The secret key currently on
your yubikey will be destroyed as part of this operation!
''')
print 'Contacting %s' % opts.url
password = getpass('Password for %s: ' % opts.username)
fas = AccountSystem(username=opts.username, password=password, base_url=opts.url)
try:
new_key = fas.send_request('yubikey/genkey', auth=True)
except AuthError, e:
print e
sys.exit(1)
print
print _('New key generated in FAS, attempting to burn to yubikey')
print
opts = new_key['key'].split()
try:
retcode = subprocess.call(['/usr/bin/ykpersonalize',
'-ofixed=%s' % opts[0],
'-a%s' % opts[2],
'-ouid=%s' % opts[1]])
except KeyboardInterrupt:
print _('''
Burn attempt cancelled by user! Note: Even though the key did not get burned
onto your key, FAS did generate a new one. This just means that if you did
previously burn a different key, it will no longer work.
''')
retcode=1
if retcode:
print "There was an error writing to your yubi key"
else:
print "Success! Your Yubikey ID is %s" % opts[0]
| gpl-2.0 |
40223123/raven | static/Brython3.1.0-20150301-090019/Lib/test/test_re.py | 718 | 56009 | # FIXME: brython: implement test.support
#from test.support import verbose, run_unittest, gc_collect, bigmemtest, _2G, \
# cpython_only
verbose = True
# FIXME: brython: Not used in this module ?
#import io
import re
# FIXME: brython: implement re.Scanner
#from re import Scanner
import sre_constants
import sys
import string
import traceback
# FIXME: brython: implement _weakref
#from weakref import proxy
# Misc tests from Tim Peters' re.doc
# WARNING: Don't change details in these tests if you don't know
# what you're doing. Some of these tests were carefully modeled to
# cover most of the code.
import unittest
class ReTests(unittest.TestCase):
# FIXME: brython: implement test.support
# def test_keep_buffer(self):
# # See bug 14212
# b = bytearray(b'x')
# it = re.finditer(b'a', b)
# with self.assertRaises(BufferError):
# b.extend(b'x'*400)
# list(it)
# del it
# gc_collect()
# b.extend(b'x'*400)
# FIXME: brython: implement _weakref
# def test_weakref(self):
# s = 'QabbbcR'
# x = re.compile('ab+c')
# y = proxy(x)
# self.assertEqual(x.findall('QabbbcR'), y.findall('QabbbcR'))
def test_search_star_plus(self):
self.assertEqual(re.search('x*', 'axx').span(0), (0, 0))
self.assertEqual(re.search('x*', 'axx').span(), (0, 0))
self.assertEqual(re.search('x+', 'axx').span(0), (1, 3))
self.assertEqual(re.search('x+', 'axx').span(), (1, 3))
self.assertEqual(re.search('x', 'aaa'), None)
self.assertEqual(re.match('a*', 'xxx').span(0), (0, 0))
self.assertEqual(re.match('a*', 'xxx').span(), (0, 0))
self.assertEqual(re.match('x*', 'xxxa').span(0), (0, 3))
self.assertEqual(re.match('x*', 'xxxa').span(), (0, 3))
self.assertEqual(re.match('a+', 'xxx'), None)
def bump_num(self, matchobj):
int_value = int(matchobj.group(0))
return str(int_value + 1)
def test_basic_re_sub(self):
self.assertEqual(re.sub("(?i)b+", "x", "bbbb BBBB"), 'x x')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y'),
'9.3 -3 24x100y')
self.assertEqual(re.sub(r'\d+', self.bump_num, '08.2 -2 23x99y', 3),
'9.3 -3 23x99y')
self.assertEqual(re.sub('.', lambda m: r"\n", 'x'), '\\n')
self.assertEqual(re.sub('.', r"\n", 'x'), '\n')
s = r"\1\1"
self.assertEqual(re.sub('(.)', s, 'x'), 'xx')
self.assertEqual(re.sub('(.)', re.escape(s), 'x'), s)
self.assertEqual(re.sub('(.)', lambda m: s, 'x'), s)
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<a>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<a>x)', '\g<a>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<unk>\g<unk>', 'xx'), 'xxxx')
self.assertEqual(re.sub('(?P<unk>x)', '\g<1>\g<1>', 'xx'), 'xxxx')
self.assertEqual(re.sub('a',r'\t\n\v\r\f\a\b\B\Z\a\A\w\W\s\S\d\D','a'),
'\t\n\v\r\f\a\b\\B\\Z\a\\A\\w\\W\\s\\S\\d\\D')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'), '\t\n\v\r\f\a')
self.assertEqual(re.sub('a', '\t\n\v\r\f\a', 'a'),
(chr(9)+chr(10)+chr(11)+chr(13)+chr(12)+chr(7)))
self.assertEqual(re.sub('^\s*', 'X', 'test'), 'Xtest')
def test_bug_449964(self):
# fails for group followed by other escape
self.assertEqual(re.sub(r'(?P<unk>x)', '\g<1>\g<1>\\b', 'xx'),
'xx\bxx\b')
def test_bug_449000(self):
# Test for sub() on escaped characters
self.assertEqual(re.sub(r'\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', r'\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub(r'\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
self.assertEqual(re.sub('\r\n', '\n', 'abc\r\ndef\r\n'),
'abc\ndef\n')
def test_bug_1661(self):
# Verify that flags do not get silently ignored with compiled patterns
pattern = re.compile('.')
self.assertRaises(ValueError, re.match, pattern, 'A', re.I)
self.assertRaises(ValueError, re.search, pattern, 'A', re.I)
self.assertRaises(ValueError, re.findall, pattern, 'A', re.I)
self.assertRaises(ValueError, re.compile, pattern, re.I)
def test_bug_3629(self):
# A regex that triggered a bug in the sre-code validator
re.compile("(?P<quote>)(?(quote))")
def test_sub_template_numeric_escape(self):
# bug 776311 and friends
self.assertEqual(re.sub('x', r'\0', 'x'), '\0')
self.assertEqual(re.sub('x', r'\000', 'x'), '\000')
self.assertEqual(re.sub('x', r'\001', 'x'), '\001')
self.assertEqual(re.sub('x', r'\008', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\009', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\111', 'x'), '\111')
self.assertEqual(re.sub('x', r'\117', 'x'), '\117')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\1111')
self.assertEqual(re.sub('x', r'\1111', 'x'), '\111' + '1')
self.assertEqual(re.sub('x', r'\00', 'x'), '\x00')
self.assertEqual(re.sub('x', r'\07', 'x'), '\x07')
self.assertEqual(re.sub('x', r'\08', 'x'), '\0' + '8')
self.assertEqual(re.sub('x', r'\09', 'x'), '\0' + '9')
self.assertEqual(re.sub('x', r'\0a', 'x'), '\0' + 'a')
self.assertEqual(re.sub('x', r'\400', 'x'), '\0')
self.assertEqual(re.sub('x', r'\777', 'x'), '\377')
self.assertRaises(re.error, re.sub, 'x', r'\1', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\8', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\9', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\11', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\18', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\1a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\90', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\99', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\118', 'x') # r'\11' + '8'
self.assertRaises(re.error, re.sub, 'x', r'\11a', 'x')
self.assertRaises(re.error, re.sub, 'x', r'\181', 'x') # r'\18' + '1'
self.assertRaises(re.error, re.sub, 'x', r'\800', 'x') # r'\80' + '0'
# in python2.3 (etc), these loop endlessly in sre_parser.py
self.assertEqual(re.sub('(((((((((((x)))))))))))', r'\11', 'x'), 'x')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\118', 'xyz'),
'xz8')
self.assertEqual(re.sub('((((((((((y))))))))))(.)', r'\11a', 'xyz'),
'xza')
def test_qualified_re_sub(self):
self.assertEqual(re.sub('a', 'b', 'aaaaa'), 'bbbbb')
self.assertEqual(re.sub('a', 'b', 'aaaaa', 1), 'baaaa')
def test_bug_114660(self):
self.assertEqual(re.sub(r'(\S)\s+(\S)', r'\1 \2', 'hello there'),
'hello there')
def test_bug_462270(self):
# Test for empty sub() behaviour, see SF bug #462270
self.assertEqual(re.sub('x*', '-', 'abxd'), '-a-b-d-')
self.assertEqual(re.sub('x+', '-', 'abxd'), 'ab-d')
def test_symbolic_groups(self):
re.compile('(?P<a>x)(?P=a)(?(a)y)')
re.compile('(?P<a1>x)(?P=a1)(?(a1)y)')
self.assertRaises(re.error, re.compile, '(?P<a>)(?P<a>)')
self.assertRaises(re.error, re.compile, '(?Px)')
self.assertRaises(re.error, re.compile, '(?P=)')
self.assertRaises(re.error, re.compile, '(?P=1)')
self.assertRaises(re.error, re.compile, '(?P=a)')
self.assertRaises(re.error, re.compile, '(?P=a1)')
self.assertRaises(re.error, re.compile, '(?P=a.)')
self.assertRaises(re.error, re.compile, '(?P<)')
self.assertRaises(re.error, re.compile, '(?P<>)')
self.assertRaises(re.error, re.compile, '(?P<1>)')
self.assertRaises(re.error, re.compile, '(?P<a.>)')
self.assertRaises(re.error, re.compile, '(?())')
self.assertRaises(re.error, re.compile, '(?(a))')
self.assertRaises(re.error, re.compile, '(?(1a))')
self.assertRaises(re.error, re.compile, '(?(a.))')
# New valid/invalid identifiers in Python 3
re.compile('(?P<µ>x)(?P=µ)(?(µ)y)')
re.compile('(?P<𝔘𝔫𝔦𝔠𝔬𝔡𝔢>x)(?P=𝔘𝔫𝔦𝔠𝔬𝔡𝔢)(?(𝔘𝔫𝔦𝔠𝔬𝔡𝔢)y)')
self.assertRaises(re.error, re.compile, '(?P<©>x)')
def test_symbolic_refs(self):
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<a a>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<1a1>', 'xx')
self.assertRaises(IndexError, re.sub, '(?P<a>x)', '\g<ab>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\g<b>', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)|(?P<b>y)', '\\2', 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', '\g<-1>', 'xx')
# New valid/invalid identifiers in Python 3
self.assertEqual(re.sub('(?P<µ>x)', r'\g<µ>', 'xx'), 'xx')
self.assertEqual(re.sub('(?P<𝔘𝔫𝔦𝔠𝔬𝔡𝔢>x)', r'\g<𝔘𝔫𝔦𝔠𝔬𝔡𝔢>', 'xx'), 'xx')
self.assertRaises(re.error, re.sub, '(?P<a>x)', r'\g<©>', 'xx')
def test_re_subn(self):
self.assertEqual(re.subn("(?i)b+", "x", "bbbb BBBB"), ('x x', 2))
self.assertEqual(re.subn("b+", "x", "bbbb BBBB"), ('x BBBB', 1))
self.assertEqual(re.subn("b+", "x", "xyz"), ('xyz', 0))
self.assertEqual(re.subn("b*", "x", "xyz"), ('xxxyxzx', 4))
self.assertEqual(re.subn("b*", "x", "xyz", 2), ('xxxyz', 2))
def test_re_split(self):
self.assertEqual(re.split(":", ":a:b::c"), ['', 'a', 'b', '', 'c'])
self.assertEqual(re.split(":*", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:*)", ":a:b::c"),
['', ':', 'a', ':', 'b', '::', 'c'])
self.assertEqual(re.split("(?::*)", ":a:b::c"), ['', 'a', 'b', 'c'])
self.assertEqual(re.split("(:)*", ":a:b::c"),
['', ':', 'a', ':', 'b', ':', 'c'])
self.assertEqual(re.split("([b:]+)", ":a:b::c"),
['', ':', 'a', ':b::', 'c'])
self.assertEqual(re.split("(b)|(:+)", ":a:b::c"),
['', None, ':', 'a', None, ':', '', 'b', None, '',
None, '::', 'c'])
self.assertEqual(re.split("(?:b)|(?::+)", ":a:b::c"),
['', 'a', '', '', 'c'])
def test_qualified_re_split(self):
self.assertEqual(re.split(":", ":a:b::c", 2), ['', 'a', 'b::c'])
self.assertEqual(re.split(':', 'a:b:c:d', 2), ['a', 'b', 'c:d'])
self.assertEqual(re.split("(:)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
self.assertEqual(re.split("(:*)", ":a:b::c", 2),
['', ':', 'a', ':', 'b::c'])
def test_re_findall(self):
self.assertEqual(re.findall(":+", "abc"), [])
self.assertEqual(re.findall(":+", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:+)", "a:b::c:::d"), [":", "::", ":::"])
self.assertEqual(re.findall("(:)(:*)", "a:b::c:::d"), [(":", ""),
(":", ":"),
(":", "::")])
def test_bug_117612(self):
self.assertEqual(re.findall(r"(a|(b))", "aba"),
[("a", ""),("b", "b"),("a", "")])
def test_re_match(self):
self.assertEqual(re.match('a', 'a').groups(), ())
self.assertEqual(re.match('(a)', 'a').groups(), ('a',))
self.assertEqual(re.match(r'(a)', 'a').group(0), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1), 'a')
self.assertEqual(re.match(r'(a)', 'a').group(1, 1), ('a', 'a'))
pat = re.compile('((a)|(b))(c)?')
self.assertEqual(pat.match('a').groups(), ('a', 'a', None, None))
self.assertEqual(pat.match('b').groups(), ('b', None, 'b', None))
self.assertEqual(pat.match('ac').groups(), ('a', 'a', None, 'c'))
self.assertEqual(pat.match('bc').groups(), ('b', None, 'b', 'c'))
self.assertEqual(pat.match('bc').groups(""), ('b', "", 'b', 'c'))
# A single group
m = re.match('(a)', 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(0), 'a')
self.assertEqual(m.group(1), 'a')
self.assertEqual(m.group(1, 1), ('a', 'a'))
pat = re.compile('(?:(?P<a1>a)|(?P<b2>b))(?P<c3>c)?')
self.assertEqual(pat.match('a').group(1, 2, 3), ('a', None, None))
self.assertEqual(pat.match('b').group('a1', 'b2', 'c3'),
(None, 'b', None))
self.assertEqual(pat.match('ac').group(1, 'b2', 3), ('a', None, 'c'))
def test_re_groupref_exists(self):
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a)').groups(),
('(', 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', 'a)'), None)
self.assertEqual(re.match('^(\()?([^()]+)(?(1)\))$', '(a'), None)
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'ab').groups(),
('a', 'b'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)b|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'cd').groups(),
(None, 'd'))
self.assertEqual(re.match('^(?:(a)|c)((?(1)|d))$', 'a').groups(),
('a', ''))
# Tests for bug #1177831: exercise groups other than the first group
p = re.compile('(?P<g1>a)(?P<g2>b)?((?(g2)c|d))')
self.assertEqual(p.match('abc').groups(),
('a', 'b', 'c'))
self.assertEqual(p.match('ad').groups(),
('a', None, 'd'))
self.assertEqual(p.match('abd'), None)
self.assertEqual(p.match('ac'), None)
def test_re_groupref(self):
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a|').groups(),
('|', 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1?$', 'a').groups(),
(None, 'a'))
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', 'a|'), None)
self.assertEqual(re.match(r'^(\|)?([^()]+)\1$', '|a'), None)
self.assertEqual(re.match(r'^(?:(a)|c)(\1)$', 'aa').groups(),
('a', 'a'))
self.assertEqual(re.match(r'^(?:(a)|c)(\1)?$', 'c').groups(),
(None, None))
def test_groupdict(self):
self.assertEqual(re.match('(?P<first>first) (?P<second>second)',
'first second').groupdict(),
{'first':'first', 'second':'second'})
def test_expand(self):
self.assertEqual(re.match("(?P<first>first) (?P<second>second)",
"first second")
.expand(r"\2 \1 \g<second> \g<first>"),
"second first second first")
def test_repeat_minmax(self):
self.assertEqual(re.match("^(\w){1}$", "abc"), None)
self.assertEqual(re.match("^(\w){1}?$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}$", "abc"), None)
self.assertEqual(re.match("^(\w){1,2}?$", "abc"), None)
self.assertEqual(re.match("^(\w){3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,3}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){1,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^(\w){3,4}?$", "abc").group(1), "c")
self.assertEqual(re.match("^x{1}$", "xxx"), None)
self.assertEqual(re.match("^x{1}?$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}$", "xxx"), None)
self.assertEqual(re.match("^x{1,2}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,3}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{1,4}?$", "xxx"), None)
self.assertNotEqual(re.match("^x{3,4}?$", "xxx"), None)
self.assertEqual(re.match("^x{}$", "xxx"), None)
self.assertNotEqual(re.match("^x{}$", "x{}"), None)
def test_getattr(self):
self.assertEqual(re.compile("(?i)(a)(b)").pattern, "(?i)(a)(b)")
self.assertEqual(re.compile("(?i)(a)(b)").flags, re.I | re.U)
self.assertEqual(re.compile("(?i)(a)(b)").groups, 2)
self.assertEqual(re.compile("(?i)(a)(b)").groupindex, {})
self.assertEqual(re.compile("(?i)(?P<first>a)(?P<other>b)").groupindex,
{'first': 1, 'other': 2})
self.assertEqual(re.match("(a)", "a").pos, 0)
self.assertEqual(re.match("(a)", "a").endpos, 1)
self.assertEqual(re.match("(a)", "a").string, "a")
self.assertEqual(re.match("(a)", "a").regs, ((0, 1), (0, 1)))
self.assertNotEqual(re.match("(a)", "a").re, None)
def test_special_escapes(self):
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.LOCALE).group(1), "bx")
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd", re.UNICODE).group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\b(b.)\b",
"abcd abc bcd bx").group(1), "bx")
self.assertEqual(re.search(r"\B(b.)\B",
"abc bcd bc abxd").group(1), "bx")
self.assertEqual(re.search(r"^abc$", "\nabc\n", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "abc", re.M).group(0), "abc")
self.assertEqual(re.search(r"^\Aabc\Z$", "\nabc\n", re.M), None)
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a").group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.LOCALE).group(0), "1aa! a")
self.assertEqual(re.search(r"\d\D\w\W\s\S",
"1aa! a", re.UNICODE).group(0), "1aa! a")
def test_string_boundaries(self):
# See http://bugs.python.org/issue10713
self.assertEqual(re.search(r"\b(abc)\b", "abc").group(1),
"abc")
# There's a word boundary at the start of a string.
self.assertTrue(re.match(r"\b", "abc"))
# A non-empty string includes a non-boundary zero-length match.
self.assertTrue(re.search(r"\B", "abc"))
# There is no non-boundary match at the start of a string.
self.assertFalse(re.match(r"\B", "abc"))
# However, an empty string contains no word boundaries, and also no
# non-boundaries.
self.assertEqual(re.search(r"\B", ""), None)
# This one is questionable and different from the perlre behaviour,
# but describes current behavior.
self.assertEqual(re.search(r"\b", ""), None)
# A single word-character string has two boundaries, but no
# non-boundary gaps.
self.assertEqual(len(re.findall(r"\b", "a")), 2)
self.assertEqual(len(re.findall(r"\B", "a")), 0)
# If there are no words, there are no boundaries
self.assertEqual(len(re.findall(r"\b", " ")), 0)
self.assertEqual(len(re.findall(r"\b", " ")), 0)
# Can match around the whitespace.
self.assertEqual(len(re.findall(r"\B", " ")), 2)
def test_bigcharset(self):
self.assertEqual(re.match("([\u2222\u2223])",
"\u2222").group(1), "\u2222")
self.assertEqual(re.match("([\u2222\u2223])",
"\u2222", re.UNICODE).group(1), "\u2222")
def test_big_codesize(self):
# Issue #1160
r = re.compile('|'.join(('%d'%x for x in range(10000))))
self.assertIsNotNone(r.match('1000'))
self.assertIsNotNone(r.match('9999'))
def test_anyall(self):
self.assertEqual(re.match("a.b", "a\nb", re.DOTALL).group(0),
"a\nb")
self.assertEqual(re.match("a.*b", "a\n\nb", re.DOTALL).group(0),
"a\n\nb")
def test_non_consuming(self):
self.assertEqual(re.match("(a(?=\s[^a]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[^a]*))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]))", "a b").group(1), "a")
self.assertEqual(re.match("(a(?=\s[abc]*))", "a bc").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1)", "a a").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s\1*)", "a aa").group(1), "a")
self.assertEqual(re.match(r"(a)(?=\s(abc|a))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[^a]))", "a a").group(1), "a")
self.assertEqual(re.match(r"(a(?!\s[abc]))", "a d").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s\1)", "a b").group(1), "a")
self.assertEqual(re.match(r"(a)(?!\s(abc|a))", "a b").group(1), "a")
def test_ignore_case(self):
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match(r"(a\s[^a])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[^a]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"(a\s[abc])", "a b", re.I).group(1), "a b")
self.assertEqual(re.match(r"(a\s[abc]*)", "a bb", re.I).group(1), "a bb")
self.assertEqual(re.match(r"((a)\s\2)", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s\2*)", "a aa", re.I).group(1), "a aa")
self.assertEqual(re.match(r"((a)\s(abc|a))", "a a", re.I).group(1), "a a")
self.assertEqual(re.match(r"((a)\s(abc|a)*)", "a aa", re.I).group(1), "a aa")
def test_category(self):
self.assertEqual(re.match(r"(\s)", " ").group(1), " ")
def test_getlower(self):
import _sre
self.assertEqual(_sre.getlower(ord('A'), 0), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.LOCALE), ord('a'))
self.assertEqual(_sre.getlower(ord('A'), re.UNICODE), ord('a'))
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
self.assertEqual(re.match("abc", "ABC", re.I).group(0), "ABC")
def test_not_literal(self):
self.assertEqual(re.search("\s([^a])", " b").group(1), "b")
self.assertEqual(re.search("\s([^a]*)", " bb").group(1), "bb")
def test_search_coverage(self):
self.assertEqual(re.search("\s(b)", " b").group(1), "b")
self.assertEqual(re.search("a\s", "a ").group(0), "a ")
def assertMatch(self, pattern, text, match=None, span=None,
matcher=re.match):
if match is None and span is None:
# the pattern matches the whole text
match = text
span = (0, len(text))
elif match is None or span is None:
raise ValueError('If match is not None, span should be specified '
'(and vice versa).')
m = matcher(pattern, text)
self.assertTrue(m)
self.assertEqual(m.group(), match)
self.assertEqual(m.span(), span)
def test_re_escape(self):
alnum_chars = string.ascii_letters + string.digits + '_'
p = ''.join(chr(i) for i in range(256))
for c in p:
if c in alnum_chars:
self.assertEqual(re.escape(c), c)
elif c == '\x00':
self.assertEqual(re.escape(c), '\\000')
else:
self.assertEqual(re.escape(c), '\\' + c)
self.assertMatch(re.escape(c), c)
self.assertMatch(re.escape(p), p)
def test_re_escape_byte(self):
alnum_chars = (string.ascii_letters + string.digits + '_').encode('ascii')
p = bytes(range(256))
for i in p:
b = bytes([i])
if b in alnum_chars:
self.assertEqual(re.escape(b), b)
elif i == 0:
self.assertEqual(re.escape(b), b'\\000')
else:
self.assertEqual(re.escape(b), b'\\' + b)
self.assertMatch(re.escape(b), b)
self.assertMatch(re.escape(p), p)
def test_re_escape_non_ascii(self):
s = 'xxx\u2620\u2620\u2620xxx'
s_escaped = re.escape(s)
self.assertEqual(s_escaped, 'xxx\\\u2620\\\u2620\\\u2620xxx')
self.assertMatch(s_escaped, s)
self.assertMatch('.%s+.' % re.escape('\u2620'), s,
'x\u2620\u2620\u2620x', (2, 7), re.search)
def test_re_escape_non_ascii_bytes(self):
b = 'y\u2620y\u2620y'.encode('utf-8')
b_escaped = re.escape(b)
self.assertEqual(b_escaped, b'y\\\xe2\\\x98\\\xa0y\\\xe2\\\x98\\\xa0y')
self.assertMatch(b_escaped, b)
res = re.findall(re.escape('\u2620'.encode('utf-8')), b)
self.assertEqual(len(res), 2)
def pickle_test(self, pickle):
oldpat = re.compile('a(?:b|(c|e){1,2}?|d)+?(.)')
s = pickle.dumps(oldpat)
newpat = pickle.loads(s)
self.assertEqual(oldpat, newpat)
def test_constants(self):
self.assertEqual(re.I, re.IGNORECASE)
self.assertEqual(re.L, re.LOCALE)
self.assertEqual(re.M, re.MULTILINE)
self.assertEqual(re.S, re.DOTALL)
self.assertEqual(re.X, re.VERBOSE)
def test_flags(self):
for flag in [re.I, re.M, re.X, re.S, re.L]:
self.assertNotEqual(re.compile('^pattern$', flag), None)
def test_sre_character_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255, 256, 0xFFFF, 0x10000, 0x10FFFF]:
if i < 256:
self.assertIsNotNone(re.match(r"\%03o" % i, chr(i)))
self.assertIsNotNone(re.match(r"\%03o0" % i, chr(i)+"0"))
self.assertIsNotNone(re.match(r"\%03o8" % i, chr(i)+"8"))
self.assertIsNotNone(re.match(r"\x%02x" % i, chr(i)))
self.assertIsNotNone(re.match(r"\x%02x0" % i, chr(i)+"0"))
self.assertIsNotNone(re.match(r"\x%02xz" % i, chr(i)+"z"))
if i < 0x10000:
self.assertIsNotNone(re.match(r"\u%04x" % i, chr(i)))
self.assertIsNotNone(re.match(r"\u%04x0" % i, chr(i)+"0"))
self.assertIsNotNone(re.match(r"\u%04xz" % i, chr(i)+"z"))
self.assertIsNotNone(re.match(r"\U%08x" % i, chr(i)))
self.assertIsNotNone(re.match(r"\U%08x0" % i, chr(i)+"0"))
self.assertIsNotNone(re.match(r"\U%08xz" % i, chr(i)+"z"))
self.assertIsNotNone(re.match(r"\0", "\000"))
self.assertIsNotNone(re.match(r"\08", "\0008"))
self.assertIsNotNone(re.match(r"\01", "\001"))
self.assertIsNotNone(re.match(r"\018", "\0018"))
self.assertIsNotNone(re.match(r"\567", chr(0o167)))
self.assertRaises(re.error, re.match, r"\911", "")
self.assertRaises(re.error, re.match, r"\x1", "")
self.assertRaises(re.error, re.match, r"\x1z", "")
self.assertRaises(re.error, re.match, r"\u123", "")
self.assertRaises(re.error, re.match, r"\u123z", "")
self.assertRaises(re.error, re.match, r"\U0001234", "")
self.assertRaises(re.error, re.match, r"\U0001234z", "")
self.assertRaises(re.error, re.match, r"\U00110000", "")
def test_sre_character_class_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255, 256, 0xFFFF, 0x10000, 0x10FFFF]:
if i < 256:
self.assertIsNotNone(re.match(r"[\%o]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\%o8]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\%03o]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\%03o0]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\%03o8]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\x%02x]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\x%02x0]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\x%02xz]" % i, chr(i)))
if i < 0x10000:
self.assertIsNotNone(re.match(r"[\u%04x]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\u%04x0]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\u%04xz]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\U%08x]" % i, chr(i)))
self.assertIsNotNone(re.match(r"[\U%08x0]" % i, chr(i)+"0"))
self.assertIsNotNone(re.match(r"[\U%08xz]" % i, chr(i)+"z"))
self.assertIsNotNone(re.match(r"[\U0001d49c-\U0001d4b5]", "\U0001d49e"))
self.assertRaises(re.error, re.match, r"[\911]", "")
self.assertRaises(re.error, re.match, r"[\x1z]", "")
self.assertRaises(re.error, re.match, r"[\u123z]", "")
self.assertRaises(re.error, re.match, r"[\U0001234z]", "")
self.assertRaises(re.error, re.match, r"[\U00110000]", "")
def test_sre_byte_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertIsNotNone(re.match((r"\%03o" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"\%03o0" % i).encode(), bytes([i])+b"0"))
self.assertIsNotNone(re.match((r"\%03o8" % i).encode(), bytes([i])+b"8"))
self.assertIsNotNone(re.match((r"\x%02x" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"\x%02x0" % i).encode(), bytes([i])+b"0"))
self.assertIsNotNone(re.match((r"\x%02xz" % i).encode(), bytes([i])+b"z"))
self.assertIsNotNone(re.match(br"\u", b'u'))
self.assertIsNotNone(re.match(br"\U", b'U'))
self.assertIsNotNone(re.match(br"\0", b"\000"))
self.assertIsNotNone(re.match(br"\08", b"\0008"))
self.assertIsNotNone(re.match(br"\01", b"\001"))
self.assertIsNotNone(re.match(br"\018", b"\0018"))
self.assertIsNotNone(re.match(br"\567", bytes([0o167])))
self.assertRaises(re.error, re.match, br"\911", b"")
self.assertRaises(re.error, re.match, br"\x1", b"")
self.assertRaises(re.error, re.match, br"\x1z", b"")
def test_sre_byte_class_literals(self):
for i in [0, 8, 16, 32, 64, 127, 128, 255]:
self.assertIsNotNone(re.match((r"[\%o]" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"[\%o8]" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"[\%03o]" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"[\%03o0]" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"[\%03o8]" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"[\x%02x]" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"[\x%02x0]" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match((r"[\x%02xz]" % i).encode(), bytes([i])))
self.assertIsNotNone(re.match(br"[\u]", b'u'))
self.assertIsNotNone(re.match(br"[\U]", b'U'))
self.assertRaises(re.error, re.match, br"[\911]", "")
self.assertRaises(re.error, re.match, br"[\x1z]", "")
def test_bug_113254(self):
self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').end(1), -1)
self.assertEqual(re.match(r'(a)|(b)', 'b').span(1), (-1, -1))
def test_bug_527371(self):
# bug described in patches 527371/672491
self.assertEqual(re.match(r'(a)?a','a').lastindex, None)
self.assertEqual(re.match(r'(a)(b)?b','ab').lastindex, 1)
self.assertEqual(re.match(r'(?P<a>a)(?P<b>b)?b','ab').lastgroup, 'a')
self.assertEqual(re.match("(?P<a>a(b))", "ab").lastgroup, 'a')
self.assertEqual(re.match("((a))", "a").lastindex, 1)
def test_bug_545855(self):
# bug 545855 -- This pattern failed to cause a compile error as it
# should, instead provoking a TypeError.
self.assertRaises(re.error, re.compile, 'foo[a-')
def test_bug_418626(self):
# bugs 418626 at al. -- Testing Greg Chapman's addition of op code
# SRE_OP_MIN_REPEAT_ONE for eliminating recursion on simple uses of
# pattern '*?' on a long string.
self.assertEqual(re.match('.*?c', 10000*'ab'+'cd').end(0), 20001)
self.assertEqual(re.match('.*?cd', 5000*'ab'+'c'+5000*'ab'+'cde').end(0),
20003)
self.assertEqual(re.match('.*?cd', 20000*'abc'+'de').end(0), 60001)
# non-simple '*?' still used to hit the recursion limit, before the
# non-recursive scheme was implemented.
self.assertEqual(re.search('(a|b)*?c', 10000*'ab'+'cd').end(0), 20001)
def test_bug_612074(self):
pat="["+re.escape("\u2039")+"]"
self.assertEqual(re.compile(pat) and 1, 1)
def test_stack_overflow(self):
# nasty cases that used to overflow the straightforward recursive
# implementation of repeated groups.
self.assertEqual(re.match('(x)*', 50000*'x').group(1), 'x')
self.assertEqual(re.match('(x)*y', 50000*'x'+'y').group(1), 'x')
self.assertEqual(re.match('(x)*?y', 50000*'x'+'y').group(1), 'x')
def test_unlimited_zero_width_repeat(self):
# Issue #9669
self.assertIsNone(re.match(r'(?:a?)*y', 'z'))
self.assertIsNone(re.match(r'(?:a?)+y', 'z'))
self.assertIsNone(re.match(r'(?:a?){2,}y', 'z'))
self.assertIsNone(re.match(r'(?:a?)*?y', 'z'))
self.assertIsNone(re.match(r'(?:a?)+?y', 'z'))
self.assertIsNone(re.match(r'(?:a?){2,}?y', 'z'))
# def test_scanner(self):
# def s_ident(scanner, token): return token
# def s_operator(scanner, token): return "op%s" % token
# def s_float(scanner, token): return float(token)
# def s_int(scanner, token): return int(token)
#
# scanner = Scanner([
# (r"[a-zA-Z_]\w*", s_ident),
# (r"\d+\.\d*", s_float),
# (r"\d+", s_int),
# (r"=|\+|-|\*|/", s_operator),
# (r"\s+", None),
# ])
#
# self.assertNotEqual(scanner.scanner.scanner("").pattern, None)
#
# self.assertEqual(scanner.scan("sum = 3*foo + 312.50 + bar"),
# (['sum', 'op=', 3, 'op*', 'foo', 'op+', 312.5,
# 'op+', 'bar'], ''))
def test_bug_448951(self):
# bug 448951 (similar to 429357, but with single char match)
# (Also test greedy matches.)
for op in '','?','*':
self.assertEqual(re.match(r'((.%s):)?z'%op, 'z').groups(),
(None, None))
self.assertEqual(re.match(r'((.%s):)?z'%op, 'a:z').groups(),
('a:', 'a'))
def test_bug_725106(self):
# capturing groups in alternatives in repeats
self.assertEqual(re.match('^((a)|b)*', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*', 'abc').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)|b)*?c', 'abc').groups(),
('b', 'a'))
self.assertEqual(re.match('^(([ab])|c)*?d', 'abcd').groups(),
('c', 'b'))
self.assertEqual(re.match('^((d)|[ab])*?c', 'abc').groups(),
('b', None))
self.assertEqual(re.match('^((a)c|[ab])*?c', 'abc').groups(),
('b', None))
def test_bug_725149(self):
# mark_stack_base restoring before restoring marks
self.assertEqual(re.match('(a)(?:(?=(b)*)c)*', 'abb').groups(),
('a', None))
self.assertEqual(re.match('(a)((?!(b)*))*', 'abb').groups(),
('a', None, None))
def test_bug_764548(self):
# bug 764548, re.compile() barfs on str/unicode subclasses
class my_unicode(str): pass
pat = re.compile(my_unicode("abc"))
self.assertEqual(pat.match("xyz"), None)
def test_finditer(self):
iter = re.finditer(r":+", "a:b::c:::d")
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
pat = re.compile(r":+")
iter = pat.finditer("a:b::c:::d", 1, 10)
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
pat = re.compile(r":+")
iter = pat.finditer("a:b::c:::d", pos=1, endpos=10)
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
pat = re.compile(r":+")
iter = pat.finditer("a:b::c:::d", endpos=10, pos=1)
self.assertEqual([item.group(0) for item in iter],
[":", "::", ":::"])
pat = re.compile(r":+")
iter = pat.finditer("a:b::c:::d", pos=3, endpos=8)
self.assertEqual([item.group(0) for item in iter],
["::", "::"])
def test_bug_926075(self):
self.assertTrue(re.compile('bug_926075') is not
re.compile(b'bug_926075'))
def test_bug_931848(self):
pattern = eval('"[\u002E\u3002\uFF0E\uFF61]"')
self.assertEqual(re.compile(pattern).split("a.b.c"),
['a','b','c'])
def test_bug_581080(self):
iter = re.finditer(r"\s", "a b")
self.assertEqual(next(iter).span(), (1,2))
self.assertRaises(StopIteration, next, iter)
scanner = re.compile(r"\s").scanner("a b")
self.assertEqual(scanner.search().span(), (1, 2))
self.assertEqual(scanner.search(), None)
def test_bug_817234(self):
iter = re.finditer(r".*", "asdf")
self.assertEqual(next(iter).span(), (0, 4))
self.assertEqual(next(iter).span(), (4, 4))
self.assertRaises(StopIteration, next, iter)
def test_bug_6561(self):
# '\d' should match characters in Unicode category 'Nd'
# (Number, Decimal Digit), but not those in 'Nl' (Number,
# Letter) or 'No' (Number, Other).
decimal_digits = [
'\u0037', # '\N{DIGIT SEVEN}', category 'Nd'
'\u0e58', # '\N{THAI DIGIT SIX}', category 'Nd'
'\uff10', # '\N{FULLWIDTH DIGIT ZERO}', category 'Nd'
]
for x in decimal_digits:
self.assertEqual(re.match('^\d$', x).group(0), x)
not_decimal_digits = [
'\u2165', # '\N{ROMAN NUMERAL SIX}', category 'Nl'
'\u3039', # '\N{HANGZHOU NUMERAL TWENTY}', category 'Nl'
'\u2082', # '\N{SUBSCRIPT TWO}', category 'No'
'\u32b4', # '\N{CIRCLED NUMBER THIRTY NINE}', category 'No'
]
for x in not_decimal_digits:
self.assertIsNone(re.match('^\d$', x))
def test_empty_array(self):
# SF buf 1647541
import array
for typecode in 'bBuhHiIlLfd':
a = array.array(typecode)
self.assertEqual(re.compile(b"bla").match(a), None)
self.assertEqual(re.compile(b"").match(a).groups(), ())
def test_inline_flags(self):
# Bug #1700
upper_char = chr(0x1ea0) # Latin Capital Letter A with Dot Bellow
lower_char = chr(0x1ea1) # Latin Small Letter A with Dot Bellow
p = re.compile(upper_char, re.I | re.U)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile(lower_char, re.I | re.U)
q = p.match(upper_char)
self.assertNotEqual(q, None)
p = re.compile('(?i)' + upper_char, re.U)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile('(?i)' + lower_char, re.U)
q = p.match(upper_char)
self.assertNotEqual(q, None)
p = re.compile('(?iu)' + upper_char)
q = p.match(lower_char)
self.assertNotEqual(q, None)
p = re.compile('(?iu)' + lower_char)
q = p.match(upper_char)
self.assertNotEqual(q, None)
def test_dollar_matches_twice(self):
"$ matches the end of string, and just before the terminating \n"
pattern = re.compile('$')
self.assertEqual(pattern.sub('#', 'a\nb\n'), 'a\nb#\n#')
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a\nb\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
pattern = re.compile('$', re.MULTILINE)
self.assertEqual(pattern.sub('#', 'a\nb\n' ), 'a#\nb#\n#' )
self.assertEqual(pattern.sub('#', 'a\nb\nc'), 'a#\nb#\nc#')
self.assertEqual(pattern.sub('#', '\n'), '#\n#')
def test_bytes_str_mixing(self):
# Mixing str and bytes is disallowed
pat = re.compile('.')
bpat = re.compile(b'.')
self.assertRaises(TypeError, pat.match, b'b')
self.assertRaises(TypeError, bpat.match, 'b')
self.assertRaises(TypeError, pat.sub, b'b', 'c')
self.assertRaises(TypeError, pat.sub, 'b', b'c')
self.assertRaises(TypeError, pat.sub, b'b', b'c')
self.assertRaises(TypeError, bpat.sub, b'b', 'c')
self.assertRaises(TypeError, bpat.sub, 'b', b'c')
self.assertRaises(TypeError, bpat.sub, 'b', 'c')
def test_ascii_and_unicode_flag(self):
# String patterns
for flags in (0, re.UNICODE):
pat = re.compile('\xc0', flags | re.IGNORECASE)
self.assertNotEqual(pat.match('\xe0'), None)
pat = re.compile('\w', flags)
self.assertNotEqual(pat.match('\xe0'), None)
pat = re.compile('\xc0', re.ASCII | re.IGNORECASE)
self.assertEqual(pat.match('\xe0'), None)
pat = re.compile('(?a)\xc0', re.IGNORECASE)
self.assertEqual(pat.match('\xe0'), None)
pat = re.compile('\w', re.ASCII)
self.assertEqual(pat.match('\xe0'), None)
pat = re.compile('(?a)\w')
self.assertEqual(pat.match('\xe0'), None)
# Bytes patterns
for flags in (0, re.ASCII):
pat = re.compile(b'\xc0', re.IGNORECASE)
self.assertEqual(pat.match(b'\xe0'), None)
pat = re.compile(b'\w')
self.assertEqual(pat.match(b'\xe0'), None)
# Incompatibilities
self.assertRaises(ValueError, re.compile, b'\w', re.UNICODE)
self.assertRaises(ValueError, re.compile, b'(?u)\w')
self.assertRaises(ValueError, re.compile, '\w', re.UNICODE | re.ASCII)
self.assertRaises(ValueError, re.compile, '(?u)\w', re.ASCII)
self.assertRaises(ValueError, re.compile, '(?a)\w', re.UNICODE)
self.assertRaises(ValueError, re.compile, '(?au)\w')
def test_bug_6509(self):
# Replacement strings of both types must parse properly.
# all strings
pat = re.compile('a(\w)')
self.assertEqual(pat.sub('b\\1', 'ac'), 'bc')
pat = re.compile('a(.)')
self.assertEqual(pat.sub('b\\1', 'a\u1234'), 'b\u1234')
pat = re.compile('..')
self.assertEqual(pat.sub(lambda m: 'str', 'a5'), 'str')
# all bytes
pat = re.compile(b'a(\w)')
self.assertEqual(pat.sub(b'b\\1', b'ac'), b'bc')
pat = re.compile(b'a(.)')
self.assertEqual(pat.sub(b'b\\1', b'a\xCD'), b'b\xCD')
pat = re.compile(b'..')
self.assertEqual(pat.sub(lambda m: b'bytes', b'a5'), b'bytes')
def test_dealloc(self):
# issue 3299: check for segfault in debug build
import _sre
# the overflow limit is different on wide and narrow builds and it
# depends on the definition of SRE_CODE (see sre.h).
# 2**128 should be big enough to overflow on both. For smaller values
# a RuntimeError is raised instead of OverflowError.
long_overflow = 2**128
self.assertRaises(TypeError, re.finditer, "a", {})
self.assertRaises(OverflowError, _sre.compile, "abc", 0, [long_overflow])
self.assertRaises(TypeError, _sre.compile, {}, 0, [])
def test_search_dot_unicode(self):
self.assertIsNotNone(re.search("123.*-", '123abc-'))
self.assertIsNotNone(re.search("123.*-", '123\xe9-'))
self.assertIsNotNone(re.search("123.*-", '123\u20ac-'))
self.assertIsNotNone(re.search("123.*-", '123\U0010ffff-'))
self.assertIsNotNone(re.search("123.*-", '123\xe9\u20ac\U0010ffff-'))
def test_compile(self):
# Test return value when given string and pattern as parameter
pattern = re.compile('random pattern')
self.assertIsInstance(pattern, re._pattern_type)
same_pattern = re.compile(pattern)
self.assertIsInstance(same_pattern, re._pattern_type)
self.assertIs(same_pattern, pattern)
# Test behaviour when not given a string or pattern as parameter
self.assertRaises(TypeError, re.compile, 0)
def test_bug_13899(self):
# Issue #13899: re pattern r"[\A]" should work like "A" but matches
# nothing. Ditto B and Z.
self.assertEqual(re.findall(r'[\A\B\b\C\Z]', 'AB\bCZ'),
['A', 'B', '\b', 'C', 'Z'])
# FIXME: brython: implement test.support
# @bigmemtest(size=_2G, memuse=1)
# def test_large_search(self, size):
# # Issue #10182: indices were 32-bit-truncated.
# s = 'a' * size
# m = re.search('$', s)
# self.assertIsNotNone(m)
# self.assertEqual(m.start(), size)
# self.assertEqual(m.end(), size)
# FIXME: brython: implement test.support
# The huge memuse is because of re.sub() using a list and a join()
# to create the replacement result.
# @bigmemtest(size=_2G, memuse=16 + 2)
# def test_large_subn(self, size):
# # Issue #10182: indices were 32-bit-truncated.
# s = 'a' * size
# r, n = re.subn('', '', s)
# self.assertEqual(r, s)
# self.assertEqual(n, size + 1)
def test_bug_16688(self):
# Issue 16688: Backreferences make case-insensitive regex fail on
# non-ASCII strings.
self.assertEqual(re.findall(r"(?i)(a)\1", "aa \u0100"), ['a'])
self.assertEqual(re.match(r"(?s).{1,3}", "\u0100\u0100").span(), (0, 2))
def test_repeat_minmax_overflow(self):
# Issue #13169
string = "x" * 100000
self.assertEqual(re.match(r".{65535}", string).span(), (0, 65535))
self.assertEqual(re.match(r".{,65535}", string).span(), (0, 65535))
self.assertEqual(re.match(r".{65535,}?", string).span(), (0, 65535))
self.assertEqual(re.match(r".{65536}", string).span(), (0, 65536))
self.assertEqual(re.match(r".{,65536}", string).span(), (0, 65536))
self.assertEqual(re.match(r".{65536,}?", string).span(), (0, 65536))
# 2**128 should be big enough to overflow both SRE_CODE and Py_ssize_t.
self.assertRaises(OverflowError, re.compile, r".{%d}" % 2**128)
self.assertRaises(OverflowError, re.compile, r".{,%d}" % 2**128)
self.assertRaises(OverflowError, re.compile, r".{%d,}?" % 2**128)
self.assertRaises(OverflowError, re.compile, r".{%d,%d}" % (2**129, 2**128))
# FIXME: brython: implement test.support
# @cpython_only
# def test_repeat_minmax_overflow_maxrepeat(self):
# try:
# from _sre import MAXREPEAT
# except ImportError:
# self.skipTest('requires _sre.MAXREPEAT constant')
# string = "x" * 100000
# self.assertIsNone(re.match(r".{%d}" % (MAXREPEAT - 1), string))
# self.assertEqual(re.match(r".{,%d}" % (MAXREPEAT - 1), string).span(),
# (0, 100000))
# self.assertIsNone(re.match(r".{%d,}?" % (MAXREPEAT - 1), string))
# self.assertRaises(OverflowError, re.compile, r".{%d}" % MAXREPEAT)
# self.assertRaises(OverflowError, re.compile, r".{,%d}" % MAXREPEAT)
# self.assertRaises(OverflowError, re.compile, r".{%d,}?" % MAXREPEAT)
def test_backref_group_name_in_exception(self):
# Issue 17341: Poor error message when compiling invalid regex
with self.assertRaisesRegex(sre_constants.error, '<foo>'):
re.compile('(?P=<foo>)')
def test_group_name_in_exception(self):
# Issue 17341: Poor error message when compiling invalid regex
with self.assertRaisesRegex(sre_constants.error, '\?foo'):
re.compile('(?P<?foo>)')
def run_re_tests():
from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR
if verbose:
print('Running re_tests test suite')
else:
# To save time, only run the first and last 10 tests
#tests = tests[:10] + tests[-10:]
pass
for t in tests:
sys.stdout.flush()
pattern = s = outcome = repl = expected = None
if len(t) == 5:
pattern, s, outcome, repl, expected = t
elif len(t) == 3:
pattern, s, outcome = t
else:
raise ValueError('Test tuples should have 3 or 5 fields', t)
try:
obj = re.compile(pattern)
except re.error:
if outcome == SYNTAX_ERROR: pass # Expected a syntax error
else:
print('=== Syntax error:', t)
except KeyboardInterrupt: raise KeyboardInterrupt
except:
print('*** Unexpected error ***', t)
if verbose:
traceback.print_exc(file=sys.stdout)
else:
try:
result = obj.search(s)
except re.error as msg:
print('=== Unexpected exception', t, repr(msg))
if outcome == SYNTAX_ERROR:
# This should have been a syntax error; forget it.
pass
elif outcome == FAIL:
if result is None: pass # No match, as expected
else: print('=== Succeeded incorrectly', t)
elif outcome == SUCCEED:
if result is not None:
# Matched, as expected, so now we compute the
# result string and compare it to our expected result.
start, end = result.span(0)
vardict={'found': result.group(0),
'groups': result.group(),
'flags': result.re.flags}
for i in range(1, 100):
try:
gi = result.group(i)
# Special hack because else the string concat fails:
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict['g%d' % i] = gi
for i in result.re.groupindex.keys():
try:
gi = result.group(i)
if gi is None:
gi = "None"
except IndexError:
gi = "Error"
vardict[i] = gi
repl = eval(repl, vardict)
if repl != expected:
print('=== grouping error', t, end=' ')
print(repr(repl) + ' should be ' + repr(expected))
else:
print('=== Failed incorrectly', t)
# Try the match with both pattern and string converted to
# bytes, and check that it still succeeds.
try:
bpat = bytes(pattern, "ascii")
bs = bytes(s, "ascii")
except UnicodeEncodeError:
# skip non-ascii tests
pass
else:
try:
bpat = re.compile(bpat)
except Exception:
print('=== Fails on bytes pattern compile', t)
if verbose:
traceback.print_exc(file=sys.stdout)
else:
bytes_result = bpat.search(bs)
if bytes_result is None:
print('=== Fails on bytes pattern match', t)
# Try the match with the search area limited to the extent
# of the match and see if it still succeeds. \B will
# break (because it won't match at the end or start of a
# string), so we'll ignore patterns that feature it.
if pattern[:2] != '\\B' and pattern[-2:] != '\\B' \
and result is not None:
obj = re.compile(pattern)
result = obj.search(s, result.start(0), result.end(0) + 1)
if result is None:
print('=== Failed on range-limited match', t)
# Try the match with IGNORECASE enabled, and check that it
# still succeeds.
obj = re.compile(pattern, re.IGNORECASE)
result = obj.search(s)
if result is None:
print('=== Fails on case-insensitive match', t)
# Try the match with LOCALE enabled, and check that it
# still succeeds.
if '(?u)' not in pattern:
obj = re.compile(pattern, re.LOCALE)
result = obj.search(s)
if result is None:
print('=== Fails on locale-sensitive match', t)
# Try the match with UNICODE locale enabled, and check
# that it still succeeds.
obj = re.compile(pattern, re.UNICODE)
result = obj.search(s)
if result is None:
print('=== Fails on unicode-sensitive match', t)
def test_main():
# FIXME: brython: implement test.support
# run_unittest(ReTests)
run_re_tests()
if __name__ == "__main__":
test_main()
| gpl-3.0 |
nagyistoce/edx-platform | cms/djangoapps/contentstore/management/commands/export_convert_format.py | 34 | 2454 | """
Script for converting a tar.gz file representing an exported course
to the archive format used by a different version of export.
Sample invocation: ./manage.py export_convert_format mycourse.tar.gz ~/newformat/
"""
import os
from path import path
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from tempfile import mkdtemp
import tarfile
import shutil
from openedx.core.lib.extract_tar import safetar_extractall
from xmodule.modulestore.xml_exporter import convert_between_versions
class Command(BaseCommand):
"""
Convert between export formats.
"""
help = 'Convert between versions 0 and 1 of the course export format'
args = '<tar.gz archive file> <output path>'
def handle(self, *args, **options):
"Execute the command"
if len(args) != 2:
raise CommandError("export requires two arguments: <tar.gz file> <output path>")
source_archive = args[0]
output_path = args[1]
# Create temp directories to extract the source and create the target archive.
temp_source_dir = mkdtemp(dir=settings.DATA_DIR)
temp_target_dir = mkdtemp(dir=settings.DATA_DIR)
try:
extract_source(source_archive, temp_source_dir)
desired_version = convert_between_versions(temp_source_dir, temp_target_dir)
# New zip up the target directory.
parts = os.path.basename(source_archive).split('.')
archive_name = path(output_path) / "{source_name}_version_{desired_version}.tar.gz".format(
source_name=parts[0], desired_version=desired_version
)
with open(archive_name, "w"):
tar_file = tarfile.open(archive_name, mode='w:gz')
try:
for item in os.listdir(temp_target_dir):
tar_file.add(path(temp_target_dir) / item, arcname=item)
finally:
tar_file.close()
print("Created archive {0}".format(archive_name))
except ValueError as err:
raise CommandError(err)
finally:
shutil.rmtree(temp_source_dir)
shutil.rmtree(temp_target_dir)
def extract_source(source_archive, target):
"""
Extract the archive into the given target directory.
"""
with tarfile.open(source_archive) as tar_file:
safetar_extractall(tar_file, target)
| agpl-3.0 |
joshmoore/bioformats | components/xsd-fu/python/generateDS/process_includes.py | 17 | 5947 | #!/usr/bin/env python
# -*- mode: pymode; coding: latin1; -*-
"""
Synopsis:
Recusively process the include elements in an XML Schema file.
Produce a single file that contains all included content.
Input is read either from a file or from stdin.
Output is written either to a file or to stdout.
Usage:
python process_includes.py [options] [ infile [ outfile ] ]
Options:
-h, --help Display this help message.
-f, --force Force. If outfile exists, overwrite without asking.
-s, --search Search path for schemas. Colon separated list of directorys where schemas may be found.
Examples:
python process_includes.py infile.xsd
python process_includes.py infile.xsd outfile.xsd
python process_includes.py infile.xsd > outfile.xsd
cat infile.xsd | python process_includes.py > outfile.xsd
"""
#
# Imports
import sys
import os
import getopt
import re
import urllib
#
# Try to import lxml first, and if that fails try ElementTree.
# lxml preserves namespace prefixes, but ElemenTree does not.
#
WhichElementTree = ''
try:
from lxml import etree
WhichElementTree = 'lxml'
except ImportError, e:
from xml.etree import ElementTree as etree
WhichElementTree = 'elementtree'
if WhichElementTree != 'lxml' or etree.LXML_VERSION[0] < 2:
print '***'
print '*** Error: Must install lxml (v. >= 2.0) or use "--no-process-includes".'
print '*** Override this error by modifying the above test.'
print '*** But, see the docs before doing so:'
print '*** http://www.rexx.com/~dkuhlman/generateDS.html#include-file-processing'
print '***'
raise RuntimeError, 'Must install lxml (v. >= 2.0) or use "--no-process-includes".'
#print WhichElementTree, etree
#
# Globals and constants
FORCE = False
NAMESPACE_PAT = re.compile(r'\{.*\}')
DIRPATH = []
#
# Classes
#
# Functions
def process_includes(inpath, outpath):
if inpath:
infile = open(inpath, 'r')
else:
infile = sys.stdin
if outpath:
outfile = make_file(outpath)
else:
outfile = sys.stdout
process_include_files(infile, outfile)
if inpath:
infile.close()
if outpath:
outfile.close()
def process_include_files(infile, outfile):
doc = etree.parse(infile)
root = doc.getroot()
process_include_tree(root)
doc.write(outfile)
def process_path(root, idx, path):
count = idx
doc = etree.parse(path)
node = doc.getroot()
process_include_tree(node)
children1 = node.getchildren()
for child1 in children1:
root.insert(count, child1)
count += 1
return count
def process_include_tree(root):
global DIRPATH
idx = 0
children = root.getchildren()
while idx < len(children):
child = children[idx]
tag = child.tag
if type(tag) == type(""):
tag = NAMESPACE_PAT.sub("", tag)
else:
tag = None
if tag == 'include' and 'schemaLocation' in child.attrib:
root.remove(child)
locn = child.attrib['schemaLocation']
path = child.attrib['schemaLocation']
if os.path.exists(path):
idx = process_path(root, idx, path)
else:
for d in DIRPATH:
path = os.path.join(d,locn)
if os.path.exists(path):
idx = process_path(root, idx, path)
break
else:
msg = "Can't find include file %s. Aborting." % (path, )
raise IOError(msg)
elif tag == 'import' and 'schemaLocation' in child.attrib:
root.remove(child)
locn = child.attrib['schemaLocation']
if locn.startswith('ftp:') or locn.startswith('http:'):
try:
path, msg = urllib.urlretrieve(locn)
idx = process_path(root, idx, path)
except:
msg = "Can't retrieve import file %s. Aborting." % (locn, )
raise IOError(msg)
else:
if os.path.exists(locn):
idx = process_path(root, idx, locn)
else:
for d in DIRPATH:
path = os.path.join(d,locn)
if os.path.exists(path):
idx = process_path(root, idx, path)
break
else:
msg = "Can't find import file %s. Aborting." % (locn, )
raise IOError(msg)
else:
process_include_tree(child)
idx += 1
children = root.getchildren()
def make_file(outFileName):
global FORCE
outFile = None
if (not FORCE) and os.path.exists(outFileName):
reply = raw_input('File %s exists. Overwrite? (y/n): ' % outFileName)
if reply == 'y':
outFile = open(outFileName, 'w')
else:
outFile = open(outFileName, 'w')
return outFile
USAGE_TEXT = __doc__
def usage():
print USAGE_TEXT
sys.exit(1)
def main():
global FORCE
global DIRPATH
args = sys.argv[1:]
try:
opts, args = getopt.getopt(args, 'hfs:', ['help', 'force', 'search=',])
except:
usage()
name = 'nobody'
for opt, val in opts:
if opt in ('-h', '--help'):
usage()
elif opt in ('-f', '--force'):
FORCE = True
elif opt in ('-s', '--search'):
DIRPATH = val.split(':')
if len(args) == 2:
inpath = args[0]
outpath = args[1]
elif len(args) == 1:
inpath = args[0]
outpath = None
elif len(args) == 0:
inpath = None
outpath = None
else:
usage()
process_includes(inpath, outpath)
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
| gpl-2.0 |
jetskijoe/SickGear | lib/unidecode/x06a.py | 252 | 4674 | data = (
'Di ', # 0x00
'Zhuang ', # 0x01
'Le ', # 0x02
'Lang ', # 0x03
'Chen ', # 0x04
'Cong ', # 0x05
'Li ', # 0x06
'Xiu ', # 0x07
'Qing ', # 0x08
'Shuang ', # 0x09
'Fan ', # 0x0a
'Tong ', # 0x0b
'Guan ', # 0x0c
'Ji ', # 0x0d
'Suo ', # 0x0e
'Lei ', # 0x0f
'Lu ', # 0x10
'Liang ', # 0x11
'Mi ', # 0x12
'Lou ', # 0x13
'Chao ', # 0x14
'Su ', # 0x15
'Ke ', # 0x16
'Shu ', # 0x17
'Tang ', # 0x18
'Biao ', # 0x19
'Lu ', # 0x1a
'Jiu ', # 0x1b
'Shu ', # 0x1c
'Zha ', # 0x1d
'Shu ', # 0x1e
'Zhang ', # 0x1f
'Men ', # 0x20
'Mo ', # 0x21
'Niao ', # 0x22
'Yang ', # 0x23
'Tiao ', # 0x24
'Peng ', # 0x25
'Zhu ', # 0x26
'Sha ', # 0x27
'Xi ', # 0x28
'Quan ', # 0x29
'Heng ', # 0x2a
'Jian ', # 0x2b
'Cong ', # 0x2c
'[?] ', # 0x2d
'Hokuso ', # 0x2e
'Qiang ', # 0x2f
'Tara ', # 0x30
'Ying ', # 0x31
'Er ', # 0x32
'Xin ', # 0x33
'Zhi ', # 0x34
'Qiao ', # 0x35
'Zui ', # 0x36
'Cong ', # 0x37
'Pu ', # 0x38
'Shu ', # 0x39
'Hua ', # 0x3a
'Kui ', # 0x3b
'Zhen ', # 0x3c
'Zun ', # 0x3d
'Yue ', # 0x3e
'Zhan ', # 0x3f
'Xi ', # 0x40
'Xun ', # 0x41
'Dian ', # 0x42
'Fa ', # 0x43
'Gan ', # 0x44
'Mo ', # 0x45
'Wu ', # 0x46
'Qiao ', # 0x47
'Nao ', # 0x48
'Lin ', # 0x49
'Liu ', # 0x4a
'Qiao ', # 0x4b
'Xian ', # 0x4c
'Run ', # 0x4d
'Fan ', # 0x4e
'Zhan ', # 0x4f
'Tuo ', # 0x50
'Lao ', # 0x51
'Yun ', # 0x52
'Shun ', # 0x53
'Tui ', # 0x54
'Cheng ', # 0x55
'Tang ', # 0x56
'Meng ', # 0x57
'Ju ', # 0x58
'Cheng ', # 0x59
'Su ', # 0x5a
'Jue ', # 0x5b
'Jue ', # 0x5c
'Tan ', # 0x5d
'Hui ', # 0x5e
'Ji ', # 0x5f
'Nuo ', # 0x60
'Xiang ', # 0x61
'Tuo ', # 0x62
'Ning ', # 0x63
'Rui ', # 0x64
'Zhu ', # 0x65
'Chuang ', # 0x66
'Zeng ', # 0x67
'Fen ', # 0x68
'Qiong ', # 0x69
'Ran ', # 0x6a
'Heng ', # 0x6b
'Cen ', # 0x6c
'Gu ', # 0x6d
'Liu ', # 0x6e
'Lao ', # 0x6f
'Gao ', # 0x70
'Chu ', # 0x71
'Zusa ', # 0x72
'Nude ', # 0x73
'Ca ', # 0x74
'San ', # 0x75
'Ji ', # 0x76
'Dou ', # 0x77
'Shou ', # 0x78
'Lu ', # 0x79
'[?] ', # 0x7a
'[?] ', # 0x7b
'Yuan ', # 0x7c
'Ta ', # 0x7d
'Shu ', # 0x7e
'Jiang ', # 0x7f
'Tan ', # 0x80
'Lin ', # 0x81
'Nong ', # 0x82
'Yin ', # 0x83
'Xi ', # 0x84
'Sui ', # 0x85
'Shan ', # 0x86
'Zui ', # 0x87
'Xuan ', # 0x88
'Cheng ', # 0x89
'Gan ', # 0x8a
'Ju ', # 0x8b
'Zui ', # 0x8c
'Yi ', # 0x8d
'Qin ', # 0x8e
'Pu ', # 0x8f
'Yan ', # 0x90
'Lei ', # 0x91
'Feng ', # 0x92
'Hui ', # 0x93
'Dang ', # 0x94
'Ji ', # 0x95
'Sui ', # 0x96
'Bo ', # 0x97
'Bi ', # 0x98
'Ding ', # 0x99
'Chu ', # 0x9a
'Zhua ', # 0x9b
'Kuai ', # 0x9c
'Ji ', # 0x9d
'Jie ', # 0x9e
'Jia ', # 0x9f
'Qing ', # 0xa0
'Zhe ', # 0xa1
'Jian ', # 0xa2
'Qiang ', # 0xa3
'Dao ', # 0xa4
'Yi ', # 0xa5
'Biao ', # 0xa6
'Song ', # 0xa7
'She ', # 0xa8
'Lin ', # 0xa9
'Kunugi ', # 0xaa
'Cha ', # 0xab
'Meng ', # 0xac
'Yin ', # 0xad
'Tao ', # 0xae
'Tai ', # 0xaf
'Mian ', # 0xb0
'Qi ', # 0xb1
'Toan ', # 0xb2
'Bin ', # 0xb3
'Huo ', # 0xb4
'Ji ', # 0xb5
'Qian ', # 0xb6
'Mi ', # 0xb7
'Ning ', # 0xb8
'Yi ', # 0xb9
'Gao ', # 0xba
'Jian ', # 0xbb
'Yin ', # 0xbc
'Er ', # 0xbd
'Qing ', # 0xbe
'Yan ', # 0xbf
'Qi ', # 0xc0
'Mi ', # 0xc1
'Zhao ', # 0xc2
'Gui ', # 0xc3
'Chun ', # 0xc4
'Ji ', # 0xc5
'Kui ', # 0xc6
'Po ', # 0xc7
'Deng ', # 0xc8
'Chu ', # 0xc9
'[?] ', # 0xca
'Mian ', # 0xcb
'You ', # 0xcc
'Zhi ', # 0xcd
'Guang ', # 0xce
'Qian ', # 0xcf
'Lei ', # 0xd0
'Lei ', # 0xd1
'Sa ', # 0xd2
'Lu ', # 0xd3
'Li ', # 0xd4
'Cuan ', # 0xd5
'Lu ', # 0xd6
'Mie ', # 0xd7
'Hui ', # 0xd8
'Ou ', # 0xd9
'Lu ', # 0xda
'Jie ', # 0xdb
'Gao ', # 0xdc
'Du ', # 0xdd
'Yuan ', # 0xde
'Li ', # 0xdf
'Fei ', # 0xe0
'Zhuo ', # 0xe1
'Sou ', # 0xe2
'Lian ', # 0xe3
'Tamo ', # 0xe4
'Chu ', # 0xe5
'[?] ', # 0xe6
'Zhu ', # 0xe7
'Lu ', # 0xe8
'Yan ', # 0xe9
'Li ', # 0xea
'Zhu ', # 0xeb
'Chen ', # 0xec
'Jie ', # 0xed
'E ', # 0xee
'Su ', # 0xef
'Huai ', # 0xf0
'Nie ', # 0xf1
'Yu ', # 0xf2
'Long ', # 0xf3
'Lai ', # 0xf4
'[?] ', # 0xf5
'Xian ', # 0xf6
'Kwi ', # 0xf7
'Ju ', # 0xf8
'Xiao ', # 0xf9
'Ling ', # 0xfa
'Ying ', # 0xfb
'Jian ', # 0xfc
'Yin ', # 0xfd
'You ', # 0xfe
'Ying ', # 0xff
)
| gpl-3.0 |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/host/lib/python2.7/test/test_userlist.py | 136 | 1894 | # Check every path through every method of UserList
from UserList import UserList
from test import test_support, list_tests
class UserListTest(list_tests.CommonTest):
type2test = UserList
def test_getslice(self):
super(UserListTest, self).test_getslice()
l = [0, 1, 2, 3, 4]
u = self.type2test(l)
for i in range(-3, 6):
self.assertEqual(u[:i], l[:i])
self.assertEqual(u[i:], l[i:])
for j in xrange(-3, 6):
self.assertEqual(u[i:j], l[i:j])
def test_add_specials(self):
u = UserList("spam")
u2 = u + "eggs"
self.assertEqual(u2, list("spameggs"))
def test_radd_specials(self):
u = UserList("eggs")
u2 = "spam" + u
self.assertEqual(u2, list("spameggs"))
u2 = u.__radd__(UserList("spam"))
self.assertEqual(u2, list("spameggs"))
def test_iadd(self):
super(UserListTest, self).test_iadd()
u = [0, 1]
u += UserList([0, 1])
self.assertEqual(u, [0, 1, 0, 1])
def test_mixedcmp(self):
u = self.type2test([0, 1])
self.assertEqual(u, [0, 1])
self.assertNotEqual(u, [0])
self.assertNotEqual(u, [0, 2])
def test_mixedadd(self):
u = self.type2test([0, 1])
self.assertEqual(u + [], u)
self.assertEqual(u + [2], [0, 1, 2])
def test_getitemoverwriteiter(self):
# Verify that __getitem__ overrides *are* recognized by __iter__
class T(self.type2test):
def __getitem__(self, key):
return str(key) + '!!!'
self.assertEqual(iter(T((1,2))).next(), "0!!!")
def test_main():
with test_support.check_py3k_warnings(
(".+__(get|set|del)slice__ has been removed", DeprecationWarning)):
test_support.run_unittest(UserListTest)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
ofekd/servo | tests/wpt/web-platform-tests/tools/py/testing/code/test_assertion.py | 160 | 7843 | import pytest, py
def exvalue():
return py.std.sys.exc_info()[1]
def f():
return 2
def test_assert():
try:
assert f() == 3
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 == 3\n')
def test_assert_within_finally():
excinfo = py.test.raises(ZeroDivisionError, """
try:
1/0
finally:
i = 42
""")
s = excinfo.exconly()
assert py.std.re.search("division.+by zero", s) is not None
#def g():
# A.f()
#excinfo = getexcinfo(TypeError, g)
#msg = getmsg(excinfo)
#assert msg.find("must be called with A") != -1
def test_assert_multiline_1():
try:
assert (f() ==
3)
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 == 3\n')
def test_assert_multiline_2():
try:
assert (f() == (4,
3)[-1])
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 ==')
def test_in():
try:
assert "hi" in [1, 2]
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 'hi' in")
def test_is():
try:
assert 1 is 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 is 2")
@py.test.mark.skipif("sys.version_info < (2,6)")
def test_attrib():
class Foo(object):
b = 1
i = Foo()
try:
assert i.b == 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 == 2")
@py.test.mark.skipif("sys.version_info < (2,6)")
def test_attrib_inst():
class Foo(object):
b = 1
try:
assert Foo().b == 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 == 2")
def test_len():
l = list(range(42))
try:
assert len(l) == 100
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 42 == 100")
assert "where 42 = len([" in s
def test_assert_keyword_arg():
def f(x=3):
return False
try:
assert f(x=5)
except AssertionError:
e = exvalue()
assert "x=5" in e.msg
# These tests should both fail, but should fail nicely...
class WeirdRepr:
def __repr__(self):
return '<WeirdRepr\nsecond line>'
def bug_test_assert_repr():
v = WeirdRepr()
try:
assert v == 1
except AssertionError:
e = exvalue()
assert e.msg.find('WeirdRepr') != -1
assert e.msg.find('second line') != -1
assert 0
def test_assert_non_string():
try:
assert 0, ['list']
except AssertionError:
e = exvalue()
assert e.msg.find("list") != -1
def test_assert_implicit_multiline():
try:
x = [1,2,3]
assert x != [1,
2, 3]
except AssertionError:
e = exvalue()
assert e.msg.find('assert [1, 2, 3] !=') != -1
def test_assert_with_brokenrepr_arg():
class BrokenRepr:
def __repr__(self): 0 / 0
e = AssertionError(BrokenRepr())
if e.msg.find("broken __repr__") == -1:
py.test.fail("broken __repr__ not handle correctly")
def test_multiple_statements_per_line():
try:
a = 1; assert a == 2
except AssertionError:
e = exvalue()
assert "assert 1 == 2" in e.msg
def test_power():
try:
assert 2**3 == 7
except AssertionError:
e = exvalue()
assert "assert (2 ** 3) == 7" in e.msg
class TestView:
def setup_class(cls):
cls.View = py.test.importorskip("py._code._assertionold").View
def test_class_dispatch(self):
### Use a custom class hierarchy with existing instances
class Picklable(self.View):
pass
class Simple(Picklable):
__view__ = object
def pickle(self):
return repr(self.__obj__)
class Seq(Picklable):
__view__ = list, tuple, dict
def pickle(self):
return ';'.join(
[Picklable(item).pickle() for item in self.__obj__])
class Dict(Seq):
__view__ = dict
def pickle(self):
return Seq.pickle(self) + '!' + Seq(self.values()).pickle()
assert Picklable(123).pickle() == '123'
assert Picklable([1,[2,3],4]).pickle() == '1;2;3;4'
assert Picklable({1:2}).pickle() == '1!2'
def test_viewtype_class_hierarchy(self):
# Use a custom class hierarchy based on attributes of existing instances
class Operation:
"Existing class that I don't want to change."
def __init__(self, opname, *args):
self.opname = opname
self.args = args
existing = [Operation('+', 4, 5),
Operation('getitem', '', 'join'),
Operation('setattr', 'x', 'y', 3),
Operation('-', 12, 1)]
class PyOp(self.View):
def __viewkey__(self):
return self.opname
def generate(self):
return '%s(%s)' % (self.opname, ', '.join(map(repr, self.args)))
class PyBinaryOp(PyOp):
__view__ = ('+', '-', '*', '/')
def generate(self):
return '%s %s %s' % (self.args[0], self.opname, self.args[1])
codelines = [PyOp(op).generate() for op in existing]
assert codelines == ["4 + 5", "getitem('', 'join')",
"setattr('x', 'y', 3)", "12 - 1"]
def test_underscore_api():
py.code._AssertionError
py.code._reinterpret_old # used by pypy
py.code._reinterpret
@py.test.mark.skipif("sys.version_info < (2,6)")
def test_assert_customizable_reprcompare(monkeypatch):
util = pytest.importorskip("_pytest.assertion.util")
monkeypatch.setattr(util, '_reprcompare', lambda *args: 'hello')
try:
assert 3 == 4
except AssertionError:
e = exvalue()
s = str(e)
assert "hello" in s
def test_assert_long_source_1():
try:
assert len == [
(None, ['somet text', 'more text']),
]
except AssertionError:
e = exvalue()
s = str(e)
assert 're-run' not in s
assert 'somet text' in s
def test_assert_long_source_2():
try:
assert(len == [
(None, ['somet text', 'more text']),
])
except AssertionError:
e = exvalue()
s = str(e)
assert 're-run' not in s
assert 'somet text' in s
def test_assert_raise_alias(testdir):
testdir.makepyfile("""
import sys
EX = AssertionError
def test_hello():
raise EX("hello"
"multi"
"line")
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*def test_hello*",
"*raise EX*",
"*1 failed*",
])
@pytest.mark.skipif("sys.version_info < (2,5)")
def test_assert_raise_subclass():
class SomeEx(AssertionError):
def __init__(self, *args):
super(SomeEx, self).__init__()
try:
raise SomeEx("hello")
except AssertionError:
s = str(exvalue())
assert 're-run' not in s
assert 'could not determine' in s
def test_assert_raises_in_nonzero_of_object_pytest_issue10():
class A(object):
def __nonzero__(self):
raise ValueError(42)
def __lt__(self, other):
return A()
def __repr__(self):
return "<MY42 object>"
def myany(x):
return True
try:
assert not(myany(A() < 0))
except AssertionError:
e = exvalue()
s = str(e)
assert "<MY42 object> < 0" in s
| mpl-2.0 |
devilry/devilry-django | devilry/devilry_admin/views/assignment/students/overview.py | 1 | 3478 | # -*- coding: utf-8 -*-
from django.utils.translation import gettext_lazy
from cradmin_legacy import crapp
from cradmin_legacy.crinstance import reverse_cradmin_url
from cradmin_legacy.viewhelpers.listbuilder.itemframe import DefaultSpacingItemFrame
from cradmin_legacy.viewhelpers.listbuilder.lists import RowList
from devilry.devilry_admin.views.assignment.students import groupview_base
from devilry.devilry_cradmin import devilry_listbuilder
from devilry.devilry_cradmin.devilry_listfilter.utils import WithResultValueRenderable
class NonAnonymousGroupItemFrame(devilry_listbuilder.common.GoForwardLinkItemFrame):
valuealias = 'group'
def get_url(self):
return reverse_cradmin_url(
instanceid='devilry_group_admin',
roleid=self.group.id,
appname='feedbackfeed'
)
def get_extra_css_classes_list(self):
return ['devilry-admin-assignment-students-overview-group-linkframe']
class StudentGroupListMatchResultRenderable(WithResultValueRenderable):
def get_object_name_singular(self, num_matches):
return gettext_lazy('student')
def get_object_name_plural(self, num_matches):
return gettext_lazy('students')
class RowListWithMatchResults(RowList):
def append_results_renderable(self):
result_info_renderable = StudentGroupListMatchResultRenderable(
value=None,
num_matches=self.num_matches,
num_total=self.num_total
)
self.renderable_list.insert(0, DefaultSpacingItemFrame(inneritem=result_info_renderable))
def __init__(self, num_matches, num_total, page):
self.num_matches = num_matches
self.num_total = num_total
self.page = page
super(RowListWithMatchResults, self).__init__()
if page == 1:
self.append_results_renderable()
class Overview(groupview_base.BaseInfoView):
filterview_name = 'filter'
template_name = 'devilry_admin/assignment/students/overview.django.html'
listbuilder_class = RowListWithMatchResults
def get_frame_renderer_class(self):
devilryrole = self.request.cradmin_instance.get_devilryrole_for_requestuser()
if self.assignment.is_fully_anonymous and devilryrole != 'departmentadmin':
return None
else:
return NonAnonymousGroupItemFrame
#
# Add support for showing results on the top of the list.
#
def get_listbuilder_list_kwargs(self):
kwargs = super(Overview, self).get_listbuilder_list_kwargs()
kwargs['num_matches'] = self.num_matches or 0
kwargs['num_total'] = self.num_total or 0
kwargs['page'] = self.request.GET.get('page', 1)
return kwargs
def get_unfiltered_queryset_for_role(self, role):
queryset = super(Overview, self).get_unfiltered_queryset_for_role(role=role)
# Set unfiltered count on self.
self.num_total = queryset.count()
return queryset
def get_queryset_for_role(self, role):
queryset = super(Overview, self).get_queryset_for_role(role=role)
# Set filtered count on self.
self.num_matches = queryset.count()
return queryset
class App(crapp.App):
appurls = [
crapp.Url(r'^$',
Overview.as_view(),
name=crapp.INDEXVIEW_NAME),
crapp.Url(r'^filter/(?P<filters_string>.+)?$',
Overview.as_view(),
name='filter'),
]
| bsd-3-clause |
teeple/pns_server | work/install/Python-2.7.4/Lib/test/test_bsddb.py | 91 | 11674 | #! /usr/bin/env python
"""Test script for the bsddb C module by Roger E. Masse
Adapted to unittest format and expanded scope by Raymond Hettinger
"""
import os, sys
import unittest
from test import test_support
# Skip test if _bsddb wasn't built.
test_support.import_module('_bsddb')
bsddb = test_support.import_module('bsddb', deprecated=True)
# Just so we know it's imported:
test_support.import_module('dbhash', deprecated=True)
class TestBSDDB(unittest.TestCase):
openflag = 'c'
def setUp(self):
self.f = self.openmethod[0](self.fname, self.openflag, cachesize=32768)
self.d = dict(q='Guido', w='van', e='Rossum', r='invented', t='Python', y='')
for k, v in self.d.iteritems():
self.f[k] = v
def tearDown(self):
self.f.sync()
self.f.close()
if self.fname is None:
return
try:
os.remove(self.fname)
except os.error:
pass
def test_getitem(self):
for k, v in self.d.iteritems():
self.assertEqual(self.f[k], v)
def test_len(self):
self.assertEqual(len(self.f), len(self.d))
def test_change(self):
self.f['r'] = 'discovered'
self.assertEqual(self.f['r'], 'discovered')
self.assertIn('r', self.f.keys())
self.assertIn('discovered', self.f.values())
def test_close_and_reopen(self):
if self.fname is None:
# if we're using an in-memory only db, we can't reopen it
# so finish here.
return
self.f.close()
self.f = self.openmethod[0](self.fname, 'w')
for k, v in self.d.iteritems():
self.assertEqual(self.f[k], v)
def assertSetEquals(self, seqn1, seqn2):
self.assertEqual(set(seqn1), set(seqn2))
def test_mapping_iteration_methods(self):
f = self.f
d = self.d
self.assertSetEquals(d, f)
self.assertSetEquals(d.keys(), f.keys())
self.assertSetEquals(d.values(), f.values())
self.assertSetEquals(d.items(), f.items())
self.assertSetEquals(d.iterkeys(), f.iterkeys())
self.assertSetEquals(d.itervalues(), f.itervalues())
self.assertSetEquals(d.iteritems(), f.iteritems())
def test_iter_while_modifying_values(self):
di = iter(self.d)
while 1:
try:
key = di.next()
self.d[key] = 'modified '+key
except StopIteration:
break
# it should behave the same as a dict. modifying values
# of existing keys should not break iteration. (adding
# or removing keys should)
loops_left = len(self.f)
fi = iter(self.f)
while 1:
try:
key = fi.next()
self.f[key] = 'modified '+key
loops_left -= 1
except StopIteration:
break
self.assertEqual(loops_left, 0)
self.test_mapping_iteration_methods()
def test_iter_abort_on_changed_size(self):
def DictIterAbort():
di = iter(self.d)
while 1:
try:
di.next()
self.d['newkey'] = 'SPAM'
except StopIteration:
break
self.assertRaises(RuntimeError, DictIterAbort)
def DbIterAbort():
fi = iter(self.f)
while 1:
try:
fi.next()
self.f['newkey'] = 'SPAM'
except StopIteration:
break
self.assertRaises(RuntimeError, DbIterAbort)
def test_iteritems_abort_on_changed_size(self):
def DictIteritemsAbort():
di = self.d.iteritems()
while 1:
try:
di.next()
self.d['newkey'] = 'SPAM'
except StopIteration:
break
self.assertRaises(RuntimeError, DictIteritemsAbort)
def DbIteritemsAbort():
fi = self.f.iteritems()
while 1:
try:
key, value = fi.next()
del self.f[key]
except StopIteration:
break
self.assertRaises(RuntimeError, DbIteritemsAbort)
def test_iteritems_while_modifying_values(self):
di = self.d.iteritems()
while 1:
try:
k, v = di.next()
self.d[k] = 'modified '+v
except StopIteration:
break
# it should behave the same as a dict. modifying values
# of existing keys should not break iteration. (adding
# or removing keys should)
loops_left = len(self.f)
fi = self.f.iteritems()
while 1:
try:
k, v = fi.next()
self.f[k] = 'modified '+v
loops_left -= 1
except StopIteration:
break
self.assertEqual(loops_left, 0)
self.test_mapping_iteration_methods()
def test_first_next_looping(self):
items = [self.f.first()]
for i in xrange(1, len(self.f)):
items.append(self.f.next())
self.assertSetEquals(items, self.d.items())
def test_previous_last_looping(self):
items = [self.f.last()]
for i in xrange(1, len(self.f)):
items.append(self.f.previous())
self.assertSetEquals(items, self.d.items())
def test_first_while_deleting(self):
# Test for bug 1725856
self.assertTrue(len(self.d) >= 2, "test requires >=2 items")
for _ in self.d:
key = self.f.first()[0]
del self.f[key]
self.assertEqual([], self.f.items(), "expected empty db after test")
def test_last_while_deleting(self):
# Test for bug 1725856's evil twin
self.assertTrue(len(self.d) >= 2, "test requires >=2 items")
for _ in self.d:
key = self.f.last()[0]
del self.f[key]
self.assertEqual([], self.f.items(), "expected empty db after test")
def test_set_location(self):
self.assertEqual(self.f.set_location('e'), ('e', self.d['e']))
def test_contains(self):
for k in self.d:
self.assertIn(k, self.f)
self.assertNotIn('not here', self.f)
def test_has_key(self):
for k in self.d:
self.assertTrue(self.f.has_key(k))
self.assertTrue(not self.f.has_key('not here'))
def test_clear(self):
self.f.clear()
self.assertEqual(len(self.f), 0)
def test__no_deadlock_first(self, debug=0):
# do this so that testers can see what function we're in in
# verbose mode when we deadlock.
sys.stdout.flush()
# in pybsddb's _DBWithCursor this causes an internal DBCursor
# object is created. Other test_ methods in this class could
# inadvertently cause the deadlock but an explicit test is needed.
if debug: print "A"
k,v = self.f.first()
if debug: print "B", k
self.f[k] = "deadlock. do not pass go. do not collect $200."
if debug: print "C"
# if the bsddb implementation leaves the DBCursor open during
# the database write and locking+threading support is enabled
# the cursor's read lock will deadlock the write lock request..
# test the iterator interface
if True:
if debug: print "D"
i = self.f.iteritems()
k,v = i.next()
if debug: print "E"
self.f[k] = "please don't deadlock"
if debug: print "F"
while 1:
try:
k,v = i.next()
except StopIteration:
break
if debug: print "F2"
i = iter(self.f)
if debug: print "G"
while i:
try:
if debug: print "H"
k = i.next()
if debug: print "I"
self.f[k] = "deadlocks-r-us"
if debug: print "J"
except StopIteration:
i = None
if debug: print "K"
# test the legacy cursor interface mixed with writes
self.assertIn(self.f.first()[0], self.d)
k = self.f.next()[0]
self.assertIn(k, self.d)
self.f[k] = "be gone with ye deadlocks"
self.assertTrue(self.f[k], "be gone with ye deadlocks")
def test_for_cursor_memleak(self):
# do the bsddb._DBWithCursor iterator internals leak cursors?
nc1 = len(self.f._cursor_refs)
# create iterator
i = self.f.iteritems()
nc2 = len(self.f._cursor_refs)
# use the iterator (should run to the first yield, creating the cursor)
k, v = i.next()
nc3 = len(self.f._cursor_refs)
# destroy the iterator; this should cause the weakref callback
# to remove the cursor object from self.f._cursor_refs
del i
nc4 = len(self.f._cursor_refs)
self.assertEqual(nc1, nc2)
self.assertEqual(nc1, nc4)
self.assertTrue(nc3 == nc1+1)
def test_popitem(self):
k, v = self.f.popitem()
self.assertIn(k, self.d)
self.assertIn(v, self.d.values())
self.assertNotIn(k, self.f)
self.assertEqual(len(self.d)-1, len(self.f))
def test_pop(self):
k = 'w'
v = self.f.pop(k)
self.assertEqual(v, self.d[k])
self.assertNotIn(k, self.f)
self.assertNotIn(v, self.f.values())
self.assertEqual(len(self.d)-1, len(self.f))
def test_get(self):
self.assertEqual(self.f.get('NotHere'), None)
self.assertEqual(self.f.get('NotHere', 'Default'), 'Default')
self.assertEqual(self.f.get('q', 'Default'), self.d['q'])
def test_setdefault(self):
self.assertEqual(self.f.setdefault('new', 'dog'), 'dog')
self.assertEqual(self.f.setdefault('r', 'cat'), self.d['r'])
def test_update(self):
new = dict(y='life', u='of', i='brian')
self.f.update(new)
self.d.update(new)
for k, v in self.d.iteritems():
self.assertEqual(self.f[k], v)
def test_keyordering(self):
if self.openmethod[0] is not bsddb.btopen:
return
keys = self.d.keys()
keys.sort()
self.assertEqual(self.f.first()[0], keys[0])
self.assertEqual(self.f.next()[0], keys[1])
self.assertEqual(self.f.last()[0], keys[-1])
self.assertEqual(self.f.previous()[0], keys[-2])
self.assertEqual(list(self.f), keys)
class TestBTree(TestBSDDB):
fname = test_support.TESTFN
openmethod = [bsddb.btopen]
class TestBTree_InMemory(TestBSDDB):
fname = None
openmethod = [bsddb.btopen]
class TestBTree_InMemory_Truncate(TestBSDDB):
fname = None
openflag = 'n'
openmethod = [bsddb.btopen]
class TestHashTable(TestBSDDB):
fname = test_support.TESTFN
openmethod = [bsddb.hashopen]
class TestHashTable_InMemory(TestBSDDB):
fname = None
openmethod = [bsddb.hashopen]
## # (bsddb.rnopen,'Record Numbers'), 'put' for RECNO for bsddb 1.85
## # appears broken... at least on
## # Solaris Intel - rmasse 1/97
def test_main(verbose=None):
test_support.run_unittest(
TestBTree,
TestHashTable,
TestBTree_InMemory,
TestHashTable_InMemory,
TestBTree_InMemory_Truncate,
)
if __name__ == "__main__":
test_main(verbose=True)
| gpl-2.0 |
kuiwei/edx-platform | lms/djangoapps/instructor/views/api.py | 6 | 78005 | """
Instructor Dashboard API views
JSON views which the instructor dashboard requests.
Many of these GETs may become PUTs in the future.
"""
import StringIO
import json
import logging
import re
import time
import requests
from django.conf import settings
from django_future.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_POST
from django.views.decorators.cache import cache_control
from django.core.exceptions import ValidationError
from django.core.mail.message import EmailMessage
from django.db import IntegrityError
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.utils.translation import ugettext as _
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound
from django.utils.html import strip_tags
import string # pylint: disable=W0402
import random
import urllib
from util.json_request import JsonResponse
from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features
from microsite_configuration import microsite
from courseware.access import has_access
from courseware.courses import get_course_with_access, get_course_by_id
from django.contrib.auth.models import User
from django_comment_client.utils import has_forum_access
from django_comment_common.models import (
Role,
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_COMMUNITY_TA,
)
from edxmako.shortcuts import render_to_response, render_to_string
from courseware.models import StudentModule
from shoppingcart.models import Coupon, CourseRegistrationCode, RegistrationCodeRedemption, Invoice, CourseMode
from student.models import CourseEnrollment, unique_id_for_user, anonymous_id_for_user
import instructor_task.api
from instructor_task.api_helper import AlreadyRunningError
from instructor_task.models import ReportStore
import instructor.enrollment as enrollment
from instructor.enrollment import (
enroll_email,
send_mail_to_student,
get_email_params,
send_beta_role_email,
unenroll_email
)
from instructor.access import list_with_level, allow_access, revoke_access, update_forum_role
from instructor.offline_gradecalc import student_grades
import instructor_analytics.basic
import instructor_analytics.distributions
import instructor_analytics.csvs
import csv
from user_api.models import UserPreference
from instructor.views import INVOICE_KEY
from submissions import api as sub_api # installed from the edx-submissions repository
from bulk_email.models import CourseEmail
from .tools import (
dump_student_extensions,
dump_module_extensions,
find_unit,
get_student_from_identifier,
require_student_from_identifier,
handle_dashboard_error,
parse_datetime,
set_due_date_extension,
strip_if_string,
bulk_email_is_enabled_for_course,
add_block_ids,
)
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys import InvalidKeyError
from student.models import UserProfile, Registration
log = logging.getLogger(__name__)
def common_exceptions_400(func):
"""
Catches common exceptions and renders matching 400 errors.
(decorator without arguments)
"""
def wrapped(request, *args, **kwargs): # pylint: disable=C0111
use_json = (request.is_ajax() or
request.META.get("HTTP_ACCEPT", "").startswith("application/json"))
try:
return func(request, *args, **kwargs)
except User.DoesNotExist:
message = _("User does not exist.")
if use_json:
return JsonResponse({"error": message}, 400)
else:
return HttpResponseBadRequest(message)
except AlreadyRunningError:
message = _("Task is already running.")
if use_json:
return JsonResponse({"error": message}, 400)
else:
return HttpResponseBadRequest(message)
return wrapped
def require_query_params(*args, **kwargs):
"""
Checks for required paremters or renders a 400 error.
(decorator with arguments)
`args` is a *list of required GET parameter names.
`kwargs` is a **dict of required GET parameter names
to string explanations of the parameter
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func): # pylint: disable=C0111
def wrapped(*args, **kwargs): # pylint: disable=C0111
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.GET.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if len(error_response_data['parameters']) > 0:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_post_params(*args, **kwargs):
"""
Checks for required parameters or renders a 400 error.
(decorator with arguments)
Functions like 'require_query_params', but checks for
POST parameters rather than GET parameters.
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func): # pylint: disable=C0111
def wrapped(*args, **kwargs): # pylint: disable=C0111
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.POST.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if len(error_response_data['parameters']) > 0:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_level(level):
"""
Decorator with argument that requires an access level of the requesting
user. If the requirement is not satisfied, returns an
HttpResponseForbidden (403).
Assumes that request is in args[0].
Assumes that course_id is in kwargs['course_id'].
`level` is in ['instructor', 'staff']
if `level` is 'staff', instructors will also be allowed, even
if they are not in the staff group.
"""
if level not in ['instructor', 'staff']:
raise ValueError("unrecognized level '{}'".format(level))
def decorator(func): # pylint: disable=C0111
def wrapped(*args, **kwargs): # pylint: disable=C0111
request = args[0]
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(kwargs['course_id']))
if has_access(request.user, level, course):
return func(*args, **kwargs)
else:
return HttpResponseForbidden()
return wrapped
return decorator
EMAIL_INDEX = 0
USERNAME_INDEX = 1
NAME_INDEX = 2
COUNTRY_INDEX = 3
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def register_and_enroll_students(request, course_id): # pylint: disable=R0915
"""
Create new account and Enroll students in this course.
Passing a csv file that contains a list of students.
Order in csv should be the following email = 0; username = 1; name = 2; country = 3.
Requires staff access.
-If the email address and username already exists and the user is enrolled in the course,
do nothing (including no email gets sent out)
-If the email address already exists, but the username is different,
match on the email address only and continue to enroll the user in the course using the email address
as the matching criteria. Note the change of username as a warning message (but not a failure). Send a standard enrollment email
which is the same as the existing manual enrollment
-If the username already exists (but not the email), assume it is a different user and fail to create the new account.
The failure will be messaged in a response in the browser.
"""
if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False)):
return HttpResponseForbidden()
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
warnings = []
row_errors = []
general_errors = []
if 'students_list' in request.FILES:
students = []
try:
upload_file = request.FILES.get('students_list')
students = [row for row in csv.reader(upload_file.read().splitlines())]
except Exception: # pylint: disable=W0703
general_errors.append({
'username': '', 'email': '', 'response': _('Could not read uploaded file.')
})
finally:
upload_file.close()
generated_passwords = []
course = get_course_by_id(course_id)
row_num = 0
for student in students:
row_num = row_num + 1
# verify that we have exactly four columns in every row but allow for blank lines
if len(student) != 4:
if len(student) > 0:
general_errors.append({
'username': '',
'email': '',
'response': _('Data in row #{row_num} must have exactly four columns: email, username, full name, and country').format(row_num=row_num)
})
continue
# Iterate each student in the uploaded csv file.
email = student[EMAIL_INDEX]
username = student[USERNAME_INDEX]
name = student[NAME_INDEX]
country = student[COUNTRY_INDEX][:2]
email_params = get_email_params(course, True, secure=request.is_secure())
try:
validate_email(email) # Raises ValidationError if invalid
except ValidationError:
row_errors.append({
'username': username, 'email': email, 'response': _('Invalid email {email_address}.').format(email_address=email)})
else:
if User.objects.filter(email=email).exists():
# Email address already exists. assume it is the correct user
# and just register the user in the course and send an enrollment email.
user = User.objects.get(email=email)
# see if it is an exact match with email and username
# if it's not an exact match then just display a warning message, but continue onwards
if not User.objects.filter(email=email, username=username).exists():
warning_message = _(
'An account with email {email} exists but the provided username {username} '
'is different. Enrolling anyway with {email}.'
).format(email=email, username=username)
warnings.append({
'username': username, 'email': email, 'response': warning_message})
log.warning('email {email} already exist'.format(email=email))
else:
log.info("user already exists with username '{username}' and email '{email}'".format(email=email, username=username))
# make sure user is enrolled in course
if not CourseEnrollment.is_enrolled(user, course_id):
CourseEnrollment.enroll(user, course_id)
log.info('user {username} enrolled in the course {course}'.format(username=username, course=course.id))
enroll_email(course_id=course_id, student_email=email, auto_enroll=True, email_students=True, email_params=email_params)
else:
# This email does not yet exist, so we need to create a new account
# If username already exists in the database, then create_and_enroll_user
# will raise an IntegrityError exception.
password = generate_unique_password(generated_passwords)
try:
create_and_enroll_user(email, username, name, country, password, course_id)
except IntegrityError:
row_errors.append({
'username': username, 'email': email, 'response': _('Username {user} already exists.').format(user=username)})
except Exception as ex:
log.exception(type(ex).__name__)
row_errors.append({
'username': username, 'email': email, 'response': _(type(ex).__name__)})
else:
# It's a new user, an email will be sent to each newly created user.
email_params['message'] = 'account_creation_and_enrollment'
email_params['email_address'] = email
email_params['password'] = password
email_params['platform_name'] = microsite.get_value('platform_name', settings.PLATFORM_NAME)
send_mail_to_student(email, email_params)
log.info('email sent to new created user at {email}'.format(email=email))
else:
general_errors.append({
'username': '', 'email': '', 'response': _('File is not attached.')
})
results = {
'row_errors': row_errors,
'general_errors': general_errors,
'warnings': warnings
}
return JsonResponse(results)
def generate_random_string(length):
"""
Create a string of random characters of specified length
"""
chars = [
char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase
if char not in 'aAeEiIoOuU1l'
]
return string.join((random.choice(chars) for __ in range(length)), '')
def generate_unique_password(generated_passwords, password_length=12):
"""
generate a unique password for each student.
"""
password = generate_random_string(password_length)
while password in generated_passwords:
password = generate_random_string(password_length)
generated_passwords.append(password)
return password
def create_and_enroll_user(email, username, name, country, password, course_id):
""" Creates a user and enroll him/her in the course"""
user = User.objects.create_user(username, email, password)
reg = Registration()
reg.register(user)
profile = UserProfile(user=user)
profile.name = name
profile.country = country
profile.save()
# try to enroll the user in this course
CourseEnrollment.enroll(user, course_id)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(action="enroll or unenroll", identifiers="stringified list of emails and/or usernames")
def students_update_enrollment(request, course_id):
"""
Enroll or unenroll students by email.
Requires staff access.
Query Parameters:
- action in ['enroll', 'unenroll']
- identifiers is string containing a list of emails and/or usernames separated by anything split_input_list can handle.
- auto_enroll is a boolean (defaults to false)
If auto_enroll is false, students will be allowed to enroll.
If auto_enroll is true, students will be enrolled as soon as they register.
- email_students is a boolean (defaults to false)
If email_students is true, students will be sent email notification
If email_students is false, students will not be sent email notification
Returns an analog to this JSON structure: {
"action": "enroll",
"auto_enroll": false,
"results": [
{
"email": "[email protected]",
"before": {
"enrollment": false,
"auto_enroll": false,
"user": true,
"allowed": false
},
"after": {
"enrollment": true,
"auto_enroll": false,
"user": true,
"allowed": false
}
}
]
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]
email_students = request.POST.get('email_students') in ['true', 'True', True]
email_params = {}
if email_students:
course = get_course_by_id(course_id)
email_params = get_email_params(course, auto_enroll, secure=request.is_secure())
results = []
for identifier in identifiers:
# First try to get a user object from the identifer
user = None
email = None
try:
user = get_student_from_identifier(identifier)
except User.DoesNotExist:
email = identifier
else:
email = user.email
try:
# Use django.core.validators.validate_email to check email address
# validity (obviously, cannot check if email actually /exists/,
# simply that it is plausibly valid)
validate_email(email) # Raises ValidationError if invalid
if action == 'enroll':
before, after = enroll_email(course_id, email, auto_enroll, email_students, email_params)
elif action == 'unenroll':
before, after = unenroll_email(course_id, email, email_students, email_params)
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized action '{}'".format(action)
))
except ValidationError:
# Flag this email as an error if invalid, but continue checking
# the remaining in the list
results.append({
'identifier': identifier,
'invalidIdentifier': True,
})
except Exception as exc: # pylint: disable=W0703
# catch and log any exceptions
# so that one error doesn't cause a 500.
log.exception("Error while #{}ing student")
log.exception(exc)
results.append({
'identifier': identifier,
'error': True,
})
else:
results.append({
'identifier': identifier,
'before': before.to_dict(),
'after': after.to_dict(),
})
response_payload = {
'action': action,
'results': results,
'auto_enroll': auto_enroll,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
@require_post_params(
identifiers="stringified list of emails and/or usernames",
action="add or remove",
)
def bulk_beta_modify_access(request, course_id):
"""
Enroll or unenroll users in beta testing program.
Query parameters:
- identifiers is string containing a list of emails and/or usernames separated by
anything split_input_list can handle.
- action is one of ['add', 'remove']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
email_students = request.POST.get('email_students') in ['true', 'True', True]
auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]
results = []
rolename = 'beta'
course = get_course_by_id(course_id)
email_params = {}
if email_students:
secure = request.is_secure()
email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure)
for identifier in identifiers:
try:
error = False
user_does_not_exist = False
user = get_student_from_identifier(identifier)
if action == 'add':
allow_access(course, user, rolename)
elif action == 'remove':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
"Unrecognized action '{}'".format(action)
))
except User.DoesNotExist:
error = True
user_does_not_exist = True
# catch and log any unexpected exceptions
# so that one error doesn't cause a 500.
except Exception as exc: # pylint: disable=broad-except
log.exception("Error while #{}ing student")
log.exception(exc)
error = True
else:
# If no exception thrown, see if we should send an email
if email_students:
send_beta_role_email(action, user, email_params)
# See if we should autoenroll the student
if auto_enroll:
# Check if student is already enrolled
if not CourseEnrollment.is_enrolled(user, course_id):
CourseEnrollment.enroll(user, course_id)
finally:
# Tabulate the action result of this email address
results.append({
'identifier': identifier,
'error': error,
'userDoesNotExist': user_does_not_exist
})
response_payload = {
'action': action,
'results': results,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@common_exceptions_400
@require_query_params(
unique_student_identifier="email or username of user to change access",
rolename="'instructor', 'staff', or 'beta'",
action="'allow' or 'revoke'"
)
def modify_access(request, course_id):
"""
Modify staff/instructor access of other user.
Requires instructor access.
NOTE: instructors cannot remove their own instructor access.
Query parameters:
unique_student_identifer is the target user's username or email
rolename is one of ['instructor', 'staff', 'beta']
action is one of ['allow', 'revoke']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
try:
user = get_student_from_identifier(request.GET.get('unique_student_identifier'))
except User.DoesNotExist:
response_payload = {
'unique_student_identifier': request.GET.get('unique_student_identifier'),
'userDoesNotExist': True,
}
return JsonResponse(response_payload)
# Check that user is active, because add_users
# in common/djangoapps/student/roles.py fails
# silently when we try to add an inactive user.
if not user.is_active:
response_payload = {
'unique_student_identifier': user.username,
'inactiveUser': True,
}
return JsonResponse(response_payload)
rolename = request.GET.get('rolename')
action = request.GET.get('action')
if not rolename in ['instructor', 'staff', 'beta']:
return HttpResponseBadRequest(strip_tags(
"unknown rolename '{}'".format(rolename)
))
# disallow instructors from removing their own instructor access.
if rolename == 'instructor' and user == request.user and action != 'allow':
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'removingSelfAsInstructor': True,
}
return JsonResponse(response_payload)
if action == 'allow':
allow_access(course, user, rolename)
elif action == 'revoke':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
"unrecognized action '{}'".format(action)
))
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'success': 'yes',
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@require_query_params(rolename="'instructor', 'staff', or 'beta'")
def list_course_role_members(request, course_id):
"""
List instructors and staff.
Requires instructor access.
rolename is one of ['instructor', 'staff', 'beta']
Returns JSON of the form {
"course_id": "some/course/id",
"staff": [
{
"username": "staff1",
"email": "[email protected]",
"first_name": "Joe",
"last_name": "Shmoe",
}
]
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
rolename = request.GET.get('rolename')
if not rolename in ['instructor', 'staff', 'beta']:
return HttpResponseBadRequest()
def extract_user_info(user):
""" convert user into dicts for json view """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': course_id.to_deprecated_string(),
rolename: map(extract_user_info, list_with_level(
course, rolename
)),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_grading_config(request, course_id):
"""
Respond with json which contains a html formatted grade summary.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
grading_config_summary = instructor_analytics.basic.dump_grading_context(course)
response_payload = {
'course_id': course_id.to_deprecated_string(),
'grading_config_summary': grading_config_summary,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_sale_records(request, course_id, csv=False): # pylint: disable=W0613, W0621
"""
return the summary of all sales records for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
'company_name', 'company_contact_name', 'company_contact_email', 'total_codes', 'total_used_codes',
'total_amount', 'created_at', 'customer_reference_number', 'recipient_name', 'recipient_email', 'created_by',
'internal_reference', 'invoice_number', 'codes', 'course_id'
]
sale_data = instructor_analytics.basic.sale_record_features(course_id, query_features)
if not csv:
for item in sale_data:
item['created_by'] = item['created_by'].username
response_payload = {
'course_id': course_id.to_deprecated_string(),
'sale': sale_data,
'queried_features': query_features
}
return JsonResponse(response_payload)
else:
header, datarows = instructor_analytics.csvs.format_dictlist(sale_data, query_features)
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_invoice_records.csv", header, datarows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_sale_order_records(request, course_id): # pylint: disable=W0613, W0621
"""
return the summary of all sales records for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
('id', 'Order Id'),
('company_name', 'Company Name'),
('company_contact_name', 'Company Contact Name'),
('company_contact_email', 'Company Contact Email'),
('total_amount', 'Total Amount'),
('total_codes', 'Total Codes'),
('total_used_codes', 'Total Used Codes'),
('logged_in_username', 'Login Username'),
('logged_in_email', 'Login User Email'),
('purchase_time', 'Date of Sale'),
('customer_reference_number', 'Customer Reference Number'),
('recipient_name', 'Recipient Name'),
('recipient_email', 'Recipient Email'),
('bill_to_street1', 'Street 1'),
('bill_to_street2', 'Street 2'),
('bill_to_city', 'City'),
('bill_to_state', 'State'),
('bill_to_postalcode', 'Postal Code'),
('bill_to_country', 'Country'),
('order_type', 'Order Type'),
('codes', 'Registration Codes'),
('course_id', 'Course Id')
]
db_columns = [x[0] for x in query_features]
csv_columns = [x[1] for x in query_features]
sale_data = instructor_analytics.basic.sale_order_record_features(course_id, db_columns)
header, datarows = instructor_analytics.csvs.format_dictlist(sale_data, db_columns) # pylint: disable=W0612
return instructor_analytics.csvs.create_csv_response("e-commerce_sale_order_records.csv", csv_columns, datarows)
@require_level('staff')
@require_POST
def sale_validation(request, course_id):
"""
This method either invalidate or re validate the sale against the invoice number depending upon the event type
"""
try:
invoice_number = request.POST["invoice_number"]
except KeyError:
return HttpResponseBadRequest("Missing required invoice_number parameter")
try:
invoice_number = int(invoice_number)
except ValueError:
return HttpResponseBadRequest(
"invoice_number must be an integer, {value} provided".format(
value=invoice_number
)
)
try:
event_type = request.POST["event_type"]
except KeyError:
return HttpResponseBadRequest("Missing required event_type parameter")
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
obj_invoice = Invoice.objects.select_related('is_valid').get(id=invoice_number, course_id=course_id)
except Invoice.DoesNotExist:
return HttpResponseNotFound(_("Invoice number '{0}' does not exist.".format(invoice_number)))
if event_type == "invalidate":
return invalidate_invoice(obj_invoice)
else:
return re_validate_invoice(obj_invoice)
def invalidate_invoice(obj_invoice):
"""
This method invalidate the sale against the invoice number
"""
if not obj_invoice.is_valid:
return HttpResponseBadRequest(_("The sale associated with this invoice has already been invalidated."))
obj_invoice.is_valid = False
obj_invoice.save()
message = _('Invoice number {0} has been invalidated.').format(obj_invoice.id)
return JsonResponse({'message': message})
def re_validate_invoice(obj_invoice):
"""
This method re-validate the sale against the invoice number
"""
if obj_invoice.is_valid:
return HttpResponseBadRequest(_("This invoice is already active."))
obj_invoice.is_valid = True
obj_invoice.save()
message = _('The registration codes for invoice {0} have been re-activated.').format(obj_invoice.id)
return JsonResponse({'message': message})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_purchase_transaction(request, course_id, csv=False): # pylint: disable=W0613, W0621
"""
return the summary of all purchased transactions for a particular course
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
query_features = [
'id', 'username', 'email', 'course_id', 'list_price', 'coupon_code',
'unit_cost', 'purchase_time', 'orderitem_id',
'order_id',
]
student_data = instructor_analytics.basic.purchase_transactions(course_id, query_features)
if not csv:
response_payload = {
'course_id': course_id.to_deprecated_string(),
'students': student_data,
'queried_features': query_features
}
return JsonResponse(response_payload)
else:
header, datarows = instructor_analytics.csvs.format_dictlist(student_data, query_features)
return instructor_analytics.csvs.create_csv_response("e-commerce_purchase_transactions.csv", header, datarows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_students_features(request, course_id, csv=False): # pylint: disable=W0613, W0621
"""
Respond with json which contains a summary of all enrolled students profile information.
Responds with JSON
{"students": [{-student-info-}, ...]}
TO DO accept requests for different attribute sets.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_by_id(course_key)
available_features = instructor_analytics.basic.AVAILABLE_FEATURES
# Allow for microsites to be able to define additional columns (e.g. )
query_features = microsite.get_value('student_profile_download_fields')
if not query_features:
query_features = [
'id', 'username', 'name', 'email', 'language', 'location',
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
'goals'
]
# Provide human-friendly and translatable names for these features. These names
# will be displayed in the table generated in data_download.coffee. It is not (yet)
# used as the header row in the CSV, but could be in the future.
query_features_names = {
'id': _('User ID'),
'username': _('Username'),
'name': _('Name'),
'email': _('Email'),
'language': _('Language'),
'location': _('Location'),
'year_of_birth': _('Birth Year'),
'gender': _('Gender'),
'level_of_education': _('Level of Education'),
'mailing_address': _('Mailing Address'),
'goals': _('Goals'),
}
if course.is_cohorted:
# Translators: 'Cohort' refers to a group of students within a course.
query_features.append('cohort')
query_features_names['cohort'] = _('Cohort')
if not csv:
student_data = instructor_analytics.basic.enrolled_students_features(course_key, query_features)
response_payload = {
'course_id': unicode(course_key),
'students': student_data,
'students_count': len(student_data),
'queried_features': query_features,
'feature_names': query_features_names,
'available_features': available_features,
}
return JsonResponse(response_payload)
else:
try:
instructor_task.api.submit_calculate_students_features_csv(request, course_key, query_features)
success_status = _("Your enrolled student profile report is being generated! You can view the status of the generation task in the 'Pending Instructor Tasks' section.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("An enrolled student profile report generation task is already in progress. Check the 'Pending Instructor Tasks' table for the status of the task. When completed, the report will be available for download in the table below.")
return JsonResponse({"status": already_running_status})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_coupon_codes(request, course_id): # pylint: disable=W0613
"""
Respond with csv which contains a summary of all Active Coupons.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
active_coupons = Coupon.objects.filter(course_id=course_id, is_active=True)
query_features = [
'course_id', 'percentage_discount', 'code_redeemed_count', 'description'
]
coupons_list = instructor_analytics.basic.coupon_codes_features(query_features, active_coupons)
header, data_rows = instructor_analytics.csvs.format_dictlist(coupons_list, query_features)
return instructor_analytics.csvs.create_csv_response('Coupons.csv', header, data_rows)
def save_registration_code(user, course_id, invoice=None, order=None):
"""
recursive function that generate a new code every time and saves in the Course Registration Table
if validation check passes
"""
code = random_code_generator()
# check if the generated code is in the Coupon Table
matching_coupons = Coupon.objects.filter(code=code, is_active=True)
if matching_coupons:
return save_registration_code(user, course_id, invoice, order)
course_registration = CourseRegistrationCode(
code=code, course_id=course_id.to_deprecated_string(), created_by=user, invoice=invoice, order=order
)
try:
course_registration.save()
return course_registration
except IntegrityError:
return save_registration_code(user, course_id, invoice, order)
def registration_codes_csv(file_name, codes_list, csv_type=None):
"""
Respond with the csv headers and data rows
given a dict of codes list
:param file_name:
:param codes_list:
:param csv_type:
"""
# csv headers
query_features = [
'code', 'course_id', 'company_name', 'created_by',
'redeemed_by', 'invoice_id', 'purchaser', 'customer_reference_number', 'internal_reference'
]
registration_codes = instructor_analytics.basic.course_registration_features(query_features, codes_list, csv_type)
header, data_rows = instructor_analytics.csvs.format_dictlist(registration_codes, query_features)
return instructor_analytics.csvs.create_csv_response(file_name, header, data_rows)
def random_code_generator():
"""
generate a random alphanumeric code of length defined in
REGISTRATION_CODE_LENGTH settings
"""
code_length = getattr(settings, 'REGISTRATION_CODE_LENGTH', 8)
return generate_random_string(code_length)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def get_registration_codes(request, course_id): # pylint: disable=W0613
"""
Respond with csv which contains a summary of all Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
#filter all the course registration codes
registration_codes = CourseRegistrationCode.objects.filter(course_id=course_id).order_by('invoice__company_name')
company_name = request.POST['download_company_name']
if company_name:
registration_codes = registration_codes.filter(invoice__company_name=company_name)
csv_type = 'download'
return registration_codes_csv("Registration_Codes.csv", registration_codes, csv_type)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def generate_registration_codes(request, course_id):
"""
Respond with csv which contains a summary of all Generated Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
invoice_copy = False
# covert the course registration code number into integer
try:
course_code_number = int(request.POST['total_registration_codes'])
except ValueError:
course_code_number = int(float(request.POST['total_registration_codes']))
company_name = request.POST['company_name']
company_contact_name = request.POST['company_contact_name']
company_contact_email = request.POST['company_contact_email']
sale_price = request.POST['sale_price']
recipient_name = request.POST['recipient_name']
recipient_email = request.POST['recipient_email']
address_line_1 = request.POST['address_line_1']
address_line_2 = request.POST['address_line_2']
address_line_3 = request.POST['address_line_3']
city = request.POST['city']
state = request.POST['state']
zip_code = request.POST['zip']
country = request.POST['country']
internal_reference = request.POST['internal_reference']
customer_reference_number = request.POST['customer_reference_number']
recipient_list = [recipient_email]
if request.POST.get('invoice', False):
recipient_list.append(request.user.email)
invoice_copy = True
UserPreference.set_preference(request.user, INVOICE_KEY, invoice_copy)
sale_invoice = Invoice.objects.create(
total_amount=sale_price, company_name=company_name, company_contact_email=company_contact_email,
company_contact_name=company_contact_name, course_id=course_id, recipient_name=recipient_name,
recipient_email=recipient_email, address_line_1=address_line_1, address_line_2=address_line_2,
address_line_3=address_line_3, city=city, state=state, zip=zip_code, country=country,
internal_reference=internal_reference, customer_reference_number=customer_reference_number
)
registration_codes = []
for _ in range(course_code_number): # pylint: disable=W0621
generated_registration_code = save_registration_code(request.user, course_id, sale_invoice, order=None)
registration_codes.append(generated_registration_code)
site_name = microsite.get_value('SITE_NAME', 'localhost')
course = get_course_by_id(course_id, depth=None)
course_honor_mode = CourseMode.mode_for_course(course_id, 'honor')
course_price = course_honor_mode.min_price
quantity = course_code_number
discount = (float(quantity * course_price) - float(sale_price))
course_url = '{base_url}{course_about}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
course_about=reverse('about_course', kwargs={'course_id': course_id.to_deprecated_string()})
)
dashboard_url = '{base_url}{dashboard}'.format(
base_url=microsite.get_value('SITE_NAME', settings.SITE_NAME),
dashboard=reverse('dashboard')
)
from_address = microsite.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
context = {
'invoice': sale_invoice,
'site_name': site_name,
'course': course,
'course_price': course_price,
'sub_total': course_price * quantity,
'discount': discount,
'sale_price': sale_price,
'quantity': quantity,
'registration_codes': registration_codes,
'course_url': course_url,
'platform_name': microsite.get_value('platform_name', settings.PLATFORM_NAME),
'dashboard_url': dashboard_url,
'contact_email': from_address,
'corp_address': microsite.get_value('invoice_corp_address', settings.INVOICE_CORP_ADDRESS),
'payment_instructions': microsite.get_value('invoice_payment_instructions', settings. INVOICE_PAYMENT_INSTRUCTIONS),
'date': time.strftime("%m/%d/%Y")
}
# composes registration codes invoice email
subject = u'Confirmation and Invoice for {course_name}'.format(course_name=course.display_name)
message = render_to_string('emails/registration_codes_sale_email.txt', context)
invoice_attachment = render_to_string('emails/registration_codes_sale_invoice_attachment.txt', context)
#send_mail(subject, message, from_address, recipient_list, fail_silently=False)
csv_file = StringIO.StringIO()
csv_writer = csv.writer(csv_file)
for registration_code in registration_codes:
csv_writer.writerow([registration_code.code])
# send a unique email for each recipient, don't put all email addresses in a single email
for recipient in recipient_list:
email = EmailMessage()
email.subject = subject
email.body = message
email.from_email = from_address
email.to = [recipient]
email.attach(u'RegistrationCodes.csv', csv_file.getvalue(), 'text/csv')
email.attach(u'Invoice.txt', invoice_attachment, 'text/plain')
email.send()
return registration_codes_csv("Registration_Codes.csv", registration_codes)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def active_registration_codes(request, course_id): # pylint: disable=W0613
"""
Respond with csv which contains a summary of all Active Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# find all the registration codes in this course
registration_codes_list = CourseRegistrationCode.objects.filter(course_id=course_id).order_by('invoice__company_name')
company_name = request.POST['active_company_name']
if company_name:
registration_codes_list = registration_codes_list.filter(invoice__company_name=company_name)
# find the redeemed registration codes if any exist in the db
code_redemption_set = RegistrationCodeRedemption.objects.select_related('registration_code').filter(registration_code__course_id=course_id)
if code_redemption_set.exists():
redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set]
# exclude the redeemed registration codes from the registration codes list and you will get
# all the registration codes that are active
registration_codes_list = registration_codes_list.exclude(code__in=redeemed_registration_codes)
return registration_codes_csv("Active_Registration_Codes.csv", registration_codes_list)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_POST
def spent_registration_codes(request, course_id): # pylint: disable=W0613
"""
Respond with csv which contains a summary of all Spent(used) Registration Codes.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
# find the redeemed registration codes if any exist in the db
code_redemption_set = RegistrationCodeRedemption.objects.select_related('registration_code').filter(
registration_code__course_id=course_id
)
spent_codes_list = []
if code_redemption_set.exists():
redeemed_registration_codes = [code.registration_code.code for code in code_redemption_set]
# filter the Registration Codes by course id and the redeemed codes and
# you will get a list of all the spent(Redeemed) Registration Codes
spent_codes_list = CourseRegistrationCode.objects.filter(
course_id=course_id, code__in=redeemed_registration_codes
).order_by('invoice__company_name')
company_name = request.POST['spent_company_name']
if company_name:
spent_codes_list = spent_codes_list.filter(invoice__company_name=company_name) # pylint: disable=E1103
csv_type = 'spent'
return registration_codes_csv("Spent_Registration_Codes.csv", spent_codes_list, csv_type)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_anon_ids(request, course_id): # pylint: disable=W0613
"""
Respond with 2-column CSV output of user-id, anonymized-user-id
"""
# TODO: the User.objects query and CSV generation here could be
# centralized into instructor_analytics. Currently instructor_analytics
# has similar functionality but not quite what's needed.
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
def csv_response(filename, header, rows):
"""Returns a CSV http response for the given header and rows (excel/utf-8)."""
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename={0}'.format(unicode(filename).encode('utf-8'))
writer = csv.writer(response, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
# In practice, there should not be non-ascii data in this query,
# but trying to do the right thing anyway.
encoded = [unicode(s).encode('utf-8') for s in header]
writer.writerow(encoded)
for row in rows:
encoded = [unicode(s).encode('utf-8') for s in row]
writer.writerow(encoded)
return response
students = User.objects.filter(
courseenrollment__course_id=course_id,
).order_by('id')
header = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']
rows = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, course_id, save=False)] for s in students]
return csv_response(course_id.to_deprecated_string().replace('/', '-') + '-anon-ids.csv', header, rows)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def get_distribution(request, course_id):
"""
Respond with json of the distribution of students over selected features which have choices.
Ask for a feature through the `feature` query parameter.
If no `feature` is supplied, will return response with an
empty response['feature_results'] object.
A list of available will be available in the response['available_features']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
feature = request.GET.get('feature')
# alternate notations of None
if feature in (None, 'null', ''):
feature = None
else:
feature = str(feature)
available_features = instructor_analytics.distributions.AVAILABLE_PROFILE_FEATURES
# allow None so that requests for no feature can list available features
if not feature in available_features + (None,):
return HttpResponseBadRequest(strip_tags(
"feature '{}' not available.".format(feature)
))
response_payload = {
'course_id': course_id.to_deprecated_string(),
'queried_feature': feature,
'available_features': available_features,
'feature_display_names': instructor_analytics.distributions.DISPLAY_NAMES,
}
p_dist = None
if not feature is None:
p_dist = instructor_analytics.distributions.profile_distribution(course_id, feature)
response_payload['feature_results'] = {
'feature': p_dist.feature,
'feature_display_name': p_dist.feature_display_name,
'data': p_dist.data,
'type': p_dist.type,
}
if p_dist.type == 'EASY_CHOICE':
response_payload['feature_results']['choices_display_names'] = p_dist.choices_display_names
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@common_exceptions_400
@require_level('staff')
@require_query_params(
unique_student_identifier="email or username of student for whom to get progress url"
)
def get_student_progress_url(request, course_id):
"""
Get the progress url of a student.
Limited to staff access.
Takes query paremeter unique_student_identifier and if the student exists
returns e.g. {
'progress_url': '/../...'
}
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
user = get_student_from_identifier(request.GET.get('unique_student_identifier'))
progress_url = reverse('student_progress', kwargs={'course_id': course_id.to_deprecated_string(), 'student_id': user.id})
response_payload = {
'course_id': course_id.to_deprecated_string(),
'progress_url': progress_url,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params(
problem_to_reset="problem urlname to reset"
)
@common_exceptions_400
def reset_student_attempts(request, course_id):
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters. Optionally deletes student state for a problem. Limited
to staff access. Some sub-methods limited to instructor access.
Takes some of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
problem_to_reset = strip_if_string(request.GET.get('problem_to_reset'))
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students', False) in ['true', 'True', True]
delete_module = request.GET.get('delete_module', False) in ['true', 'True', True]
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
"all_students and unique_student_identifier are mutually exclusive."
)
if all_students and delete_module:
return HttpResponseBadRequest(
"all_students and delete_module are mutually exclusive."
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden("Requires instructor access.")
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset)
except InvalidKeyError:
return HttpResponseBadRequest()
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
try:
enrollment.reset_student_attempts(course_id, student, module_state_key, delete_module=delete_module)
except StudentModule.DoesNotExist:
return HttpResponseBadRequest(_("Module does not exist."))
except sub_api.SubmissionError:
# Trust the submissions API to log the error
error_msg = _("An error occurred while deleting the score.")
return HttpResponse(error_msg, status=500)
response_payload['student'] = student_identifier
elif all_students:
instructor_task.api.submit_reset_problem_attempts_for_all_students(request, module_state_key)
response_payload['task'] = 'created'
response_payload['student'] = 'All Students'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('instructor')
@require_query_params(problem_to_reset="problem urlname to reset")
@common_exceptions_400
def rescore_problem(request, course_id):
"""
Starts a background process a students attempts counter. Optionally deletes student state for a problem.
Limited to instructor access.
Takes either of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
problem_to_reset = strip_if_string(request.GET.get('problem_to_reset'))
student_identifier = request.GET.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = request.GET.get('all_students') in ['true', 'True', True]
if not (problem_to_reset and (all_students or student)):
return HttpResponseBadRequest("Missing query parameters.")
if all_students and student:
return HttpResponseBadRequest(
"Cannot rescore with all_students and unique_student_identifier."
)
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_to_reset)
except InvalidKeyError:
return HttpResponseBadRequest("Unable to parse problem id")
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
response_payload['student'] = student_identifier
instructor_task.api.submit_rescore_problem_for_student(request, module_state_key, student)
response_payload['task'] = 'created'
elif all_students:
instructor_task.api.submit_rescore_problem_for_all_students(request, module_state_key)
response_payload['task'] = 'created'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_background_email_tasks(request, course_id): # pylint: disable=unused-argument
"""
List background email tasks.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
task_type = 'bulk_course_email'
# Specifying for the history of a single task type
tasks = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_email_content(request, course_id): # pylint: disable=unused-argument
"""
List the content of bulk emails sent
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
task_type = 'bulk_course_email'
# First get tasks list of bulk emails sent
emails = instructor_task.api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'emails': map(extract_email_features, emails),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_instructor_tasks(request, course_id):
"""
List instructor tasks.
Takes optional query paremeters.
- With no arguments, lists running tasks.
- `problem_location_str` lists task history for problem
- `problem_location_str` and `unique_student_identifier` lists task
history for problem AND student (intersection)
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
problem_location_str = strip_if_string(request.GET.get('problem_location_str', False))
student = request.GET.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
if student and not problem_location_str:
return HttpResponseBadRequest(
"unique_student_identifier must accompany problem_location_str"
)
if problem_location_str:
try:
module_state_key = course_id.make_usage_key_from_deprecated_string(problem_location_str)
except InvalidKeyError:
return HttpResponseBadRequest()
if student:
# Specifying for a single student's history on this problem
tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key, student)
else:
# Specifying for single problem's history
tasks = instructor_task.api.get_instructor_task_history(course_id, module_state_key)
else:
# If no problem or student, just get currently running tasks
tasks = instructor_task.api.get_running_instructor_tasks(course_id)
response_payload = {
'tasks': map(extract_task_features, tasks),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
report_store = ReportStore.from_config()
response_payload = {
'downloads': [
dict(name=name, url=url, link='<a href="{}">{}</a>'.format(url, name))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def calculate_grades_csv(request, course_id):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
try:
instructor_task.api.submit_calculate_grades_csv(request, course_key)
success_status = _("Your grade report is being generated! You can view the status of the generation task in the 'Pending Instructor Tasks' section.")
return JsonResponse({"status": success_status})
except AlreadyRunningError:
already_running_status = _("A grade report generation task is already in progress. Check the 'Pending Instructor Tasks' table for the status of the task. When completed, the report will be available for download in the table below.")
return JsonResponse({
"status": already_running_status
})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('rolename')
def list_forum_members(request, course_id):
"""
Lists forum members of a certain rolename.
Limited to staff access.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
Takes query parameter `rolename`.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
rolename = request.GET.get('rolename')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
# filter out unsupported for roles
if not rolename in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized rolename '{}'.".format(rolename)
))
try:
role = Role.objects.get(name=rolename, course_id=course_id)
users = role.users.all().order_by('username')
except Role.DoesNotExist:
users = []
def extract_user_info(user):
""" Convert user to dict for json rendering. """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': course_id.to_deprecated_string(),
rolename: map(extract_user_info, users),
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_post_params(send_to="sending to whom", subject="subject line", message="message text")
def send_email(request, course_id):
"""
Send an email to self, staff, or everyone involved in a course.
Query Parameters:
- 'send_to' specifies what group the email should be sent to
Options are defined by the CourseEmail model in
lms/djangoapps/bulk_email/models.py
- 'subject' specifies email's subject
- 'message' specifies email's content
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
if not bulk_email_is_enabled_for_course(course_id):
return HttpResponseForbidden("Email is not enabled for this course.")
send_to = request.POST.get("send_to")
subject = request.POST.get("subject")
message = request.POST.get("message")
# allow two branding points to come from Microsites: which CourseEmailTemplate should be used
# and what the 'from' field in the email should be
#
# If these are None (because we are not in a Microsite or they are undefined in Microsite config) than
# the system will use normal system defaults
template_name = microsite.get_value('course_email_template_name')
from_addr = microsite.get_value('course_email_from_addr')
# Create the CourseEmail object. This is saved immediately, so that
# any transaction that has been pending up to this point will also be
# committed.
email = CourseEmail.create(
course_id,
request.user,
send_to,
subject, message,
template_name=template_name,
from_addr=from_addr
)
# Submit the task, so that the correct InstructorTask object gets created (for monitoring purposes)
instructor_task.api.submit_bulk_course_email(request, course_id, email.id) # pylint: disable=E1101
response_payload = {
'course_id': course_id.to_deprecated_string(),
'success': True,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params(
unique_student_identifier="email or username of user to change access",
rolename="the forum role",
action="'allow' or 'revoke'",
)
@common_exceptions_400
def update_forum_role_membership(request, course_id):
"""
Modify user's forum role.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
No one can revoke an instructors FORUM_ROLE_ADMINISTRATOR status.
Query parameters:
- `email` is the target users email
- `rolename` is one of [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]
- `action` is one of ['allow', 'revoke']
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
unique_student_identifier = request.GET.get('unique_student_identifier')
rolename = request.GET.get('rolename')
action = request.GET.get('action')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
if not rolename in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
"Unrecognized rolename '{}'.".format(rolename)
))
user = get_student_from_identifier(unique_student_identifier)
try:
update_forum_role(course_id, user, rolename, action)
except Role.DoesNotExist:
return HttpResponseBadRequest("Role does not exist.")
response_payload = {
'course_id': course_id.to_deprecated_string(),
'action': action,
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params(
aname="name of analytic to query",
)
@common_exceptions_400
def proxy_legacy_analytics(request, course_id):
"""
Proxies to the analytics cron job server.
`aname` is a query parameter specifying which analytic to query.
"""
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)
analytics_name = request.GET.get('aname')
# abort if misconfigured
if not (hasattr(settings, 'ANALYTICS_SERVER_URL') and
hasattr(settings, 'ANALYTICS_API_KEY') and
settings.ANALYTICS_SERVER_URL and settings.ANALYTICS_API_KEY):
return HttpResponse("Analytics service not configured.", status=501)
url = "{}get?aname={}&course_id={}&apikey={}".format(
settings.ANALYTICS_SERVER_URL,
analytics_name,
urllib.quote(unicode(course_id)),
settings.ANALYTICS_API_KEY,
)
try:
res = requests.get(url)
except Exception: # pylint: disable=broad-except
log.exception("Error requesting from analytics server at %s", url)
return HttpResponse("Error requesting from analytics server.", status=500)
if res.status_code is 200:
payload = json.loads(res.content)
add_block_ids(payload)
content = json.dumps(payload)
# return the successful request content
return HttpResponse(content, content_type="application/json")
elif res.status_code is 404:
# forward the 404 and content
return HttpResponse(res.content, content_type="application/json", status=404)
else:
# 500 on all other unexpected status codes.
log.error(
"Error fetching {}, code: {}, msg: {}".format(
url, res.status_code, res.content
)
)
return HttpResponse(
"Error from analytics server ({}).".format(res.status_code),
status=500
)
@require_POST
def get_user_invoice_preference(request, course_id): # pylint: disable=W0613
"""
Gets invoice copy user's preferences.
"""
invoice_copy_preference = True
if UserPreference.get_preference(request.user, INVOICE_KEY) is not None:
invoice_copy_preference = UserPreference.get_preference(request.user, INVOICE_KEY) == 'True'
return JsonResponse({
'invoice_copy': invoice_copy_preference
})
def _display_unit(unit):
"""
Gets string for displaying unit to user.
"""
name = getattr(unit, 'display_name', None)
if name:
return u'{0} ({1})'.format(name, unit.location.to_deprecated_string())
else:
return unit.location.to_deprecated_string()
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student', 'url', 'due_datetime')
def change_due_date(request, course_id):
"""
Grants a due date extension to a student for a particular unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
student = require_student_from_identifier(request.GET.get('student'))
unit = find_unit(course, request.GET.get('url'))
due_date = parse_datetime(request.GET.get('due_datetime'))
set_due_date_extension(course, unit, student, due_date)
return JsonResponse(_(
'Successfully changed due date for student {0} for {1} '
'to {2}').format(student.profile.name, _display_unit(unit),
due_date.strftime('%Y-%m-%d %H:%M')))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student', 'url')
def reset_due_date(request, course_id):
"""
Rescinds a due date extension for a student on a particular unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
student = require_student_from_identifier(request.GET.get('student'))
unit = find_unit(course, request.GET.get('url'))
set_due_date_extension(course, unit, student, None)
if not getattr(unit, "due", None):
# It's possible the normal due date was deleted after an extension was granted:
return JsonResponse(
_("Successfully removed invalid due date extension (unit has no due date).")
)
original_due_date_str = unit.due.strftime('%Y-%m-%d %H:%M')
return JsonResponse(_(
'Successfully reset due date for student {0} for {1} '
'to {2}').format(student.profile.name, _display_unit(unit),
original_due_date_str))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('url')
def show_unit_extensions(request, course_id):
"""
Shows all of the students which have due date extensions for the given unit.
"""
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
unit = find_unit(course, request.GET.get('url'))
return JsonResponse(dump_module_extensions(course, unit))
@handle_dashboard_error
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('student')
def show_student_extensions(request, course_id):
"""
Shows all of the due date extensions granted to a particular student in a
particular course.
"""
student = require_student_from_identifier(request.GET.get('student'))
course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(course_id))
return JsonResponse(dump_student_extensions(course, student))
def _split_input_list(str_list):
"""
Separate out individual student email from the comma, or space separated string.
e.g.
in: "[email protected], [email protected]\[email protected]\r [email protected]\r, [email protected]"
out: ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']
`str_list` is a string coming from an input text area
returns a list of separated values
"""
new_list = re.split(r'[\n\r\s,]', str_list)
new_list = [s.strip() for s in new_list]
new_list = [s for s in new_list if s != '']
return new_list
#---- Gradebook (shown to small courses only) ----
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def spoc_gradebook(request, course_id):
"""
Show the gradebook for this course:
- Only shown for courses with enrollment < settings.FEATURES.get("MAX_ENROLLMENT_INSTR_BUTTONS")
- Only displayed to course staff
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = get_course_with_access(request.user, 'staff', course_key, depth=None)
enrolled_students = User.objects.filter(
courseenrollment__course_id=course_key,
courseenrollment__is_active=1
).order_by('username').select_related("profile")
# possible extension: implement pagination to show to large courses
student_info = [
{
'username': student.username,
'id': student.id,
'email': student.email,
'grade_summary': student_grades(student, request, course),
'realname': student.profile.name,
}
for student in enrolled_students
]
return render_to_response('courseware/gradebook.html', {
'students': student_info,
'course': course,
'course_id': course_key,
# Checked above
'staff_access': True,
'ordered_grades': sorted(course.grade_cutoffs.items(), key=lambda i: i[1], reverse=True),
})
| agpl-3.0 |
leesavide/pythonista-docs | Documentation/matplotlib/mpl_examples/pylab_examples/boxplot_demo.py | 12 | 1162 | #!/usr/bin/python
#
# Example boxplot code
#
from pylab import *
# fake up some data
spread= rand(50) * 100
center = ones(25) * 50
flier_high = rand(10) * 100 + 100
flier_low = rand(10) * -100
data =concatenate((spread, center, flier_high, flier_low), 0)
# basic plot
boxplot(data)
# notched plot
figure()
boxplot(data,1)
# change outlier point symbols
figure()
boxplot(data,0,'gD')
# don't show outlier points
figure()
boxplot(data,0,'')
# horizontal boxes
figure()
boxplot(data,0,'rs',0)
# change whisker length
figure()
boxplot(data,0,'rs',0,0.75)
# fake up some more data
spread= rand(50) * 100
center = ones(25) * 40
flier_high = rand(10) * 100 + 100
flier_low = rand(10) * -100
d2 = concatenate( (spread, center, flier_high, flier_low), 0 )
data.shape = (-1, 1)
d2.shape = (-1, 1)
#data = concatenate( (data, d2), 1 )
# Making a 2-D array only works if all the columns are the
# same length. If they are not, then use a list instead.
# This is actually more efficient because boxplot converts
# a 2-D array into a list of vectors internally anyway.
data = [data, d2, d2[::2,0]]
# multiple box plots on one figure
figure()
boxplot(data)
show()
| apache-2.0 |
Hodorable/0602 | openstack_dashboard/dashboards/project/networks/ports/forms.py | 25 | 4064 | # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
VNIC_TYPES = [('normal', _('Normal')), ('direct', _('Direct')),
('macvtap', _('MacVTap'))]
class UpdatePort(forms.SelfHandlingForm):
network_id = forms.CharField(widget=forms.HiddenInput())
port_id = forms.CharField(label=_("ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
name = forms.CharField(max_length=255,
label=_("Name"),
required=False)
admin_state = forms.ChoiceField(choices=[(True, _('UP')),
(False, _('DOWN'))],
label=_("Admin State"))
failure_url = 'horizon:project:networks:detail'
def __init__(self, request, *args, **kwargs):
super(UpdatePort, self).__init__(request, *args, **kwargs)
if api.neutron.is_extension_supported(request, 'binding'):
neutron_settings = getattr(settings,
'OPENSTACK_NEUTRON_NETWORK', {})
supported_vnic_types = neutron_settings.get(
'supported_vnic_types', ['*'])
if supported_vnic_types == ['*']:
vnic_type_choices = VNIC_TYPES
else:
vnic_type_choices = [
vnic_type for vnic_type in VNIC_TYPES
if vnic_type[0] in supported_vnic_types
]
self.fields['binding__vnic_type'] = forms.ChoiceField(
choices=vnic_type_choices,
label=_("Binding: VNIC Type"),
help_text=_("The VNIC type that is bound to the neutron port"),
required=False)
if api.neutron.is_extension_supported(request, 'mac-learning'):
self.fields['mac_state'] = forms.BooleanField(
label=_("MAC Learning State"), initial=False, required=False)
def handle(self, request, data):
data['admin_state'] = (data['admin_state'] == 'True')
try:
LOG.debug('params = %s' % data)
extension_kwargs = {}
if 'binding__vnic_type' in data:
extension_kwargs['binding__vnic_type'] = \
data['binding__vnic_type']
if 'mac_state' in data:
extension_kwargs['mac_learning_enabled'] = data['mac_state']
port = api.neutron.port_update(request,
data['port_id'],
name=data['name'],
admin_state_up=data['admin_state'],
**extension_kwargs)
msg = _('Port %s was successfully updated.') % data['port_id']
LOG.debug(msg)
messages.success(request, msg)
return port
except Exception:
msg = _('Failed to update port %s') % data['port_id']
LOG.info(msg)
redirect = reverse(self.failure_url,
args=[data['network_id']])
exceptions.handle(request, msg, redirect=redirect)
| apache-2.0 |
diegocortassa/TACTIC | src/tactic/ui/widget/swap_display_wdg.py | 1 | 7595 | ###########################################################
#
# Copyright (c) 2009, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ["SwapDisplayWdg", "TestSwapWdg"]
from pyasm.web import DivWdg, HtmlElement, SpanWdg, Table
from pyasm.widget import IconWdg
from tactic.ui.common import BaseRefreshWdg
class SwapDisplayWdg(BaseRefreshWdg):
'''This swap display is a very light version which makes use of
relay behaviors to significantly reduce the number of behaviors
required'''
def init(self):
self.on_wdg = None
self.off_wdg = None
self.title_wdg = None
self.behavior_top = None
self.content_id = None
self.inner = DivWdg()
def set_default_wdg(self):
theme = DivWdg().get_theme()
if theme == "default":
self.on_wdg = IconWdg('open', IconWdg.ARROWHEAD_DARK_DOWN)
self.off_wdg = IconWdg('closed', IconWdg.ARROWHEAD_DARK_RIGHT)
else:
self.on_wdg = IconWdg('open', IconWdg.INFO_OPEN_SMALL)
self.off_wdg = IconWdg('closed', IconWdg.INFO_CLOSED_SMALL)
def set_display_wdgs(self, on_wdg, off_wdg):
self.on_wdg = on_wdg
self.off_wdg = off_wdg
def set_on(self, flag=True):
self.kwargs["is_on"] = flag
def set_off(self, flag=False):
self.kwargs["is_on"] = flag
def set_title_wdg(self, title):
self.title_wdg = title
def set_content_id(self, content_id):
self.content_id = content_id
def set_behavior_top(self, behavior_top):
self.behavior_top = behavior_top
def handle_top(cls, top):
behavior = {
'type': 'click',
'bvr_match_class': 'spt_swap_top',
'cbjs_action': '''
var top = bvr.src_el;
var on = top.getElement(".SPT_SWAP_ON");
var off = top.getElement(".SPT_SWAP_OFF");
var state = top.getAttribute("spt_state");
if (state == 'on')
state = 'off';
else
state = 'on';
if (state == 'on') {
spt.show(on);
spt.hide(off);
top.setAttribute("spt_state", "on");
}
else {
spt.show(off);
spt.hide(on);
top.setAttribute("spt_state", "off");
}
var content_id = top.getAttribute("spt_content_id");
if (content_id) {
if (state == 'on') {
spt.show( content_id )
}
else {
spt.hide( content_id )
}
}
'''
}
#top.add_behavior(behavior)
top.add_relay_behavior(behavior)
handle_top = classmethod(handle_top)
def add_behavior(self, behavior):
self.top.add_behavior(behavior)
def add_class(self, class_name):
self.inner.add_class(class_name)
def add_attr(self, name, value):
self.inner.add_attr(name, value)
def get_display(self):
top = self.top
top.add_class("hand")
inner = self.inner
top.add(inner)
inner.add_class("spt_swap_top")
table = Table()
inner.add(table)
table.add_color("color", "color")
table.add_class("SPT_DTS")
table.add_row()
td = table.add_cell()
title = self.kwargs.get("title")
# determine whether this widget is on or off
is_on = self.kwargs.get("is_on")
if is_on in [True, "true"]:
is_on = True
else:
is_on = False
if not self.on_wdg or not self.off_wdg:
self.set_default_wdg()
# add the content id
if self.content_id:
inner.add_attr("spt_content_id", self.content_id)
# add the behaviors
if not self.behavior_top:
self.handle_top(top)
on_div = DivWdg()
td.add(on_div)
on_div.add_class("SPT_SWAP_ON")
off_div = DivWdg()
td.add(off_div)
off_div.add_class("SPT_SWAP_OFF")
if is_on:
off_div.add_style("display: none")
inner.add_attr("spt_state", "on")
else:
on_div.add_style("display: none")
inner.add_attr("spt_state", "off")
on_div.add( self.on_wdg )
off_div.add( self.off_wdg )
# handle an icon
icon_str = self.kwargs.get("icon")
if icon_str and isinstance(icon_str, basestring):
icon_div = DivWdg()
if icon_str.startswith("BS_"):
icon = IconWdg(name=title, icon=icon_str, size=12 )
icon_div.add_style("margin: -2px 10px 0px 10px")
icon_div.add_style("margin-left: -3px")
else:
icon = IconWdg(name=title, icon=icon_str )
icon_div.add_style("margin-left: -6px")
icon_div.add(icon)
td = table.add_cell(icon_div)
elif icon_str:
td = table.add_cell(icon_str)
icon_str.add_style("margin-left: -6px")
else:
show_border = self.kwargs.get("show_border")
if show_border in [True, 'true']:
on_div.add_border()
off_div.add_border()
on_div.add_style("width: 16")
on_div.add_style("height: 16")
on_div.add_style("overflow: hidden")
off_div.add_style("width: 16")
off_div.add_style("height: 16")
off_div.add_style("overflow: hidden")
if self.title_wdg:
td = table.add_cell(self.title_wdg)
else:
td = table.add_cell(title)
return top
class TestSwapWdg(BaseRefreshWdg):
def get_display(self):
top = self.top
top.add_color("background", "background")
top.add_style("padding: 5px")
top.add_border()
SwapDisplayWdg.handle_top(top)
top.add_relay_behavior( {
'type': 'click',
'bvr_match_class': 'spt_swap_top',
'cbjs_action': '''var top = bvr.src_el;
if (['on', null].contains(top.getAttribute("spt_state")))
spt.alert('clicked open')
'''
} )
for title in ['First', 'Second', 'Third', 'Fourth', 'Fifth', 'Sixth', 'Seventh']:
swap = SwapDisplayWdg(title=title, icon='FILM')
top.add(swap)
swap.set_behavior_top(top)
# handle hover behavior
hover = top.get_color("background", -10)
behavior = {
'type': 'hover',
'bvr_match_class': 'spt_swap_top',
'hover': hover,
'cbjs_action_over': '''bvr.src_el.setStyle('background', bvr.hover)''',
'cbjs_action_out': '''bvr.src_el.setStyle('background', '')''',
}
swap.add_behavior(behavior)
content = DivWdg()
unique_id = content.set_unique_id("content")
swap.set_content_id(unique_id)
content.add("This is content!!!!")
top.add(content)
content.add_style("display: none")
return top
| epl-1.0 |
XiaodunServerGroup/xiaodun-platform | common/djangoapps/edxmako/tests.py | 30 | 1489 | from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from edxmako import add_lookup, LOOKUP
from edxmako.shortcuts import marketing_link
from mock import patch
from util.testing import UrlResetMixin
class ShortcutsTests(UrlResetMixin, TestCase):
"""
Test the edxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
class AddLookupTests(TestCase):
"""
Test the `add_lookup` function.
"""
@patch('edxmako.LOOKUP', {})
def test_with_package(self):
add_lookup('test', 'management', __name__)
dirs = LOOKUP['test'].directories
self.assertEqual(len(dirs), 1)
self.assertTrue(dirs[0].endswith('management'))
| agpl-3.0 |
ClearCorp/odoo-clearcorp | TODO-6.1/ccorp_account/__init__.py | 4 | 2003 | # -*- encoding: utf-8 -*-
##############################################################################
#
# __init__.py
# ccorp_account
# First author: Carlos Vásquez <[email protected]> (ClearCorp S.A.)
# Copyright (c) 2010-TODAY ClearCorp S.A. (http://clearcorp.co.cr). All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those of the
# authors and should not be interpreted as representing official policies, either expressed
# or implied, of ClearCorp S.A..
#
##############################################################################
import wizard
import report
| agpl-3.0 |
fjbatresv/odoo | addons/website_mail_group/models/mail_group.py | 321 | 2678 | # -*- coding: utf-8 -*-
from openerp.osv import osv
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
from openerp.addons.website.models.website import slug
class MailGroup(osv.Model):
_inherit = 'mail.group'
def message_get_email_values(self, cr, uid, id, notif_mail=None, context=None):
res = super(MailGroup, self).message_get_email_values(cr, uid, id, notif_mail=notif_mail, context=context)
group = self.browse(cr, uid, id, context=context)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
headers = {}
if res.get('headers'):
try:
headers = eval(res['headers'])
except Exception:
pass
headers.update({
'List-Archive': '<%s/groups/%s>' % (base_url, slug(group)),
'List-Subscribe': '<%s/groups>' % (base_url),
'List-Unsubscribe': '<%s/groups?unsubscribe>' % (base_url,),
})
res['headers'] = repr(headers)
return res
class MailMail(osv.Model):
_inherit = 'mail.mail'
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Short-circuit parent method for mail groups, replace the default
footer with one appropriate for mailing-lists."""
if mail.model == 'mail.group' and mail.res_id:
# no super() call on purpose, no private links that could be quoted!
group = self.pool['mail.group'].browse(cr, uid, mail.res_id, context=context)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
vals = {
'maillist': _('Mailing-List'),
'post_to': _('Post to'),
'unsub': _('Unsubscribe'),
'mailto': 'mailto:%s@%s' % (group.alias_name, group.alias_domain),
'group_url': '%s/groups/%s' % (base_url, slug(group)),
'unsub_url': '%s/groups?unsubscribe' % (base_url,),
}
footer = """_______________________________________________
%(maillist)s: %(group_url)s
%(post_to)s: %(mailto)s
%(unsub)s: %(unsub_url)s
""" % vals
body = tools.append_content_to_html(mail.body, footer, container_tag='div')
return body
else:
return super(MailMail, self).send_get_mail_body(cr, uid, mail,
partner=partner,
context=context)
| agpl-3.0 |
achals/servo | tests/wpt/css-tests/css21_dev/html4/reference/support/fonts/makegsubfonts.py | 1616 | 14125 |
import os
import textwrap
from xml.etree import ElementTree
from fontTools.ttLib import TTFont, newTable
from fontTools.misc.psCharStrings import T2CharString
from fontTools.ttLib.tables.otTables import GSUB,\
ScriptList, ScriptRecord, Script, DefaultLangSys,\
FeatureList, FeatureRecord, Feature,\
LookupList, Lookup, AlternateSubst, SingleSubst
# paths
directory = os.path.dirname(__file__)
shellSourcePath = os.path.join(directory, "gsubtest-shell.ttx")
shellTempPath = os.path.join(directory, "gsubtest-shell.otf")
featureList = os.path.join(directory, "gsubtest-features.txt")
javascriptData = os.path.join(directory, "gsubtest-features.js")
outputPath = os.path.join(os.path.dirname(directory), "gsubtest-lookup%d")
baseCodepoint = 0xe000
# -------
# Features
# -------
f = open(featureList, "rb")
text = f.read()
f.close()
mapping = []
for line in text.splitlines():
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
# parse
values = line.split("\t")
tag = values.pop(0)
mapping.append(tag);
# --------
# Outlines
# --------
def addGlyphToCFF(glyphName=None, program=None, private=None, globalSubrs=None, charStringsIndex=None, topDict=None, charStrings=None):
charString = T2CharString(program=program, private=private, globalSubrs=globalSubrs)
charStringsIndex.append(charString)
glyphID = len(topDict.charset)
charStrings.charStrings[glyphName] = glyphID
topDict.charset.append(glyphName)
def makeLookup1():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup1")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
# bump this up so that the sequence is the same as the lookup 3 font
cp += 3
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 1
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = SingleSubst()
subtable.Format = 2
subtable.LookupType = 1
subtable.mapping = {
"%s.pass" % tag : "%s.fail" % tag,
"%s.fail" % tag : "%s.pass" % tag,
}
lookup.SubTable.append(subtable)
path = outputPath % 1 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeLookup3():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup3")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
# tag.default
glyphName = "%s.default" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.alt1,2,3
for i in range(1,4):
glyphName = "%s.alt%d" % (tag, i)
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 3
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = AlternateSubst()
subtable.Format = 1
subtable.LookupType = 3
subtable.alternates = {
"%s.default" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt1" % tag : ["%s.pass" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt2" % tag : ["%s.fail" % tag, "%s.pass" % tag, "%s.fail" % tag],
"%s.alt3" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.pass" % tag]
}
lookup.SubTable.append(subtable)
path = outputPath % 3 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeJavascriptData():
features = sorted(mapping)
outStr = []
outStr.append("")
outStr.append("/* This file is autogenerated by makegsubfonts.py */")
outStr.append("")
outStr.append("/* ")
outStr.append(" Features defined in gsubtest fonts with associated base")
outStr.append(" codepoints for each feature:")
outStr.append("")
outStr.append(" cp = codepoint for feature featX")
outStr.append("")
outStr.append(" cp default PASS")
outStr.append(" cp featX=1 FAIL")
outStr.append(" cp featX=2 FAIL")
outStr.append("")
outStr.append(" cp+1 default FAIL")
outStr.append(" cp+1 featX=1 PASS")
outStr.append(" cp+1 featX=2 FAIL")
outStr.append("")
outStr.append(" cp+2 default FAIL")
outStr.append(" cp+2 featX=1 FAIL")
outStr.append(" cp+2 featX=2 PASS")
outStr.append("")
outStr.append("*/")
outStr.append("")
outStr.append("var gFeatures = {");
cp = baseCodepoint
taglist = []
for tag in features:
taglist.append("\"%s\": 0x%x" % (tag, cp))
cp += 4
outStr.append(textwrap.fill(", ".join(taglist), initial_indent=" ", subsequent_indent=" "))
outStr.append("};");
outStr.append("");
if os.path.exists(javascriptData):
os.remove(javascriptData)
f = open(javascriptData, "wb")
f.write("\n".join(outStr))
f.close()
# build fonts
print "Making lookup type 1 font..."
makeLookup1()
print "Making lookup type 3 font..."
makeLookup3()
# output javascript data
print "Making javascript data file..."
makeJavascriptData()
| mpl-2.0 |
danalec/dotfiles | sublime/.config/sublime-text-3/Packages/pygments/all/pygments/regexopt.py | 50 | 3067 | # -*- coding: utf-8 -*-
"""
pygments.regexopt
~~~~~~~~~~~~~~~~~
An algorithm that generates optimized regexes for matching long lists of
literal strings.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from re import escape
from os.path import commonprefix
from itertools import groupby
from operator import itemgetter
CS_ESCAPE = re.compile(r'[\^\\\-\]]')
FIRST_ELEMENT = itemgetter(0)
def make_charset(letters):
return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
def regex_opt_inner(strings, open_paren):
"""Return a regex that matches any string in the sorted list of strings."""
close_paren = open_paren and ')' or ''
# print strings, repr(open_paren)
if not strings:
# print '-> nothing left'
return ''
first = strings[0]
if len(strings) == 1:
# print '-> only 1 string'
return open_paren + escape(first) + close_paren
if not first:
# print '-> first string empty'
return open_paren + regex_opt_inner(strings[1:], '(?:') \
+ '?' + close_paren
if len(first) == 1:
# multiple one-char strings? make a charset
oneletter = []
rest = []
for s in strings:
if len(s) == 1:
oneletter.append(s)
else:
rest.append(s)
if len(oneletter) > 1: # do we have more than one oneletter string?
if rest:
# print '-> 1-character + rest'
return open_paren + regex_opt_inner(rest, '') + '|' \
+ make_charset(oneletter) + close_paren
# print '-> only 1-character'
return make_charset(oneletter)
prefix = commonprefix(strings)
if prefix:
plen = len(prefix)
# we have a prefix for all strings
# print '-> prefix:', prefix
return open_paren + escape(prefix) \
+ regex_opt_inner([s[plen:] for s in strings], '(?:') \
+ close_paren
# is there a suffix?
strings_rev = [s[::-1] for s in strings]
suffix = commonprefix(strings_rev)
if suffix:
slen = len(suffix)
# print '-> suffix:', suffix[::-1]
return open_paren \
+ regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
+ escape(suffix[::-1]) + close_paren
# recurse on common 1-string prefixes
# print '-> last resort'
return open_paren + \
'|'.join(regex_opt_inner(list(group[1]), '')
for group in groupby(strings, lambda s: s[0] == first[0])) \
+ close_paren
def regex_opt(strings, prefix='', suffix=''):
"""Return a compiled regex that matches any string in the given list.
The strings to match must be literal strings, not regexes. They will be
regex-escaped.
*prefix* and *suffix* are pre- and appended to the final regex.
"""
strings = sorted(strings)
return prefix + regex_opt_inner(strings, '(') + suffix
| mit |
ralphtheninja/cjdns | contrib/python/cjdnsadmin/publicToIp6.py | 18 | 2228 | #!/usr/bin/env python2
# You may redistribute this program and/or modify it under the terms of
# the GNU General Public License as published by the Free Software Foundation,
# either version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from hashlib import sha512;
# see util/Base32.h
def Base32_decode(input):
output = bytearray(len(input));
numForAscii = [
99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,
99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,
99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,99,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,99,99,99,99,99,99,
99,99,10,11,12,99,13,14,15,99,16,17,18,19,20,99,
21,22,23,24,25,26,27,28,29,30,31,99,99,99,99,99,
99,99,10,11,12,99,13,14,15,99,16,17,18,19,20,99,
21,22,23,24,25,26,27,28,29,30,31,99,99,99,99,99,
];
outputIndex = 0;
inputIndex = 0;
nextByte = 0;
bits = 0;
while (inputIndex < len(input)):
o = ord(input[inputIndex]);
if (o & 0x80): raise ValueError;
b = numForAscii[o];
inputIndex += 1;
if (b > 31): raise ValueError("bad character " + input[inputIndex]);
nextByte |= (b << bits);
bits += 5;
if (bits >= 8):
output[outputIndex] = nextByte & 0xff;
outputIndex += 1;
bits -= 8;
nextByte >>= 8;
if (bits >= 5 or nextByte):
raise ValueError("bits is " + str(bits) + " and nextByte is " + str(nextByte));
return buffer(output, 0, outputIndex);
def PublicToIp6_convert(pubKey):
if pubKey[-2:] != ".k":
raise ValueError("key does not end with .k")
keyBytes = Base32_decode(pubKey[:-2])
hashOne = sha512(keyBytes).digest()
hashTwo = sha512(hashOne).hexdigest()
return ":".join([hashTwo[i:i+4] for i in range(0, 32, 4)])
| gpl-3.0 |
llvtt/mongo-python-driver | test/test_common.py | 1 | 8501 | # Copyright 2011-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the pymongo common module."""
import sys
import uuid
sys.path[0:0] = [""]
from bson.binary import UUIDLegacy, PYTHON_LEGACY, STANDARD
from bson.code import Code
from bson.codec_options import CodecOptions
from bson.objectid import ObjectId
from pymongo.mongo_client import MongoClient
from pymongo.errors import OperationFailure
from pymongo.write_concern import WriteConcern
from test import client_context, pair, unittest, IntegrationTest
from test.utils import connected, rs_or_single_client, single_client
@client_context.require_connection
def setUpModule():
pass
class TestCommon(IntegrationTest):
def test_uuid_representation(self):
coll = self.db.uuid
coll.drop()
# Test property
self.assertEqual(PYTHON_LEGACY,
coll.codec_options.uuid_representation)
# Test basic query
uu = uuid.uuid4()
# Insert as binary subtype 3
coll.insert_one({'uu': uu})
self.assertEqual(uu, coll.find_one({'uu': uu})['uu'])
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=STANDARD))
self.assertEqual(STANDARD, coll.codec_options.uuid_representation)
self.assertEqual(None, coll.find_one({'uu': uu}))
self.assertEqual(uu, coll.find_one({'uu': UUIDLegacy(uu)})['uu'])
# Test Cursor.count
self.assertEqual(0, coll.find({'uu': uu}).count())
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=PYTHON_LEGACY))
self.assertEqual(1, coll.find({'uu': uu}).count())
# Test delete
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=STANDARD))
coll.delete_one({'uu': uu})
self.assertEqual(1, coll.count())
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=PYTHON_LEGACY))
coll.delete_one({'uu': uu})
self.assertEqual(0, coll.count())
# Test update_one
coll.insert_one({'_id': uu, 'i': 1})
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=STANDARD))
coll.update_one({'_id': uu}, {'$set': {'i': 2}})
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=PYTHON_LEGACY))
self.assertEqual(1, coll.find_one({'_id': uu})['i'])
coll.update_one({'_id': uu}, {'$set': {'i': 2}})
self.assertEqual(2, coll.find_one({'_id': uu})['i'])
# Test Cursor.distinct
self.assertEqual([2], coll.find({'_id': uu}).distinct('i'))
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=STANDARD))
self.assertEqual([], coll.find({'_id': uu}).distinct('i'))
# Test findAndModify
self.assertEqual(None, coll.find_one_and_update({'_id': uu},
{'$set': {'i': 5}}))
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=PYTHON_LEGACY))
self.assertEqual(2, coll.find_one_and_update({'_id': uu},
{'$set': {'i': 5}})['i'])
self.assertEqual(5, coll.find_one({'_id': uu})['i'])
# Test command
self.assertEqual(5, self.db.command('findAndModify', 'uuid',
update={'$set': {'i': 6}},
query={'_id': uu})['value']['i'])
self.assertEqual(6, self.db.command(
'findAndModify', 'uuid',
update={'$set': {'i': 7}},
query={'_id': UUIDLegacy(uu)})['value']['i'])
# Test (inline)_map_reduce
coll.drop()
coll.insert_one({"_id": uu, "x": 1, "tags": ["dog", "cat"]})
coll.insert_one({"_id": uuid.uuid4(), "x": 3,
"tags": ["mouse", "cat", "dog"]})
map = Code("function () {"
" this.tags.forEach(function(z) {"
" emit(z, 1);"
" });"
"}")
reduce = Code("function (key, values) {"
" var total = 0;"
" for (var i = 0; i < values.length; i++) {"
" total += values[i];"
" }"
" return total;"
"}")
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=STANDARD))
q = {"_id": uu}
result = coll.inline_map_reduce(map, reduce, query=q)
self.assertEqual([], result)
result = coll.map_reduce(map, reduce, "results", query=q)
self.assertEqual(0, self.db.results.count())
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=PYTHON_LEGACY))
q = {"_id": uu}
result = coll.inline_map_reduce(map, reduce, query=q)
self.assertEqual(2, len(result))
result = coll.map_reduce(map, reduce, "results", query=q)
self.assertEqual(2, self.db.results.count())
self.db.drop_collection("result")
coll.drop()
# Test group
coll.insert_one({"_id": uu, "a": 2})
coll.insert_one({"_id": uuid.uuid4(), "a": 1})
reduce = "function (obj, prev) { prev.count++; }"
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=STANDARD))
self.assertEqual([],
coll.group([], {"_id": uu},
{"count": 0}, reduce))
coll = self.db.get_collection(
"uuid", CodecOptions(uuid_representation=PYTHON_LEGACY))
self.assertEqual([{"count": 1}],
coll.group([], {"_id": uu},
{"count": 0}, reduce))
def test_write_concern(self):
c = MongoClient(connect=False)
self.assertEqual(WriteConcern(), c.write_concern)
c = MongoClient(connect=False, w=2, wtimeout=1000)
wc = WriteConcern(w=2, wtimeout=1000)
self.assertEqual(wc, c.write_concern)
db = c.pymongo_test
self.assertEqual(wc, db.write_concern)
coll = db.test
self.assertEqual(wc, coll.write_concern)
cwc = WriteConcern(j=True)
coll = db.get_collection('test', write_concern=cwc)
self.assertEqual(cwc, coll.write_concern)
self.assertEqual(wc, db.write_concern)
def test_mongo_client(self):
m = rs_or_single_client(w=0)
coll = m.pymongo_test.write_concern_test
coll.drop()
doc = {"_id": ObjectId()}
coll.insert_one(doc)
self.assertTrue(coll.insert_one(doc))
coll = coll.with_options(write_concern=WriteConcern(w=1))
self.assertRaises(OperationFailure, coll.insert_one, doc)
m = rs_or_single_client()
coll = m.pymongo_test.write_concern_test
new_coll = coll.with_options(write_concern=WriteConcern(w=0))
self.assertTrue(new_coll.insert_one(doc))
self.assertRaises(OperationFailure, coll.insert_one, doc)
m = MongoClient("mongodb://%s/" % (pair,),
replicaSet=client_context.replica_set_name)
coll = m.pymongo_test.write_concern_test
self.assertRaises(OperationFailure, coll.insert_one, doc)
m = MongoClient("mongodb://%s/?w=0" % (pair,),
replicaSet=client_context.replica_set_name)
coll = m.pymongo_test.write_concern_test
coll.insert_one(doc)
# Equality tests
direct = connected(single_client(w=0))
self.assertEqual(direct,
connected(MongoClient("mongodb://%s/?w=0" % (pair,))))
self.assertFalse(direct !=
connected(MongoClient("mongodb://%s/?w=0" % (pair,))))
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
taaviteska/django | tests/template_tests/syntax_tests/test_cycle.py | 79 | 6974 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
class CycleTagTests(SimpleTestCase):
@setup({'cycle01': '{% cycle a %}'})
def test_cycle01(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle01')
@setup({'cycle05': '{% cycle %}'})
def test_cycle05(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle05')
@setup({'cycle06': '{% cycle a %}'})
def test_cycle06(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle06')
@setup({'cycle07': '{% cycle a,b,c as foo %}{% cycle bar %}'})
def test_cycle07(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle07')
@setup({'cycle10': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}"})
def test_cycle10(self):
output = self.engine.render_to_string('cycle10')
self.assertEqual(output, 'ab')
@setup({'cycle11': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle11(self):
output = self.engine.render_to_string('cycle11')
self.assertEqual(output, 'abc')
@setup({'cycle12': "{% cycle 'a' 'b' 'c' as abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle12(self):
output = self.engine.render_to_string('cycle12')
self.assertEqual(output, 'abca')
@setup({'cycle13': "{% for i in test %}{% cycle 'a' 'b' %}{{ i }},{% endfor %}"})
def test_cycle13(self):
output = self.engine.render_to_string('cycle13', {'test': list(range(5))})
self.assertEqual(output, 'a0,b1,a2,b3,a4,')
@setup({'cycle14': '{% cycle one two as foo %}{% cycle foo %}'})
def test_cycle14(self):
output = self.engine.render_to_string('cycle14', {'one': '1', 'two': '2'})
self.assertEqual(output, '12')
@setup({'cycle15': '{% for i in test %}{% cycle aye bee %}{{ i }},{% endfor %}'})
def test_cycle15(self):
output = self.engine.render_to_string('cycle15', {'test': list(range(5)), 'aye': 'a', 'bee': 'b'})
self.assertEqual(output, 'a0,b1,a2,b3,a4,')
@setup({'cycle16': '{% cycle one|lower two as foo %}{% cycle foo %}'})
def test_cycle16(self):
output = self.engine.render_to_string('cycle16', {'one': 'A', 'two': '2'})
self.assertEqual(output, 'a2')
@setup({'cycle17': "{% cycle 'a' 'b' 'c' as abc silent %}"
"{% cycle abc %}{% cycle abc %}{% cycle abc %}{% cycle abc %}"})
def test_cycle17(self):
output = self.engine.render_to_string('cycle17')
self.assertEqual(output, '')
@setup({'cycle18': "{% cycle 'a' 'b' 'c' as foo invalid_flag %}"})
def test_cycle18(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('cycle18')
@setup({'cycle19': "{% cycle 'a' 'b' as silent %}{% cycle silent %}"})
def test_cycle19(self):
output = self.engine.render_to_string('cycle19')
self.assertEqual(output, 'ab')
@setup({'cycle20': '{% cycle one two as foo %} & {% cycle foo %}'})
def test_cycle20(self):
output = self.engine.render_to_string('cycle20', {'two': 'C & D', 'one': 'A & B'})
self.assertEqual(output, 'A & B & C & D')
@setup({'cycle21': '{% filter force_escape %}'
'{% cycle one two as foo %} & {% cycle foo %}{% endfilter %}'})
def test_cycle21(self):
output = self.engine.render_to_string('cycle21', {'two': 'C & D', 'one': 'A & B'})
self.assertEqual(output, 'A &amp; B & C &amp; D')
@setup({'cycle22': "{% for x in values %}{% cycle 'a' 'b' 'c' as abc silent %}{{ x }}{% endfor %}"})
def test_cycle22(self):
output = self.engine.render_to_string('cycle22', {'values': [1, 2, 3, 4]})
self.assertEqual(output, '1234')
@setup({'cycle23': "{% for x in values %}"
"{% cycle 'a' 'b' 'c' as abc silent %}{{ abc }}{{ x }}{% endfor %}"})
def test_cycle23(self):
output = self.engine.render_to_string('cycle23', {'values': [1, 2, 3, 4]})
self.assertEqual(output, 'a1b2c3a4')
@setup({
'cycle24': "{% for x in values %}"
"{% cycle 'a' 'b' 'c' as abc silent %}{% include 'included-cycle' %}{% endfor %}",
'included-cycle': '{{ abc }}',
})
def test_cycle24(self):
output = self.engine.render_to_string('cycle24', {'values': [1, 2, 3, 4]})
self.assertEqual(output, 'abca')
@setup({'cycle25': '{% cycle a as abc %}'})
def test_cycle25(self):
output = self.engine.render_to_string('cycle25', {'a': '<'})
self.assertEqual(output, '<')
@setup({'cycle26': '{% cycle a b as ab %}{% cycle ab %}'})
def test_cycle26(self):
output = self.engine.render_to_string('cycle26', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
@setup({'cycle27': '{% autoescape off %}{% cycle a b as ab %}{% cycle ab %}{% endautoescape %}'})
def test_cycle27(self):
output = self.engine.render_to_string('cycle27', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
@setup({'cycle28': '{% cycle a|safe b as ab %}{% cycle ab %}'})
def test_cycle28(self):
output = self.engine.render_to_string('cycle28', {'a': '<', 'b': '>'})
self.assertEqual(output, '<>')
@setup({
'cycle29': "{% cycle 'a' 'b' 'c' as cycler silent %}"
"{% for x in values %}"
"{% ifchanged x %}"
"{% cycle cycler %}{{ cycler }}"
"{% else %}"
"{{ cycler }}"
"{% endifchanged %}"
"{% endfor %}"
})
def test_cycle29(self):
"""
A named {% cycle %} tag works inside an {% ifchanged %} block and a
{% for %} loop.
"""
output = self.engine.render_to_string('cycle29', {'values': [1, 2, 3, 4, 5, 6, 7, 8, 8, 8, 9, 9]})
self.assertEqual(output, 'bcabcabcccaa')
@setup({
'cycle30': "{% cycle 'a' 'b' 'c' as cycler silent %}"
"{% for x in values %}"
"{% with doesnothing=irrelevant %}"
"{% ifchanged x %}"
"{% cycle cycler %}{{ cycler }}"
"{% else %}"
"{{ cycler }}"
"{% endifchanged %}"
"{% endwith %}"
"{% endfor %}"})
def test_cycle30(self):
"""
A {% with %} tag shouldn't reset the {% cycle %} variable.
"""
output = self.engine.render_to_string(
'cycle30', {
'irrelevant': 1,
'values': [1, 2, 3, 4, 5, 6, 7, 8, 8, 8, 9, 9]
})
self.assertEqual(output, 'bcabcabcccaa')
| bsd-3-clause |
lmtierney/watir-snake | tests/browser/elements/hn_tests.py | 1 | 2878 | from re import compile
import pytest
from nerodia.exception import UnknownObjectException
pytestmark = pytest.mark.page('non_control_elements.html')
class TestHnExist(object):
def test_returns_true_if_the_element_exists(self, browser):
assert browser.h1(id='header1').exists is True
assert browser.h2(id=compile(r'header2')).exists is True
assert browser.h3(text='Header 3').exists is True
assert browser.h4(text=compile(r'Header 4')).exists is True
assert browser.h5(index=0).exists is True
assert browser.h6(index=0).exists is True
assert browser.h1(xpath="//h1[@id='first_header']").exists is True
def test_returns_the_first_h1_if_given_no_args(self, browser):
assert browser.h1().exists
def test_returns_false_if_the_element_does_not_exist(self, browser):
assert browser.h1(id='no_such_id').exists is False
assert browser.h1(id=compile(r'no_such_id')).exists is False
assert browser.h1(text='no_such_text').exists is False
assert browser.h1(text=compile(r'no_such_text 1')).exists is False
assert browser.h1(index=1337).exists is False
assert browser.h1(xpath="//h1[@id='no_such_id']").exists is False
def test_raises_correct_exception_when_what_argument_is_invalid(self, browser):
with pytest.raises(TypeError):
browser.h1(id=3.14).exists
class TestHnAttributes(object):
# id
def test_returns_the_id_if_the_element_exists_and_has_id(self, browser):
assert browser.h1(index=0).id == 'first_header'
def test_returns_an_empty_string_if_the_element_exists_and_the_id_doesnt(self, browser):
assert browser.h3(index=0).id == ''
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_id_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.h1(id='no_such_id').id
with pytest.raises(UnknownObjectException):
browser.h1(index=1337).id
# text
def test_returns_the_text_if_the_element_exists_and_has_name(self, browser):
assert browser.h1(index=1).text == 'Header 1'
def test_returns_an_empty_string_if_the_element_exists_and_the_text_doesnt(self, browser):
assert browser.h6(id='empty_header').text == ''
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_text_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.h1(id='no_such_id').text
with pytest.raises(UnknownObjectException):
browser.h1(xpath="//h1[@id='no_such_id']").text
def test_finds_all_attribute_methods(browser):
assert hasattr(browser.h1(index=1), 'class_name')
assert hasattr(browser.h1(index=1), 'id')
assert hasattr(browser.h1(index=1), 'text')
| mit |
eemirtekin/edx-platform | common/djangoapps/xblock_django/user_service.py | 104 | 2842 | """
Support for converting a django user to an XBlock user
"""
from django.contrib.auth.models import User
from opaque_keys.edx.keys import CourseKey
from xblock.reference.user_service import XBlockUser, UserService
from student.models import anonymous_id_for_user, get_user_by_username_or_email
ATTR_KEY_IS_AUTHENTICATED = 'edx-platform.is_authenticated'
ATTR_KEY_USER_ID = 'edx-platform.user_id'
ATTR_KEY_USERNAME = 'edx-platform.username'
ATTR_KEY_USER_IS_STAFF = 'edx-platform.user_is_staff'
class DjangoXBlockUserService(UserService):
"""
A user service that converts Django users to XBlockUser
"""
def __init__(self, django_user, **kwargs):
super(DjangoXBlockUserService, self).__init__(**kwargs)
self._django_user = django_user
if self._django_user:
self._django_user.user_is_staff = kwargs.get('user_is_staff', False)
def get_current_user(self):
"""
Returns the currently-logged in user, as an instance of XBlockUser
"""
return self._convert_django_user_to_xblock_user(self._django_user)
def get_anonymous_user_id(self, username, course_id):
"""
Get the anonymous user id for a user.
Args:
username(str): username of a user.
course_id(str): course id of particular course.
Returns:
A unique anonymous_user_id for (user, course) pair.
None for Non-staff users.
"""
if not self.get_current_user().opt_attrs.get(ATTR_KEY_USER_IS_STAFF):
return None
try:
user = get_user_by_username_or_email(username_or_email=username)
except User.DoesNotExist:
return None
course_id = CourseKey.from_string(course_id)
return anonymous_id_for_user(user=user, course_id=course_id, save=False)
def _convert_django_user_to_xblock_user(self, django_user):
"""
A function that returns an XBlockUser from the current Django request.user
"""
xblock_user = XBlockUser(is_current_user=True)
if django_user is not None and django_user.is_authenticated():
# This full_name is dependent on edx-platform's profile implementation
full_name = getattr(django_user.profile, 'name') if hasattr(django_user, 'profile') else None
xblock_user.full_name = full_name
xblock_user.emails = [django_user.email]
xblock_user.opt_attrs[ATTR_KEY_IS_AUTHENTICATED] = True
xblock_user.opt_attrs[ATTR_KEY_USER_ID] = django_user.id
xblock_user.opt_attrs[ATTR_KEY_USERNAME] = django_user.username
xblock_user.opt_attrs[ATTR_KEY_USER_IS_STAFF] = django_user.user_is_staff
else:
xblock_user.opt_attrs[ATTR_KEY_IS_AUTHENTICATED] = False
return xblock_user
| agpl-3.0 |
aayushidwivedi01/spark-tk | regression-tests/sparktkregtests/testcases/frames/lda_groupby_flow_test.py | 11 | 3240 | # vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Sample LDA/Groupby example"""
import unittest
from sparktkregtests.lib import sparktk_test
import numpy
class LDAExample(sparktk_test.SparkTKTestCase):
def test_lda_example(self):
"""LDA demo from examples directory"""
# this is a full worked example of lda and groupby
# with known correct values
data = [['nytimes', 'harry', 3], ['nytimes', 'economy', 35], ['nytimes', 'jobs', 40], ['nytimes', 'magic', 1],
['nytimes', 'realestate', 15], ['nytimes', 'movies', 6], ['economist', 'economy', 50],
['economist', 'jobs', 35], ['economist', 'realestate', 20], ['economist', 'movies', 1],
['economist', 'harry', 1], ['economist', 'magic', 1], ['harrypotter', 'harry', 40],
['harrypotter', 'magic', 30], ['harrypotter', 'chamber', 20], ['harrypotter', 'secrets', 30]]
frame = self.context.frame.create(
data,
schema=[('doc_id', str),
('word_id', str),
('word_count', long)])
model = self.context.models.clustering.lda.train(
frame, "doc_id", "word_id", "word_count", max_iterations=3, num_topics=2)
doc_results = model.topics_given_doc_frame
word_results = model.word_given_topics_frame
doc_results.rename_columns({'topic_probabilities': 'lda_results_doc'})
word_results.rename_columns(
{'topic_probabilities': 'lda_results_word'})
frame = frame.join_left(
doc_results, left_on="doc_id", right_on="doc_id")
frame = frame.join_left(
word_results, left_on="word_id", right_on="word_id")
# similar to calling predict on a model
frame.dot_product(
['lda_results_doc'], ['lda_results_word'], 'lda_score')
word_hist = frame.histogram('word_count', 4)
lda_hist = frame.histogram('lda_score', 2)
group_frame = frame.group_by(
'word_id_L',
{'word_count': self.context.agg.histogram(
cutoffs=word_hist.cutoffs,
include_lowest=True,
strict_binning=False),
'lda_score': self.context.agg.histogram(lda_hist.cutoffs)})
pandas = group_frame.to_pandas()
for (index, row) in pandas.iterrows():
if str(row["word_id_L"]) == "magic":
numpy.testing.assert_equal(list(row["word_count_HISTOGRAM"]), [float(2.0/3.0), 0, float(1.0/3.0), 0])
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
Work4Labs/lettuce | tests/integration/lib/Django-1.3/django/contrib/localflavor/tr/forms.py | 159 | 3430 | """
TR-specific Form helpers
"""
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select, CharField
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _
import re
phone_digits_re = re.compile(r'^(\+90|0)? ?(([1-9]\d{2})|\([1-9]\d{2}\)) ?([2-9]\d{2} ?\d{2} ?\d{2})$')
class TRPostalCodeField(RegexField):
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XXXXX.'),
}
def __init__(self, *args, **kwargs):
super(TRPostalCodeField, self).__init__(r'^\d{5}$',
max_length=5, min_length=5, *args, **kwargs)
def clean(self, value):
value = super(TRPostalCodeField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if len(value) != 5:
raise ValidationError(self.error_messages['invalid'])
province_code = int(value[:2])
if province_code == 0 or province_code > 81:
raise ValidationError(self.error_messages['invalid'])
return value
class TRPhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Phone numbers must be in 0XXX XXX XXXX format.'),
}
def clean(self, value):
super(TRPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('(\(|\)|\s+)', '', smart_unicode(value))
m = phone_digits_re.search(value)
if m:
return u'%s%s' % (m.group(2), m.group(4))
raise ValidationError(self.error_messages['invalid'])
class TRIdentificationNumberField(Field):
"""
A Turkey Identification Number number.
See: http://tr.wikipedia.org/wiki/T%C3%BCrkiye_Cumhuriyeti_Kimlik_Numaras%C4%B1
Checks the following rules to determine whether the number is valid:
* The number is 11-digits.
* First digit is not 0.
* Conforms to the following two formula:
(sum(1st, 3rd, 5th, 7th, 9th)*7 - sum(2nd,4th,6th,8th)) % 10 = 10th digit
sum(1st to 10th) % 10 = 11th digit
"""
default_error_messages = {
'invalid': _(u'Enter a valid Turkish Identification number.'),
'not_11': _(u'Turkish Identification number must be 11 digits.'),
}
def clean(self, value):
super(TRIdentificationNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if len(value) != 11:
raise ValidationError(self.error_messages['not_11'])
if not re.match(r'^\d{11}$', value):
raise ValidationError(self.error_messages['invalid'])
if int(value[0]) == 0:
raise ValidationError(self.error_messages['invalid'])
chksum = (sum([int(value[i]) for i in xrange(0,9,2)])*7-
sum([int(value[i]) for i in xrange(1,9,2)])) % 10
if chksum != int(value[9]) or \
(sum([int(value[i]) for i in xrange(10)]) % 10) != int(value[10]):
raise ValidationError(self.error_messages['invalid'])
return value
class TRProvinceSelect(Select):
"""
A Select widget that uses a list of provinces in Turkey as its choices.
"""
def __init__(self, attrs=None):
from tr_provinces import PROVINCE_CHOICES
super(TRProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
| gpl-3.0 |
mattgiguere/scikit-learn | sklearn/utils/arpack.py | 265 | 64837 | """
This contains a copy of the future version of
scipy.sparse.linalg.eigen.arpack.eigsh
It's an upgraded wrapper of the ARPACK library which
allows the use of shift-invert mode for symmetric matrices.
Find a few eigenvectors and eigenvalues of a matrix.
Uses ARPACK: http://www.caam.rice.edu/software/ARPACK/
"""
# Wrapper implementation notes
#
# ARPACK Entry Points
# -------------------
# The entry points to ARPACK are
# - (s,d)seupd : single and double precision symmetric matrix
# - (s,d,c,z)neupd: single,double,complex,double complex general matrix
# This wrapper puts the *neupd (general matrix) interfaces in eigs()
# and the *seupd (symmetric matrix) in eigsh().
# There is no Hermetian complex/double complex interface.
# To find eigenvalues of a Hermetian matrix you
# must use eigs() and not eigsh()
# It might be desirable to handle the Hermetian case differently
# and, for example, return real eigenvalues.
# Number of eigenvalues returned and complex eigenvalues
# ------------------------------------------------------
# The ARPACK nonsymmetric real and double interface (s,d)naupd return
# eigenvalues and eigenvectors in real (float,double) arrays.
# Since the eigenvalues and eigenvectors are, in general, complex
# ARPACK puts the real and imaginary parts in consecutive entries
# in real-valued arrays. This wrapper puts the real entries
# into complex data types and attempts to return the requested eigenvalues
# and eigenvectors.
# Solver modes
# ------------
# ARPACK and handle shifted and shift-inverse computations
# for eigenvalues by providing a shift (sigma) and a solver.
__docformat__ = "restructuredtext en"
__all__ = ['eigs', 'eigsh', 'svds', 'ArpackError', 'ArpackNoConvergence']
import warnings
from scipy.sparse.linalg.eigen.arpack import _arpack
import numpy as np
from scipy.sparse.linalg.interface import aslinearoperator, LinearOperator
from scipy.sparse import identity, isspmatrix, isspmatrix_csr
from scipy.linalg import lu_factor, lu_solve
from scipy.sparse.sputils import isdense
from scipy.sparse.linalg import gmres, splu
import scipy
from distutils.version import LooseVersion
_type_conv = {'f': 's', 'd': 'd', 'F': 'c', 'D': 'z'}
_ndigits = {'f': 5, 'd': 12, 'F': 5, 'D': 12}
DNAUPD_ERRORS = {
0: "Normal exit.",
1: "Maximum number of iterations taken. "
"All possible eigenvalues of OP has been found. IPARAM(5) "
"returns the number of wanted converged Ritz values.",
2: "No longer an informational error. Deprecated starting "
"with release 2 of ARPACK.",
3: "No shifts could be applied during a cycle of the "
"Implicitly restarted Arnoldi iteration. One possibility "
"is to increase the size of NCV relative to NEV. ",
-1: "N must be positive.",
-2: "NEV must be positive.",
-3: "NCV-NEV >= 2 and less than or equal to N.",
-4: "The maximum number of Arnoldi update iterations allowed "
"must be greater than zero.",
-5: " WHICH must be one of 'LM', 'SM', 'LR', 'SR', 'LI', 'SI'",
-6: "BMAT must be one of 'I' or 'G'.",
-7: "Length of private work array WORKL is not sufficient.",
-8: "Error return from LAPACK eigenvalue calculation;",
-9: "Starting vector is zero.",
-10: "IPARAM(7) must be 1,2,3,4.",
-11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.",
-12: "IPARAM(1) must be equal to 0 or 1.",
-13: "NEV and WHICH = 'BE' are incompatible.",
-9999: "Could not build an Arnoldi factorization. "
"IPARAM(5) returns the size of the current Arnoldi "
"factorization. The user is advised to check that "
"enough workspace and array storage has been allocated."
}
SNAUPD_ERRORS = DNAUPD_ERRORS
ZNAUPD_ERRORS = DNAUPD_ERRORS.copy()
ZNAUPD_ERRORS[-10] = "IPARAM(7) must be 1,2,3."
CNAUPD_ERRORS = ZNAUPD_ERRORS
DSAUPD_ERRORS = {
0: "Normal exit.",
1: "Maximum number of iterations taken. "
"All possible eigenvalues of OP has been found.",
2: "No longer an informational error. Deprecated starting with "
"release 2 of ARPACK.",
3: "No shifts could be applied during a cycle of the Implicitly "
"restarted Arnoldi iteration. One possibility is to increase "
"the size of NCV relative to NEV. ",
-1: "N must be positive.",
-2: "NEV must be positive.",
-3: "NCV must be greater than NEV and less than or equal to N.",
-4: "The maximum number of Arnoldi update iterations allowed "
"must be greater than zero.",
-5: "WHICH must be one of 'LM', 'SM', 'LA', 'SA' or 'BE'.",
-6: "BMAT must be one of 'I' or 'G'.",
-7: "Length of private work array WORKL is not sufficient.",
-8: "Error return from trid. eigenvalue calculation; "
"Informational error from LAPACK routine dsteqr .",
-9: "Starting vector is zero.",
-10: "IPARAM(7) must be 1,2,3,4,5.",
-11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.",
-12: "IPARAM(1) must be equal to 0 or 1.",
-13: "NEV and WHICH = 'BE' are incompatible. ",
-9999: "Could not build an Arnoldi factorization. "
"IPARAM(5) returns the size of the current Arnoldi "
"factorization. The user is advised to check that "
"enough workspace and array storage has been allocated.",
}
SSAUPD_ERRORS = DSAUPD_ERRORS
DNEUPD_ERRORS = {
0: "Normal exit.",
1: "The Schur form computed by LAPACK routine dlahqr "
"could not be reordered by LAPACK routine dtrsen. "
"Re-enter subroutine dneupd with IPARAM(5)NCV and "
"increase the size of the arrays DR and DI to have "
"dimension at least dimension NCV and allocate at least NCV "
"columns for Z. NOTE: Not necessary if Z and V share "
"the same space. Please notify the authors if this error "
"occurs.",
-1: "N must be positive.",
-2: "NEV must be positive.",
-3: "NCV-NEV >= 2 and less than or equal to N.",
-5: "WHICH must be one of 'LM', 'SM', 'LR', 'SR', 'LI', 'SI'",
-6: "BMAT must be one of 'I' or 'G'.",
-7: "Length of private work WORKL array is not sufficient.",
-8: "Error return from calculation of a real Schur form. "
"Informational error from LAPACK routine dlahqr .",
-9: "Error return from calculation of eigenvectors. "
"Informational error from LAPACK routine dtrevc.",
-10: "IPARAM(7) must be 1,2,3,4.",
-11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.",
-12: "HOWMNY = 'S' not yet implemented",
-13: "HOWMNY must be one of 'A' or 'P' if RVEC = .true.",
-14: "DNAUPD did not find any eigenvalues to sufficient "
"accuracy.",
-15: "DNEUPD got a different count of the number of converged "
"Ritz values than DNAUPD got. This indicates the user "
"probably made an error in passing data from DNAUPD to "
"DNEUPD or that the data was modified before entering "
"DNEUPD",
}
SNEUPD_ERRORS = DNEUPD_ERRORS.copy()
SNEUPD_ERRORS[1] = ("The Schur form computed by LAPACK routine slahqr "
"could not be reordered by LAPACK routine strsen . "
"Re-enter subroutine dneupd with IPARAM(5)=NCV and "
"increase the size of the arrays DR and DI to have "
"dimension at least dimension NCV and allocate at least "
"NCV columns for Z. NOTE: Not necessary if Z and V share "
"the same space. Please notify the authors if this error "
"occurs.")
SNEUPD_ERRORS[-14] = ("SNAUPD did not find any eigenvalues to sufficient "
"accuracy.")
SNEUPD_ERRORS[-15] = ("SNEUPD got a different count of the number of "
"converged Ritz values than SNAUPD got. This indicates "
"the user probably made an error in passing data from "
"SNAUPD to SNEUPD or that the data was modified before "
"entering SNEUPD")
ZNEUPD_ERRORS = {0: "Normal exit.",
1: "The Schur form computed by LAPACK routine csheqr "
"could not be reordered by LAPACK routine ztrsen. "
"Re-enter subroutine zneupd with IPARAM(5)=NCV and "
"increase the size of the array D to have "
"dimension at least dimension NCV and allocate at least "
"NCV columns for Z. NOTE: Not necessary if Z and V share "
"the same space. Please notify the authors if this error "
"occurs.",
-1: "N must be positive.",
-2: "NEV must be positive.",
-3: "NCV-NEV >= 1 and less than or equal to N.",
-5: "WHICH must be one of 'LM', 'SM', 'LR', 'SR', 'LI', 'SI'",
-6: "BMAT must be one of 'I' or 'G'.",
-7: "Length of private work WORKL array is not sufficient.",
-8: "Error return from LAPACK eigenvalue calculation. "
"This should never happened.",
-9: "Error return from calculation of eigenvectors. "
"Informational error from LAPACK routine ztrevc.",
-10: "IPARAM(7) must be 1,2,3",
-11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.",
-12: "HOWMNY = 'S' not yet implemented",
-13: "HOWMNY must be one of 'A' or 'P' if RVEC = .true.",
-14: "ZNAUPD did not find any eigenvalues to sufficient "
"accuracy.",
-15: "ZNEUPD got a different count of the number of "
"converged Ritz values than ZNAUPD got. This "
"indicates the user probably made an error in passing "
"data from ZNAUPD to ZNEUPD or that the data was "
"modified before entering ZNEUPD"}
CNEUPD_ERRORS = ZNEUPD_ERRORS.copy()
CNEUPD_ERRORS[-14] = ("CNAUPD did not find any eigenvalues to sufficient "
"accuracy.")
CNEUPD_ERRORS[-15] = ("CNEUPD got a different count of the number of "
"converged Ritz values than CNAUPD got. This indicates "
"the user probably made an error in passing data from "
"CNAUPD to CNEUPD or that the data was modified before "
"entering CNEUPD")
DSEUPD_ERRORS = {
0: "Normal exit.",
-1: "N must be positive.",
-2: "NEV must be positive.",
-3: "NCV must be greater than NEV and less than or equal to N.",
-5: "WHICH must be one of 'LM', 'SM', 'LA', 'SA' or 'BE'.",
-6: "BMAT must be one of 'I' or 'G'.",
-7: "Length of private work WORKL array is not sufficient.",
-8: ("Error return from trid. eigenvalue calculation; "
"Information error from LAPACK routine dsteqr."),
-9: "Starting vector is zero.",
-10: "IPARAM(7) must be 1,2,3,4,5.",
-11: "IPARAM(7) = 1 and BMAT = 'G' are incompatible.",
-12: "NEV and WHICH = 'BE' are incompatible.",
-14: "DSAUPD did not find any eigenvalues to sufficient accuracy.",
-15: "HOWMNY must be one of 'A' or 'S' if RVEC = .true.",
-16: "HOWMNY = 'S' not yet implemented",
-17: ("DSEUPD got a different count of the number of converged "
"Ritz values than DSAUPD got. This indicates the user "
"probably made an error in passing data from DSAUPD to "
"DSEUPD or that the data was modified before entering "
"DSEUPD.")
}
SSEUPD_ERRORS = DSEUPD_ERRORS.copy()
SSEUPD_ERRORS[-14] = ("SSAUPD did not find any eigenvalues "
"to sufficient accuracy.")
SSEUPD_ERRORS[-17] = ("SSEUPD got a different count of the number of "
"converged "
"Ritz values than SSAUPD got. This indicates the user "
"probably made an error in passing data from SSAUPD to "
"SSEUPD or that the data was modified before entering "
"SSEUPD.")
_SAUPD_ERRORS = {'d': DSAUPD_ERRORS,
's': SSAUPD_ERRORS}
_NAUPD_ERRORS = {'d': DNAUPD_ERRORS,
's': SNAUPD_ERRORS,
'z': ZNAUPD_ERRORS,
'c': CNAUPD_ERRORS}
_SEUPD_ERRORS = {'d': DSEUPD_ERRORS,
's': SSEUPD_ERRORS}
_NEUPD_ERRORS = {'d': DNEUPD_ERRORS,
's': SNEUPD_ERRORS,
'z': ZNEUPD_ERRORS,
'c': CNEUPD_ERRORS}
# accepted values of parameter WHICH in _SEUPD
_SEUPD_WHICH = ['LM', 'SM', 'LA', 'SA', 'BE']
# accepted values of parameter WHICH in _NAUPD
_NEUPD_WHICH = ['LM', 'SM', 'LR', 'SR', 'LI', 'SI']
class ArpackError(RuntimeError):
"""
ARPACK error
"""
def __init__(self, info, infodict=_NAUPD_ERRORS):
msg = infodict.get(info, "Unknown error")
RuntimeError.__init__(self, "ARPACK error %d: %s" % (info, msg))
class ArpackNoConvergence(ArpackError):
"""
ARPACK iteration did not converge
Attributes
----------
eigenvalues : ndarray
Partial result. Converged eigenvalues.
eigenvectors : ndarray
Partial result. Converged eigenvectors.
"""
def __init__(self, msg, eigenvalues, eigenvectors):
ArpackError.__init__(self, -1, {-1: msg})
self.eigenvalues = eigenvalues
self.eigenvectors = eigenvectors
class _ArpackParams(object):
def __init__(self, n, k, tp, mode=1, sigma=None,
ncv=None, v0=None, maxiter=None, which="LM", tol=0):
if k <= 0:
raise ValueError("k must be positive, k=%d" % k)
if maxiter is None:
maxiter = n * 10
if maxiter <= 0:
raise ValueError("maxiter must be positive, maxiter=%d" % maxiter)
if tp not in 'fdFD':
raise ValueError("matrix type must be 'f', 'd', 'F', or 'D'")
if v0 is not None:
# ARPACK overwrites its initial resid, make a copy
self.resid = np.array(v0, copy=True)
info = 1
else:
self.resid = np.zeros(n, tp)
info = 0
if sigma is None:
#sigma not used
self.sigma = 0
else:
self.sigma = sigma
if ncv is None:
ncv = 2 * k + 1
ncv = min(ncv, n)
self.v = np.zeros((n, ncv), tp) # holds Ritz vectors
self.iparam = np.zeros(11, "int")
# set solver mode and parameters
ishfts = 1
self.mode = mode
self.iparam[0] = ishfts
self.iparam[2] = maxiter
self.iparam[3] = 1
self.iparam[6] = mode
self.n = n
self.tol = tol
self.k = k
self.maxiter = maxiter
self.ncv = ncv
self.which = which
self.tp = tp
self.info = info
self.converged = False
self.ido = 0
def _raise_no_convergence(self):
msg = "No convergence (%d iterations, %d/%d eigenvectors converged)"
k_ok = self.iparam[4]
num_iter = self.iparam[2]
try:
ev, vec = self.extract(True)
except ArpackError as err:
msg = "%s [%s]" % (msg, err)
ev = np.zeros((0,))
vec = np.zeros((self.n, 0))
k_ok = 0
raise ArpackNoConvergence(msg % (num_iter, k_ok, self.k), ev, vec)
class _SymmetricArpackParams(_ArpackParams):
def __init__(self, n, k, tp, matvec, mode=1, M_matvec=None,
Minv_matvec=None, sigma=None,
ncv=None, v0=None, maxiter=None, which="LM", tol=0):
# The following modes are supported:
# mode = 1:
# Solve the standard eigenvalue problem:
# A*x = lambda*x :
# A - symmetric
# Arguments should be
# matvec = left multiplication by A
# M_matvec = None [not used]
# Minv_matvec = None [not used]
#
# mode = 2:
# Solve the general eigenvalue problem:
# A*x = lambda*M*x
# A - symmetric
# M - symmetric positive definite
# Arguments should be
# matvec = left multiplication by A
# M_matvec = left multiplication by M
# Minv_matvec = left multiplication by M^-1
#
# mode = 3:
# Solve the general eigenvalue problem in shift-invert mode:
# A*x = lambda*M*x
# A - symmetric
# M - symmetric positive semi-definite
# Arguments should be
# matvec = None [not used]
# M_matvec = left multiplication by M
# or None, if M is the identity
# Minv_matvec = left multiplication by [A-sigma*M]^-1
#
# mode = 4:
# Solve the general eigenvalue problem in Buckling mode:
# A*x = lambda*AG*x
# A - symmetric positive semi-definite
# AG - symmetric indefinite
# Arguments should be
# matvec = left multiplication by A
# M_matvec = None [not used]
# Minv_matvec = left multiplication by [A-sigma*AG]^-1
#
# mode = 5:
# Solve the general eigenvalue problem in Cayley-transformed mode:
# A*x = lambda*M*x
# A - symmetric
# M - symmetric positive semi-definite
# Arguments should be
# matvec = left multiplication by A
# M_matvec = left multiplication by M
# or None, if M is the identity
# Minv_matvec = left multiplication by [A-sigma*M]^-1
if mode == 1:
if matvec is None:
raise ValueError("matvec must be specified for mode=1")
if M_matvec is not None:
raise ValueError("M_matvec cannot be specified for mode=1")
if Minv_matvec is not None:
raise ValueError("Minv_matvec cannot be specified for mode=1")
self.OP = matvec
self.B = lambda x: x
self.bmat = 'I'
elif mode == 2:
if matvec is None:
raise ValueError("matvec must be specified for mode=2")
if M_matvec is None:
raise ValueError("M_matvec must be specified for mode=2")
if Minv_matvec is None:
raise ValueError("Minv_matvec must be specified for mode=2")
self.OP = lambda x: Minv_matvec(matvec(x))
self.OPa = Minv_matvec
self.OPb = matvec
self.B = M_matvec
self.bmat = 'G'
elif mode == 3:
if matvec is not None:
raise ValueError("matvec must not be specified for mode=3")
if Minv_matvec is None:
raise ValueError("Minv_matvec must be specified for mode=3")
if M_matvec is None:
self.OP = Minv_matvec
self.OPa = Minv_matvec
self.B = lambda x: x
self.bmat = 'I'
else:
self.OP = lambda x: Minv_matvec(M_matvec(x))
self.OPa = Minv_matvec
self.B = M_matvec
self.bmat = 'G'
elif mode == 4:
if matvec is None:
raise ValueError("matvec must be specified for mode=4")
if M_matvec is not None:
raise ValueError("M_matvec must not be specified for mode=4")
if Minv_matvec is None:
raise ValueError("Minv_matvec must be specified for mode=4")
self.OPa = Minv_matvec
self.OP = lambda x: self.OPa(matvec(x))
self.B = matvec
self.bmat = 'G'
elif mode == 5:
if matvec is None:
raise ValueError("matvec must be specified for mode=5")
if Minv_matvec is None:
raise ValueError("Minv_matvec must be specified for mode=5")
self.OPa = Minv_matvec
self.A_matvec = matvec
if M_matvec is None:
self.OP = lambda x: Minv_matvec(matvec(x) + sigma * x)
self.B = lambda x: x
self.bmat = 'I'
else:
self.OP = lambda x: Minv_matvec(matvec(x)
+ sigma * M_matvec(x))
self.B = M_matvec
self.bmat = 'G'
else:
raise ValueError("mode=%i not implemented" % mode)
if which not in _SEUPD_WHICH:
raise ValueError("which must be one of %s"
% ' '.join(_SEUPD_WHICH))
if k >= n:
raise ValueError("k must be less than rank(A), k=%d" % k)
_ArpackParams.__init__(self, n, k, tp, mode, sigma,
ncv, v0, maxiter, which, tol)
if self.ncv > n or self.ncv <= k:
raise ValueError("ncv must be k<ncv<=n, ncv=%s" % self.ncv)
self.workd = np.zeros(3 * n, self.tp)
self.workl = np.zeros(self.ncv * (self.ncv + 8), self.tp)
ltr = _type_conv[self.tp]
if ltr not in ["s", "d"]:
raise ValueError("Input matrix is not real-valued.")
self._arpack_solver = _arpack.__dict__[ltr + 'saupd']
self._arpack_extract = _arpack.__dict__[ltr + 'seupd']
self.iterate_infodict = _SAUPD_ERRORS[ltr]
self.extract_infodict = _SEUPD_ERRORS[ltr]
self.ipntr = np.zeros(11, "int")
def iterate(self):
self.ido, self.resid, self.v, self.iparam, self.ipntr, self.info = \
self._arpack_solver(self.ido, self.bmat, self.which, self.k,
self.tol, self.resid, self.v, self.iparam,
self.ipntr, self.workd, self.workl, self.info)
xslice = slice(self.ipntr[0] - 1, self.ipntr[0] - 1 + self.n)
yslice = slice(self.ipntr[1] - 1, self.ipntr[1] - 1 + self.n)
if self.ido == -1:
# initialization
self.workd[yslice] = self.OP(self.workd[xslice])
elif self.ido == 1:
# compute y = Op*x
if self.mode == 1:
self.workd[yslice] = self.OP(self.workd[xslice])
elif self.mode == 2:
self.workd[xslice] = self.OPb(self.workd[xslice])
self.workd[yslice] = self.OPa(self.workd[xslice])
elif self.mode == 5:
Bxslice = slice(self.ipntr[2] - 1, self.ipntr[2] - 1 + self.n)
Ax = self.A_matvec(self.workd[xslice])
self.workd[yslice] = self.OPa(Ax + (self.sigma *
self.workd[Bxslice]))
else:
Bxslice = slice(self.ipntr[2] - 1, self.ipntr[2] - 1 + self.n)
self.workd[yslice] = self.OPa(self.workd[Bxslice])
elif self.ido == 2:
self.workd[yslice] = self.B(self.workd[xslice])
elif self.ido == 3:
raise ValueError("ARPACK requested user shifts. Assure ISHIFT==0")
else:
self.converged = True
if self.info == 0:
pass
elif self.info == 1:
self._raise_no_convergence()
else:
raise ArpackError(self.info, infodict=self.iterate_infodict)
def extract(self, return_eigenvectors):
rvec = return_eigenvectors
ierr = 0
howmny = 'A' # return all eigenvectors
sselect = np.zeros(self.ncv, 'int') # unused
d, z, ierr = self._arpack_extract(rvec, howmny, sselect, self.sigma,
self.bmat, self.which, self.k,
self.tol, self.resid, self.v,
self.iparam[0:7], self.ipntr,
self.workd[0:2 * self.n],
self.workl, ierr)
if ierr != 0:
raise ArpackError(ierr, infodict=self.extract_infodict)
k_ok = self.iparam[4]
d = d[:k_ok]
z = z[:, :k_ok]
if return_eigenvectors:
return d, z
else:
return d
class _UnsymmetricArpackParams(_ArpackParams):
def __init__(self, n, k, tp, matvec, mode=1, M_matvec=None,
Minv_matvec=None, sigma=None,
ncv=None, v0=None, maxiter=None, which="LM", tol=0):
# The following modes are supported:
# mode = 1:
# Solve the standard eigenvalue problem:
# A*x = lambda*x
# A - square matrix
# Arguments should be
# matvec = left multiplication by A
# M_matvec = None [not used]
# Minv_matvec = None [not used]
#
# mode = 2:
# Solve the generalized eigenvalue problem:
# A*x = lambda*M*x
# A - square matrix
# M - symmetric, positive semi-definite
# Arguments should be
# matvec = left multiplication by A
# M_matvec = left multiplication by M
# Minv_matvec = left multiplication by M^-1
#
# mode = 3,4:
# Solve the general eigenvalue problem in shift-invert mode:
# A*x = lambda*M*x
# A - square matrix
# M - symmetric, positive semi-definite
# Arguments should be
# matvec = None [not used]
# M_matvec = left multiplication by M
# or None, if M is the identity
# Minv_matvec = left multiplication by [A-sigma*M]^-1
# if A is real and mode==3, use the real part of Minv_matvec
# if A is real and mode==4, use the imag part of Minv_matvec
# if A is complex and mode==3,
# use real and imag parts of Minv_matvec
if mode == 1:
if matvec is None:
raise ValueError("matvec must be specified for mode=1")
if M_matvec is not None:
raise ValueError("M_matvec cannot be specified for mode=1")
if Minv_matvec is not None:
raise ValueError("Minv_matvec cannot be specified for mode=1")
self.OP = matvec
self.B = lambda x: x
self.bmat = 'I'
elif mode == 2:
if matvec is None:
raise ValueError("matvec must be specified for mode=2")
if M_matvec is None:
raise ValueError("M_matvec must be specified for mode=2")
if Minv_matvec is None:
raise ValueError("Minv_matvec must be specified for mode=2")
self.OP = lambda x: Minv_matvec(matvec(x))
self.OPa = Minv_matvec
self.OPb = matvec
self.B = M_matvec
self.bmat = 'G'
elif mode in (3, 4):
if matvec is None:
raise ValueError("matvec must be specified "
"for mode in (3,4)")
if Minv_matvec is None:
raise ValueError("Minv_matvec must be specified "
"for mode in (3,4)")
self.matvec = matvec
if tp in 'DF': # complex type
if mode == 3:
self.OPa = Minv_matvec
else:
raise ValueError("mode=4 invalid for complex A")
else: # real type
if mode == 3:
self.OPa = lambda x: np.real(Minv_matvec(x))
else:
self.OPa = lambda x: np.imag(Minv_matvec(x))
if M_matvec is None:
self.B = lambda x: x
self.bmat = 'I'
self.OP = self.OPa
else:
self.B = M_matvec
self.bmat = 'G'
self.OP = lambda x: self.OPa(M_matvec(x))
else:
raise ValueError("mode=%i not implemented" % mode)
if which not in _NEUPD_WHICH:
raise ValueError("Parameter which must be one of %s"
% ' '.join(_NEUPD_WHICH))
if k >= n - 1:
raise ValueError("k must be less than rank(A)-1, k=%d" % k)
_ArpackParams.__init__(self, n, k, tp, mode, sigma,
ncv, v0, maxiter, which, tol)
if self.ncv > n or self.ncv <= k + 1:
raise ValueError("ncv must be k+1<ncv<=n, ncv=%s" % self.ncv)
self.workd = np.zeros(3 * n, self.tp)
self.workl = np.zeros(3 * self.ncv * (self.ncv + 2), self.tp)
ltr = _type_conv[self.tp]
self._arpack_solver = _arpack.__dict__[ltr + 'naupd']
self._arpack_extract = _arpack.__dict__[ltr + 'neupd']
self.iterate_infodict = _NAUPD_ERRORS[ltr]
self.extract_infodict = _NEUPD_ERRORS[ltr]
self.ipntr = np.zeros(14, "int")
if self.tp in 'FD':
self.rwork = np.zeros(self.ncv, self.tp.lower())
else:
self.rwork = None
def iterate(self):
if self.tp in 'fd':
self.ido, self.resid, self.v, self.iparam, self.ipntr, self.info =\
self._arpack_solver(self.ido, self.bmat, self.which, self.k,
self.tol, self.resid, self.v, self.iparam,
self.ipntr, self.workd, self.workl,
self.info)
else:
self.ido, self.resid, self.v, self.iparam, self.ipntr, self.info =\
self._arpack_solver(self.ido, self.bmat, self.which, self.k,
self.tol, self.resid, self.v, self.iparam,
self.ipntr, self.workd, self.workl,
self.rwork, self.info)
xslice = slice(self.ipntr[0] - 1, self.ipntr[0] - 1 + self.n)
yslice = slice(self.ipntr[1] - 1, self.ipntr[1] - 1 + self.n)
if self.ido == -1:
# initialization
self.workd[yslice] = self.OP(self.workd[xslice])
elif self.ido == 1:
# compute y = Op*x
if self.mode in (1, 2):
self.workd[yslice] = self.OP(self.workd[xslice])
else:
Bxslice = slice(self.ipntr[2] - 1, self.ipntr[2] - 1 + self.n)
self.workd[yslice] = self.OPa(self.workd[Bxslice])
elif self.ido == 2:
self.workd[yslice] = self.B(self.workd[xslice])
elif self.ido == 3:
raise ValueError("ARPACK requested user shifts. Assure ISHIFT==0")
else:
self.converged = True
if self.info == 0:
pass
elif self.info == 1:
self._raise_no_convergence()
else:
raise ArpackError(self.info, infodict=self.iterate_infodict)
def extract(self, return_eigenvectors):
k, n = self.k, self.n
ierr = 0
howmny = 'A' # return all eigenvectors
sselect = np.zeros(self.ncv, 'int') # unused
sigmar = np.real(self.sigma)
sigmai = np.imag(self.sigma)
workev = np.zeros(3 * self.ncv, self.tp)
if self.tp in 'fd':
dr = np.zeros(k + 1, self.tp)
di = np.zeros(k + 1, self.tp)
zr = np.zeros((n, k + 1), self.tp)
dr, di, zr, ierr = \
self._arpack_extract(
return_eigenvectors, howmny, sselect, sigmar, sigmai,
workev, self.bmat, self.which, k, self.tol, self.resid,
self.v, self.iparam, self.ipntr, self.workd, self.workl,
self.info)
if ierr != 0:
raise ArpackError(ierr, infodict=self.extract_infodict)
nreturned = self.iparam[4] # number of good eigenvalues returned
# Build complex eigenvalues from real and imaginary parts
d = dr + 1.0j * di
# Arrange the eigenvectors: complex eigenvectors are stored as
# real,imaginary in consecutive columns
z = zr.astype(self.tp.upper())
# The ARPACK nonsymmetric real and double interface (s,d)naupd
# return eigenvalues and eigenvectors in real (float,double)
# arrays.
# Efficiency: this should check that return_eigenvectors == True
# before going through this construction.
if sigmai == 0:
i = 0
while i <= k:
# check if complex
if abs(d[i].imag) != 0:
# this is a complex conjugate pair with eigenvalues
# in consecutive columns
if i < k:
z[:, i] = zr[:, i] + 1.0j * zr[:, i + 1]
z[:, i + 1] = z[:, i].conjugate()
i += 1
else:
#last eigenvalue is complex: the imaginary part of
# the eigenvector has not been returned
#this can only happen if nreturned > k, so we'll
# throw out this case.
nreturned -= 1
i += 1
else:
# real matrix, mode 3 or 4, imag(sigma) is nonzero:
# see remark 3 in <s,d>neupd.f
# Build complex eigenvalues from real and imaginary parts
i = 0
while i <= k:
if abs(d[i].imag) == 0:
d[i] = np.dot(zr[:, i], self.matvec(zr[:, i]))
else:
if i < k:
z[:, i] = zr[:, i] + 1.0j * zr[:, i + 1]
z[:, i + 1] = z[:, i].conjugate()
d[i] = ((np.dot(zr[:, i],
self.matvec(zr[:, i]))
+ np.dot(zr[:, i + 1],
self.matvec(zr[:, i + 1])))
+ 1j * (np.dot(zr[:, i],
self.matvec(zr[:, i + 1]))
- np.dot(zr[:, i + 1],
self.matvec(zr[:, i]))))
d[i + 1] = d[i].conj()
i += 1
else:
#last eigenvalue is complex: the imaginary part of
# the eigenvector has not been returned
#this can only happen if nreturned > k, so we'll
# throw out this case.
nreturned -= 1
i += 1
# Now we have k+1 possible eigenvalues and eigenvectors
# Return the ones specified by the keyword "which"
if nreturned <= k:
# we got less or equal as many eigenvalues we wanted
d = d[:nreturned]
z = z[:, :nreturned]
else:
# we got one extra eigenvalue (likely a cc pair, but which?)
# cut at approx precision for sorting
rd = np.round(d, decimals=_ndigits[self.tp])
if self.which in ['LR', 'SR']:
ind = np.argsort(rd.real)
elif self.which in ['LI', 'SI']:
# for LI,SI ARPACK returns largest,smallest
# abs(imaginary) why?
ind = np.argsort(abs(rd.imag))
else:
ind = np.argsort(abs(rd))
if self.which in ['LR', 'LM', 'LI']:
d = d[ind[-k:]]
z = z[:, ind[-k:]]
if self.which in ['SR', 'SM', 'SI']:
d = d[ind[:k]]
z = z[:, ind[:k]]
else:
# complex is so much simpler...
d, z, ierr =\
self._arpack_extract(
return_eigenvectors, howmny, sselect, self.sigma, workev,
self.bmat, self.which, k, self.tol, self.resid, self.v,
self.iparam, self.ipntr, self.workd, self.workl,
self.rwork, ierr)
if ierr != 0:
raise ArpackError(ierr, infodict=self.extract_infodict)
k_ok = self.iparam[4]
d = d[:k_ok]
z = z[:, :k_ok]
if return_eigenvectors:
return d, z
else:
return d
def _aslinearoperator_with_dtype(m):
m = aslinearoperator(m)
if not hasattr(m, 'dtype'):
x = np.zeros(m.shape[1])
m.dtype = (m * x).dtype
return m
class SpLuInv(LinearOperator):
"""
SpLuInv:
helper class to repeatedly solve M*x=b
using a sparse LU-decopposition of M
"""
def __init__(self, M):
self.M_lu = splu(M)
LinearOperator.__init__(self, M.shape, self._matvec, dtype=M.dtype)
self.isreal = not np.issubdtype(self.dtype, np.complexfloating)
def _matvec(self, x):
# careful here: splu.solve will throw away imaginary
# part of x if M is real
if self.isreal and np.issubdtype(x.dtype, np.complexfloating):
return (self.M_lu.solve(np.real(x))
+ 1j * self.M_lu.solve(np.imag(x)))
else:
return self.M_lu.solve(x)
class LuInv(LinearOperator):
"""
LuInv:
helper class to repeatedly solve M*x=b
using an LU-decomposition of M
"""
def __init__(self, M):
self.M_lu = lu_factor(M)
LinearOperator.__init__(self, M.shape, self._matvec, dtype=M.dtype)
def _matvec(self, x):
return lu_solve(self.M_lu, x)
class IterInv(LinearOperator):
"""
IterInv:
helper class to repeatedly solve M*x=b
using an iterative method.
"""
def __init__(self, M, ifunc=gmres, tol=0):
if tol <= 0:
# when tol=0, ARPACK uses machine tolerance as calculated
# by LAPACK's _LAMCH function. We should match this
tol = np.finfo(M.dtype).eps
self.M = M
self.ifunc = ifunc
self.tol = tol
if hasattr(M, 'dtype'):
dtype = M.dtype
else:
x = np.zeros(M.shape[1])
dtype = (M * x).dtype
LinearOperator.__init__(self, M.shape, self._matvec, dtype=dtype)
def _matvec(self, x):
b, info = self.ifunc(self.M, x, tol=self.tol)
if info != 0:
raise ValueError("Error in inverting M: function "
"%s did not converge (info = %i)."
% (self.ifunc.__name__, info))
return b
class IterOpInv(LinearOperator):
"""
IterOpInv:
helper class to repeatedly solve [A-sigma*M]*x = b
using an iterative method
"""
def __init__(self, A, M, sigma, ifunc=gmres, tol=0):
if tol <= 0:
# when tol=0, ARPACK uses machine tolerance as calculated
# by LAPACK's _LAMCH function. We should match this
tol = np.finfo(A.dtype).eps
self.A = A
self.M = M
self.sigma = sigma
self.ifunc = ifunc
self.tol = tol
x = np.zeros(A.shape[1])
if M is None:
dtype = self.mult_func_M_None(x).dtype
self.OP = LinearOperator(self.A.shape,
self.mult_func_M_None,
dtype=dtype)
else:
dtype = self.mult_func(x).dtype
self.OP = LinearOperator(self.A.shape,
self.mult_func,
dtype=dtype)
LinearOperator.__init__(self, A.shape, self._matvec, dtype=dtype)
def mult_func(self, x):
return self.A.matvec(x) - self.sigma * self.M.matvec(x)
def mult_func_M_None(self, x):
return self.A.matvec(x) - self.sigma * x
def _matvec(self, x):
b, info = self.ifunc(self.OP, x, tol=self.tol)
if info != 0:
raise ValueError("Error in inverting [A-sigma*M]: function "
"%s did not converge (info = %i)."
% (self.ifunc.__name__, info))
return b
def get_inv_matvec(M, symmetric=False, tol=0):
if isdense(M):
return LuInv(M).matvec
elif isspmatrix(M):
if isspmatrix_csr(M) and symmetric:
M = M.T
return SpLuInv(M).matvec
else:
return IterInv(M, tol=tol).matvec
def get_OPinv_matvec(A, M, sigma, symmetric=False, tol=0):
if sigma == 0:
return get_inv_matvec(A, symmetric=symmetric, tol=tol)
if M is None:
#M is the identity matrix
if isdense(A):
if (np.issubdtype(A.dtype, np.complexfloating)
or np.imag(sigma) == 0):
A = np.copy(A)
else:
A = A + 0j
A.flat[::A.shape[1] + 1] -= sigma
return LuInv(A).matvec
elif isspmatrix(A):
A = A - sigma * identity(A.shape[0])
if symmetric and isspmatrix_csr(A):
A = A.T
return SpLuInv(A.tocsc()).matvec
else:
return IterOpInv(_aslinearoperator_with_dtype(A), M, sigma,
tol=tol).matvec
else:
if ((not isdense(A) and not isspmatrix(A)) or
(not isdense(M) and not isspmatrix(M))):
return IterOpInv(_aslinearoperator_with_dtype(A),
_aslinearoperator_with_dtype(M), sigma,
tol=tol).matvec
elif isdense(A) or isdense(M):
return LuInv(A - sigma * M).matvec
else:
OP = A - sigma * M
if symmetric and isspmatrix_csr(OP):
OP = OP.T
return SpLuInv(OP.tocsc()).matvec
def _eigs(A, k=6, M=None, sigma=None, which='LM', v0=None, ncv=None,
maxiter=None, tol=0, return_eigenvectors=True, Minv=None, OPinv=None,
OPpart=None):
"""
Find k eigenvalues and eigenvectors of the square matrix A.
Solves ``A * x[i] = w[i] * x[i]``, the standard eigenvalue problem
for w[i] eigenvalues with corresponding eigenvectors x[i].
If M is specified, solves ``A * x[i] = w[i] * M * x[i]``, the
generalized eigenvalue problem for w[i] eigenvalues
with corresponding eigenvectors x[i]
Parameters
----------
A : An N x N matrix, array, sparse matrix, or LinearOperator representing \
the operation A * x, where A is a real or complex square matrix.
k : int, default 6
The number of eigenvalues and eigenvectors desired.
`k` must be smaller than N. It is not possible to compute all
eigenvectors of a matrix.
return_eigenvectors : boolean, default True
Whether to return the eigenvectors along with the eigenvalues.
M : An N x N matrix, array, sparse matrix, or LinearOperator representing
the operation M*x for the generalized eigenvalue problem
``A * x = w * M * x``
M must represent a real symmetric matrix. For best results, M should
be of the same type as A. Additionally:
* If sigma==None, M is positive definite
* If sigma is specified, M is positive semi-definite
If sigma==None, eigs requires an operator to compute the solution
of the linear equation `M * x = b`. This is done internally via a
(sparse) LU decomposition for an explicit matrix M, or via an
iterative solver for a general linear operator. Alternatively,
the user can supply the matrix or operator Minv, which gives
x = Minv * b = M^-1 * b
sigma : real or complex
Find eigenvalues near sigma using shift-invert mode. This requires
an operator to compute the solution of the linear system
`[A - sigma * M] * x = b`, where M is the identity matrix if
unspecified. This is computed internally via a (sparse) LU
decomposition for explicit matrices A & M, or via an iterative
solver if either A or M is a general linear operator.
Alternatively, the user can supply the matrix or operator OPinv,
which gives x = OPinv * b = [A - sigma * M]^-1 * b.
For a real matrix A, shift-invert can either be done in imaginary
mode or real mode, specified by the parameter OPpart ('r' or 'i').
Note that when sigma is specified, the keyword 'which' (below)
refers to the shifted eigenvalues w'[i] where:
* If A is real and OPpart == 'r' (default),
w'[i] = 1/2 * [ 1/(w[i]-sigma) + 1/(w[i]-conj(sigma)) ]
* If A is real and OPpart == 'i',
w'[i] = 1/2i * [ 1/(w[i]-sigma) - 1/(w[i]-conj(sigma)) ]
* If A is complex,
w'[i] = 1/(w[i]-sigma)
v0 : array
Starting vector for iteration.
ncv : integer
The number of Lanczos vectors generated
`ncv` must be greater than `k`; it is recommended that ``ncv > 2*k``.
which : string ['LM' | 'SM' | 'LR' | 'SR' | 'LI' | 'SI']
Which `k` eigenvectors and eigenvalues to find:
- 'LM' : largest magnitude
- 'SM' : smallest magnitude
- 'LR' : largest real part
- 'SR' : smallest real part
- 'LI' : largest imaginary part
- 'SI' : smallest imaginary part
When sigma != None, 'which' refers to the shifted eigenvalues w'[i]
(see discussion in 'sigma', above). ARPACK is generally better
at finding large values than small values. If small eigenvalues are
desired, consider using shift-invert mode for better performance.
maxiter : integer
Maximum number of Arnoldi update iterations allowed
tol : float
Relative accuracy for eigenvalues (stopping criterion)
The default value of 0 implies machine precision.
return_eigenvectors : boolean
Return eigenvectors (True) in addition to eigenvalues
Minv : N x N matrix, array, sparse matrix, or linear operator
See notes in M, above.
OPinv : N x N matrix, array, sparse matrix, or linear operator
See notes in sigma, above.
OPpart : 'r' or 'i'.
See notes in sigma, above
Returns
-------
w : array
Array of k eigenvalues.
v : array
An array of `k` eigenvectors.
``v[:, i]`` is the eigenvector corresponding to the eigenvalue w[i].
Raises
------
ArpackNoConvergence
When the requested convergence is not obtained.
The currently converged eigenvalues and eigenvectors can be found
as ``eigenvalues`` and ``eigenvectors`` attributes of the exception
object.
See Also
--------
eigsh : eigenvalues and eigenvectors for symmetric matrix A
svds : singular value decomposition for a matrix A
Examples
--------
Find 6 eigenvectors of the identity matrix:
>>> from sklearn.utils.arpack import eigs
>>> id = np.identity(13)
>>> vals, vecs = eigs(id, k=6)
>>> vals
array([ 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j])
>>> vecs.shape
(13, 6)
Notes
-----
This function is a wrapper to the ARPACK [1]_ SNEUPD, DNEUPD, CNEUPD,
ZNEUPD, functions which use the Implicitly Restarted Arnoldi Method to
find the eigenvalues and eigenvectors [2]_.
References
----------
.. [1] ARPACK Software, http://www.caam.rice.edu/software/ARPACK/
.. [2] R. B. Lehoucq, D. C. Sorensen, and C. Yang, ARPACK USERS GUIDE:
Solution of Large Scale Eigenvalue Problems by Implicitly Restarted
Arnoldi Methods. SIAM, Philadelphia, PA, 1998.
"""
if A.shape[0] != A.shape[1]:
raise ValueError('expected square matrix (shape=%s)' % (A.shape,))
if M is not None:
if M.shape != A.shape:
raise ValueError('wrong M dimensions %s, should be %s'
% (M.shape, A.shape))
if np.dtype(M.dtype).char.lower() != np.dtype(A.dtype).char.lower():
warnings.warn('M does not have the same type precision as A. '
'This may adversely affect ARPACK convergence')
n = A.shape[0]
if k <= 0 or k >= n:
raise ValueError("k must be between 1 and rank(A)-1")
if sigma is None:
matvec = _aslinearoperator_with_dtype(A).matvec
if OPinv is not None:
raise ValueError("OPinv should not be specified "
"with sigma = None.")
if OPpart is not None:
raise ValueError("OPpart should not be specified with "
"sigma = None or complex A")
if M is None:
#standard eigenvalue problem
mode = 1
M_matvec = None
Minv_matvec = None
if Minv is not None:
raise ValueError("Minv should not be "
"specified with M = None.")
else:
#general eigenvalue problem
mode = 2
if Minv is None:
Minv_matvec = get_inv_matvec(M, symmetric=True, tol=tol)
else:
Minv = _aslinearoperator_with_dtype(Minv)
Minv_matvec = Minv.matvec
M_matvec = _aslinearoperator_with_dtype(M).matvec
else:
#sigma is not None: shift-invert mode
if np.issubdtype(A.dtype, np.complexfloating):
if OPpart is not None:
raise ValueError("OPpart should not be specified "
"with sigma=None or complex A")
mode = 3
elif OPpart is None or OPpart.lower() == 'r':
mode = 3
elif OPpart.lower() == 'i':
if np.imag(sigma) == 0:
raise ValueError("OPpart cannot be 'i' if sigma is real")
mode = 4
else:
raise ValueError("OPpart must be one of ('r','i')")
matvec = _aslinearoperator_with_dtype(A).matvec
if Minv is not None:
raise ValueError("Minv should not be specified when sigma is")
if OPinv is None:
Minv_matvec = get_OPinv_matvec(A, M, sigma,
symmetric=False, tol=tol)
else:
OPinv = _aslinearoperator_with_dtype(OPinv)
Minv_matvec = OPinv.matvec
if M is None:
M_matvec = None
else:
M_matvec = _aslinearoperator_with_dtype(M).matvec
params = _UnsymmetricArpackParams(n, k, A.dtype.char, matvec, mode,
M_matvec, Minv_matvec, sigma,
ncv, v0, maxiter, which, tol)
while not params.converged:
params.iterate()
return params.extract(return_eigenvectors)
def _eigsh(A, k=6, M=None, sigma=None, which='LM', v0=None, ncv=None,
maxiter=None, tol=0, return_eigenvectors=True, Minv=None,
OPinv=None, mode='normal'):
"""
Find k eigenvalues and eigenvectors of the real symmetric square matrix
or complex hermitian matrix A.
Solves ``A * x[i] = w[i] * x[i]``, the standard eigenvalue problem for
w[i] eigenvalues with corresponding eigenvectors x[i].
If M is specified, solves ``A * x[i] = w[i] * M * x[i]``, the
generalized eigenvalue problem for w[i] eigenvalues
with corresponding eigenvectors x[i]
Parameters
----------
A : An N x N matrix, array, sparse matrix, or LinearOperator representing
the operation A * x, where A is a real symmetric matrix
For buckling mode (see below) A must additionally be positive-definite
k : integer
The number of eigenvalues and eigenvectors desired.
`k` must be smaller than N. It is not possible to compute all
eigenvectors of a matrix.
M : An N x N matrix, array, sparse matrix, or linear operator representing
the operation M * x for the generalized eigenvalue problem
``A * x = w * M * x``.
M must represent a real, symmetric matrix. For best results, M should
be of the same type as A. Additionally:
* If sigma == None, M is symmetric positive definite
* If sigma is specified, M is symmetric positive semi-definite
* In buckling mode, M is symmetric indefinite.
If sigma == None, eigsh requires an operator to compute the solution
of the linear equation `M * x = b`. This is done internally via a
(sparse) LU decomposition for an explicit matrix M, or via an
iterative solver for a general linear operator. Alternatively,
the user can supply the matrix or operator Minv, which gives
x = Minv * b = M^-1 * b
sigma : real
Find eigenvalues near sigma using shift-invert mode. This requires
an operator to compute the solution of the linear system
`[A - sigma * M] x = b`, where M is the identity matrix if
unspecified. This is computed internally via a (sparse) LU
decomposition for explicit matrices A & M, or via an iterative
solver if either A or M is a general linear operator.
Alternatively, the user can supply the matrix or operator OPinv,
which gives x = OPinv * b = [A - sigma * M]^-1 * b.
Note that when sigma is specified, the keyword 'which' refers to
the shifted eigenvalues w'[i] where:
- if mode == 'normal',
w'[i] = 1 / (w[i] - sigma)
- if mode == 'cayley',
w'[i] = (w[i] + sigma) / (w[i] - sigma)
- if mode == 'buckling',
w'[i] = w[i] / (w[i] - sigma)
(see further discussion in 'mode' below)
v0 : array
Starting vector for iteration.
ncv : integer
The number of Lanczos vectors generated
ncv must be greater than k and smaller than n;
it is recommended that ncv > 2*k
which : string ['LM' | 'SM' | 'LA' | 'SA' | 'BE']
If A is a complex hermitian matrix, 'BE' is invalid.
Which `k` eigenvectors and eigenvalues to find
- 'LM' : Largest (in magnitude) eigenvalues
- 'SM' : Smallest (in magnitude) eigenvalues
- 'LA' : Largest (algebraic) eigenvalues
- 'SA' : Smallest (algebraic) eigenvalues
- 'BE' : Half (k/2) from each end of the spectrum
When k is odd, return one more (k/2+1) from the high end
When sigma != None, 'which' refers to the shifted eigenvalues w'[i]
(see discussion in 'sigma', above). ARPACK is generally better
at finding large values than small values. If small eigenvalues are
desired, consider using shift-invert mode for better performance.
maxiter : integer
Maximum number of Arnoldi update iterations allowed
tol : float
Relative accuracy for eigenvalues (stopping criterion).
The default value of 0 implies machine precision.
Minv : N x N matrix, array, sparse matrix, or LinearOperator
See notes in M, above
OPinv : N x N matrix, array, sparse matrix, or LinearOperator
See notes in sigma, above.
return_eigenvectors : boolean
Return eigenvectors (True) in addition to eigenvalues
mode : string ['normal' | 'buckling' | 'cayley']
Specify strategy to use for shift-invert mode. This argument applies
only for real-valued A and sigma != None. For shift-invert mode,
ARPACK internally solves the eigenvalue problem
``OP * x'[i] = w'[i] * B * x'[i]``
and transforms the resulting Ritz vectors x'[i] and Ritz values w'[i]
into the desired eigenvectors and eigenvalues of the problem
``A * x[i] = w[i] * M * x[i]``.
The modes are as follows:
- 'normal' : OP = [A - sigma * M]^-1 * M
B = M
w'[i] = 1 / (w[i] - sigma)
- 'buckling' : OP = [A - sigma * M]^-1 * A
B = A
w'[i] = w[i] / (w[i] - sigma)
- 'cayley' : OP = [A - sigma * M]^-1 * [A + sigma * M]
B = M
w'[i] = (w[i] + sigma) / (w[i] - sigma)
The choice of mode will affect which eigenvalues are selected by
the keyword 'which', and can also impact the stability of
convergence (see [2] for a discussion)
Returns
-------
w : array
Array of k eigenvalues
v : array
An array of k eigenvectors
The v[i] is the eigenvector corresponding to the eigenvector w[i]
Raises
------
ArpackNoConvergence
When the requested convergence is not obtained.
The currently converged eigenvalues and eigenvectors can be found
as ``eigenvalues`` and ``eigenvectors`` attributes of the exception
object.
See Also
--------
eigs : eigenvalues and eigenvectors for a general (nonsymmetric) matrix A
svds : singular value decomposition for a matrix A
Notes
-----
This function is a wrapper to the ARPACK [1]_ SSEUPD and DSEUPD
functions which use the Implicitly Restarted Lanczos Method to
find the eigenvalues and eigenvectors [2]_.
Examples
--------
>>> from sklearn.utils.arpack import eigsh
>>> id = np.identity(13)
>>> vals, vecs = eigsh(id, k=6)
>>> vals # doctest: +SKIP
array([ 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j, 1.+0.j])
>>> print(vecs.shape)
(13, 6)
References
----------
.. [1] ARPACK Software, http://www.caam.rice.edu/software/ARPACK/
.. [2] R. B. Lehoucq, D. C. Sorensen, and C. Yang, ARPACK USERS GUIDE:
Solution of Large Scale Eigenvalue Problems by Implicitly Restarted
Arnoldi Methods. SIAM, Philadelphia, PA, 1998.
"""
# complex hermitian matrices should be solved with eigs
if np.issubdtype(A.dtype, np.complexfloating):
if mode != 'normal':
raise ValueError("mode=%s cannot be used with "
"complex matrix A" % mode)
if which == 'BE':
raise ValueError("which='BE' cannot be used with complex matrix A")
elif which == 'LA':
which = 'LR'
elif which == 'SA':
which = 'SR'
ret = eigs(A, k, M=M, sigma=sigma, which=which, v0=v0,
ncv=ncv, maxiter=maxiter, tol=tol,
return_eigenvectors=return_eigenvectors, Minv=Minv,
OPinv=OPinv)
if return_eigenvectors:
return ret[0].real, ret[1]
else:
return ret.real
if A.shape[0] != A.shape[1]:
raise ValueError('expected square matrix (shape=%s)' % (A.shape,))
if M is not None:
if M.shape != A.shape:
raise ValueError('wrong M dimensions %s, should be %s'
% (M.shape, A.shape))
if np.dtype(M.dtype).char.lower() != np.dtype(A.dtype).char.lower():
warnings.warn('M does not have the same type precision as A. '
'This may adversely affect ARPACK convergence')
n = A.shape[0]
if k <= 0 or k >= n:
raise ValueError("k must be between 1 and rank(A)-1")
if sigma is None:
A = _aslinearoperator_with_dtype(A)
matvec = A.matvec
if OPinv is not None:
raise ValueError("OPinv should not be specified "
"with sigma = None.")
if M is None:
#standard eigenvalue problem
mode = 1
M_matvec = None
Minv_matvec = None
if Minv is not None:
raise ValueError("Minv should not be "
"specified with M = None.")
else:
#general eigenvalue problem
mode = 2
if Minv is None:
Minv_matvec = get_inv_matvec(M, symmetric=True, tol=tol)
else:
Minv = _aslinearoperator_with_dtype(Minv)
Minv_matvec = Minv.matvec
M_matvec = _aslinearoperator_with_dtype(M).matvec
else:
# sigma is not None: shift-invert mode
if Minv is not None:
raise ValueError("Minv should not be specified when sigma is")
# normal mode
if mode == 'normal':
mode = 3
matvec = None
if OPinv is None:
Minv_matvec = get_OPinv_matvec(A, M, sigma,
symmetric=True, tol=tol)
else:
OPinv = _aslinearoperator_with_dtype(OPinv)
Minv_matvec = OPinv.matvec
if M is None:
M_matvec = None
else:
M = _aslinearoperator_with_dtype(M)
M_matvec = M.matvec
# buckling mode
elif mode == 'buckling':
mode = 4
if OPinv is None:
Minv_matvec = get_OPinv_matvec(A, M, sigma,
symmetric=True, tol=tol)
else:
Minv_matvec = _aslinearoperator_with_dtype(OPinv).matvec
matvec = _aslinearoperator_with_dtype(A).matvec
M_matvec = None
# cayley-transform mode
elif mode == 'cayley':
mode = 5
matvec = _aslinearoperator_with_dtype(A).matvec
if OPinv is None:
Minv_matvec = get_OPinv_matvec(A, M, sigma,
symmetric=True, tol=tol)
else:
Minv_matvec = _aslinearoperator_with_dtype(OPinv).matvec
if M is None:
M_matvec = None
else:
M_matvec = _aslinearoperator_with_dtype(M).matvec
# unrecognized mode
else:
raise ValueError("unrecognized mode '%s'" % mode)
params = _SymmetricArpackParams(n, k, A.dtype.char, matvec, mode,
M_matvec, Minv_matvec, sigma,
ncv, v0, maxiter, which, tol)
while not params.converged:
params.iterate()
return params.extract(return_eigenvectors)
def _svds(A, k=6, ncv=None, tol=0):
"""Compute k singular values/vectors for a sparse matrix using ARPACK.
Parameters
----------
A : sparse matrix
Array to compute the SVD on
k : int, optional
Number of singular values and vectors to compute.
ncv : integer
The number of Lanczos vectors generated
ncv must be greater than k+1 and smaller than n;
it is recommended that ncv > 2*k
tol : float, optional
Tolerance for singular values. Zero (default) means machine precision.
Notes
-----
This is a naive implementation using an eigensolver on A.H * A or
A * A.H, depending on which one is more efficient.
"""
if not (isinstance(A, np.ndarray) or isspmatrix(A)):
A = np.asarray(A)
n, m = A.shape
if np.issubdtype(A.dtype, np.complexfloating):
herm = lambda x: x.T.conjugate()
eigensolver = eigs
else:
herm = lambda x: x.T
eigensolver = eigsh
if n > m:
X = A
XH = herm(A)
else:
XH = A
X = herm(A)
if hasattr(XH, 'dot'):
def matvec_XH_X(x):
return XH.dot(X.dot(x))
else:
def matvec_XH_X(x):
return np.dot(XH, np.dot(X, x))
XH_X = LinearOperator(matvec=matvec_XH_X, dtype=X.dtype,
shape=(X.shape[1], X.shape[1]))
# Ignore deprecation warnings here: dot on matrices is deprecated,
# but this code is a backport anyhow
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
eigvals, eigvec = eigensolver(XH_X, k=k, tol=tol ** 2)
s = np.sqrt(eigvals)
if n > m:
v = eigvec
if hasattr(X, 'dot'):
u = X.dot(v) / s
else:
u = np.dot(X, v) / s
vh = herm(v)
else:
u = eigvec
if hasattr(X, 'dot'):
vh = herm(X.dot(u) / s)
else:
vh = herm(np.dot(X, u) / s)
return u, s, vh
# check if backport is actually needed:
if scipy.version.version >= LooseVersion('0.10'):
from scipy.sparse.linalg import eigs, eigsh, svds
else:
eigs, eigsh, svds = _eigs, _eigsh, _svds
| bsd-3-clause |
slevenhagen/odoo-npg | addons/payment_ogone/controllers/main.py | 389 | 1179 | # -*- coding: utf-8 -*-
import logging
import pprint
import werkzeug
from openerp import http, SUPERUSER_ID
from openerp.http import request
_logger = logging.getLogger(__name__)
class OgoneController(http.Controller):
_accept_url = '/payment/ogone/test/accept'
_decline_url = '/payment/ogone/test/decline'
_exception_url = '/payment/ogone/test/exception'
_cancel_url = '/payment/ogone/test/cancel'
@http.route([
'/payment/ogone/accept', '/payment/ogone/test/accept',
'/payment/ogone/decline', '/payment/ogone/test/decline',
'/payment/ogone/exception', '/payment/ogone/test/exception',
'/payment/ogone/cancel', '/payment/ogone/test/cancel',
], type='http', auth='none')
def ogone_form_feedback(self, **post):
""" Ogone contacts using GET, at least for accept """
_logger.info('Ogone: entering form_feedback with post data %s', pprint.pformat(post)) # debug
cr, uid, context = request.cr, SUPERUSER_ID, request.context
request.registry['payment.transaction'].form_feedback(cr, uid, post, 'ogone', context=context)
return werkzeug.utils.redirect(post.pop('return_url', '/'))
| agpl-3.0 |
BMan-L/shadowsocks | shadowsocks/daemon.py | 386 | 5602 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import logging
import signal
import time
from shadowsocks import common, shell
# this module is ported from ShadowVPN daemon.c
def daemon_exec(config):
if 'daemon' in config:
if os.name != 'posix':
raise Exception('daemon mode is only supported on Unix')
command = config['daemon']
if not command:
command = 'start'
pid_file = config['pid-file']
log_file = config['log-file']
if command == 'start':
daemon_start(pid_file, log_file)
elif command == 'stop':
daemon_stop(pid_file)
# always exit after daemon_stop
sys.exit(0)
elif command == 'restart':
daemon_stop(pid_file)
daemon_start(pid_file, log_file)
else:
raise Exception('unsupported daemon command %s' % command)
def write_pid_file(pid_file, pid):
import fcntl
import stat
try:
fd = os.open(pid_file, os.O_RDWR | os.O_CREAT,
stat.S_IRUSR | stat.S_IWUSR)
except OSError as e:
shell.print_exception(e)
return -1
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
assert flags != -1
flags |= fcntl.FD_CLOEXEC
r = fcntl.fcntl(fd, fcntl.F_SETFD, flags)
assert r != -1
# There is no platform independent way to implement fcntl(fd, F_SETLK, &fl)
# via fcntl.fcntl. So use lockf instead
try:
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 0, 0, os.SEEK_SET)
except IOError:
r = os.read(fd, 32)
if r:
logging.error('already started at pid %s' % common.to_str(r))
else:
logging.error('already started')
os.close(fd)
return -1
os.ftruncate(fd, 0)
os.write(fd, common.to_bytes(str(pid)))
return 0
def freopen(f, mode, stream):
oldf = open(f, mode)
oldfd = oldf.fileno()
newfd = stream.fileno()
os.close(newfd)
os.dup2(oldfd, newfd)
def daemon_start(pid_file, log_file):
def handle_exit(signum, _):
if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(1)
signal.signal(signal.SIGINT, handle_exit)
signal.signal(signal.SIGTERM, handle_exit)
# fork only once because we are sure parent will exit
pid = os.fork()
assert pid != -1
if pid > 0:
# parent waits for its child
time.sleep(5)
sys.exit(0)
# child signals its parent to exit
ppid = os.getppid()
pid = os.getpid()
if write_pid_file(pid_file, pid) != 0:
os.kill(ppid, signal.SIGINT)
sys.exit(1)
os.setsid()
signal.signal(signal.SIG_IGN, signal.SIGHUP)
print('started')
os.kill(ppid, signal.SIGTERM)
sys.stdin.close()
try:
freopen(log_file, 'a', sys.stdout)
freopen(log_file, 'a', sys.stderr)
except IOError as e:
shell.print_exception(e)
sys.exit(1)
def daemon_stop(pid_file):
import errno
try:
with open(pid_file) as f:
buf = f.read()
pid = common.to_str(buf)
if not buf:
logging.error('not running')
except IOError as e:
shell.print_exception(e)
if e.errno == errno.ENOENT:
# always exit 0 if we are sure daemon is not running
logging.error('not running')
return
sys.exit(1)
pid = int(pid)
if pid > 0:
try:
os.kill(pid, signal.SIGTERM)
except OSError as e:
if e.errno == errno.ESRCH:
logging.error('not running')
# always exit 0 if we are sure daemon is not running
return
shell.print_exception(e)
sys.exit(1)
else:
logging.error('pid is not positive: %d', pid)
# sleep for maximum 10s
for i in range(0, 200):
try:
# query for the pid
os.kill(pid, 0)
except OSError as e:
if e.errno == errno.ESRCH:
break
time.sleep(0.05)
else:
logging.error('timed out when stopping pid %d', pid)
sys.exit(1)
print('stopped')
os.unlink(pid_file)
def set_user(username):
if username is None:
return
import pwd
import grp
try:
pwrec = pwd.getpwnam(username)
except KeyError:
logging.error('user not found: %s' % username)
raise
user = pwrec[0]
uid = pwrec[2]
gid = pwrec[3]
cur_uid = os.getuid()
if uid == cur_uid:
return
if cur_uid != 0:
logging.error('can not set user as nonroot user')
# will raise later
# inspired by supervisor
if hasattr(os, 'setgroups'):
groups = [grprec[2] for grprec in grp.getgrall() if user in grprec[3]]
groups.insert(0, gid)
os.setgroups(groups)
os.setgid(gid)
os.setuid(uid)
| apache-2.0 |
willthames/ansible | lib/ansible/modules/network/a10/a10_server_axapi3.py | 54 | 9517 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage A10 Networks slb server objects
(c) 2014, Mischa Peters <[email protected]>, 2016, Eric Chou <[email protected]>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: a10_server_axapi3
version_added: 2.3
short_description: Manage A10 Networks AX/SoftAX/Thunder/vThunder devices
description:
- Manage SLB (Server Load Balancer) server objects on A10 Networks devices via aXAPIv3.
author: "Eric Chou (@ericchou) based on previous work by Mischa Peters (@mischapeters)"
extends_documentation_fragment: a10
options:
server_name:
description:
- The SLB (Server Load Balancer) server name.
required: true
aliases: ['server']
server_ip:
description:
- The SLB (Server Load Balancer) server IPv4 address.
required: true
aliases: ['ip', 'address']
server_status:
description:
- The SLB (Server Load Balancer) virtual server status.
required: false
default: enable
aliases: ['action']
choices: ['enable', 'disable']
server_ports:
description:
- A list of ports to create for the server. Each list item should be a dictionary which specifies the C(port:)
and C(protocol:).
required: false
default: null
operation:
description:
- Create, Update or Remove SLB server. For create and update operation, we use the IP address and server
name specified in the POST message. For delete operation, we use the server name in the request URI.
required: false
default: create
choices: ['create', 'update', 'remove']
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled devices using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
'''
RETURN = '''
#
'''
EXAMPLES = '''
# Create a new server
- a10_server:
host: a10.mydomain.com
username: myadmin
password: mypassword
server: test
server_ip: 1.1.1.100
validate_certs: false
server_status: enable
write_config: yes
operation: create
server_ports:
- port-number: 8080
protocol: tcp
action: enable
- port-number: 8443
protocol: TCP
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import url_argument_spec
from ansible.module_utils.a10 import axapi_call_v3, a10_argument_spec, axapi_authenticate_v3, axapi_failure
from ansible.module_utils.a10 import AXAPI_PORT_PROTOCOLS
VALID_PORT_FIELDS = ['port-number', 'protocol', 'action']
def validate_ports(module, ports):
for item in ports:
for key in item:
if key not in VALID_PORT_FIELDS:
module.fail_json(msg="invalid port field (%s), must be one of: %s" % (key, ','.join(VALID_PORT_FIELDS)))
# validate the port number is present and an integer
if 'port-number' in item:
try:
item['port-number'] = int(item['port-number'])
except:
module.fail_json(msg="port-number entries in the port definitions must be integers")
else:
module.fail_json(msg="port definitions must define the port-number field")
# validate the port protocol is present, no need to convert to the internal API integer value in v3
if 'protocol' in item:
protocol = item['protocol']
if not protocol:
module.fail_json(msg="invalid port protocol, must be one of: %s" % ','.join(AXAPI_PORT_PROTOCOLS))
else:
item['protocol'] = protocol
else:
module.fail_json(msg="port definitions must define the port protocol (%s)" % ','.join(AXAPI_PORT_PROTOCOLS))
# 'status' is 'action' in AXAPIv3
# no need to convert the status, a.k.a action, to the internal API integer value in v3
# action is either enabled or disabled
if 'action' in item:
action = item['action']
if action not in ['enable', 'disable']:
module.fail_json(msg="server action must be enable or disable")
else:
item['action'] = 'enable'
def main():
argument_spec = a10_argument_spec()
argument_spec.update(url_argument_spec())
argument_spec.update(
dict(
operation=dict(type='str', default='create', choices=['create', 'update', 'delete']),
server_name=dict(type='str', aliases=['server'], required=True),
server_ip=dict(type='str', aliases=['ip', 'address'], required=True),
server_status=dict(type='str', default='enable', aliases=['action'], choices=['enable', 'disable']),
server_ports=dict(type='list', aliases=['port'], default=[]),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False
)
host = module.params['host']
username = module.params['username']
password = module.params['password']
operation = module.params['operation']
write_config = module.params['write_config']
slb_server = module.params['server_name']
slb_server_ip = module.params['server_ip']
slb_server_status = module.params['server_status']
slb_server_ports = module.params['server_ports']
axapi_base_url = 'https://{}/axapi/v3/'.format(host)
axapi_auth_url = axapi_base_url + 'auth/'
signature = axapi_authenticate_v3(module, axapi_auth_url, username, password)
# validate the ports data structure
validate_ports(module, slb_server_ports)
json_post = {
"server-list": [
{
"name": slb_server,
"host": slb_server_ip
}
]
}
# add optional module parameters
if slb_server_ports:
json_post['server-list'][0]['port-list'] = slb_server_ports
if slb_server_status:
json_post['server-list'][0]['action'] = slb_server_status
slb_server_data = axapi_call_v3(module, axapi_base_url+'slb/server/', method='GET', body='', signature=signature)
# for empty slb server list
if axapi_failure(slb_server_data):
slb_server_exists = False
else:
slb_server_list = [server['name'] for server in slb_server_data['server-list']]
if slb_server in slb_server_list:
slb_server_exists = True
else:
slb_server_exists = False
changed = False
if operation == 'create':
if slb_server_exists is False:
result = axapi_call_v3(module, axapi_base_url+'slb/server/', method='POST', body=json.dumps(json_post), signature=signature)
if axapi_failure(result):
module.fail_json(msg="failed to create the server: %s" % result['response']['err']['msg'])
changed = True
else:
module.fail_json(msg="server already exists, use state='update' instead")
changed = False
# if we changed things, get the full info regarding result
if changed:
result = axapi_call_v3(module, axapi_base_url + 'slb/server/' + slb_server, method='GET', body='', signature=signature)
else:
result = slb_server_data
elif operation == 'delete':
if slb_server_exists:
result = axapi_call_v3(module, axapi_base_url + 'slb/server/' + slb_server, method='DELETE', body='', signature=signature)
if axapi_failure(result):
module.fail_json(msg="failed to delete server: %s" % result['response']['err']['msg'])
changed = True
else:
result = dict(msg="the server was not present")
elif operation == 'update':
if slb_server_exists:
result = axapi_call_v3(module, axapi_base_url + 'slb/server/', method='PUT', body=json.dumps(json_post), signature=signature)
if axapi_failure(result):
module.fail_json(msg="failed to update server: %s" % result['response']['err']['msg'])
changed = True
else:
result = dict(msg="the server was not present")
# if the config has changed, save the config unless otherwise requested
if changed and write_config:
write_result = axapi_call_v3(module, axapi_base_url+'write/memory/', method='POST', body='', signature=signature)
if axapi_failure(write_result):
module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg'])
# log out gracefully and exit
axapi_call_v3(module, axapi_base_url + 'logoff/', method='POST', body='', signature=signature)
module.exit_json(changed=changed, content=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
aforalee/rally | tests/unit/plugins/openstack/scenarios/nova/test_keypairs.py | 3 | 2647 | # Copyright 2015: Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.plugins.openstack.scenarios.nova import keypairs
from tests.unit import test
class NovaKeypairTestCase(test.ScenarioTestCase):
def test_create_and_list_keypairs(self):
scenario = keypairs.NovaKeypair(self.context)
scenario._generate_random_name = mock.MagicMock(return_value="name")
scenario._create_keypair = mock.MagicMock(return_value="foo_keypair")
scenario._list_keypairs = mock.MagicMock()
scenario.create_and_list_keypairs(fakearg="fakearg")
scenario._create_keypair.assert_called_once_with(fakearg="fakearg")
scenario._list_keypairs.assert_called_once_with()
def test_create_and_delete_keypair(self):
scenario = keypairs.NovaKeypair(self.context)
scenario._generate_random_name = mock.MagicMock(return_value="name")
scenario._create_keypair = mock.MagicMock(return_value="foo_keypair")
scenario._delete_keypair = mock.MagicMock()
scenario.create_and_delete_keypair(fakearg="fakearg")
scenario._create_keypair.assert_called_once_with(fakearg="fakearg")
scenario._delete_keypair.assert_called_once_with("foo_keypair")
def test_boot_and_delete_server_with_keypair(self):
scenario = keypairs.NovaKeypair(self.context)
scenario._generate_random_name = mock.MagicMock(return_value="name")
scenario._create_keypair = mock.MagicMock(return_value="foo_keypair")
scenario._boot_server = mock.MagicMock(return_value="foo_server")
scenario._delete_server = mock.MagicMock()
scenario._delete_keypair = mock.MagicMock()
scenario.boot_and_delete_server_with_keypair("img", 1)
scenario._create_keypair.assert_called_once_with()
scenario._boot_server.assert_called_once_with(
"img", 1, key_name="foo_keypair")
scenario._delete_server.assert_called_once_with("foo_server")
scenario._delete_keypair.assert_called_once_with("foo_keypair")
| apache-2.0 |
ahmadshahwan/cohorte-runtime | python/src/lib/python/pelix/remote/transport/commons.py | 3 | 11987 | #!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
Pelix remote services: Abstract RPC implementation
:author: Thomas Calmant
:copyright: Copyright 2014, isandlaTech
:license: Apache License 2.0
:version: 0.5.7
:status: Beta
..
Copyright 2014 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**TODO:**
* "system" methods (list, help, ...)
"""
# Module version
__version_info__ = (0, 5, 7)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
# iPOPO decorators
from pelix.ipopo.decorators import Validate, Invalidate, Property, Provides
# Pelix constants
import pelix.constants as constants
import pelix.remote.beans
from pelix.remote import RemoteServiceError
# Standard library
import logging
import threading
import uuid
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
@Provides(pelix.remote.SERVICE_EXPORT_PROVIDER)
@Property('_kinds', pelix.remote.PROP_REMOTE_CONFIGS_SUPPORTED)
class AbstractRpcServiceExporter(object):
"""
Abstract Remote Services exporter
"""
def __init__(self):
"""
Sets up the exporter
"""
# Bundle context
self._context = None
# Framework UID
self._framework_uid = None
# Handled configurations
self._kinds = None
# Exported services: Name -> ExportEndpoint
self.__endpoints = {}
# Thread safety
self.__lock = threading.Lock()
def dispatch(self, method, params):
"""
Called by the servlet: calls the method of an exported service
"""
# Get the best matching name
matching = None
len_found = 0
for name in self.__endpoints:
if len(name) > len_found and method.startswith(name + "."):
# Better matching end point name (longer that previous one)
matching = name
len_found = len(matching)
if matching is None:
# No end point name match
raise RemoteServiceError("No end point found for: {0}"
.format(method))
# Extract the method name. (+1 for the trailing dot)
method_name = method[len_found + 1:]
# Get the service
try:
service = self.__endpoints[matching].instance
except KeyError:
raise RemoteServiceError("Unknown endpoint: {0}".format(matching))
# Get the method
method_ref = getattr(service, method_name, None)
if method_ref is None:
raise RemoteServiceError("Unknown method {0}".format(method))
# Call it (let the errors be propagated)
return method_ref(*params)
def handles(self, configurations):
"""
Checks if this provider handles the given configuration types
:param configurations: Configuration types
"""
if configurations is None or configurations == '*':
# 'Matches all'
return True
return bool(set(configurations).intersection(self._kinds))
def export_service(self, svc_ref, name, fw_uid):
"""
Prepares an export endpoint
:param svc_ref: Service reference
:param name: Endpoint name
:param fw_uid: Framework UID
:return: An ExportEndpoint bean
:raise NameError: Already known name
:raise BundleException: Error getting the service
"""
with self.__lock:
# Prepare extra properties
extra_props = self.make_endpoint_properties(svc_ref, name, fw_uid)
try:
# Check if the name has been changed by the exporter
name = extra_props[pelix.remote.PROP_ENDPOINT_NAME]
except KeyError:
# Name not updated
pass
if name in self.__endpoints:
# Already known end point
raise NameError("Already known end point {0} for kinds {1}"
.format(name, ','.join(self._kinds)))
# Get the service (let it raise a BundleException if any
service = self._context.get_service(svc_ref)
# Prepare the export endpoint
try:
endpoint = pelix.remote.beans.ExportEndpoint(str(uuid.uuid4()),
fw_uid,
self._kinds,
name,
svc_ref,
service,
extra_props)
except ValueError:
# No specification to export (specifications filtered, ...)
return None
# Store information
self.__endpoints[name] = endpoint
# Return the endpoint bean
return endpoint
def update_export(self, endpoint, new_name, old_properties):
"""
Updates an export endpoint
:param endpoint: An ExportEndpoint bean
:param new_name: Future endpoint name
:param old_properties: Previous properties
:raise NameError: Rename refused
"""
with self.__lock:
try:
if self.__endpoints[new_name] is not endpoint:
# Reject the new name, as an endpoint uses it
raise NameError("New name of {0} already used: {1}"
.format(endpoint.name, new_name))
else:
# Name hasn't changed
pass
except KeyError:
# Update the name of the endpoint
old_name = endpoint.name
endpoint.rename(new_name)
# No endpoint matches the new name: update the storage
self.__endpoints[new_name] = self.__endpoints.pop(old_name)
def unexport_service(self, endpoint):
"""
Deletes an export endpoint
:param endpoint: An ExportEndpoint bean
"""
with self.__lock:
# Clean up storage
del self.__endpoints[endpoint.name]
# Release the service
svc_ref = endpoint.reference
self._context.unget_service(svc_ref)
def make_endpoint_properties(self, svc_ref, name, fw_uid):
"""
Prepare properties for the ExportEndpoint to be created
:param svc_ref: Service reference
:param name: Endpoint name
:param fw_uid: Framework UID
:return: A dictionary of extra endpoint properties
"""
raise NotImplementedError("make_endpoint_properties() not "
"implemented by class {0}"
.format(type(self).__name__))
@Validate
def validate(self, context):
"""
Component validated
"""
# Store the context
self._context = context
# Store the framework UID
self._framework_uid = context.get_property(constants.FRAMEWORK_UID)
@Invalidate
def invalidate(self, context):
"""
Component invalidated
"""
# Clean up the storage
self.__endpoints.clear()
# Clean up members
self._context = None
self._framework_uid = None
# ------------------------------------------------------------------------------
@Provides(pelix.remote.SERVICE_IMPORT_ENDPOINT_LISTENER)
@Property('_kinds', pelix.remote.PROP_REMOTE_CONFIGS_SUPPORTED)
class AbstractRpcServiceImporter(object):
"""
Abstract Remote Services importer
"""
def __init__(self):
"""
Sets up the exporter
"""
# Bundle context
self._context = None
# Framework UID
self._framework_uid = None
# Component properties
self._kinds = None
# Registered services (endpoint UID -> ServiceReference)
self.__registrations = {}
self.__lock = threading.Lock()
def endpoint_added(self, endpoint):
"""
An end point has been imported
"""
configs = set(endpoint.configurations)
if '*' not in configs and not configs.intersection(self._kinds):
# Not for us
return
with self.__lock:
if endpoint.uid in self.__registrations:
# Already known endpoint
return
# Prepare a proxy
svc = self.make_service_proxy(endpoint)
if svc is None:
return
# Register it as a service
svc_reg = self._context.register_service(endpoint.specifications,
svc, endpoint.properties)
# Store references
self.__registrations[endpoint.uid] = svc_reg
def endpoint_updated(self, endpoint, old_properties):
"""
An end point has been updated
"""
with self.__lock:
try:
# Update service registration properties
self.__registrations[endpoint.uid].set_properties(
endpoint.properties)
except KeyError:
# Unknown end point
return
def endpoint_removed(self, endpoint):
"""
An end point has been removed
"""
with self.__lock:
try:
# Pop reference and unregister the service
self.__registrations.pop(endpoint.uid).unregister()
except KeyError:
# Unknown end point
return
else:
# Clear the proxy
self.clear_service_proxy(endpoint)
def make_service_proxy(self, endpoint):
"""
Creates the proxy for the given ImportEndpoint
:param endpoint: An ImportEndpoint bean
:return: A service proxy
"""
raise NotImplementedError("make_service_proxy() not implemented by "
"class {0}".format(type(self).__name__))
def clear_service_proxy(self, endpoint):
"""
Destroys the proxy made for the given ImportEndpoint
:param endpoint: An ImportEndpoint bean
"""
raise NotImplementedError("clear_service_proxy() not implemented by "
"class {0}".format(type(self).__name__))
@Validate
def validate(self, context):
"""
Component validated
"""
# Store the bundle context and the framework UID
self._context = context
self._framework_uid = context.get_property(constants.FRAMEWORK_UID)
@Invalidate
def invalidate(self, context):
"""
Component invalidated
"""
# Unregister all of our services
for svc_reg in self.__registrations.values():
svc_reg.unregister()
# Clean up members
self.__registrations.clear()
self._context = None
self._framework_uid = None
| apache-2.0 |
map222/spark | examples/src/main/python/mllib/gradient_boosting_regression_example.py | 106 | 2443 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Gradient Boosted Trees Regression Example.
"""
from __future__ import print_function
from pyspark import SparkContext
# $example on$
from pyspark.mllib.tree import GradientBoostedTrees, GradientBoostedTreesModel
from pyspark.mllib.util import MLUtils
# $example off$
if __name__ == "__main__":
sc = SparkContext(appName="PythonGradientBoostedTreesRegressionExample")
# $example on$
# Load and parse the data file.
data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_libsvm_data.txt")
# Split the data into training and test sets (30% held out for testing)
(trainingData, testData) = data.randomSplit([0.7, 0.3])
# Train a GradientBoostedTrees model.
# Notes: (a) Empty categoricalFeaturesInfo indicates all features are continuous.
# (b) Use more iterations in practice.
model = GradientBoostedTrees.trainRegressor(trainingData,
categoricalFeaturesInfo={}, numIterations=3)
# Evaluate model on test instances and compute test error
predictions = model.predict(testData.map(lambda x: x.features))
labelsAndPredictions = testData.map(lambda lp: lp.label).zip(predictions)
testMSE = labelsAndPredictions.map(lambda lp: (lp[0] - lp[1]) * (lp[0] - lp[1])).sum() /\
float(testData.count())
print('Test Mean Squared Error = ' + str(testMSE))
print('Learned regression GBT model:')
print(model.toDebugString())
# Save and load model
model.save(sc, "target/tmp/myGradientBoostingRegressionModel")
sameModel = GradientBoostedTreesModel.load(sc, "target/tmp/myGradientBoostingRegressionModel")
# $example off$
| apache-2.0 |
BackupGGCode/python-for-android | python-modules/twisted/twisted/conch/test/test_cftp.py | 56 | 33079 | # -*- test-case-name: twisted.conch.test.test_cftp -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE file for details.
"""
Tests for L{twisted.conch.scripts.cftp}.
"""
import time, sys, os, operator, getpass, struct
from StringIO import StringIO
from twisted.conch.test.test_ssh import Crypto, pyasn1
_reason = None
if Crypto and pyasn1:
try:
from twisted.conch import unix
from twisted.conch.scripts import cftp
from twisted.conch.test.test_filetransfer import FileTransferForTestAvatar
except ImportError, e:
# Python 2.3 compatibility fix
sys.modules.pop("twisted.conch.unix", None)
unix = None
_reason = str(e)
del e
else:
unix = None
from twisted.python.fakepwd import UserDatabase
from twisted.trial.unittest import TestCase
from twisted.cred import portal
from twisted.internet import reactor, protocol, interfaces, defer, error
from twisted.internet.utils import getProcessOutputAndValue
from twisted.python import log
from twisted.conch import ls
from twisted.test.proto_helpers import StringTransport
from twisted.internet.task import Clock
from twisted.conch.test import test_ssh, test_conch
from twisted.conch.test.test_filetransfer import SFTPTestBase
from twisted.conch.test.test_filetransfer import FileTransferTestAvatar
class ListingTests(TestCase):
"""
Tests for L{lsLine}, the function which generates an entry for a file or
directory in an SFTP I{ls} command's output.
"""
if getattr(time, 'tzset', None) is None:
skip = "Cannot test timestamp formatting code without time.tzset"
def setUp(self):
"""
Patch the L{ls} module's time function so the results of L{lsLine} are
deterministic.
"""
self.now = 123456789
def fakeTime():
return self.now
self.patch(ls, 'time', fakeTime)
# Make sure that the timezone ends up the same after these tests as
# it was before.
if 'TZ' in os.environ:
self.addCleanup(operator.setitem, os.environ, 'TZ', os.environ['TZ'])
self.addCleanup(time.tzset)
else:
def cleanup():
# os.environ.pop is broken! Don't use it! Ever! Or die!
try:
del os.environ['TZ']
except KeyError:
pass
time.tzset()
self.addCleanup(cleanup)
def _lsInTimezone(self, timezone, stat):
"""
Call L{ls.lsLine} after setting the timezone to C{timezone} and return
the result.
"""
# Set the timezone to a well-known value so the timestamps are
# predictable.
os.environ['TZ'] = timezone
time.tzset()
return ls.lsLine('foo', stat)
def test_oldFile(self):
"""
A file with an mtime six months (approximately) or more in the past has
a listing including a low-resolution timestamp.
"""
# Go with 7 months. That's more than 6 months.
then = self.now - (60 * 60 * 24 * 31 * 7)
stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
self.assertEqual(
self._lsInTimezone('America/New_York', stat),
'!--------- 0 0 0 0 Apr 26 1973 foo')
self.assertEqual(
self._lsInTimezone('Pacific/Auckland', stat),
'!--------- 0 0 0 0 Apr 27 1973 foo')
def test_oldSingleDigitDayOfMonth(self):
"""
A file with a high-resolution timestamp which falls on a day of the
month which can be represented by one decimal digit is formatted with
one padding 0 to preserve the columns which come after it.
"""
# A point about 7 months in the past, tweaked to fall on the first of a
# month so we test the case we want to test.
then = self.now - (60 * 60 * 24 * 31 * 7) + (60 * 60 * 24 * 5)
stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
self.assertEqual(
self._lsInTimezone('America/New_York', stat),
'!--------- 0 0 0 0 May 01 1973 foo')
self.assertEqual(
self._lsInTimezone('Pacific/Auckland', stat),
'!--------- 0 0 0 0 May 02 1973 foo')
def test_newFile(self):
"""
A file with an mtime fewer than six months (approximately) in the past
has a listing including a high-resolution timestamp excluding the year.
"""
# A point about three months in the past.
then = self.now - (60 * 60 * 24 * 31 * 3)
stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
self.assertEqual(
self._lsInTimezone('America/New_York', stat),
'!--------- 0 0 0 0 Aug 28 17:33 foo')
self.assertEqual(
self._lsInTimezone('Pacific/Auckland', stat),
'!--------- 0 0 0 0 Aug 29 09:33 foo')
def test_newSingleDigitDayOfMonth(self):
"""
A file with a high-resolution timestamp which falls on a day of the
month which can be represented by one decimal digit is formatted with
one padding 0 to preserve the columns which come after it.
"""
# A point about three months in the past, tweaked to fall on the first
# of a month so we test the case we want to test.
then = self.now - (60 * 60 * 24 * 31 * 3) + (60 * 60 * 24 * 4)
stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
self.assertEqual(
self._lsInTimezone('America/New_York', stat),
'!--------- 0 0 0 0 Sep 01 17:33 foo')
self.assertEqual(
self._lsInTimezone('Pacific/Auckland', stat),
'!--------- 0 0 0 0 Sep 02 09:33 foo')
class StdioClientTests(TestCase):
"""
Tests for L{cftp.StdioClient}.
"""
def setUp(self):
"""
Create a L{cftp.StdioClient} hooked up to dummy transport and a fake
user database.
"""
class Connection:
pass
conn = Connection()
conn.transport = StringTransport()
conn.transport.localClosed = False
self.client = cftp.StdioClient(conn)
self.database = self.client._pwd = UserDatabase()
# Intentionally bypassing makeConnection - that triggers some code
# which uses features not provided by our dumb Connection fake.
self.client.transport = StringTransport()
def test_exec(self):
"""
The I{exec} command runs its arguments locally in a child process
using the user's shell.
"""
self.database.addUser(
getpass.getuser(), 'secret', os.getuid(), 1234, 'foo', 'bar',
sys.executable)
d = self.client._dispatchCommand("exec print 1 + 2")
d.addCallback(self.assertEquals, "3\n")
return d
def test_execWithoutShell(self):
"""
If the local user has no shell, the I{exec} command runs its arguments
using I{/bin/sh}.
"""
self.database.addUser(
getpass.getuser(), 'secret', os.getuid(), 1234, 'foo', 'bar', '')
d = self.client._dispatchCommand("exec echo hello")
d.addCallback(self.assertEquals, "hello\n")
return d
def test_bang(self):
"""
The I{exec} command is run for lines which start with C{"!"}.
"""
self.database.addUser(
getpass.getuser(), 'secret', os.getuid(), 1234, 'foo', 'bar',
'/bin/sh')
d = self.client._dispatchCommand("!echo hello")
d.addCallback(self.assertEquals, "hello\n")
return d
def setKnownConsoleSize(self, width, height):
"""
For the duration of this test, patch C{cftp}'s C{fcntl} module to return
a fixed width and height.
@param width: the width in characters
@type width: C{int}
@param height: the height in characters
@type height: C{int}
"""
import tty # local import to avoid win32 issues
class FakeFcntl(object):
def ioctl(self, fd, opt, mutate):
if opt != tty.TIOCGWINSZ:
self.fail("Only window-size queries supported.")
return struct.pack("4H", height, width, 0, 0)
self.patch(cftp, "fcntl", FakeFcntl())
def test_progressReporting(self):
"""
L{StdioClient._printProgressBar} prints a progress description,
including percent done, amount transferred, transfer rate, and time
remaining, all based the given start time, the given L{FileWrapper}'s
progress information and the reactor's current time.
"""
# Use a short, known console width because this simple test doesn't need
# to test the console padding.
self.setKnownConsoleSize(10, 34)
clock = self.client.reactor = Clock()
wrapped = StringIO("x")
wrapped.name = "sample"
wrapper = cftp.FileWrapper(wrapped)
wrapper.size = 1024 * 10
startTime = clock.seconds()
clock.advance(2.0)
wrapper.total += 4096
self.client._printProgressBar(wrapper, startTime)
self.assertEquals(self.client.transport.value(),
"\rsample 40% 4.0kB 2.0kBps 00:03 ")
def test_reportNoProgress(self):
"""
L{StdioClient._printProgressBar} prints a progress description that
indicates 0 bytes transferred if no bytes have been transferred and no
time has passed.
"""
self.setKnownConsoleSize(10, 34)
clock = self.client.reactor = Clock()
wrapped = StringIO("x")
wrapped.name = "sample"
wrapper = cftp.FileWrapper(wrapped)
startTime = clock.seconds()
self.client._printProgressBar(wrapper, startTime)
self.assertEquals(self.client.transport.value(),
"\rsample 0% 0.0B 0.0Bps 00:00 ")
class FileTransferTestRealm:
def __init__(self, testDir):
self.testDir = testDir
def requestAvatar(self, avatarID, mind, *interfaces):
a = FileTransferTestAvatar(self.testDir)
return interfaces[0], a, lambda: None
class SFTPTestProcess(protocol.ProcessProtocol):
"""
Protocol for testing cftp. Provides an interface between Python (where all
the tests are) and the cftp client process (which does the work that is
being tested).
"""
def __init__(self, onOutReceived):
"""
@param onOutReceived: A L{Deferred} to be fired as soon as data is
received from stdout.
"""
self.clearBuffer()
self.onOutReceived = onOutReceived
self.onProcessEnd = None
self._expectingCommand = None
self._processEnded = False
def clearBuffer(self):
"""
Clear any buffered data received from stdout. Should be private.
"""
self.buffer = ''
self._linesReceived = []
self._lineBuffer = ''
def outReceived(self, data):
"""
Called by Twisted when the cftp client prints data to stdout.
"""
log.msg('got %s' % data)
lines = (self._lineBuffer + data).split('\n')
self._lineBuffer = lines.pop(-1)
self._linesReceived.extend(lines)
# XXX - not strictly correct.
# We really want onOutReceived to fire after the first 'cftp>' prompt
# has been received. (See use in TestOurServerCmdLineClient.setUp)
if self.onOutReceived is not None:
d, self.onOutReceived = self.onOutReceived, None
d.callback(data)
self.buffer += data
self._checkForCommand()
def _checkForCommand(self):
prompt = 'cftp> '
if self._expectingCommand and self._lineBuffer == prompt:
buf = '\n'.join(self._linesReceived)
if buf.startswith(prompt):
buf = buf[len(prompt):]
self.clearBuffer()
d, self._expectingCommand = self._expectingCommand, None
d.callback(buf)
def errReceived(self, data):
"""
Called by Twisted when the cftp client prints data to stderr.
"""
log.msg('err: %s' % data)
def getBuffer(self):
"""
Return the contents of the buffer of data received from stdout.
"""
return self.buffer
def runCommand(self, command):
"""
Issue the given command via the cftp client. Return a C{Deferred} that
fires when the server returns a result. Note that the C{Deferred} will
callback even if the server returns some kind of error.
@param command: A string containing an sftp command.
@return: A C{Deferred} that fires when the sftp server returns a
result. The payload is the server's response string.
"""
self._expectingCommand = defer.Deferred()
self.clearBuffer()
self.transport.write(command + '\n')
return self._expectingCommand
def runScript(self, commands):
"""
Run each command in sequence and return a Deferred that fires when all
commands are completed.
@param commands: A list of strings containing sftp commands.
@return: A C{Deferred} that fires when all commands are completed. The
payload is a list of response strings from the server, in the same
order as the commands.
"""
sem = defer.DeferredSemaphore(1)
dl = [sem.run(self.runCommand, command) for command in commands]
return defer.gatherResults(dl)
def killProcess(self):
"""
Kill the process if it is still running.
If the process is still running, sends a KILL signal to the transport
and returns a C{Deferred} which fires when L{processEnded} is called.
@return: a C{Deferred}.
"""
if self._processEnded:
return defer.succeed(None)
self.onProcessEnd = defer.Deferred()
self.transport.signalProcess('KILL')
return self.onProcessEnd
def processEnded(self, reason):
"""
Called by Twisted when the cftp client process ends.
"""
self._processEnded = True
if self.onProcessEnd:
d, self.onProcessEnd = self.onProcessEnd, None
d.callback(None)
class CFTPClientTestBase(SFTPTestBase):
def setUp(self):
f = open('dsa_test.pub','w')
f.write(test_ssh.publicDSA_openssh)
f.close()
f = open('dsa_test','w')
f.write(test_ssh.privateDSA_openssh)
f.close()
os.chmod('dsa_test', 33152)
f = open('kh_test','w')
f.write('127.0.0.1 ' + test_ssh.publicRSA_openssh)
f.close()
return SFTPTestBase.setUp(self)
def startServer(self):
realm = FileTransferTestRealm(self.testDir)
p = portal.Portal(realm)
p.registerChecker(test_ssh.ConchTestPublicKeyChecker())
fac = test_ssh.ConchTestServerFactory()
fac.portal = p
self.server = reactor.listenTCP(0, fac, interface="127.0.0.1")
def stopServer(self):
if not hasattr(self.server.factory, 'proto'):
return self._cbStopServer(None)
self.server.factory.proto.expectedLoseConnection = 1
d = defer.maybeDeferred(
self.server.factory.proto.transport.loseConnection)
d.addCallback(self._cbStopServer)
return d
def _cbStopServer(self, ignored):
return defer.maybeDeferred(self.server.stopListening)
def tearDown(self):
for f in ['dsa_test.pub', 'dsa_test', 'kh_test']:
try:
os.remove(f)
except:
pass
return SFTPTestBase.tearDown(self)
class TestOurServerCmdLineClient(CFTPClientTestBase):
def setUp(self):
CFTPClientTestBase.setUp(self)
self.startServer()
cmds = ('-p %i -l testuser '
'--known-hosts kh_test '
'--user-authentications publickey '
'--host-key-algorithms ssh-rsa '
'-i dsa_test '
'-a '
'-v '
'127.0.0.1')
port = self.server.getHost().port
cmds = test_conch._makeArgs((cmds % port).split(), mod='cftp')
log.msg('running %s %s' % (sys.executable, cmds))
d = defer.Deferred()
self.processProtocol = SFTPTestProcess(d)
d.addCallback(lambda _: self.processProtocol.clearBuffer())
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
reactor.spawnProcess(self.processProtocol, sys.executable, cmds,
env=env)
return d
def tearDown(self):
d = self.stopServer()
d.addCallback(lambda _: self.processProtocol.killProcess())
return d
def _killProcess(self, ignored):
try:
self.processProtocol.transport.signalProcess('KILL')
except error.ProcessExitedAlready:
pass
def runCommand(self, command):
"""
Run the given command with the cftp client. Return a C{Deferred} that
fires when the command is complete. Payload is the server's output for
that command.
"""
return self.processProtocol.runCommand(command)
def runScript(self, *commands):
"""
Run the given commands with the cftp client. Returns a C{Deferred}
that fires when the commands are all complete. The C{Deferred}'s
payload is a list of output for each command.
"""
return self.processProtocol.runScript(commands)
def testCdPwd(self):
"""
Test that 'pwd' reports the current remote directory, that 'lpwd'
reports the current local directory, and that changing to a
subdirectory then changing to its parent leaves you in the original
remote directory.
"""
# XXX - not actually a unit test, see docstring.
homeDir = os.path.join(os.getcwd(), self.testDir)
d = self.runScript('pwd', 'lpwd', 'cd testDirectory', 'cd ..', 'pwd')
d.addCallback(lambda xs: xs[:3] + xs[4:])
d.addCallback(self.assertEqual,
[homeDir, os.getcwd(), '', homeDir])
return d
def testChAttrs(self):
"""
Check that 'ls -l' output includes the access permissions and that
this output changes appropriately with 'chmod'.
"""
def _check(results):
self.flushLoggedErrors()
self.assertTrue(results[0].startswith('-rw-r--r--'))
self.assertEqual(results[1], '')
self.assertTrue(results[2].startswith('----------'), results[2])
self.assertEqual(results[3], '')
d = self.runScript('ls -l testfile1', 'chmod 0 testfile1',
'ls -l testfile1', 'chmod 644 testfile1')
return d.addCallback(_check)
# XXX test chgrp/own
def testList(self):
"""
Check 'ls' works as expected. Checks for wildcards, hidden files,
listing directories and listing empty directories.
"""
def _check(results):
self.assertEqual(results[0], ['testDirectory', 'testRemoveFile',
'testRenameFile', 'testfile1'])
self.assertEqual(results[1], ['testDirectory', 'testRemoveFile',
'testRenameFile', 'testfile1'])
self.assertEqual(results[2], ['testRemoveFile', 'testRenameFile'])
self.assertEqual(results[3], ['.testHiddenFile', 'testRemoveFile',
'testRenameFile'])
self.assertEqual(results[4], [''])
d = self.runScript('ls', 'ls ../' + os.path.basename(self.testDir),
'ls *File', 'ls -a *File', 'ls -l testDirectory')
d.addCallback(lambda xs: [x.split('\n') for x in xs])
return d.addCallback(_check)
def testHelp(self):
"""
Check that running the '?' command returns help.
"""
d = self.runCommand('?')
d.addCallback(self.assertEqual,
cftp.StdioClient(None).cmd_HELP('').strip())
return d
def assertFilesEqual(self, name1, name2, msg=None):
"""
Assert that the files at C{name1} and C{name2} contain exactly the
same data.
"""
f1 = file(name1).read()
f2 = file(name2).read()
self.failUnlessEqual(f1, f2, msg)
def testGet(self):
"""
Test that 'get' saves the remote file to the correct local location,
that the output of 'get' is correct and that 'rm' actually removes
the file.
"""
# XXX - not actually a unit test
expectedOutput = ("Transferred %s/%s/testfile1 to %s/test file2"
% (os.getcwd(), self.testDir, self.testDir))
def _checkGet(result):
self.assertTrue(result.endswith(expectedOutput))
self.assertFilesEqual(self.testDir + '/testfile1',
self.testDir + '/test file2',
"get failed")
return self.runCommand('rm "test file2"')
d = self.runCommand('get testfile1 "%s/test file2"' % (self.testDir,))
d.addCallback(_checkGet)
d.addCallback(lambda _: self.failIf(
os.path.exists(self.testDir + '/test file2')))
return d
def testWildcardGet(self):
"""
Test that 'get' works correctly when given wildcard parameters.
"""
def _check(ignored):
self.assertFilesEqual(self.testDir + '/testRemoveFile',
'testRemoveFile',
'testRemoveFile get failed')
self.assertFilesEqual(self.testDir + '/testRenameFile',
'testRenameFile',
'testRenameFile get failed')
d = self.runCommand('get testR*')
return d.addCallback(_check)
def testPut(self):
"""
Check that 'put' uploads files correctly and that they can be
successfully removed. Also check the output of the put command.
"""
# XXX - not actually a unit test
expectedOutput = ('Transferred %s/testfile1 to %s/%s/test"file2'
% (self.testDir, os.getcwd(), self.testDir))
def _checkPut(result):
self.assertFilesEqual(self.testDir + '/testfile1',
self.testDir + '/test"file2')
self.failUnless(result.endswith(expectedOutput))
return self.runCommand('rm "test\\"file2"')
d = self.runCommand('put %s/testfile1 "test\\"file2"'
% (self.testDir,))
d.addCallback(_checkPut)
d.addCallback(lambda _: self.failIf(
os.path.exists(self.testDir + '/test"file2')))
return d
def test_putOverLongerFile(self):
"""
Check that 'put' uploads files correctly when overwriting a longer
file.
"""
# XXX - not actually a unit test
f = file(os.path.join(self.testDir, 'shorterFile'), 'w')
f.write("a")
f.close()
f = file(os.path.join(self.testDir, 'longerFile'), 'w')
f.write("bb")
f.close()
def _checkPut(result):
self.assertFilesEqual(self.testDir + '/shorterFile',
self.testDir + '/longerFile')
d = self.runCommand('put %s/shorterFile longerFile'
% (self.testDir,))
d.addCallback(_checkPut)
return d
def test_putMultipleOverLongerFile(self):
"""
Check that 'put' uploads files correctly when overwriting a longer
file and you use a wildcard to specify the files to upload.
"""
# XXX - not actually a unit test
os.mkdir(os.path.join(self.testDir, 'dir'))
f = file(os.path.join(self.testDir, 'dir', 'file'), 'w')
f.write("a")
f.close()
f = file(os.path.join(self.testDir, 'file'), 'w')
f.write("bb")
f.close()
def _checkPut(result):
self.assertFilesEqual(self.testDir + '/dir/file',
self.testDir + '/file')
d = self.runCommand('put %s/dir/*'
% (self.testDir,))
d.addCallback(_checkPut)
return d
def testWildcardPut(self):
"""
What happens if you issue a 'put' command and include a wildcard (i.e.
'*') in parameter? Check that all files matching the wildcard are
uploaded to the correct directory.
"""
def check(results):
self.assertEqual(results[0], '')
self.assertEqual(results[2], '')
self.assertFilesEqual(self.testDir + '/testRemoveFile',
self.testDir + '/../testRemoveFile',
'testRemoveFile get failed')
self.assertFilesEqual(self.testDir + '/testRenameFile',
self.testDir + '/../testRenameFile',
'testRenameFile get failed')
d = self.runScript('cd ..',
'put %s/testR*' % (self.testDir,),
'cd %s' % os.path.basename(self.testDir))
d.addCallback(check)
return d
def testLink(self):
"""
Test that 'ln' creates a file which appears as a link in the output of
'ls'. Check that removing the new file succeeds without output.
"""
def _check(results):
self.flushLoggedErrors()
self.assertEqual(results[0], '')
self.assertTrue(results[1].startswith('l'), 'link failed')
return self.runCommand('rm testLink')
d = self.runScript('ln testLink testfile1', 'ls -l testLink')
d.addCallback(_check)
d.addCallback(self.assertEqual, '')
return d
def testRemoteDirectory(self):
"""
Test that we can create and remove directories with the cftp client.
"""
def _check(results):
self.assertEqual(results[0], '')
self.assertTrue(results[1].startswith('d'))
return self.runCommand('rmdir testMakeDirectory')
d = self.runScript('mkdir testMakeDirectory',
'ls -l testMakeDirector?')
d.addCallback(_check)
d.addCallback(self.assertEqual, '')
return d
def test_existingRemoteDirectory(self):
"""
Test that a C{mkdir} on an existing directory fails with the
appropriate error, and doesn't log an useless error server side.
"""
def _check(results):
self.assertEquals(results[0], '')
self.assertEquals(results[1],
'remote error 11: mkdir failed')
d = self.runScript('mkdir testMakeDirectory',
'mkdir testMakeDirectory')
d.addCallback(_check)
return d
def testLocalDirectory(self):
"""
Test that we can create a directory locally and remove it with the
cftp client. This test works because the 'remote' server is running
out of a local directory.
"""
d = self.runCommand('lmkdir %s/testLocalDirectory' % (self.testDir,))
d.addCallback(self.assertEqual, '')
d.addCallback(lambda _: self.runCommand('rmdir testLocalDirectory'))
d.addCallback(self.assertEqual, '')
return d
def testRename(self):
"""
Test that we can rename a file.
"""
def _check(results):
self.assertEqual(results[0], '')
self.assertEqual(results[1], 'testfile2')
return self.runCommand('rename testfile2 testfile1')
d = self.runScript('rename testfile1 testfile2', 'ls testfile?')
d.addCallback(_check)
d.addCallback(self.assertEqual, '')
return d
class TestOurServerBatchFile(CFTPClientTestBase):
def setUp(self):
CFTPClientTestBase.setUp(self)
self.startServer()
def tearDown(self):
CFTPClientTestBase.tearDown(self)
return self.stopServer()
def _getBatchOutput(self, f):
fn = self.mktemp()
open(fn, 'w').write(f)
port = self.server.getHost().port
cmds = ('-p %i -l testuser '
'--known-hosts kh_test '
'--user-authentications publickey '
'--host-key-algorithms ssh-rsa '
'-i dsa_test '
'-a '
'-v -b %s 127.0.0.1') % (port, fn)
cmds = test_conch._makeArgs(cmds.split(), mod='cftp')[1:]
log.msg('running %s %s' % (sys.executable, cmds))
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
self.server.factory.expectedLoseConnection = 1
d = getProcessOutputAndValue(sys.executable, cmds, env=env)
def _cleanup(res):
os.remove(fn)
return res
d.addCallback(lambda res: res[0])
d.addBoth(_cleanup)
return d
def testBatchFile(self):
"""Test whether batch file function of cftp ('cftp -b batchfile').
This works by treating the file as a list of commands to be run.
"""
cmds = """pwd
ls
exit
"""
def _cbCheckResult(res):
res = res.split('\n')
log.msg('RES %s' % str(res))
self.failUnless(res[1].find(self.testDir) != -1, repr(res))
self.failUnlessEqual(res[3:-2], ['testDirectory', 'testRemoveFile',
'testRenameFile', 'testfile1'])
d = self._getBatchOutput(cmds)
d.addCallback(_cbCheckResult)
return d
def testError(self):
"""Test that an error in the batch file stops running the batch.
"""
cmds = """chown 0 missingFile
pwd
exit
"""
def _cbCheckResult(res):
self.failIf(res.find(self.testDir) != -1)
d = self._getBatchOutput(cmds)
d.addCallback(_cbCheckResult)
return d
def testIgnoredError(self):
"""Test that a minus sign '-' at the front of a line ignores
any errors.
"""
cmds = """-chown 0 missingFile
pwd
exit
"""
def _cbCheckResult(res):
self.failIf(res.find(self.testDir) == -1)
d = self._getBatchOutput(cmds)
d.addCallback(_cbCheckResult)
return d
class TestOurServerSftpClient(CFTPClientTestBase):
"""
Test the sftp server against sftp command line client.
"""
def setUp(self):
CFTPClientTestBase.setUp(self)
return self.startServer()
def tearDown(self):
return self.stopServer()
def test_extendedAttributes(self):
"""
Test the return of extended attributes by the server: the sftp client
should ignore them, but still be able to parse the response correctly.
This test is mainly here to check that
L{filetransfer.FILEXFER_ATTR_EXTENDED} has the correct value.
"""
fn = self.mktemp()
open(fn, 'w').write("ls .\nexit")
port = self.server.getHost().port
oldGetAttr = FileTransferForTestAvatar._getAttrs
def _getAttrs(self, s):
attrs = oldGetAttr(self, s)
attrs["ext_foo"] = "bar"
return attrs
self.patch(FileTransferForTestAvatar, "_getAttrs", _getAttrs)
self.server.factory.expectedLoseConnection = True
cmds = ('-o', 'IdentityFile=dsa_test',
'-o', 'UserKnownHostsFile=kh_test',
'-o', 'HostKeyAlgorithms=ssh-rsa',
'-o', 'Port=%i' % (port,), '-b', fn, '[email protected]')
d = getProcessOutputAndValue("sftp", cmds)
def check(result):
self.assertEquals(result[2], 0)
for i in ['testDirectory', 'testRemoveFile',
'testRenameFile', 'testfile1']:
self.assertIn(i, result[0])
return d.addCallback(check)
if unix is None or Crypto is None or pyasn1 is None or interfaces.IReactorProcess(reactor, None) is None:
if _reason is None:
_reason = "don't run w/o spawnProcess or PyCrypto or pyasn1"
TestOurServerCmdLineClient.skip = _reason
TestOurServerBatchFile.skip = _reason
TestOurServerSftpClient.skip = _reason
StdioClientTests.skip = _reason
else:
from twisted.python.procutils import which
if not which('sftp'):
TestOurServerSftpClient.skip = "no sftp command-line client available"
| apache-2.0 |
geerlingguy/ansible-modules-extras | cloud/webfaction/webfaction_site.py | 76 | 6889 | #!/usr/bin/python
#
# Create Webfaction website using Ansible and the Webfaction API
#
# ------------------------------------------
#
# (c) Quentin Stafford-Fraser 2015
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: webfaction_site
short_description: Add or remove a website on a Webfaction host
description:
- Add or remove a website on a Webfaction host. Further documentation at http://github.com/quentinsf/ansible-webfaction.
author: Quentin Stafford-Fraser (@quentinsf)
version_added: "2.0"
notes:
- Sadly, you I(do) need to know your webfaction hostname for the C(host) parameter. But at least, unlike the API, you don't need to know the IP address - you can use a DNS name.
- If a site of the same name exists in the account but on a different host, the operation will exit.
- "You can run playbooks that use this on a local machine, or on a Webfaction host, or elsewhere, since the scripts use the remote webfaction API - the location is not important. However, running them on multiple hosts I(simultaneously) is best avoided. If you don't specify I(localhost) as your host, you may want to add C(serial: 1) to the plays."
- See `the webfaction API <http://docs.webfaction.com/xmlrpc-api/>`_ for more info.
options:
name:
description:
- The name of the website
required: true
state:
description:
- Whether the website should exist
required: false
choices: ['present', 'absent']
default: "present"
host:
description:
- The webfaction host on which the site should be created.
required: true
https:
description:
- Whether or not to use HTTPS
required: false
choices: BOOLEANS
default: 'false'
site_apps:
description:
- A mapping of URLs to apps
required: false
subdomains:
description:
- A list of subdomains associated with this site.
required: false
default: null
login_name:
description:
- The webfaction account to use
required: true
login_password:
description:
- The webfaction password to use
required: true
'''
EXAMPLES = '''
- name: create website
webfaction_site:
name: testsite1
state: present
host: myhost.webfaction.com
subdomains:
- 'testsite1.my_domain.org'
site_apps:
- ['testapp1', '/']
https: no
login_name: "{{webfaction_user}}"
login_password: "{{webfaction_passwd}}"
'''
import socket
import xmlrpclib
webfaction = xmlrpclib.ServerProxy('https://api.webfaction.com/')
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
state = dict(required=False, choices=['present', 'absent'], default='present'),
# You can specify an IP address or hostname.
host = dict(required=True),
https = dict(required=False, choices=BOOLEANS, default=False),
subdomains = dict(required=False, default=[]),
site_apps = dict(required=False, default=[]),
login_name = dict(required=True),
login_password = dict(required=True),
),
supports_check_mode=True
)
site_name = module.params['name']
site_state = module.params['state']
site_host = module.params['host']
site_ip = socket.gethostbyname(site_host)
session_id, account = webfaction.login(
module.params['login_name'],
module.params['login_password']
)
site_list = webfaction.list_websites(session_id)
site_map = dict([(i['name'], i) for i in site_list])
existing_site = site_map.get(site_name)
result = {}
# Here's where the real stuff happens
if site_state == 'present':
# Does a site with this name already exist?
if existing_site:
# If yes, but it's on a different IP address, then fail.
# If we wanted to allow relocation, we could add a 'relocate=true' option
# which would get the existing IP address, delete the site there, and create it
# at the new address. A bit dangerous, perhaps, so for now we'll require manual
# deletion if it's on another host.
if existing_site['ip'] != site_ip:
module.fail_json(msg="Website already exists with a different IP address. Please fix by hand.")
# If it's on this host and the key parameters are the same, nothing needs to be done.
if (existing_site['https'] == module.boolean(module.params['https'])) and \
(set(existing_site['subdomains']) == set(module.params['subdomains'])) and \
(dict(existing_site['website_apps']) == dict(module.params['site_apps'])):
module.exit_json(
changed = False
)
positional_args = [
session_id, site_name, site_ip,
module.boolean(module.params['https']),
module.params['subdomains'],
]
for a in module.params['site_apps']:
positional_args.append( (a[0], a[1]) )
if not module.check_mode:
# If this isn't a dry run, create or modify the site
result.update(
webfaction.create_website(
*positional_args
) if not existing_site else webfaction.update_website (
*positional_args
)
)
elif site_state == 'absent':
# If the site's already not there, nothing changed.
if not existing_site:
module.exit_json(
changed = False,
)
if not module.check_mode:
# If this isn't a dry run, delete the site
result.update(
webfaction.delete_website(session_id, site_name, site_ip)
)
else:
module.fail_json(msg="Unknown state specified: {}".format(site_state))
module.exit_json(
changed = True,
result = result
)
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
NL66278/OCB | addons/stock_landed_costs/__openerp__.py | 49 | 1932 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'WMS Landed Costs',
'version': '1.1',
'author': 'OpenERP SA',
'summary': 'Landed Costs',
'description': """
Landed Costs Management
=======================
This module allows you to easily add extra costs on pickings and decide the split of these costs among their stock moves in order to take them into account in your stock valuation.
""",
'website': 'https://www.odoo.com/page/warehouse',
'images': [],
'depends': ['stock_account'],
'category': 'Warehouse Management',
'sequence': 16,
'demo': [
],
'data': [
'security/ir.model.access.csv',
'stock_landed_costs_sequence.xml',
'product_view.xml',
'stock_landed_costs_view.xml',
'stock_landed_costs_data.xml',
],
'test': [
'test/stock_landed_costs.yml'
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ppiotr/Invenio | modules/websearch/lib/websearchadminlib.py | 3 | 163797 | ## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0301
"""Invenio WebSearch Administrator Interface."""
__revision__ = "$Id$"
import cgi
import random
import time
import sys
from invenio.dateutils import strftime
if sys.hexversion < 0x2040000:
# pylint: disable=W0622
from sets import Set as set
# pylint: enable=W0622
from invenio.config import \
CFG_CACHEDIR, \
CFG_SITE_LANG, \
CFG_SITE_NAME, \
CFG_SITE_URL,\
CFG_WEBCOMMENT_ALLOW_COMMENTS, \
CFG_WEBSEARCH_SHOW_COMMENT_COUNT, \
CFG_WEBCOMMENT_ALLOW_REVIEWS, \
CFG_WEBSEARCH_SHOW_REVIEW_COUNT, \
CFG_BIBRANK_SHOW_CITATION_LINKS, \
CFG_INSPIRE_SITE, \
CFG_CERN_SITE
from invenio.bibrankadminlib import \
write_outcome, \
modify_translations, \
get_def_name, \
get_name, \
get_languages, \
addadminbox, \
tupletotable, \
createhiddenform
from invenio.dbquery import \
run_sql, \
get_table_update_time
from invenio.websearch_external_collections import \
external_collections_dictionary, \
external_collection_sort_engine_by_name, \
external_collection_get_state, \
external_collection_get_update_state_list, \
external_collection_apply_changes
from invenio.websearch_external_collections_utils import \
get_collection_descendants
from invenio.websearch_external_collections_config import CFG_EXTERNAL_COLLECTION_STATES_NAME
#from invenio.bibformat_elements import bfe_references
#from invenio.bibformat_engine import BibFormatObject
from invenio.bibdocfile import BibRecDocs
from invenio.messages import gettext_set_language
#from invenio.bibrank_citation_searcher import get_cited_by
from invenio.access_control_admin import acc_get_action_id
from invenio.access_control_config import VIEWRESTRCOLL
from invenio.errorlib import register_exception
from invenio.intbitset import intbitset
from invenio.bibrank_citation_searcher import get_cited_by_count
from invenio.bibrecord import record_get_field_instances
def getnavtrail(previous = ''):
"""Get the navtrail"""
navtrail = """<a class="navtrail" href="%s/help/admin">Admin Area</a> """ % (CFG_SITE_URL,)
navtrail = navtrail + previous
return navtrail
def fix_collection_scores():
"""
Re-calculate and re-normalize de scores of the collection relationship.
"""
for id_dad in intbitset(run_sql("SELECT id_dad FROM collection_collection")):
for index, id_son in enumerate(run_sql("SELECT id_son FROM collection_collection WHERE id_dad=%s ORDER BY score DESC", (id_dad, ))):
run_sql("UPDATE collection_collection SET score=%s WHERE id_dad=%s AND id_son=%s", (index * 10 + 10, id_dad, id_son[0]))
def perform_modifytranslations(colID, ln, sel_type='', trans=[], confirm=-1, callback='yes'):
"""Modify the translations of a collection
sel_type - the nametype to modify
trans - the translations in the same order as the languages from get_languages()"""
output = ''
subtitle = ''
sitelangs = get_languages()
if type(trans) is str:
trans = [trans]
if confirm in ["2", 2] and colID:
finresult = modify_translations(colID, sitelangs, sel_type, trans, "collection")
col_dict = dict(get_def_name('', "collection"))
if colID and col_dict.has_key(int(colID)):
colID = int(colID)
subtitle = """<a name="3">3. Modify translations for collection '%s'</a> <small>[<a href="%s/help/admin/websearch-admin-guide#3.3">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
if sel_type == '':
sel_type = get_col_nametypes()[0][0]
header = ['Language', 'Translation']
actions = []
types = get_col_nametypes()
if len(types) > 1:
text = """
<span class="adminlabel">Name type</span>
<select name="sel_type" class="admin_w200">
"""
for (key, value) in types:
text += """<option value="%s" %s>%s""" % (key, key == sel_type and 'selected="selected"' or '', value)
trans_names = get_name(colID, ln, key, "collection")
if trans_names and trans_names[0][0]:
text += ": %s" % trans_names[0][0]
text += "</option>"
text += """</select>"""
output += createhiddenform(action="modifytranslations#3",
text=text,
button="Select",
colID=colID,
ln=ln,
confirm=0)
if confirm in [-1, "-1", 0, "0"]:
trans = []
for (key, value) in sitelangs:
try:
trans_names = get_name(colID, key, sel_type, "collection")
trans.append(trans_names[0][0])
except StandardError, e:
trans.append('')
for nr in range(0, len(sitelangs)):
actions.append(["%s" % (sitelangs[nr][1],)])
actions[-1].append('<input type="text" name="trans" size="30" value="%s"/>' % trans[nr])
text = tupletotable(header=header, tuple=actions)
output += createhiddenform(action="modifytranslations#3",
text=text,
button="Modify",
colID=colID,
sel_type=sel_type,
ln=ln,
confirm=2)
if sel_type and len(trans) and confirm in ["2", 2]:
output += write_outcome(finresult)
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_modifytranslations", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifyrankmethods(colID, ln, func='', rnkID='', confirm=0, callback='yes'):
"""Modify which rank methods is visible to the collection
func - remove or add rank method
rnkID - the id of the rank method."""
output = ""
subtitle = ""
col_dict = dict(get_def_name('', "collection"))
rnk_dict = dict(get_def_name('', "rnkMETHOD"))
if colID and col_dict.has_key(int(colID)):
colID = int(colID)
if func in ["0", 0] and confirm in ["1", 1]:
finresult = attach_rnk_col(colID, rnkID)
elif func in ["1", 1] and confirm in ["1", 1]:
finresult = detach_rnk_col(colID, rnkID)
subtitle = """<a name="9">9. Modify rank options for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.9">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """
<dl>
<dt>The rank methods enabled for the collection '%s' is:</dt>
""" % col_dict[colID]
rnkmethods = get_col_rnk(colID, ln)
output += """<dd>"""
if not rnkmethods:
output += """No rank methods"""
else:
for id, name in rnkmethods:
output += """%s, """ % name
output += """</dd>
</dl>
"""
rnk_list = get_def_name('', "rnkMETHOD")
rnk_dict_in_col = dict(get_col_rnk(colID, ln))
rnk_list = filter(lambda x: not rnk_dict_in_col.has_key(x[0]), rnk_list)
if rnk_list:
text = """
<span class="adminlabel">Enable:</span>
<select name="rnkID" class="admin_w200">
<option value="-1">- select rank method -</option>
"""
for (id, name) in rnk_list:
text += """<option value="%s" %s>%s</option>""" % (id, (func in ["0", 0] and confirm in ["0", 0] and int(rnkID) == int(id)) and 'selected="selected"' or '' , name)
text += """</select>"""
output += createhiddenform(action="modifyrankmethods#9",
text=text,
button="Enable",
colID=colID,
ln=ln,
func=0,
confirm=1)
if confirm in ["1", 1] and func in ["0", 0] and int(rnkID) != -1:
output += write_outcome(finresult)
elif confirm not in ["0", 0] and func in ["0", 0]:
output += """<b><span class="info">Please select a rank method.</span></b>"""
coll_list = get_col_rnk(colID, ln)
if coll_list:
text = """
<span class="adminlabel">Disable:</span>
<select name="rnkID" class="admin_w200">
<option value="-1">- select rank method-</option>
"""
for (id, name) in coll_list:
text += """<option value="%s" %s>%s</option>""" % (id, (func in ["1", 1] and confirm in ["0", 0] and int(rnkID) == int(id)) and 'selected="selected"' or '' , name)
text += """</select>"""
output += createhiddenform(action="modifyrankmethods#9",
text=text,
button="Disable",
colID=colID,
ln=ln,
func=1,
confirm=1)
if confirm in ["1", 1] and func in ["1", 1] and int(rnkID) != -1:
output += write_outcome(finresult)
elif confirm not in ["0", 0] and func in ["1", 1]:
output += """<b><span class="info">Please select a rank method.</span></b>"""
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_modifyrankmethods", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_addcollectiontotree(colID, ln, add_dad='', add_son='', rtype='', mtype='', callback='yes', confirm=-1):
"""Form to add a collection to the tree.
add_dad - the dad to add the collection to
add_son - the collection to add
rtype - add it as a regular or virtual
mtype - add it to the regular or virtual tree."""
output = ""
output2 = ""
subtitle = """Attach collection to tree <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#2.2">?</a>]</small>""" % (CFG_SITE_URL)
col_dict = dict(get_def_name('', "collection"))
if confirm not in [-1, "-1"] and not (add_son and add_dad and rtype):
output2 += """<b><span class="info">All fields must be filled.</span></b><br /><br />
"""
elif add_son and add_dad and rtype:
add_son = int(add_son)
add_dad = int(add_dad)
if confirm not in [-1, "-1"]:
if add_son == add_dad:
output2 += """<b><span class="info">Cannot add a collection as a pointer to itself.</span></b><br /><br />
"""
elif check_col(add_dad, add_son):
res = add_col_dad_son(add_dad, add_son, rtype)
output2 += write_outcome(res)
if res[0] == 1:
output2 += """<b><span class="info"><br /> The collection will appear on your website after the next webcoll run. You can either run it manually or wait until bibsched does it for you.</span></b><br /><br />
"""
else:
output2 += """<b><span class="info">Cannot add the collection '%s' as a %s subcollection of '%s' since it will either create a loop, or the association already exists.</span></b><br /><br />
""" % (col_dict[add_son], (rtype=="r" and 'regular' or 'virtual'), col_dict[add_dad])
add_son = ''
add_dad = ''
rtype = ''
tree = get_col_tree(colID)
col_list = col_dict.items()
col_list.sort(compare_on_val)
output = show_coll_not_in_tree(colID, ln, col_dict)
text = """
<span class="adminlabel">Attach collection:</span>
<select name="add_son" class="admin_w200">
<option value="">- select collection -</option>
"""
for (id, name) in col_list:
if id != colID:
text += """<option value="%s" %s>%s</option>""" % (id, str(id)==str(add_son) and 'selected="selected"' or '', name)
text += """
</select><br />
<span class="adminlabel">to parent collection:</span>
<select name="add_dad" class="admin_w200">
<option value="">- select parent collection -</option>
"""
for (id, name) in col_list:
text += """<option value="%s" %s>%s</option>
""" % (id, str(id)==add_dad and 'selected="selected"' or '', name)
text += """</select><br />
"""
text += """
<span class="adminlabel">with relationship:</span>
<select name="rtype" class="admin_w200">
<option value="">- select relationship -</option>
<option value="r" %s>Regular (Narrow by...)</option>
<option value="v" %s>Virtual (Focus on...)</option>
</select>
""" % ((rtype=="r" and 'selected="selected"' or ''), (rtype=="v" and 'selected="selected"' or ''))
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/addcollectiontotree" % CFG_SITE_URL,
text=text,
button="Add",
colID=colID,
ln=ln,
confirm=1)
output += output2
#output += perform_showtree(colID, ln)
body = [output]
if callback:
return perform_index(colID, ln, mtype="perform_addcollectiontotree", content=addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_addcollection(colID, ln, colNAME='', dbquery='', callback="yes", confirm=-1):
"""form to add a new collection.
colNAME - the name of the new collection
dbquery - the dbquery of the new collection"""
output = ""
subtitle = """Create new collection <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#2.1">?</a>]</small>""" % (CFG_SITE_URL)
text = """
<span class="adminlabel">Default name</span>
<input class="admin_w200" type="text" name="colNAME" value="%s" /><br />
""" % colNAME
output = createhiddenform(action="%s/admin/websearch/websearchadmin.py/addcollection" % CFG_SITE_URL,
text=text,
colID=colID,
ln=ln,
button="Add collection",
confirm=1)
if colNAME and confirm in ["1", 1]:
res = add_col(colNAME, '')
output += write_outcome(res)
if res[0] == 1:
output += perform_addcollectiontotree(colID=colID, ln=ln, add_son=res[1], callback='')
elif confirm not in ["-1", -1]:
output += """<b><span class="info">Please give the collection a name.</span></b>"""
body = [output]
if callback:
return perform_index(colID, ln=ln, mtype="perform_addcollection", content=addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifydbquery(colID, ln, dbquery='', callback='yes', confirm=-1):
"""form to modify the dbquery of the collection.
dbquery - the dbquery of the collection."""
subtitle = ''
output = ""
col_dict = dict(get_def_name('', "collection"))
if colID and col_dict.has_key(int(colID)):
colID = int(colID)
subtitle = """<a name="1">1. Modify collection query for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.1">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
if confirm == -1:
res = run_sql("SELECT dbquery FROM collection WHERE id=%s" % colID)
dbquery = res[0][0]
if not dbquery:
dbquery = ''
reg_sons = len(get_col_tree(colID, 'r'))
vir_sons = len(get_col_tree(colID, 'v'))
if reg_sons > 1:
if dbquery:
output += "Warning: This collection got subcollections, and should because of this not have a collection query, for further explanation, check the WebSearch Guide<br />"
elif reg_sons <= 1:
if not dbquery:
output += "Warning: This collection does not have any subcollections, and should because of this have a collection query, for further explanation, check the WebSearch Guide<br />"
text = """
<span class="adminlabel">Query</span>
<input class="admin_w200" type="text" name="dbquery" value="%s" /><br />
""" % cgi.escape(dbquery, 1)
output += createhiddenform(action="modifydbquery",
text=text,
button="Modify",
colID=colID,
ln=ln,
confirm=1)
if confirm in ["1", 1]:
res = modify_dbquery(colID, dbquery)
if res:
if dbquery == "":
text = """<b><span class="info">Query removed for this collection.</span></b>"""
else:
text = """<b><span class="info">Query set for this collection.</span></b>"""
else:
text = """<b><span class="info">Sorry, could not change query.</span></b>"""
output += text
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_modifydbquery", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifycollectiontree(colID, ln, move_up='', move_down='', move_from='', move_to='', delete='', rtype='', callback='yes', confirm=0):
"""to modify the collection tree: move a collection up and down, delete a collection, or change the father of the collection.
colID - the main collection of the tree, the root
move_up - move this collection up (is not the collection id, but the place in the tree)
move_up - move this collection down (is not the collection id, but the place in the tree)
move_from - move this collection from the current positon (is not the collection id, but the place in the tree)
move_to - move the move_from collection and set this as it's father. (is not the collection id, but the place in the tree)
delete - delete this collection from the tree (is not the collection id, but the place in the tree)
rtype - the type of the collection in the tree, regular or virtual"""
colID = int(colID)
tree = get_col_tree(colID, rtype)
col_dict = dict(get_def_name('', "collection"))
subtitle = """Modify collection tree: %s <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#2.3">?</a>] <a href="%s/admin/websearch/websearchadmin.py/showtree?colID=%s&ln=%s">Printer friendly version</a></small>""" % (col_dict[colID], CFG_SITE_URL, CFG_SITE_URL, colID, ln)
fin_output = ""
output = ""
try:
if move_up:
move_up = int(move_up)
switch = find_last(tree, move_up)
if switch and switch_col_treescore(tree[move_up], tree[switch]):
output += """<b><span class="info">Moved the %s collection '%s' up and '%s' down.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_up][0]], col_dict[tree[switch][0]])
else:
output += """<b><span class="info">Could not move the %s collection '%s' up and '%s' down.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_up][0]], col_dict[tree[switch][0]])
elif move_down:
move_down = int(move_down)
switch = find_next(tree, move_down)
if switch and switch_col_treescore(tree[move_down], tree[switch]):
output += """<b><span class="info">Moved the %s collection '%s' down and '%s' up.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_down][0]], col_dict[tree[switch][0]])
else:
output += """<b><span class="info">Could not move the %s collection '%s' up and '%s' down.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_up][0]],col_dict[tree[switch][0]])
elif delete:
delete = int(delete)
if confirm in [0, "0"]:
if col_dict[tree[delete][0]] != col_dict[tree[delete][3]]:
text = """<b>Do you want to remove the %s collection '%s' and its subcollections in the %s collection '%s'.</b>
""" % ((tree[delete][4]=="r" and 'regular' or 'virtual'), col_dict[tree[delete][0]], (rtype=="r" and 'regular' or 'virtual'), col_dict[tree[delete][3]])
else:
text = """<b>Do you want to remove all subcollections of the %s collection '%s'.</b>
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[delete][3]])
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/modifycollectiontree#tree" % CFG_SITE_URL,
text=text,
button="Confirm",
colID=colID,
delete=delete,
rtype=rtype,
ln=ln,
confirm=1)
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/index?mtype=perform_modifycollectiontree#tree" % CFG_SITE_URL,
text="<b>To cancel</b>",
button="Cancel",
colID=colID,
ln=ln)
else:
if remove_col_subcol(tree[delete][0], tree[delete][3], rtype):
if col_dict[tree[delete][0]] != col_dict[tree[delete][3]]:
output += """<b><span class="info">Removed the %s collection '%s' and its subcollections in subdirectory '%s'.</span></b><br /><br />
""" % ((tree[delete][4]=="r" and 'regular' or 'virtual'), col_dict[tree[delete][0]], col_dict[tree[delete][3]])
else:
output += """<b><span class="info">Removed the subcollections of the %s collection '%s'.</span></b><br /><br />
""" % ((rtype=="r" and 'regular' or 'virtual'), col_dict[tree[delete][3]])
else:
output += """<b><span class="info">Could not remove the collection from the tree.</span></b><br /><br />
"""
delete = ''
elif move_from and not move_to:
move_from_rtype = move_from[0]
move_from_id = int(move_from[1:len(move_from)])
text = """<b>Select collection to place the %s collection '%s' under.</b><br /><br />
""" % ((move_from_rtype=="r" and 'regular' or 'virtual'), col_dict[tree[move_from_id][0]])
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/index?mtype=perform_modifycollectiontree#tree" % CFG_SITE_URL,
text=text,
button="Cancel",
colID=colID,
ln=ln)
elif move_from and move_to:
move_from_rtype = move_from[0]
move_from_id = int(move_from[1:len(move_from)])
move_to_rtype = move_to[0]
move_to_id = int(move_to[1:len(move_to)])
tree_from = get_col_tree(colID, move_from_rtype)
tree_to = get_col_tree(colID, move_to_rtype)
if confirm in [0, '0']:
if move_from_id == move_to_id and move_from_rtype == move_to_rtype:
output += """<b><span class="info">Cannot move to itself.</span></b><br /><br />
"""
elif tree_from[move_from_id][3] == tree_to[move_to_id][0] and move_from_rtype==move_to_rtype:
output += """<b><span class="info">The collection is already there.</span></b><br /><br />
"""
elif check_col(tree_to[move_to_id][0], tree_from[move_from_id][0]) or (tree_to[move_to_id][0] == 1 and tree_from[move_from_id][3] == tree_to[move_to_id][0] and move_from_rtype != move_to_rtype):
text = """<b>Move %s collection '%s' to the %s collection '%s'.</b>
""" % ((tree_from[move_from_id][4]=="r" and 'regular' or 'virtual'), col_dict[tree_from[move_from_id][0]], (tree_to[move_to_id][4]=="r" and 'regular' or 'virtual'), col_dict[tree_to[move_to_id][0]])
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/modifycollectiontree#tree" % CFG_SITE_URL,
text=text,
button="Confirm",
colID=colID,
move_from=move_from,
move_to=move_to,
ln=ln,
rtype=rtype,
confirm=1)
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/index?mtype=perform_modifycollectiontree#tree" % CFG_SITE_URL,
text="""<b>To cancel</b>""",
button="Cancel",
colID=colID,
ln=ln)
else:
output += """<b><span class="info">Cannot move the collection '%s' and set it as a subcollection of '%s' since it will create a loop.</span></b><br /><br />
""" % (col_dict[tree_from[move_from_id][0]], col_dict[tree_to[move_to_id][0]])
else:
if (move_to_id != 0 and move_col_tree(tree_from[move_from_id], tree_to[move_to_id])) or (move_to_id == 0 and move_col_tree(tree_from[move_from_id], tree_to[move_to_id], move_to_rtype)):
output += """<b><span class="info">Moved %s collection '%s' to the %s collection '%s'.</span></b><br /><br />
""" % ((move_from_rtype=="r" and 'regular' or 'virtual'), col_dict[tree_from[move_from_id][0]], (move_to_rtype=="r" and 'regular' or 'virtual'), col_dict[tree_to[move_to_id][0]])
else:
output += """<b><span class="info">Could not move %s collection '%s' to the %s collection '%s'.</span></b><br /><br />
""" % ((move_from_rtype=="r" and 'regular' or 'virtual'), col_dict[tree_from[move_from_id][0]], (move_to_rtype=="r" and 'regular' or 'virtual'), col_dict[tree_to[move_to_id][0]])
move_from = ''
move_to = ''
else:
output += """
"""
except StandardError, e:
register_exception()
return """<b><span class="info">An error occured.</span></b>
"""
output += """<table border ="0" width="100%">
<tr><td width="50%">
<b>Narrow by collection:</b>
</td><td width="50%">
<b>Focus on...:</b>
</td></tr><tr><td valign="top">
"""
tree = get_col_tree(colID, 'r')
output += create_colltree(tree, col_dict, colID, ln, move_from, move_to, 'r', "yes")
output += """</td><td valign="top">
"""
tree = get_col_tree(colID, 'v')
output += create_colltree(tree, col_dict, colID, ln, move_from, move_to, 'v', "yes")
output += """</td>
</tr>
</table>
"""
body = [output]
if callback:
return perform_index(colID, ln, mtype="perform_modifycollectiontree", content=addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_showtree(colID, ln):
"""create collection tree/hiarchy"""
col_dict = dict(get_def_name('', "collection"))
subtitle = "Collection tree: %s" % col_dict[int(colID)]
output = """<table border ="0" width="100%">
<tr><td width="50%">
<b>Narrow by collection:</b>
</td><td width="50%">
<b>Focus on...:</b>
</td></tr><tr><td valign="top">
"""
tree = get_col_tree(colID, 'r')
output += create_colltree(tree, col_dict, colID, ln, '', '', 'r', '')
output += """</td><td valign="top">
"""
tree = get_col_tree(colID, 'v')
output += create_colltree(tree, col_dict, colID, ln, '', '', 'v', '')
output += """</td>
</tr>
</table>
"""
body = [output]
return addadminbox(subtitle, body)
def perform_addportalbox(colID, ln, title='', body='', callback='yes', confirm=-1):
"""form to add a new portalbox
title - the title of the portalbox
body - the body of the portalbox"""
col_dict = dict(get_def_name('', "collection"))
colID = int(colID)
subtitle = """<a name="5.1"></a>Create new portalbox"""
text = """
<span class="adminlabel">Title</span>
<textarea cols="50" rows="1" class="admin_wvar" type="text" name="title">%s</textarea><br />
<span class="adminlabel">Body</span>
<textarea cols="50" rows="10" class="admin_wvar" type="text" name="body">%s</textarea><br />
""" % (cgi.escape(title), cgi.escape(body))
output = createhiddenform(action="addportalbox#5.1",
text=text,
button="Add",
colID=colID,
ln=ln,
confirm=1)
if body and confirm in [1, "1"]:
res = add_pbx(title, body)
output += write_outcome(res)
if res[1] == 1:
output += """<b><span class="info"><a href="addexistingportalbox?colID=%s&ln=%s&pbxID=%s#5">Add portalbox to collection</a></span></b>""" % (colID, ln, res[1])
elif confirm not in [-1, "-1"]:
output += """<b><span class="info">Body field must be filled.</span></b>
"""
body = [output]
return perform_showportalboxes(colID, ln, content=addadminbox(subtitle, body))
def perform_addexistingportalbox(colID, ln, pbxID=-1, score=0, position='', sel_ln='', callback='yes', confirm=-1):
"""form to add an existing portalbox to a collection.
colID - the collection to add the portalbox to
pbxID - the portalbox to add
score - the importance of the portalbox.
position - the position of the portalbox on the page
sel_ln - the language of the portalbox"""
subtitle = """<a name="5.2"></a>Add existing portalbox to collection"""
output = ""
colID = int(colID)
res = get_pbx()
pos = get_pbx_pos()
lang = dict(get_languages())
col_dict = dict(get_def_name('', "collection"))
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
col_pbx = get_col_pbx(colID)
col_pbx = dict(map(lambda x: (x[0], x[5]), col_pbx))
if len(res) > 0:
text = """
<span class="adminlabel">Portalbox</span>
<select name="pbxID" class="admin_w200">
<option value="-1">- Select portalbox -</option>
"""
for (id, t_title, t_body) in res:
text += """<option value="%s" %s>%s - %s...</option>\n""" % \
(id, id == int(pbxID) and 'selected="selected"' or '',
t_title[:40], cgi.escape(t_body[0:40 - min(40, len(t_title))]))
text += """</select><br />
<span class="adminlabel">Language</span>
<select name="sel_ln" class="admin_w200">
<option value="">- Select language -</option>
"""
listlang = lang.items()
listlang.sort()
for (key, name) in listlang:
text += """<option value="%s" %s>%s</option>
""" % (key, key == sel_ln and 'selected="selected"' or '', name)
text += """</select><br />
<span class="adminlabel">Position</span>
<select name="position" class="admin_w200">
<option value="">- Select position -</option>
"""
listpos = pos.items()
listpos.sort()
for (key, name) in listpos:
text += """<option value="%s" %s>%s</option>""" % (key, key==position and 'selected="selected"' or '', name)
text += "</select>"
output += createhiddenform(action="addexistingportalbox#5.2",
text=text,
button="Add",
colID=colID,
ln=ln,
confirm=1)
else:
output = """No existing portalboxes to add, please create a new one.
"""
if pbxID > -1 and position and sel_ln and confirm in [1, "1"]:
pbxID = int(pbxID)
res = add_col_pbx(colID, pbxID, sel_ln, position, '')
output += write_outcome(res)
elif pbxID > -1 and confirm not in [-1, "-1"]:
output += """<b><span class="info">All fields must be filled.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showportalboxes(colID, ln, content=output)
def perform_deleteportalbox(colID, ln, pbxID=-1, callback='yes', confirm=-1):
"""form to delete a portalbox which is not in use.
colID - the current collection.
pbxID - the id of the portalbox"""
subtitle = """<a name="5.3"></a>Delete an unused portalbox"""
output = ""
colID = int(colID)
if pbxID not in [-1, "-1"] and confirm in [1, "1"]:
ares = get_pbx()
pbx_dict = dict(map(lambda x: (x[0], x[1]), ares))
if pbx_dict.has_key(int(pbxID)):
pname = pbx_dict[int(pbxID)]
ares = delete_pbx(int(pbxID))
else:
return """<b><span class="info">This portalbox does not exist</span></b>"""
res = get_pbx()
col_dict = dict(get_def_name('', "collection"))
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
col_pbx = get_col_pbx()
col_pbx = dict(map(lambda x: (x[0], x[5]), col_pbx))
if len(res) > 0:
text = """
<span class="adminlabel">Portalbox</span>
<select name="pbxID" class="admin_w200">
"""
text += """<option value="-1">- Select portalbox -"""
for (id, t_title, t_body) in res:
if not col_pbx.has_key(id):
text += """<option value="%s" %s>%s - %s...""" % (id, id == int(pbxID) and 'selected="selected"' or '', t_title, cgi.escape(t_body[0:10]))
text += "</option>"
text += """</select><br />"""
output += createhiddenform(action="deleteportalbox#5.3",
text=text,
button="Delete",
colID=colID,
ln=ln,
confirm=1)
if pbxID not in [-1, "-1"]:
pbxID = int(pbxID)
if confirm in [1, "1"]:
output += write_outcome(ares)
elif confirm not in [-1, "-1"]:
output += """<b><span class="info">Choose a portalbox to delete.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showportalboxes(colID, ln, content=output)
def perform_modifyportalbox(colID, ln, pbxID=-1, score='', position='', sel_ln='', title='', body='', callback='yes', confirm=-1):
"""form to modify a portalbox in a collection, or change the portalbox itself.
colID - the id of the collection.
pbxID - the portalbox to change
score - the score of the portalbox connected to colID which should be changed.
position - the position of the portalbox in collection colID to change."""
subtitle = ""
output = ""
colID = int(colID)
res = get_pbx()
pos = get_pbx_pos()
lang = dict(get_languages())
col_dict = dict(get_def_name('', "collection"))
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
col_pbx = get_col_pbx(colID)
col_pbx = dict(map(lambda x: (x[0], x[5]), col_pbx))
if pbxID not in [-1, "-1"]:
pbxID = int(pbxID)
subtitle = """<a name="5.4"></a>Modify portalbox '%s' for this collection""" % pbx_dict[pbxID]
col_pbx = get_col_pbx(colID)
if not (score and position) and not (body and title):
for (id_pbx, id_collection, tln, score, position, title, body) in col_pbx:
if id_pbx == pbxID:
break
output += """Collection (presentation) specific values (Changes implies only to this collection.)<br />"""
text = """
<span class="adminlabel">Position</span>
<select name="position" class="admin_w200">
"""
listpos = pos.items()
listpos.sort()
for (key, name) in listpos:
text += """<option value="%s" %s>%s""" % (key, key==position and 'selected="selected"' or '', name)
text += "</option>"
text += """</select><br />"""
output += createhiddenform(action="modifyportalbox#5.4",
text=text,
button="Modify",
colID=colID,
pbxID=pbxID,
score=score,
title=title,
body=cgi.escape(body, 1),
sel_ln=sel_ln,
ln=ln,
confirm=3)
if pbxID > -1 and score and position and confirm in [3, "3"]:
pbxID = int(pbxID)
res = modify_pbx(colID, pbxID, sel_ln, score, position, '', '')
res2 = get_pbx()
pbx_dict = dict(map(lambda x: (x[0], x[1]), res2))
output += write_outcome(res)
output += """<br />Portalbox (content) specific values (any changes appears everywhere the portalbox is used.)"""
text = """
<span class="adminlabel">Title</span>
<textarea cols="50" rows="1" class="admin_wvar" type="text" name="title">%s</textarea><br />
""" % cgi.escape(title)
text += """
<span class="adminlabel">Body</span>
<textarea cols="50" rows="10" class="admin_wvar" type="text" name="body">%s</textarea><br />
""" % cgi.escape(body)
output += createhiddenform(action="modifyportalbox#5.4",
text=text,
button="Modify",
colID=colID,
pbxID=pbxID,
sel_ln=sel_ln,
score=score,
position=position,
ln=ln,
confirm=4)
if pbxID > -1 and confirm in [4, "4"]:
pbxID = int(pbxID)
res = modify_pbx(colID, pbxID, sel_ln, '', '', title, body)
output += write_outcome(res)
else:
output = """No portalbox to modify."""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showportalboxes(colID, ln, content=output)
def perform_switchpbxscore(colID, id_1, id_2, sel_ln, ln):
"""Switch the score of id_1 and id_2 in collection_portalbox.
colID - the current collection
id_1/id_2 - the id's to change the score for.
sel_ln - the language of the portalbox"""
output = ""
res = get_pbx()
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
res = switch_pbx_score(colID, id_1, id_2, sel_ln)
output += write_outcome(res)
return perform_showportalboxes(colID, ln, content=output)
def perform_showportalboxes(colID, ln, callback='yes', content='', confirm=-1):
"""show the portalboxes of this collection.
colID - the portalboxes to show the collection for."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
subtitle = """<a name="5">5. Modify portalboxes for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.5">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = ""
pos = get_pbx_pos()
output = """<dl>
<dt>Portalbox actions (not related to this collection)</dt>
<dd><a href="addportalbox?colID=%s&ln=%s#5.1">Create new portalbox</a></dd>
<dd><a href="deleteportalbox?colID=%s&ln=%s#5.3">Delete an unused portalbox</a></dd>
<dt>Collection specific actions</dt>
<dd><a href="addexistingportalbox?colID=%s&ln=%s#5.2">Add existing portalbox to collection</a></dd>
</dl>
""" % (colID, ln, colID, ln, colID, ln)
header = ['Position', 'Language', '', 'Title', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
pos_list = pos.items()
pos_list.sort()
if len(get_col_pbx(colID)) > 0:
for (key, value) in sitelangs:
for (pos_key, pos_value) in pos_list:
res = get_col_pbx(colID, key, pos_key)
i = 0
for (pbxID, colID_pbx, tln, score, position, title, body) in res:
move = """<table cellspacing="1" cellpadding="0" border="0"><tr><td>"""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchpbxscore?colID=%s&ln=%s&id_1=%s&id_2=%s&sel_ln=%s&rand=%s#5"><img border="0" src="%s/img/smallup.gif" title="Move portalbox up" alt="up" /></a>""" % (CFG_SITE_URL, colID, ln, pbxID, res[i - 1][0], tln, random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
move += "</td><td>"
i += 1
if i != len(res):
move += """<a href="%s/admin/websearch/websearchadmin.py/switchpbxscore?colID=%s&ln=%s&id_1=%s&id_2=%s&sel_ln=%s&rand=%s#5"><img border="0" src="%s/img/smalldown.gif" title="Move portalbox down" alt="down" /></a>""" % (CFG_SITE_URL, colID, ln, pbxID, res[i][0], tln, random.randint(0, 1000), CFG_SITE_URL)
move += """</td></tr></table>"""
actions.append(["%s" % (i==1 and pos[position] or ''), "%s" % (i==1 and lang[tln] or ''), move, "%s" % title])
for col in [(('Modify', 'modifyportalbox'), ('Remove', 'removeportalbox'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&pbxID=%s&sel_ln=%s#5.4">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, pbxID, tln, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&pbxID=%s&sel_ln=%s#5.5">%s</a>' % (CFG_SITE_URL, function, colID, ln, pbxID, tln, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No portalboxes exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showportalboxes", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_removeportalbox(colID, ln, pbxID='', sel_ln='', callback='yes', confirm=0):
"""form to remove a portalbox from a collection.
colID - the current collection, remove the portalbox from this collection.
sel_ln - remove the portalbox with this language
pbxID - remove the portalbox with this id"""
subtitle = """<a name="5.5"></a>Remove portalbox"""
output = ""
col_dict = dict(get_def_name('', "collection"))
res = get_pbx()
pbx_dict = dict(map(lambda x: (x[0], x[1]), res))
if colID and pbxID and sel_ln:
colID = int(colID)
pbxID = int(pbxID)
if confirm in ["0", 0]:
text = """Do you want to remove the portalbox '%s' from the collection '%s'.""" % (pbx_dict[pbxID], col_dict[colID])
output += createhiddenform(action="removeportalbox#5.5",
text=text,
button="Confirm",
colID=colID,
pbxID=pbxID,
sel_ln=sel_ln,
confirm=1)
elif confirm in ["1", 1]:
res = remove_pbx(colID, pbxID, sel_ln)
output += write_outcome(res)
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showportalboxes(colID, ln, content=output)
def perform_switchfmtscore(colID, type, id_1, id_2, ln):
"""Switch the score of id_1 and id_2 in the table type.
colID - the current collection
id_1/id_2 - the id's to change the score for.
type - like "format" """
fmt_dict = dict(get_def_name('', "format"))
res = switch_score(colID, id_1, id_2, type)
output = write_outcome(res)
return perform_showoutputformats(colID, ln, content=output)
def perform_switchfldscore(colID, id_1, id_2, fmeth, ln):
"""Switch the score of id_1 and id_2 in collection_field_fieldvalue.
colID - the current collection
id_1/id_2 - the id's to change the score for."""
fld_dict = dict(get_def_name('', "field"))
res = switch_fld_score(colID, id_1, id_2)
output = write_outcome(res)
if fmeth == "soo":
return perform_showsortoptions(colID, ln, content=output)
elif fmeth == "sew":
return perform_showsearchfields(colID, ln, content=output)
elif fmeth == "seo":
return perform_showsearchoptions(colID, ln, content=output)
def perform_switchfldvaluescore(colID, id_1, id_fldvalue_1, id_fldvalue_2, ln):
"""Switch the score of id_1 and id_2 in collection_field_fieldvalue.
colID - the current collection
id_1/id_2 - the id's to change the score for."""
name_1 = run_sql("SELECT name from fieldvalue where id=%s", (id_fldvalue_1, ))[0][0]
name_2 = run_sql("SELECT name from fieldvalue where id=%s", (id_fldvalue_2, ))[0][0]
res = switch_fld_value_score(colID, id_1, id_fldvalue_1, id_fldvalue_2)
output = write_outcome(res)
return perform_modifyfield(colID, fldID=id_1, ln=ln, content=output)
def perform_addnewfieldvalue(colID, fldID, ln, name='', value='', callback="yes", confirm=-1):
"""form to add a new fieldvalue.
name - the name of the new fieldvalue
value - the value of the new fieldvalue
"""
output = ""
subtitle = """<a name="7.4"></a>Add new value"""
text = """
<span class="adminlabel">Display name</span>
<input class="admin_w200" type="text" name="name" value="%s" /><br />
<span class="adminlabel">Search value</span>
<input class="admin_w200" type="text" name="value" value="%s" /><br />
""" % (name, value)
output = createhiddenform(action="%s/admin/websearch/websearchadmin.py/addnewfieldvalue" % CFG_SITE_URL,
text=text,
colID=colID,
fldID=fldID,
ln=ln,
button="Add",
confirm=1)
if name and value and confirm in ["1", 1]:
res = add_fldv(name, value)
output += write_outcome(res)
if res[0] == 1:
res = add_col_fld(colID, fldID, 'seo', res[1])
if res[0] == 0:
output += "<br />" + write_outcome(res)
elif confirm not in ["-1", -1]:
output += """<b><span class="info">Please fill in name and value.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID=fldID, ln=ln, content=output)
def perform_modifyfieldvalue(colID, fldID, fldvID, ln, name='', value='', callback="yes", confirm=-1):
"""form to modify a fieldvalue.
name - the name of the fieldvalue
value - the value of the fieldvalue
"""
if confirm in [-1, "-1"]:
res = get_fld_value(fldvID)
(id, name, value) = res[0]
output = ""
subtitle = """<a name="7.4"></a>Modify existing value"""
output = """<dl>
<dt><b><span class="info">Warning: Modifications done below will also inflict on all places the modified data is used.</span></b></dt>
</dl>"""
text = """
<span class="adminlabel">Display name</span>
<input class="admin_w200" type="text" name="name" value="%s" /><br />
<span class="adminlabel">Search value</span>
<input class="admin_w200" type="text" name="value" value="%s" /><br />
""" % (name, value)
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/modifyfieldvalue" % CFG_SITE_URL,
text=text,
colID=colID,
fldID=fldID,
fldvID=fldvID,
ln=ln,
button="Update",
confirm=1)
output += createhiddenform(action="%s/admin/websearch/websearchadmin.py/modifyfieldvalue" % CFG_SITE_URL,
text="Delete value and all associations",
colID=colID,
fldID=fldID,
fldvID=fldvID,
ln=ln,
button="Delete",
confirm=2)
if name and value and confirm in ["1", 1]:
res = update_fldv(fldvID, name, value)
output += write_outcome(res)
#if res:
# output += """<b><span class="info">Operation successfully completed.</span></b>"""
#else:
# output += """<b><span class="info">Operation failed.</span></b>"""
elif confirm in ["2", 2]:
res = delete_fldv(fldvID)
output += write_outcome(res)
elif confirm not in ["-1", -1]:
output += """<b><span class="info">Please fill in name and value.</span></b>"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID=fldID, ln=ln, content=output)
def perform_removefield(colID, ln, fldID='', fldvID='', fmeth='', callback='yes', confirm=0):
"""form to remove a field from a collection.
colID - the current collection, remove the field from this collection.
sel_ln - remove the field with this language
fldID - remove the field with this id"""
if fmeth == "soo":
field = "sort option"
elif fmeth == "sew":
field = "search field"
elif fmeth == "seo":
field = "search option"
else:
field = "field"
subtitle = """<a name="6.4"><a name="7.4"><a name="8.4"></a>Remove %s""" % field
output = ""
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
res = get_fld_value()
fldv_dict = dict(map(lambda x: (x[0], x[1]), res))
if colID and fldID:
colID = int(colID)
fldID = int(fldID)
if fldvID and fldvID != "None":
fldvID = int(fldvID)
if confirm in ["0", 0]:
text = """Do you want to remove the %s '%s' %s from the collection '%s'.""" % (field, fld_dict[fldID], (fldvID not in["", "None"] and "with value '%s'" % fldv_dict[fldvID] or ''), col_dict[colID])
output += createhiddenform(action="removefield#6.5",
text=text,
button="Confirm",
colID=colID,
fldID=fldID,
fldvID=fldvID,
fmeth=fmeth,
confirm=1)
elif confirm in ["1", 1]:
res = remove_fld(colID, fldID, fldvID)
output += write_outcome(res)
body = [output]
output = "<br />" + addadminbox(subtitle, body)
if fmeth == "soo":
return perform_showsortoptions(colID, ln, content=output)
elif fmeth == "sew":
return perform_showsearchfields(colID, ln, content=output)
elif fmeth == "seo":
return perform_showsearchoptions(colID, ln, content=output)
def perform_removefieldvalue(colID, ln, fldID='', fldvID='', fmeth='', callback='yes', confirm=0):
"""form to remove a field from a collection.
colID - the current collection, remove the field from this collection.
sel_ln - remove the field with this language
fldID - remove the field with this id"""
subtitle = """<a name="7.4"></a>Remove value"""
output = ""
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
res = get_fld_value()
fldv_dict = dict(map(lambda x: (x[0], x[1]), res))
if colID and fldID:
colID = int(colID)
fldID = int(fldID)
if fldvID and fldvID != "None":
fldvID = int(fldvID)
if confirm in ["0", 0]:
text = """Do you want to remove the value '%s' from the search option '%s'.""" % (fldv_dict[fldvID], fld_dict[fldID])
output += createhiddenform(action="removefieldvalue#7.4",
text=text,
button="Confirm",
colID=colID,
fldID=fldID,
fldvID=fldvID,
fmeth=fmeth,
confirm=1)
elif confirm in ["1", 1]:
res = remove_fld(colID, fldID, fldvID)
output += write_outcome(res)
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID=fldID, ln=ln, content=output)
def perform_rearrangefieldvalue(colID, fldID, ln, callback='yes', confirm=-1):
"""rearrang the fieldvalues alphabetically
colID - the collection
fldID - the field to rearrange the fieldvalue for
"""
subtitle = "Order values alphabetically"
output = ""
col_fldv = get_col_fld(colID, 'seo', fldID)
col_fldv = dict(map(lambda x: (x[1], x[0]), col_fldv))
fldv_names = get_fld_value()
fldv_names = map(lambda x: (x[0], x[1]), fldv_names)
if not col_fldv.has_key(None):
vscore = len(col_fldv)
for (fldvID, name) in fldv_names:
if col_fldv.has_key(fldvID):
run_sql("UPDATE collection_field_fieldvalue SET score_fieldvalue=%s WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (vscore, colID, fldID, fldvID))
vscore -= 1
output += write_outcome((1, ""))
else:
output += write_outcome((0, (0, "No values to order")))
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID, ln, content=output)
def perform_rearrangefield(colID, ln, fmeth, callback='yes', confirm=-1):
"""rearrang the fields alphabetically
colID - the collection
"""
subtitle = "Order fields alphabetically"
output = ""
col_fld = dict(map(lambda x: (x[0], x[1]), get_col_fld(colID, fmeth)))
fld_names = get_def_name('', "field")
if len(col_fld) > 0:
score = len(col_fld)
for (fldID, name) in fld_names:
if col_fld.has_key(fldID):
run_sql("UPDATE collection_field_fieldvalue SET score=%s WHERE id_collection=%s and id_field=%s", (score, colID, fldID))
score -= 1
output += write_outcome((1, ""))
else:
output += write_outcome((0, (0, "No fields to order")))
body = [output]
output = "<br />" + addadminbox(subtitle, body)
if fmeth == "soo":
return perform_showsortoptions(colID, ln, content=output)
elif fmeth == "sew":
return perform_showsearchfields(colID, ln, content=output)
elif fmeth == "seo":
return perform_showsearchoptions(colID, ln, content=output)
def perform_addexistingfieldvalue(colID, fldID, fldvID=-1, ln=CFG_SITE_LANG, callback='yes', confirm=-1):
"""form to add an existing fieldvalue to a field.
colID - the collection
fldID - the field to add the fieldvalue to
fldvID - the fieldvalue to add"""
subtitle = """</a><a name="7.4"></a>Add existing value to search option"""
output = ""
if fldvID not in [-1, "-1"] and confirm in [1, "1"]:
fldvID = int(fldvID)
ares = add_col_fld(colID, fldID, 'seo', fldvID)
colID = int(colID)
fldID = int(fldID)
lang = dict(get_languages())
res = get_def_name('', "field")
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(res)
col_fld = dict(map(lambda x: (x[0], x[1]), get_col_fld(colID, 'seo')))
fld_value = get_fld_value()
fldv_dict = dict(map(lambda x: (x[0], x[1]), fld_value))
text = """
<span class="adminlabel">Value</span>
<select name="fldvID" class="admin_w200">
<option value="-1">- Select value -</option>
"""
res = run_sql("SELECT id,name,value FROM fieldvalue ORDER BY name")
for (id, name, value) in res:
text += """<option value="%s" %s>%s - %s</option>
""" % (id, id == int(fldvID) and 'selected="selected"' or '', name, value)
text += """</select><br />"""
output += createhiddenform(action="addexistingfieldvalue#7.4",
text=text,
button="Add",
colID=colID,
fldID=fldID,
ln=ln,
confirm=1)
if fldvID not in [-1, "-1"] and confirm in [1, "1"]:
output += write_outcome(ares)
elif confirm in [1, "1"]:
output += """<b><span class="info">Select a value to add and try again.</span></b>"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_modifyfield(colID, fldID, ln, content=output)
def perform_addexistingfield(colID, ln, fldID=-1, fldvID=-1, fmeth='', callback='yes', confirm=-1):
"""form to add an existing field to a collection.
colID - the collection to add the field to
fldID - the field to add
sel_ln - the language of the field"""
subtitle = """<a name="6.2"></a><a name="7.2"></a><a name="8.2"></a>Add existing field to collection"""
output = ""
if fldID not in [-1, "-1"] and confirm in [1, "1"]:
fldID = int(fldID)
ares = add_col_fld(colID, fldID, fmeth, fldvID)
colID = int(colID)
lang = dict(get_languages())
res = get_def_name('', "field")
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(res)
col_fld = dict(map(lambda x: (x[0], x[1]), get_col_fld(colID, fmeth)))
fld_value = get_fld_value()
fldv_dict = dict(map(lambda x: (x[0], x[1]), fld_value))
if fldvID:
fldvID = int(fldvID)
text = """
<span class="adminlabel">Field</span>
<select name="fldID" class="admin_w200">
<option value="-1">- Select field -</option>
"""
for (id, var) in res:
if fmeth == 'seo' or (fmeth != 'seo' and not col_fld.has_key(id)):
text += """<option value="%s" %s>%s</option>
""" % (id, '', fld_dict[id])
text += """</select><br />"""
output += createhiddenform(action="addexistingfield#6.2",
text=text,
button="Add",
colID=colID,
fmeth=fmeth,
ln=ln,
confirm=1)
if fldID not in [-1, "-1"] and confirm in [1, "1"]:
output += write_outcome(ares)
elif fldID in [-1, "-1"] and confirm not in [-1, "-1"]:
output += """<b><span class="info">Select a field.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
if fmeth == "soo":
return perform_showsortoptions(colID, ln, content=output)
elif fmeth == "sew":
return perform_showsearchfields(colID, ln, content=output)
elif fmeth == "seo":
return perform_showsearchoptions(colID, ln, content=output)
def perform_showsortoptions(colID, ln, callback='yes', content='', confirm=-1):
"""show the sort fields of this collection.."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
fld_type = get_sort_nametypes()
subtitle = """<a name="8">8. Modify sort options for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.8">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """<dl>
<dt>Field actions (not related to this collection)</dt>
<dd>Go to the BibIndex interface to modify the available sort options</dd>
<dt>Collection specific actions
<dd><a href="addexistingfield?colID=%s&ln=%s&fmeth=soo#8.2">Add sort option to collection</a></dd>
<dd><a href="rearrangefield?colID=%s&ln=%s&fmeth=soo#8.2">Order sort options alphabetically</a></dd>
</dl>
""" % (colID, ln, colID, ln)
header = ['', 'Sort option', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
fld_type_list = fld_type.items()
if len(get_col_fld(colID, 'soo')) > 0:
res = get_col_fld(colID, 'soo')
i = 0
for (fldID, fldvID, stype, score, score_fieldvalue) in res:
move = """<table cellspacing="1" cellpadding="0" border="0"><tr><td>"""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=soo&rand=%s#8"><img border="0" src="%s/img/smallup.gif" title="Move up"></a>""" % (CFG_SITE_URL, colID, ln, fldID, res[i - 1][0], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
move += "</td><td>"
i += 1
if i != len(res):
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=soo&rand=%s#8"><img border="0" src="%s/img/smalldown.gif" title="Move down"></a>""" % (CFG_SITE_URL, colID, ln, fldID, res[i][0], random.randint(0, 1000), CFG_SITE_URL)
move += """</td></tr></table>"""
actions.append([move, fld_dict[int(fldID)]])
for col in [(('Remove sort option', 'removefield'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fmeth=soo#8.4">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, fldID, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fmeth=soo#8.5">%s</a>' % (CFG_SITE_URL, function, colID, ln, fldID, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No sort options exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showsortoptions", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_showsearchfields(colID, ln, callback='yes', content='', confirm=-1):
"""show the search fields of this collection.."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
fld_type = get_sort_nametypes()
subtitle = """<a name="6">6. Modify search fields for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.6">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """<dl>
<dt>Field actions (not related to this collection)</dt>
<dd>Go to the BibIndex interface to modify the available search fields</dd>
<dt>Collection specific actions
<dd><a href="addexistingfield?colID=%s&ln=%s&fmeth=sew#6.2">Add search field to collection</a></dd>
<dd><a href="rearrangefield?colID=%s&ln=%s&fmeth=sew#6.2">Order search fields alphabetically</a></dd>
</dl>
""" % (colID, ln, colID, ln)
header = ['', 'Search field', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
fld_type_list = fld_type.items()
if len(get_col_fld(colID, 'sew')) > 0:
res = get_col_fld(colID, 'sew')
i = 0
for (fldID, fldvID, stype, score, score_fieldvalue) in res:
move = """<table cellspacing="1" cellpadding="0" border="0"><tr><td>"""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=sew&rand=%s#6"><img border="0" src="%s/img/smallup.gif" title="Move up"></a>""" % (CFG_SITE_URL, colID, ln, fldID, res[i - 1][0], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
move += "</td><td>"
i += 1
if i != len(res):
move += '<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=sew&rand=%s#6"><img border="0" src="%s/img/smalldown.gif" title="Move down"></a>' % (CFG_SITE_URL, colID, ln, fldID, res[i][0], random.randint(0, 1000), CFG_SITE_URL)
move += """</td></tr></table>"""
actions.append([move, fld_dict[int(fldID)]])
for col in [(('Remove search field', 'removefield'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fmeth=sew#6.4">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, fldID, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s#6.5">%s</a>' % (CFG_SITE_URL, function, colID, ln, fldID, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No search fields exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showsearchfields", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_showsearchoptions(colID, ln, callback='yes', content='', confirm=-1):
"""show the sort and search options of this collection.."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
fld_type = get_sort_nametypes()
subtitle = """<a name="7">7. Modify search options for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.7">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """<dl>
<dt>Field actions (not related to this collection)</dt>
<dd>Go to the BibIndex interface to modify the available search options</dd>
<dt>Collection specific actions
<dd><a href="addexistingfield?colID=%s&ln=%s&fmeth=seo#7.2">Add search option to collection</a></dd>
<dd><a href="rearrangefield?colID=%s&ln=%s&fmeth=seo#7.2">Order search options alphabetically</a></dd>
</dl>
""" % (colID, ln, colID, ln)
header = ['', 'Search option', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
fld_type_list = fld_type.items()
fld_distinct = run_sql("SELECT distinct(id_field) FROM collection_field_fieldvalue WHERE type='seo' AND id_collection=%s ORDER by score desc", (colID, ))
if len(fld_distinct) > 0:
i = 0
for (id) in fld_distinct:
fldID = id[0]
col_fld = get_col_fld(colID, 'seo', fldID)
move = ""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=seo&rand=%s#7"><img border="0" src="%s/img/smallup.gif" title="Move up"></a>""" % (CFG_SITE_URL, colID, ln, fldID, fld_distinct[i - 1][0], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
i += 1
if i != len(fld_distinct):
move += '<a href="%s/admin/websearch/websearchadmin.py/switchfldscore?colID=%s&ln=%s&id_1=%s&id_2=%s&fmeth=seo&rand=%s#7"><img border="0" src="%s/img/smalldown.gif" title="Move down"></a>' % (CFG_SITE_URL, colID, ln, fldID, fld_distinct[i][0], random.randint(0, 1000), CFG_SITE_URL)
actions.append([move, "%s" % fld_dict[fldID]])
for col in [(('Modify values', 'modifyfield'), ('Remove search option', 'removefield'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s#7.3">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, fldID, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fmeth=seo#7.3">%s</a>' % (CFG_SITE_URL, function, colID, ln, fldID, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No search options exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showsearchoptions", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifyfield(colID, fldID, fldvID='', ln=CFG_SITE_LANG, content='', callback='yes', confirm=0):
"""Modify the fieldvalues for a field"""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
fld_dict = dict(get_def_name('', "field"))
fld_type = get_sort_nametypes()
fldID = int(fldID)
subtitle = """<a name="7.3">Modify values for field '%s'</a>""" % (fld_dict[fldID])
output = """<dl>
<dt>Value specific actions
<dd><a href="addexistingfieldvalue?colID=%s&ln=%s&fldID=%s#7.4">Add existing value to search option</a></dd>
<dd><a href="addnewfieldvalue?colID=%s&ln=%s&fldID=%s#7.4">Add new value to search option</a></dd>
<dd><a href="rearrangefieldvalue?colID=%s&ln=%s&fldID=%s#7.4">Order values alphabetically</a></dd>
</dl>
""" % (colID, ln, fldID, colID, ln, fldID, colID, ln, fldID)
header = ['', 'Value name', 'Actions']
actions = []
sitelangs = get_languages()
lang = dict(sitelangs)
fld_type_list = fld_type.items()
col_fld = list(get_col_fld(colID, 'seo', fldID))
if len(col_fld) == 1 and col_fld[0][1] is None:
output += """<b><span class="info">No values added for this search option yet</span></b>"""
else:
j = 0
for (fldID, fldvID, stype, score, score_fieldvalue) in col_fld:
fieldvalue = get_fld_value(fldvID)
move = ""
if j != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldvaluescore?colID=%s&ln=%s&id_1=%s&id_fldvalue_1=%s&id_fldvalue_2=%s&rand=%s#7.3"><img border="0" src="%s/img/smallup.gif" title="Move up"></a>""" % (CFG_SITE_URL, colID, ln, fldID, fldvID, col_fld[j - 1][1], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
j += 1
if j != len(col_fld):
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfldvaluescore?colID=%s&ln=%s&id_1=%s&id_fldvalue_1=%s&id_fldvalue_2=%s&rand=%s#7.3"><img border="0" src="%s/img/smalldown.gif" title="Move down"></a>""" % (CFG_SITE_URL, colID, ln, fldID, fldvID, col_fld[j][1], random.randint(0, 1000), CFG_SITE_URL)
if fieldvalue[0][1] != fieldvalue[0][2] and fldvID is not None:
actions.append([move, "%s - %s" % (fieldvalue[0][1], fieldvalue[0][2])])
elif fldvID is not None:
actions.append([move, "%s" % fieldvalue[0][1]])
move = ''
for col in [(('Modify value', 'modifyfieldvalue'), ('Remove value', 'removefieldvalue'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fldvID=%s&fmeth=seo#7.4">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, fldID, fldvID, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fldID=%s&fldvID=%s#7.4">%s</a>' % (CFG_SITE_URL, function, colID, ln, fldID, fldvID, str)
output += tupletotable(header=header, tuple=actions)
output += content
body = [output]
output = "<br />" + addadminbox(subtitle, body)
if len(col_fld) == 0:
output = content
return perform_showsearchoptions(colID, ln, content=output)
def perform_showoutputformats(colID, ln, callback='yes', content='', confirm=-1):
"""shows the outputformats of the current collection
colID - the collection id."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
subtitle = """<a name="10">10. Modify output formats for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.10">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """
<dl>
<dt>Output format actions (not specific to the chosen collection)
<dd>Go to the BibFormat interface to modify</dd>
<dt>Collection specific actions
<dd><a href="addexistingoutputformat?colID=%s&ln=%s#10.2">Add existing output format to collection</a></dd>
</dl>
""" % (colID, ln)
header = ['', 'Code', 'Output format', 'Actions']
actions = []
col_fmt = get_col_fmt(colID)
fmt_dict = dict(get_def_name('', "format"))
i = 0
if len(col_fmt) > 0:
for (id_format, colID_fld, code, score) in col_fmt:
move = """<table cellspacing="1" cellpadding="0" border="0"><tr><td>"""
if i != 0:
move += """<a href="%s/admin/websearch/websearchadmin.py/switchfmtscore?colID=%s&ln=%s&type=format&id_1=%s&id_2=%s&rand=%s#10"><img border="0" src="%s/img/smallup.gif" title="Move format up"></a>""" % (CFG_SITE_URL, colID, ln, id_format, col_fmt[i - 1][0], random.randint(0, 1000), CFG_SITE_URL)
else:
move += " "
move += "</td><td>"
i += 1
if i != len(col_fmt):
move += '<a href="%s/admin/websearch/websearchadmin.py/switchfmtscore?colID=%s&ln=%s&type=format&id_1=%s&id_2=%s&rand=%s#10"><img border="0" src="%s/img/smalldown.gif" title="Move format down"></a>' % (CFG_SITE_URL, colID, ln, id_format, col_fmt[i][0], random.randint(0, 1000), CFG_SITE_URL)
move += """</td></tr></table>"""
actions.append([move, code, fmt_dict[int(id_format)]])
for col in [(('Remove', 'removeoutputformat'),)]:
actions[-1].append('<a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fmtID=%s#10">%s</a>' % (CFG_SITE_URL, col[0][1], colID, ln, id_format, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/websearch/websearchadmin.py/%s?colID=%s&ln=%s&fmtID=%s#10">%s</a>' % (CFG_SITE_URL, function, colID, ln, id_format, str)
output += tupletotable(header=header, tuple=actions)
else:
output += """No output formats exists for this collection"""
output += content
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_showoutputformats", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def external_collections_build_select(colID, external_collection):
output = '<select name="state" class="admin_w200">'
if external_collection.parser:
max_state = 4
else:
max_state = 2
num_selected = external_collection_get_state(external_collection, colID)
for num in range(max_state):
state_name = CFG_EXTERNAL_COLLECTION_STATES_NAME[num]
if num == num_selected:
selected = ' selected'
else:
selected = ''
output += '<option value="%(num)d"%(selected)s>%(state_name)s</option>' % {'num': num, 'selected': selected, 'state_name': state_name}
output += '</select>\n'
return output
def perform_manage_external_collections(colID, ln, callback='yes', content='', confirm=-1):
"""Show the interface to configure external collections to the user."""
colID = int(colID)
subtitle = """<a name="11">11. Configuration of related external collections</a>
<small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.11">?</a>]</small>""" % CFG_SITE_URL
output = '<form action="update_external_collections" method="POST"><input type="hidden" name="colID" value="%(colID)d">' % {'colID': colID}
table_header = ['External collection', 'Mode', 'Apply also to daughter collections?']
table_content = []
external_collections = external_collection_sort_engine_by_name(external_collections_dictionary.values())
for external_collection in external_collections:
collection_name = external_collection.name
select = external_collections_build_select(colID, external_collection)
recurse = '<input type=checkbox name="recurse" value="%(collection_name)s">' % {'collection_name': collection_name}
table_content.append([collection_name, select, recurse])
output += tupletotable(header=table_header, tuple=table_content)
output += '<input class="adminbutton" type="submit" value="Modify"/>'
output += '</form>'
return addadminbox(subtitle, [output])
def perform_update_external_collections(colID, ln, state_list, recurse_list):
colID = int(colID)
changes = []
output = ""
if not state_list:
return 'Warning : No state found.<br />' + perform_manage_external_collections(colID, ln)
external_collections = external_collection_sort_engine_by_name(external_collections_dictionary.values())
if len(external_collections) != len(state_list):
return 'Warning : Size of state_list different from external_collections!<br />' + perform_manage_external_collections(colID, ln)
for (external_collection, state) in zip(external_collections, state_list):
state = int(state)
collection_name = external_collection.name
recurse = recurse_list and collection_name in recurse_list
oldstate = external_collection_get_state(external_collection, colID)
if oldstate != state or recurse:
changes += external_collection_get_update_state_list(external_collection, colID, state, recurse)
external_collection_apply_changes(changes)
return output + '<br /><br />' + perform_manage_external_collections(colID, ln)
def perform_showdetailedrecordoptions(colID, ln, callback='yes', content='', confirm=-1):
"""Show the interface to configure detailed record page to the user."""
colID = int(colID)
subtitle = """<a name="12">12. Configuration of detailed record page</a>
<small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.12">?</a>]</small>""" % CFG_SITE_URL
output = '''<form action="update_detailed_record_options" method="post">
<table><tr><td>
<input type="hidden" name="colID" value="%(colID)d">
<dl>
<dt><b>Show tabs:</b></dt>
<dd>
''' % {'colID': colID}
for (tab_id, tab_info) in get_detailed_page_tabs(colID).iteritems():
if tab_id == 'comments' and \
not CFG_WEBCOMMENT_ALLOW_REVIEWS and \
not CFG_WEBCOMMENT_ALLOW_COMMENTS:
continue
check = ''
output += '''<input type="checkbox" id="id%(tabid)s" name="tabs" value="%(tabid)s" %(check)s />
<label for="id%(tabid)s"> %(label)s</label><br />
''' % {'tabid':tab_id,
'check':((tab_info['visible'] and 'checked="checked"') or ''),
'label':tab_info['label']}
output += '</dd></dl></td><td>'
output += '</td></tr></table><input class="adminbutton" type="submit" value="Modify"/>'
output += '''<input type="checkbox" id="recurse" name="recurse" value="1" />
<label for="recurse"> Also apply to subcollections</label>'''
output += '</form>'
return addadminbox(subtitle, [output])
def perform_update_detailed_record_options(colID, ln, tabs, recurse):
"""Update the preferences for the tab to show/hide in the detailed record page."""
colID = int(colID)
changes = []
output = '<b><span class="info">Operation successfully completed.</span></b>'
if '' in tabs:
tabs.remove('')
tabs.append('metadata')
def update_settings(colID, tabs, recurse):
run_sql("DELETE FROM collectiondetailedrecordpagetabs WHERE id_collection=%s", (colID, ))
run_sql("REPLACE INTO collectiondetailedrecordpagetabs" + \
" SET id_collection=%s, tabs=%s", (colID, ';'.join(tabs)))
## for enabled_tab in tabs:
## run_sql("REPLACE INTO collectiondetailedrecordpagetabs" + \
## " SET id_collection='%s', tabs='%s'" % (colID, ';'.join(tabs)))
if recurse:
for descendant_id in get_collection_descendants(colID):
update_settings(descendant_id, tabs, recurse)
update_settings(colID, tabs, recurse)
## for colID in colIDs:
## run_sql("DELETE FROM collectiondetailedrecordpagetabs WHERE id_collection='%s'" % colID)
## for enabled_tab in tabs:
## run_sql("REPLACE INTO collectiondetailedrecordpagetabs" + \
## " SET id_collection='%s', tabs='%s'" % (colID, ';'.join(tabs)))
#if callback:
return perform_editcollection(colID, ln, "perform_modifytranslations",
'<br /><br />' + output + '<br /><br />' + \
perform_showdetailedrecordoptions(colID, ln))
#else:
# return addadminbox(subtitle, body)
#return output + '<br /><br />' + perform_showdetailedrecordoptions(colID, ln)
def perform_addexistingoutputformat(colID, ln, fmtID=-1, callback='yes', confirm=-1):
"""form to add an existing output format to a collection.
colID - the collection the format should be added to
fmtID - the format to add."""
subtitle = """<a name="10.2"></a>Add existing output format to collection"""
output = ""
if fmtID not in [-1, "-1"] and confirm in [1, "1"]:
ares = add_col_fmt(colID, fmtID)
colID = int(colID)
res = get_def_name('', "format")
fmt_dict = dict(res)
col_dict = dict(get_def_name('', "collection"))
col_fmt = get_col_fmt(colID)
col_fmt = dict(map(lambda x: (x[0], x[2]), col_fmt))
if len(res) > 0:
text = """
<span class="adminlabel">Output format</span>
<select name="fmtID" class="admin_w200">
<option value="-1">- Select output format -</option>
"""
for (id, name) in res:
if not col_fmt.has_key(id):
text += """<option value="%s" %s>%s</option>
""" % (id, id == int(fmtID) and 'selected="selected"' or '', name)
text += """</select><br />
"""
output += createhiddenform(action="addexistingoutputformat#10.2",
text=text,
button="Add",
colID=colID,
ln=ln,
confirm=1)
else:
output = """No existing output formats to add, please create a new one."""
if fmtID not in [-1, "-1"] and confirm in [1, "1"]:
output += write_outcome(ares)
elif fmtID in [-1, "-1"] and confirm not in [-1, "-1"]:
output += """<b><span class="info">Please select output format.</span></b>"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showoutputformats(colID, ln, content=output)
def perform_deleteoutputformat(colID, ln, fmtID=-1, callback='yes', confirm=-1):
"""form to delete an output format not in use.
colID - the collection id of the current collection.
fmtID - the format id to delete."""
subtitle = """<a name="10.3"></a>Delete an unused output format"""
output = """
<dl>
<dd>Deleting an output format will also delete the translations associated.</dd>
</dl>
"""
colID = int(colID)
if fmtID not in [-1, "-1"] and confirm in [1, "1"]:
fmt_dict = dict(get_def_name('', "format"))
old_colNAME = fmt_dict[int(fmtID)]
ares = delete_fmt(int(fmtID))
res = get_def_name('', "format")
fmt_dict = dict(res)
col_dict = dict(get_def_name('', "collection"))
col_fmt = get_col_fmt()
col_fmt = dict(map(lambda x: (x[0], x[2]), col_fmt))
if len(res) > 0:
text = """
<span class="adminlabel">Output format</span>
<select name="fmtID" class="admin_w200">
"""
text += """<option value="-1">- Select output format -"""
for (id, name) in res:
if not col_fmt.has_key(id):
text += """<option value="%s" %s>%s""" % (id, id == int(fmtID) and 'selected="selected"' or '', name)
text += "</option>"
text += """</select><br />"""
output += createhiddenform(action="deleteoutputformat#10.3",
text=text,
button="Delete",
colID=colID,
ln=ln,
confirm=0)
if fmtID not in [-1, "-1"]:
fmtID = int(fmtID)
if confirm in [0, "0"]:
text = """<b>Do you want to delete the output format '%s'.</b>
""" % fmt_dict[fmtID]
output += createhiddenform(action="deleteoutputformat#10.3",
text=text,
button="Confirm",
colID=colID,
fmtID=fmtID,
ln=ln,
confirm=1)
elif confirm in [1, "1"]:
output += write_outcome(ares)
elif confirm not in [-1, "-1"]:
output += """<b><span class="info">Choose a output format to delete.</span></b>
"""
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showoutputformats(colID, ln, content=output)
def perform_removeoutputformat(colID, ln, fmtID='', callback='yes', confirm=0):
"""form to remove an output format from a collection.
colID - the collection id of the current collection.
fmtID - the format id.
"""
subtitle = """<a name="10.5"></a>Remove output format"""
output = ""
col_dict = dict(get_def_name('', "collection"))
fmt_dict = dict(get_def_name('', "format"))
if colID and fmtID:
colID = int(colID)
fmtID = int(fmtID)
if confirm in ["0", 0]:
text = """Do you want to remove the output format '%s' from the collection '%s'.""" % (fmt_dict[fmtID], col_dict[colID])
output += createhiddenform(action="removeoutputformat#10.5",
text=text,
button="Confirm",
colID=colID,
fmtID=fmtID,
confirm=1)
elif confirm in ["1", 1]:
res = remove_fmt(colID, fmtID)
output += write_outcome(res)
body = [output]
output = "<br />" + addadminbox(subtitle, body)
return perform_showoutputformats(colID, ln, content=output)
def perform_index(colID=1, ln=CFG_SITE_LANG, mtype='', content='', confirm=0):
"""The index method, calling methods to show the collection tree, create new collections and add collections to tree.
"""
subtitle = "Overview"
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
output = ""
fin_output = ""
if not col_dict.has_key(1):
res = add_col(CFG_SITE_NAME, '')
if res:
fin_output += """<b><span class="info">Created root collection.</span></b><br />"""
else:
return "Cannot create root collection, please check database."
if CFG_SITE_NAME != run_sql("SELECT name from collection WHERE id=1")[0][0]:
res = run_sql("update collection set name=%s where id=1", (CFG_SITE_NAME, ))
if res:
fin_output += """<b><span class="info">The name of the root collection has been modified to be the same as the %(sitename)s installation name given prior to installing %(sitename)s.</span><b><br />""" % {'sitename' : CFG_SITE_NAME}
else:
return "Error renaming root collection."
fin_output += """
<table>
<tr>
<td>0. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_showall">Show all</a></small></td>
<td>1. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_addcollection">Create new collection</a></small></td>
<td>2. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_addcollectiontotree">Attach collection to tree</a></small></td>
<td>3. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_modifycollectiontree">Modify collection tree</a></small></td>
<td>4. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_checkwebcollstatus">Webcoll Status</a></small></td>
</tr><tr>
<td>5. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_checkcollectionstatus">Collection Status</a></small></td>
<td>6. <small><a href="%s/admin/websearch/websearchadmin.py?colID=%s&ln=%s&mtype=perform_checkexternalcollections">Check external collections</a></small></td>
<td>7. <small><a href="%s/help/admin/websearch-admin-guide?ln=%s">Guide</a></small></td>
</tr>
</table>
""" % (CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, colID, ln, CFG_SITE_URL, ln)
if mtype == "":
fin_output += """<br /><br /><b><span class="info">To manage the collections, select an item from the menu.</span><b><br />"""
if mtype == "perform_addcollection" and content:
fin_output += content
elif mtype == "perform_addcollection" or mtype == "perform_showall":
fin_output += perform_addcollection(colID=colID, ln=ln, callback='')
fin_output += "<br />"
if mtype == "perform_addcollectiontotree" and content:
fin_output += content
elif mtype == "perform_addcollectiontotree" or mtype == "perform_showall":
fin_output += perform_addcollectiontotree(colID=colID, ln=ln, callback='')
fin_output += "<br />"
if mtype == "perform_modifycollectiontree" and content:
fin_output += content
elif mtype == "perform_modifycollectiontree" or mtype == "perform_showall":
fin_output += perform_modifycollectiontree(colID=colID, ln=ln, callback='')
fin_output += "<br />"
if mtype == "perform_checkwebcollstatus" and content:
fin_output += content
elif mtype == "perform_checkwebcollstatus" or mtype == "perform_showall":
fin_output += perform_checkwebcollstatus(colID, ln, callback='')
if mtype == "perform_checkcollectionstatus" and content:
fin_output += content
elif mtype == "perform_checkcollectionstatus" or mtype == "perform_showall":
fin_output += perform_checkcollectionstatus(colID, ln, callback='')
if mtype == "perform_checkexternalcollections" and content:
fin_output += content
elif mtype == "perform_checkexternalcollections" or mtype == "perform_showall":
fin_output += perform_checkexternalcollections(colID, ln, callback='')
body = [fin_output]
body = [fin_output]
return addadminbox('<b>Menu</b>', body)
def show_coll_not_in_tree(colID, ln, col_dict):
"""Returns collections not in tree"""
tree = get_col_tree(colID)
in_tree = {}
output = "These collections are not in the tree, and should be added:<br />"
for (id, up, down, dad, reltype) in tree:
in_tree[id] = 1
in_tree[dad] = 1
res = run_sql("SELECT id from collection")
if len(res) != len(in_tree):
for id in res:
if not in_tree.has_key(id[0]):
output += """<a href="%s/admin/websearch/websearchadmin.py/editcollection?colID=%s&ln=%s" title="Edit collection">%s</a> ,
""" % (CFG_SITE_URL, id[0], ln, col_dict[id[0]])
output += "<br /><br />"
else:
output = ""
return output
def create_colltree(tree, col_dict, colID, ln, move_from='', move_to='', rtype='', edit=''):
"""Creates the presentation of the collection tree, with the buttons for modifying it.
tree - the tree to present, from get_tree()
col_dict - the name of the collections in a dictionary
colID - the collection id to start with
move_from - if a collection to be moved has been chosen
move_to - the collection which should be set as father of move_from
rtype - the type of the tree, regular or virtual
edit - if the method should output the edit buttons."""
if move_from:
move_from_rtype = move_from[0]
move_from_id = int(move_from[1:len(move_from)])
tree_from = get_col_tree(colID, move_from_rtype)
tree_to = get_col_tree(colID, rtype)
tables = 0
tstack = []
i = 0
text = """
<table border ="0" cellspacing="0" cellpadding="0">"""
for i in range(0, len(tree)):
id_son = tree[i][0]
up = tree[i][1]
down = tree[i][2]
dad = tree[i][3]
reltype = tree[i][4]
tmove_from = ""
j = i
while j > 0:
j = j - 1
try:
if tstack[j][1] == dad:
table = tstack[j][2]
for k in range(0, tables - table):
tables = tables - 1
text += """</table></td></tr>
"""
break
except StandardError, e:
pass
text += """<tr><td>
"""
if i > 0 and tree[i][1] == 0:
tables = tables + 1
text += """</td><td></td><td></td><td></td><td><table border="0" cellspacing="0" cellpadding="0"><tr><td>
"""
if i == 0:
tstack.append((id_son, dad, 1))
else:
tstack.append((id_son, dad, tables))
if up == 1 and edit:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&move_up=%s&rtype=%s#%s"><img border="0" src="%s/img/smallup.gif" title="Move collection up"></a>""" % (CFG_SITE_URL, colID, ln, i, rtype, tree[i][0], CFG_SITE_URL)
else:
text += """ """
text += "</td><td>"
if down == 1 and edit:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&move_down=%s&rtype=%s#%s"><img border="0" src="%s/img/smalldown.gif" title="Move collection down"></a>""" % (CFG_SITE_URL, colID, ln, i, rtype, tree[i][0], CFG_SITE_URL)
else:
text += """ """
text += "</td><td>"
if edit:
if move_from and move_to:
tmove_from = move_from
move_from = ''
if not (move_from == "" and i == 0) and not (move_from != "" and int(move_from[1:len(move_from)]) == i and rtype == move_from[0]):
check = "true"
if move_from:
#if tree_from[move_from_id][0] == tree_to[i][0] or not check_col(tree_to[i][0], tree_from[move_from_id][0]):
# check = ''
#elif not check_col(tree_to[i][0], tree_from[move_from_id][0]):
# check = ''
#if not check and (tree_to[i][0] == 1 and tree_from[move_from_id][3] == tree_to[i][0] and move_from_rtype != rtype):
# check = "true"
if check:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&move_from=%s&move_to=%s%s&rtype=%s#tree"><img border="0" src="%s/img/move_to.gif" title="Move '%s' to '%s'"></a>
""" % (CFG_SITE_URL, colID, ln, move_from, rtype, i, rtype, CFG_SITE_URL, col_dict[tree_from[int(move_from[1:len(move_from)])][0]], col_dict[tree_to[i][0]])
else:
try:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&move_from=%s%s&rtype=%s#%s"><img border="0" src="%s/img/move_from.gif" title="Move '%s' from this location."></a>""" % (CFG_SITE_URL, colID, ln, rtype, i, rtype, tree[i][0], CFG_SITE_URL, col_dict[tree[i][0]])
except KeyError:
pass
else:
text += """<img border="0" src="%s/img/white_field.gif">
""" % CFG_SITE_URL
else:
text += """<img border="0" src="%s/img/white_field.gif">
""" % CFG_SITE_URL
text += """
</td>
<td>"""
if edit:
try:
text += """<a href="%s/admin/websearch/websearchadmin.py/modifycollectiontree?colID=%s&ln=%s&delete=%s&rtype=%s#%s"><img border="0" src="%s/img/iconcross.gif" title="Remove colletion from tree"></a>""" % (CFG_SITE_URL, colID, ln, i, rtype, tree[i][0], CFG_SITE_URL)
except KeyError:
pass
elif i != 0:
text += """<img border="0" src="%s/img/white_field.gif">
""" % CFG_SITE_URL
text += """</td><td>
"""
if tmove_from:
move_from = tmove_from
try:
text += """<a name="%s"></a>%s<a href="%s/admin/websearch/websearchadmin.py/editcollection?colID=%s&ln=%s" title="Edit collection">%s</a>%s%s%s""" % (tree[i][0], (reltype=="v" and '<i>' or ''), CFG_SITE_URL, tree[i][0], ln, col_dict[id_son], (move_to=="%s%s" %(rtype, i) and ' <img border="0" src="%s/img/move_to.gif">' % CFG_SITE_URL or ''), (move_from=="%s%s" % (rtype, i) and ' <img border="0" src="%s/img/move_from.gif">' % CFG_SITE_URL or ''), (reltype=="v" and '</i>' or ''))
except KeyError:
pass
text += """</td></tr>
"""
while tables > 0:
text += """</table></td></tr>
"""
tables = tables - 1
text += """</table>
"""
return text
def perform_deletecollection(colID, ln, confirm=-1, callback='yes'):
"""form to delete a collection
colID - id of collection
"""
subtitle =''
output = """
<span class="warning">
<strong>
<dl>
<dt>WARNING:</dt>
<dd>When deleting a collection, you also deletes all data related to the collection like translations, relations to other collections and information about which rank methods to use.
<br />For more information, please go to the <a title="See guide" href="%s/help/admin/websearch-admin-guide">WebSearch guide</a> and read the section regarding deleting a collection.</dd>
</dl>
</strong>
</span>
""" % CFG_SITE_URL
col_dict = dict(get_def_name('', "collection"))
if colID != 1 and colID and col_dict.has_key(int(colID)):
colID = int(colID)
subtitle = """<a name="4">4. Delete collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.4">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
res = run_sql("SELECT id_dad,id_son,type,score from collection_collection WHERE id_dad=%s", (colID, ))
res2 = run_sql("SELECT id_dad,id_son,type,score from collection_collection WHERE id_son=%s", (colID, ))
if not res and not res2:
if confirm in ["-1", -1]:
text = """Do you want to delete this collection."""
output += createhiddenform(action="deletecollection#4",
text=text,
colID=colID,
button="Delete",
confirm=0)
elif confirm in ["0", 0]:
text = """Are you sure you want to delete this collection."""
output += createhiddenform(action="deletecollection#4",
text=text,
colID=colID,
button="Confirm",
confirm=1)
elif confirm in ["1", 1]:
result = delete_col(colID)
if not result:
raise Exception
else:
output = """<b><span class="info">Can not delete a collection that is a part of the collection tree, remove collection from the tree and try again.</span></b>"""
else:
subtitle = """4. Delete collection"""
output = """<b><span class="info">Not possible to delete the root collection</span></b>"""
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_deletecollection", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_editcollection(colID=1, ln=CFG_SITE_LANG, mtype='', content=''):
"""interface to modify a collection. this method is calling other methods which again is calling this and sending back the output of the method.
if callback, the method will call perform_editcollection, if not, it will just return its output.
colID - id of the collection
mtype - the method that called this method.
content - the output from that method."""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
if not col_dict.has_key(colID):
return """<b><span class="info">Collection deleted.</span></b>
"""
fin_output = """
<table>
<tr>
<td><b>Menu</b></td>
</tr>
<tr>
<td>0. <small><a href="editcollection?colID=%s&ln=%s">Show all</a></small></td>
<td>1. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_modifydbquery">Modify collection query</a></small></td>
<td>2. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_modifyrestricted">Modify access restrictions</a></small></td>
<td>3. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_modifytranslations">Modify translations</a></small></td>
<td>4. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_deletecollection">Delete collection</a></small></td>
</tr><tr>
<td>5. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showportalboxes">Modify portalboxes</a></small></td>
<td>6. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showsearchfields#6">Modify search fields</a></small></td>
<td>7. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showsearchoptions#7">Modify search options</a></small></td>
<td>8. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showsortoptions#8">Modify sort options</a></small></td>
<td>9. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_modifyrankmethods#9">Modify rank options</a></small></td>
</tr><tr>
<td>10. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showoutputformats#10">Modify output formats</a></small></td>
<td>11. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_manage_external_collections#11">Configuration of related external collections</a></small></td>
<td>12. <small><a href="editcollection?colID=%s&ln=%s&mtype=perform_showdetailedrecordoptions#12">Detailed record page options</a></small></td>
</tr>
</table>
""" % (colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln, colID, ln)
if mtype == "perform_modifydbquery" and content:
fin_output += content
elif mtype == "perform_modifydbquery" or not mtype:
fin_output += perform_modifydbquery(colID, ln, callback='')
if mtype == "perform_modifyrestricted" and content:
fin_output += content
elif mtype == "perform_modifyrestricted" or not mtype:
fin_output += perform_modifyrestricted(colID, ln, callback='')
if mtype == "perform_modifytranslations" and content:
fin_output += content
elif mtype == "perform_modifytranslations" or not mtype:
fin_output += perform_modifytranslations(colID, ln, callback='')
if mtype == "perform_deletecollection" and content:
fin_output += content
elif mtype == "perform_deletecollection" or not mtype:
fin_output += perform_deletecollection(colID, ln, callback='')
if mtype == "perform_showportalboxes" and content:
fin_output += content
elif mtype == "perform_showportalboxes" or not mtype:
fin_output += perform_showportalboxes(colID, ln, callback='')
if mtype == "perform_showsearchfields" and content:
fin_output += content
elif mtype == "perform_showsearchfields" or not mtype:
fin_output += perform_showsearchfields(colID, ln, callback='')
if mtype == "perform_showsearchoptions" and content:
fin_output += content
elif mtype == "perform_showsearchoptions" or not mtype:
fin_output += perform_showsearchoptions(colID, ln, callback='')
if mtype == "perform_showsortoptions" and content:
fin_output += content
elif mtype == "perform_showsortoptions" or not mtype:
fin_output += perform_showsortoptions(colID, ln, callback='')
if mtype == "perform_modifyrankmethods" and content:
fin_output += content
elif mtype == "perform_modifyrankmethods" or not mtype:
fin_output += perform_modifyrankmethods(colID, ln, callback='')
if mtype == "perform_showoutputformats" and content:
fin_output += content
elif mtype == "perform_showoutputformats" or not mtype:
fin_output += perform_showoutputformats(colID, ln, callback='')
if mtype == "perform_manage_external_collections" and content:
fin_output += content
elif mtype == "perform_manage_external_collections" or not mtype:
fin_output += perform_manage_external_collections(colID, ln, callback='')
if mtype == "perform_showdetailedrecordoptions" and content:
fin_output += content
elif mtype == "perform_showdetailedrecordoptions" or not mtype:
fin_output += perform_showdetailedrecordoptions(colID, ln, callback='')
return addadminbox("Overview of edit options for collection '%s'" % col_dict[colID], [fin_output])
def perform_checkwebcollstatus(colID, ln, confirm=0, callback='yes'):
"""Check status of the collection tables with respect to the webcoll cache."""
subtitle = """<a name="11"></a>Webcoll Status [<a href="%s/help/admin/websearch-admin-guide#5">?</a>]""" % CFG_SITE_URL
output = ""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
output += """<br /><b>Last updates:</b><br />"""
collection_table_update_time = ""
collection_web_update_time = ""
collection_table_update_time = get_table_update_time('collection')
output += "Collection table last updated: %s<br />" % collection_table_update_time
try:
file = open("%s/collections/last_updated" % CFG_CACHEDIR)
collection_web_update_time = file.readline().strip()
output += "Collection cache last updated: %s<br />" % collection_web_update_time
file.close()
except:
pass
# reformat collection_web_update_time to the format suitable for comparisons
try:
collection_web_update_time = strftime("%Y-%m-%d %H:%M:%S",
time.strptime(collection_web_update_time, "%d %b %Y %H:%M:%S"))
except ValueError, e:
pass
if collection_table_update_time > collection_web_update_time:
output += """<br /><b><span class="info">Warning: The collections have been modified since last time Webcoll was executed, to process the changes, Webcoll must be executed.</span></b><br />"""
header = ['ID', 'Name', 'Time', 'Status', 'Progress']
actions = []
output += """<br /><b>Last BibSched tasks:</b><br />"""
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='webcoll' and runtime< now() ORDER by runtime")
if len(res) > 0:
(id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[len(res) - 1]
webcoll__update_time = runtime
actions.append([id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
else:
actions.append(['', 'webcoll', '', '', 'Not executed yet'])
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='bibindex' and runtime< now() ORDER by runtime")
if len(res) > 0:
(id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[len(res) - 1]
actions.append([id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
else:
actions.append(['', 'bibindex', '', '', 'Not executed yet'])
output += tupletotable(header=header, tuple=actions)
output += """<br /><b>Next scheduled BibSched run:</b><br />"""
actions = []
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='webcoll' and runtime > now() ORDER by runtime")
webcoll_future = ""
if len(res) > 0:
(id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[0]
webcoll__update_time = runtime
actions.append([id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
webcoll_future = "yes"
else:
actions.append(['', 'webcoll', '', '', 'Not scheduled'])
res = run_sql("select id, proc, host, user, runtime, sleeptime, arguments, status, progress from schTASK where proc='bibindex' and runtime > now() ORDER by runtime")
bibindex_future = ""
if len(res) > 0:
(id, proc, host, user, runtime, sleeptime, arguments, status, progress) = res[0]
actions.append([id, proc, runtime, (status !="" and status or ''), (progress !="" and progress or '')])
bibindex_future = "yes"
else:
actions.append(['', 'bibindex', '', '', 'Not scheduled'])
output += tupletotable(header=header, tuple=actions)
if webcoll_future == "":
output += """<br /><b><span class="info">Warning: Webcoll is not scheduled for a future run by bibsched, any updates to the collection will not be processed.</span></b><br />"""
if bibindex_future == "":
output += """<br /><b><span class="info">Warning: Bibindex is not scheduled for a future run by bibsched, any updates to the records will not be processed.</span></b><br />"""
body = [output]
if callback:
return perform_index(colID, ln, "perform_checkwebcollstatus", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_modifyrestricted(colID, ln, rest='', callback='yes', confirm=-1):
"""modify which apache group is allowed to access the collection.
rest - the groupname"""
subtitle = ''
output = ""
col_dict = dict(get_def_name('', "collection"))
action_id = acc_get_action_id(VIEWRESTRCOLL)
if colID and col_dict.has_key(int(colID)):
colID = int(colID)
subtitle = """<a name="2">2. Modify access restrictions for collection '%s'</a> <small>[<a title="See guide" href="%s/help/admin/websearch-admin-guide#3.2">?</a>]</small>""" % (col_dict[colID], CFG_SITE_URL)
output = """<p>Please note that Invenio versions greater than <em>0.92.1</em> manage collection restriction via the standard
<strong><a href="/admin/webaccess/webaccessadmin.py/showactiondetails?id_action=%i">WebAccess Admin Interface</a></strong> (action '%s').</p>
""" % (action_id, VIEWRESTRCOLL)
body = [output]
if callback:
return perform_editcollection(colID, ln, "perform_modifyrestricted", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_checkcollectionstatus(colID, ln, confirm=0, callback='yes'):
"""Check the configuration of the collections."""
from invenio.search_engine import collection_restricted_p, restricted_collection_cache
subtitle = """<a name="11"></a>Collection Status [<a href="%s/help/admin/websearch-admin-guide#6">?</a>]""" % CFG_SITE_URL
output = ""
colID = int(colID)
col_dict = dict(get_def_name('', "collection"))
collections = run_sql("SELECT id, name, dbquery, nbrecs FROM collection "
"ORDER BY id")
header = ['ID', 'Name','Query', 'Subcollections', 'Restricted', 'Hosted',
'I18N', 'Status', 'Number of records']
rnk_list = get_def_name('', "rnkMETHOD")
actions = []
restricted_collection_cache.recreate_cache_if_needed()
for (id, name, dbquery, nbrecs) in collections:
reg_sons = col_has_son(id, 'r')
vir_sons = col_has_son(id, 'v')
status = ""
hosted = ""
if str(dbquery).startswith("hostedcollection:"): hosted = """<b><span class="info">Yes</span></b>"""
else: hosted = """<b><span class="info">No</span></b>"""
langs = run_sql("SELECT ln from collectionname where id_collection=%s", (id, ))
i8n = ""
if len(langs) > 0:
for lang in langs:
i8n += "%s, " % lang
else:
i8n = """<b><span class="info">None</span></b>"""
if reg_sons and dbquery:
status = """<b><span class="warning">1:Conflict</span></b>"""
elif not dbquery and not reg_sons:
status = """<b><span class="warning">2:Empty</span></b>"""
if (reg_sons or vir_sons):
subs = """<b><span class="info">Yes</span></b>"""
else:
subs = """<b><span class="info">No</span></b>"""
if dbquery is None:
dbquery = """<b><span class="info">No</span></b>"""
restricted = collection_restricted_p(name, recreate_cache_if_needed=False)
if restricted:
restricted = """<b><span class="warning">Yes</span></b>"""
if status:
status += """<b><span class="warning">,3:Restricted</span></b>"""
else:
status += """<b><span class="warning">3:Restricted</span></b>"""
else:
restricted = """<b><span class="info">No</span></b>"""
if status == "":
status = """<b><span class="info">OK</span></b>"""
actions.append([id, """<a href="%s/admin/websearch/websearchadmin.py/editcollection?colID=%s&ln=%s">%s</a>""" % (CFG_SITE_URL, id, ln, name), dbquery, subs, restricted, hosted, i8n, status, nbrecs])
output += tupletotable(header=header, tuple=actions)
body = [output]
return addadminbox(subtitle, body)
if callback:
return perform_index(colID, ln, "perform_checkcollectionstatus", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def perform_checkexternalcollections(colID, ln, icl=None, update="", confirm=0, callback='yes'):
"""Check the external collections for inconsistencies."""
subtitle = """<a name="7"></a>Check external collections [<a href="%s/help/admin/websearch-admin-guide#7">?</a>]""" % CFG_SITE_URL
output = ""
colID = int(colID)
if icl:
if update == "add":
# icl : the "inconsistent list" comes as a string, it has to be converted back into a list
icl = eval(icl)
#icl = icl[1:-1].split(',')
for collection in icl:
#collection = str(collection[1:-1])
query_select = "SELECT name FROM externalcollection WHERE name like '%(name)s';" % {'name': collection}
results_select = run_sql(query_select)
if not results_select:
query_insert = "INSERT INTO externalcollection (name) VALUES ('%(name)s');" % {'name': collection}
run_sql(query_insert)
output += """<br /><span class=info>New collection \"%s\" has been added to the database table \"externalcollection\".</span><br />""" % (collection)
else:
output += """<br /><span class=info>Collection \"%s\" has already been added to the database table \"externalcollection\" or was already there.</span><br />""" % (collection)
elif update == "del":
# icl : the "inconsistent list" comes as a string, it has to be converted back into a list
icl = eval(icl)
#icl = icl[1:-1].split(',')
for collection in icl:
#collection = str(collection[1:-1])
query_select = "SELECT id FROM externalcollection WHERE name like '%(name)s';" % {'name': collection}
results_select = run_sql(query_select)
if results_select:
query_delete = "DELETE FROM externalcollection WHERE id like '%(id)s';" % {'id': results_select[0][0]}
query_delete_states = "DELETE FROM collection_externalcollection WHERE id_externalcollection like '%(id)s';" % {'id': results_select[0][0]}
run_sql(query_delete)
run_sql(query_delete_states)
output += """<br /><span class=info>Collection \"%s\" has been deleted from the database table \"externalcollection\".</span><br />""" % (collection)
else:
output += """<br /><span class=info>Collection \"%s\" has already been delete from the database table \"externalcollection\" or was never there.</span><br />""" % (collection)
external_collections_file = []
external_collections_db = []
for coll in external_collections_dictionary.values():
external_collections_file.append(coll.name)
external_collections_file.sort()
query = """SELECT name from externalcollection"""
results = run_sql(query)
for result in results:
external_collections_db.append(result[0])
external_collections_db.sort()
number_file = len(external_collections_file)
number_db = len(external_collections_db)
if external_collections_file == external_collections_db:
output += """<br /><span class="info">External collections are consistent.</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections""" % {
"number_db" : number_db,
"number_file" : number_file}
elif len(external_collections_file) > len(external_collections_db):
external_collections_diff = list(set(external_collections_file) - set(external_collections_db))
external_collections_db.extend(external_collections_diff)
external_collections_db.sort()
if external_collections_file == external_collections_db:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections
(<span class="warning">missing: %(diff)s</span>)<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><a href="%(site_url)s/admin/websearch/websearchadmin.py/checkexternalcollections?colID=%(colID)s&icl=%(diff)s&update=add&ln=%(ln)s">
Click here</a> to update your database adding the missing collections. If the problem persists please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file,
"diff" : external_collections_diff,
"site_url" : CFG_SITE_URL,
"colID" : colID,
"ln" : ln}
else:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><span class="warning">The external collections do not match.</span>
<br />To fix the problem please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file}
elif len(external_collections_file) < len(external_collections_db):
external_collections_diff = list(set(external_collections_db) - set(external_collections_file))
external_collections_file.extend(external_collections_diff)
external_collections_file.sort()
if external_collections_file == external_collections_db:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections
(<span class="warning">extra: %(diff)s</span>)<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><a href="%(site_url)s/admin/websearch/websearchadmin.py/checkexternalcollections?colID=%(colID)s&icl=%(diff)s&update=del&ln=%(ln)s">
Click here</a> to force remove the extra collections from your database (warning: use with caution!). If the problem persists please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file,
"diff" : external_collections_diff,
"site_url" : CFG_SITE_URL,
"colID" : colID,
"ln" : ln}
else:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><span class="warning">The external collections do not match.</span>
<br />To fix the problem please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file}
else:
output += """<br /><span class="warning">There is an inconsistency:</span><br /><br />
- database table \"externalcollection\" has %(number_db)s collections<br />
- configuration file \"websearch_external_collections_config.py\" has %(number_file)s collections
<br /><br /><span class="warning">The number of external collections is the same but the collections do not match.</span>
<br />To fix the problem please check your configuration manually.""" % {
"number_db" : number_db,
"number_file" : number_file}
body = [output]
return addadminbox(subtitle, body)
if callback:
return perform_index(colID, ln, "perform_checkexternalcollections", addadminbox(subtitle, body))
else:
return addadminbox(subtitle, body)
def col_has_son(colID, rtype='r'):
"""Return True if the collection has at least one son."""
return run_sql("SELECT id_son FROM collection_collection WHERE id_dad=%s and type=%s LIMIT 1", (colID, rtype)) != ()
def get_col_tree(colID, rtype=''):
"""Returns a presentation of the tree as a list. TODO: Add loop detection
colID - startpoint for the tree
rtype - get regular or virtual part of the tree"""
try:
colID = int(colID)
stack = [colID]
ssize = 0
tree = [(colID, 0, 0, colID, 'r')]
while len(stack) > 0:
ccolID = stack.pop()
if ccolID == colID and rtype:
res = run_sql("SELECT id_son, score, type FROM collection_collection WHERE id_dad=%s AND type=%s ORDER BY score ASC,id_son", (ccolID, rtype))
else:
res = run_sql("SELECT id_son, score, type FROM collection_collection WHERE id_dad=%s ORDER BY score ASC,id_son", (ccolID, ))
ssize += 1
ntree = []
for i in range(0, len(res)):
id_son = res[i][0]
score = res[i][1]
rtype = res[i][2]
stack.append(id_son)
if i == (len(res) - 1):
up = 0
else:
up = 1
if i == 0:
down = 0
else:
down = 1
ntree.insert(0, (id_son, up, down, ccolID, rtype))
tree = tree[0:ssize] + ntree + tree[ssize:len(tree)]
return tree
except StandardError, e:
register_exception()
return ()
def add_col_dad_son(add_dad, add_son, rtype):
"""Add a son to a collection (dad)
add_dad - add to this collection id
add_son - add this collection id
rtype - either regular or virtual"""
try:
res = run_sql("SELECT score FROM collection_collection WHERE id_dad=%s ORDER BY score ASC", (add_dad, ))
highscore = 0
for score in res:
if int(score[0]) > highscore:
highscore = int(score[0])
highscore += 1
res = run_sql("INSERT INTO collection_collection(id_dad,id_son,score,type) values(%s,%s,%s,%s)", (add_dad, add_son, highscore, rtype))
return (1, highscore)
except StandardError, e:
register_exception()
return (0, e)
def compare_on_val(first, second):
"""Compare the two values"""
return cmp(first[1], second[1])
def get_col_fld(colID=-1, type = '', id_field=''):
"""Returns either all portalboxes associated with a collection, or based on either colID or language or both.
colID - collection id
ln - language id"""
sql = "SELECT id_field,id_fieldvalue,type,score,score_fieldvalue FROM collection_field_fieldvalue, field WHERE id_field=field.id"
params = []
if colID > -1:
sql += " AND id_collection=%s"
params.append(colID)
if id_field:
sql += " AND id_field=%s"
params.append(id_field)
if type:
sql += " AND type=%s"
params.append(type)
sql += " ORDER BY type, score desc, score_fieldvalue desc"
res = run_sql(sql, tuple(params))
return res
def get_col_pbx(colID=-1, ln='', position = ''):
"""Returns either all portalboxes associated with a collection, or based on either colID or language or both.
colID - collection id
ln - language id"""
sql = "SELECT id_portalbox, id_collection, ln, score, position, title, body FROM collection_portalbox, portalbox WHERE id_portalbox = portalbox.id"
params = []
if colID > -1:
sql += " AND id_collection=%s"
params.append(colID)
if ln:
sql += " AND ln=%s"
params.append(ln)
if position:
sql += " AND position=%s"
params.append(position)
sql += " ORDER BY position, ln, score desc"
res = run_sql(sql, tuple(params))
return res
def get_col_fmt(colID=-1):
"""Returns all formats currently associated with a collection, or for one specific collection
colID - the id of the collection"""
if colID not in [-1, "-1"]:
res = run_sql("SELECT id_format, id_collection, code, score FROM collection_format, format WHERE id_format = format.id AND id_collection=%s ORDER BY score desc", (colID, ))
else:
res = run_sql("SELECT id_format, id_collection, code, score FROM collection_format, format WHERE id_format = format.id ORDER BY score desc")
return res
def get_col_rnk(colID, ln):
""" Returns a list of the rank methods the given collection is attached to
colID - id from collection"""
try:
res1 = dict(run_sql("SELECT id_rnkMETHOD, '' FROM collection_rnkMETHOD WHERE id_collection=%s", (colID, )))
res2 = get_def_name('', "rnkMETHOD")
result = filter(lambda x: res1.has_key(x[0]), res2)
return result
except StandardError, e:
return ()
def get_pbx():
"""Returns all portalboxes"""
res = run_sql("SELECT id, title, body FROM portalbox ORDER by title,body")
return res
def get_fld_value(fldvID = ''):
"""Returns fieldvalue"""
sql = "SELECT id, name, value FROM fieldvalue"
params = []
if fldvID:
sql += " WHERE id=%s"
params.append(fldvID)
sql += " ORDER BY name"
res = run_sql(sql, tuple(params))
return res
def get_pbx_pos():
"""Returns a list of all the positions for a portalbox"""
position = {}
position["rt"] = "Right Top"
position["lt"] = "Left Top"
position["te"] = "Title Epilog"
position["tp"] = "Title Prolog"
position["ne"] = "Narrow by coll epilog"
position["np"] = "Narrow by coll prolog"
return position
def get_sort_nametypes():
"""Return a list of the various translationnames for the fields"""
type = {}
type['soo'] = 'Sort options'
type['seo'] = 'Search options'
type['sew'] = 'Search within'
return type
def get_fmt_nametypes():
"""Return a list of the various translationnames for the output formats"""
type = []
type.append(('ln', 'Long name'))
return type
def get_fld_nametypes():
"""Return a list of the various translationnames for the fields"""
type = []
type.append(('ln', 'Long name'))
return type
def get_col_nametypes():
"""Return a list of the various translationnames for the collections"""
type = []
type.append(('ln', 'Long name'))
return type
def find_last(tree, start_son):
"""Find the previous collection in the tree with the same father as start_son"""
id_dad = tree[start_son][3]
while start_son > 0:
start_son -= 1
if tree[start_son][3] == id_dad:
return start_son
def find_next(tree, start_son):
"""Find the next collection in the tree with the same father as start_son"""
id_dad = tree[start_son][3]
while start_son < len(tree):
start_son += 1
if tree[start_son][3] == id_dad:
return start_son
def remove_col_subcol(id_son, id_dad, type):
"""Remove a collection as a son of another collection in the tree, if collection isn't used elsewhere in the tree, remove all registered sons of the id_son.
id_son - collection id of son to remove
id_dad - the id of the dad"""
try:
if id_son != id_dad:
tree = get_col_tree(id_son)
run_sql("DELETE FROM collection_collection WHERE id_son=%s and id_dad=%s", (id_son, id_dad))
else:
tree = get_col_tree(id_son, type)
run_sql("DELETE FROM collection_collection WHERE id_son=%s and id_dad=%s and type=%s", (id_son, id_dad, type))
if not run_sql("SELECT id_dad,id_son,type,score from collection_collection WHERE id_son=%s and type=%s", (id_son, type)):
for (id, up, down, dad, rtype) in tree:
run_sql("DELETE FROM collection_collection WHERE id_son=%s and id_dad=%s", (id, dad))
return (1, "")
except StandardError, e:
return (0, e)
def check_col(add_dad, add_son):
"""Check if the collection can be placed as a son of the dad without causing loops.
add_dad - collection id
add_son - collection id"""
try:
stack = [add_dad]
res = run_sql("SELECT id_dad FROM collection_collection WHERE id_dad=%s AND id_son=%s", (add_dad, add_son))
if res:
raise StandardError
while len(stack) > 0:
colID = stack.pop()
res = run_sql("SELECT id_dad FROM collection_collection WHERE id_son=%s", (colID, ))
for id in res:
if int(id[0]) == int(add_son):
# raise StandardError # this was the original but it didnt work
return(0)
else:
stack.append(id[0])
return (1, "")
except StandardError, e:
return (0, e)
def attach_rnk_col(colID, rnkID):
"""attach rank method to collection
rnkID - id from rnkMETHOD table
colID - id of collection, as in collection table """
try:
res = run_sql("INSERT INTO collection_rnkMETHOD(id_collection, id_rnkMETHOD) values (%s,%s)", (colID, rnkID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def detach_rnk_col(colID, rnkID):
"""detach rank method from collection
rnkID - id from rnkMETHOD table
colID - id of collection, as in collection table """
try:
res = run_sql("DELETE FROM collection_rnkMETHOD WHERE id_collection=%s AND id_rnkMETHOD=%s", (colID, rnkID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def switch_col_treescore(col_1, col_2):
try:
res1 = run_sql("SELECT score FROM collection_collection WHERE id_dad=%s and id_son=%s", (col_1[3], col_1[0]))
res2 = run_sql("SELECT score FROM collection_collection WHERE id_dad=%s and id_son=%s", (col_2[3], col_2[0]))
res = run_sql("UPDATE collection_collection SET score=%s WHERE id_dad=%s and id_son=%s", (res2[0][0], col_1[3], col_1[0]))
res = run_sql("UPDATE collection_collection SET score=%s WHERE id_dad=%s and id_son=%s", (res1[0][0], col_2[3], col_2[0]))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def move_col_tree(col_from, col_to, move_to_rtype=''):
"""Move a collection from one point in the tree to another. becomes a son of the endpoint.
col_from - move this collection from current point
col_to - and set it as a son of this collection.
move_to_rtype - either virtual or regular collection"""
try:
res = run_sql("SELECT score FROM collection_collection WHERE id_dad=%s ORDER BY score asc", (col_to[0], ))
highscore = 0
for score in res:
if int(score[0]) > highscore:
highscore = int(score[0])
highscore += 1
if not move_to_rtype:
move_to_rtype = col_from[4]
res = run_sql("DELETE FROM collection_collection WHERE id_son=%s and id_dad=%s", (col_from[0], col_from[3]))
res = run_sql("INSERT INTO collection_collection(id_dad,id_son,score,type) values(%s,%s,%s,%s)", (col_to[0], col_from[0], highscore, move_to_rtype))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def remove_pbx(colID, pbxID, ln):
"""Removes a portalbox from the collection given.
colID - the collection the format is connected to
pbxID - the portalbox which should be removed from the collection.
ln - the language of the portalbox to be removed"""
try:
res = run_sql("DELETE FROM collection_portalbox WHERE id_collection=%s AND id_portalbox=%s AND ln=%s", (colID, pbxID, ln))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def remove_fmt(colID, fmtID):
"""Removes a format from the collection given.
colID - the collection the format is connected to
fmtID - the format which should be removed from the collection."""
try:
res = run_sql("DELETE FROM collection_format WHERE id_collection=%s AND id_format=%s", (colID, fmtID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def remove_fld(colID, fldID, fldvID=''):
"""Removes a field from the collection given.
colID - the collection the format is connected to
fldID - the field which should be removed from the collection."""
try:
sql = "DELETE FROM collection_field_fieldvalue WHERE id_collection=%s AND id_field=%s"
params = [colID, fldID]
if fldvID:
if fldvID != "None":
sql += " AND id_fieldvalue=%s"
params.append(fldvID)
else:
sql += " AND id_fieldvalue is NULL"
res = run_sql(sql, tuple(params))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def delete_fldv(fldvID):
"""Deletes all data for the given fieldvalue
fldvID - delete all data in the tables associated with fieldvalue and this id"""
try:
res = run_sql("DELETE FROM collection_field_fieldvalue WHERE id_fieldvalue=%s", (fldvID, ))
res = run_sql("DELETE FROM fieldvalue WHERE id=%s", (fldvID, ))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def delete_pbx(pbxID):
"""Deletes all data for the given portalbox
pbxID - delete all data in the tables associated with portalbox and this id """
try:
res = run_sql("DELETE FROM collection_portalbox WHERE id_portalbox=%s", (pbxID, ))
res = run_sql("DELETE FROM portalbox WHERE id=%s", (pbxID, ))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def delete_fmt(fmtID):
"""Deletes all data for the given format
fmtID - delete all data in the tables associated with format and this id """
try:
res = run_sql("DELETE FROM format WHERE id=%s", (fmtID, ))
res = run_sql("DELETE FROM collection_format WHERE id_format=%s", (fmtID, ))
res = run_sql("DELETE FROM formatname WHERE id_format=%s", (fmtID, ))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def delete_col(colID):
"""Deletes all data for the given collection
colID - delete all data in the tables associated with collection and this id """
try:
res = run_sql("DELETE FROM collection WHERE id=%s", (colID, ))
res = run_sql("DELETE FROM collectionname WHERE id_collection=%s", (colID, ))
res = run_sql("DELETE FROM collection_rnkMETHOD WHERE id_collection=%s", (colID, ))
res = run_sql("DELETE FROM collection_collection WHERE id_dad=%s", (colID, ))
res = run_sql("DELETE FROM collection_collection WHERE id_son=%s", (colID, ))
res = run_sql("DELETE FROM collection_portalbox WHERE id_collection=%s", (colID, ))
res = run_sql("DELETE FROM collection_format WHERE id_collection=%s", (colID, ))
res = run_sql("DELETE FROM collection_field_fieldvalue WHERE id_collection=%s", (colID, ))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def add_fmt(code, name, rtype):
"""Add a new output format. Returns the id of the format.
code - the code for the format, max 6 chars.
name - the default name for the default language of the format.
rtype - the default nametype"""
try:
res = run_sql("INSERT INTO format (code, name) values (%s,%s)", (code, name))
fmtID = run_sql("SELECT id FROM format WHERE code=%s", (code,))
res = run_sql("INSERT INTO formatname(id_format, type, ln, value) VALUES (%s,%s,%s,%s)",
(fmtID[0][0], rtype, CFG_SITE_LANG, name))
return (1, fmtID)
except StandardError, e:
register_exception()
return (0, e)
def update_fldv(fldvID, name, value):
"""Modify existing fieldvalue
fldvID - id of fieldvalue to modify
value - the value of the fieldvalue
name - the name of the fieldvalue."""
try:
res = run_sql("UPDATE fieldvalue set name=%s where id=%s", (name, fldvID))
res = run_sql("UPDATE fieldvalue set value=%s where id=%s", (value, fldvID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def add_fldv(name, value):
"""Add a new fieldvalue, returns id of fieldvalue
value - the value of the fieldvalue
name - the name of the fieldvalue."""
try:
res = run_sql("SELECT id FROM fieldvalue WHERE name=%s and value=%s", (name, value))
if not res:
res = run_sql("INSERT INTO fieldvalue (name, value) values (%s,%s)", (name, value))
res = run_sql("SELECT id FROM fieldvalue WHERE name=%s and value=%s", (name, value))
if res:
return (1, res[0][0])
else:
raise StandardError
except StandardError, e:
register_exception()
return (0, e)
def add_pbx(title, body):
try:
res = run_sql("INSERT INTO portalbox (title, body) values (%s,%s)", (title, body))
res = run_sql("SELECT id FROM portalbox WHERE title=%s AND body=%s", (title, body))
if res:
return (1, res[0][0])
else:
raise StandardError
except StandardError, e:
register_exception()
return (0, e)
def add_col(colNAME, dbquery=None):
"""Adds a new collection to collection table
colNAME - the default name for the collection, saved to collection and collectionname
dbquery - query related to the collection"""
# BTW, sometimes '' are passed instead of None, so change them to None
if not dbquery:
dbquery = None
try:
rtype = get_col_nametypes()[0][0]
colID = run_sql("SELECT id FROM collection WHERE id=1")
if colID:
res = run_sql("INSERT INTO collection (name,dbquery) VALUES (%s,%s)",
(colNAME,dbquery))
else:
res = run_sql("INSERT INTO collection (id,name,dbquery) VALUES (1,%s,%s)",
(colNAME,dbquery))
colID = run_sql("SELECT id FROM collection WHERE name=%s", (colNAME,))
res = run_sql("INSERT INTO collectionname(id_collection, type, ln, value) VALUES (%s,%s,%s,%s)",
(colID[0][0], rtype, CFG_SITE_LANG, colNAME))
if colID:
return (1, colID[0][0])
else:
raise StandardError
except StandardError, e:
register_exception()
return (0, e)
def add_col_pbx(colID, pbxID, ln, position, score=''):
"""add a portalbox to the collection.
colID - the id of the collection involved
pbxID - the portalbox to add
ln - which language the portalbox is for
score - decides which portalbox is the most important
position - position on page the portalbox should appear."""
try:
if score:
res = run_sql("INSERT INTO collection_portalbox(id_portalbox, id_collection, ln, score, position) values (%s,%s,'%s',%s,%s)", (pbxID, colID, ln, score, position))
else:
res = run_sql("SELECT score FROM collection_portalbox WHERE id_collection=%s and ln=%s and position=%s ORDER BY score desc, ln, position", (colID, ln, position))
if res:
score = int(res[0][0])
else:
score = 0
res = run_sql("INSERT INTO collection_portalbox(id_portalbox, id_collection, ln, score, position) values (%s,%s,%s,%s,%s)", (pbxID, colID, ln, (score + 1), position))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def add_col_fmt(colID, fmtID, score=''):
"""Add a output format to the collection.
colID - the id of the collection involved
fmtID - the id of the format.
score - the score of the format, decides sorting, if not given, place the format on top"""
try:
if score:
res = run_sql("INSERT INTO collection_format(id_format, id_collection, score) values (%s,%s,%s)", (fmtID, colID, score))
else:
res = run_sql("SELECT score FROM collection_format WHERE id_collection=%s ORDER BY score desc", (colID, ))
if res:
score = int(res[0][0])
else:
score = 0
res = run_sql("INSERT INTO collection_format(id_format, id_collection, score) values (%s,%s,%s)", (fmtID, colID, (score + 1)))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def add_col_fld(colID, fldID, type, fldvID=''):
"""Add a sort/search/field to the collection.
colID - the id of the collection involved
fldID - the id of the field.
fldvID - the id of the fieldvalue.
type - which type, seo, sew...
score - the score of the format, decides sorting, if not given, place the format on top"""
try:
if fldvID and fldvID not in [-1, "-1"]:
run_sql("DELETE FROM collection_field_fieldvalue WHERE id_collection=%s AND id_field=%s and type=%s and id_fieldvalue is NULL", (colID, fldID, type))
res = run_sql("SELECT score FROM collection_field_fieldvalue WHERE id_collection=%s AND id_field=%s and type=%s ORDER BY score desc", (colID, fldID, type))
if res:
score = int(res[0][0])
res = run_sql("SELECT score_fieldvalue FROM collection_field_fieldvalue WHERE id_collection=%s AND id_field=%s and type=%s ORDER BY score_fieldvalue desc", (colID, fldID, type))
else:
res = run_sql("SELECT score FROM collection_field_fieldvalue WHERE id_collection=%s and type=%s ORDER BY score desc", (colID, type))
if res:
score = int(res[0][0]) + 1
else:
score = 1
res = run_sql("SELECT id_collection,id_field,id_fieldvalue,type,score,score_fieldvalue FROM collection_field_fieldvalue where id_field=%s and id_collection=%s and type=%s and id_fieldvalue=%s", (fldID, colID, type, fldvID))
if not res:
run_sql("UPDATE collection_field_fieldvalue SET score_fieldvalue=score_fieldvalue+1 WHERE id_field=%s AND id_collection=%s and type=%s", (fldID, colID, type))
res = run_sql("INSERT INTO collection_field_fieldvalue(id_field, id_fieldvalue, id_collection, type, score, score_fieldvalue) values (%s,%s,%s,%s,%s,%s)", (fldID, fldvID, colID, type, score, 1))
else:
return (0, (1, "Already exists"))
else:
res = run_sql("SELECT id_collection,id_field,id_fieldvalue,type,score,score_fieldvalue FROM collection_field_fieldvalue WHERE id_collection=%s AND type=%s and id_field=%s and id_fieldvalue is NULL", (colID, type, fldID))
if res:
return (0, (1, "Already exists"))
else:
run_sql("UPDATE collection_field_fieldvalue SET score=score+1")
res = run_sql("INSERT INTO collection_field_fieldvalue(id_field, id_collection, type, score,score_fieldvalue) values (%s,%s,%s,%s, 0)", (fldID, colID, type, 1))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def modify_dbquery(colID, dbquery=None):
"""Modify the dbquery of an collection.
colID - the id of the collection involved
dbquery - the new dbquery"""
# BTW, sometimes '' is passed instead of None, so change it to None
if not dbquery:
dbquery = None
try:
res = run_sql("UPDATE collection SET dbquery=%s WHERE id=%s", (dbquery, colID))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def modify_pbx(colID, pbxID, sel_ln, score='', position='', title='', body=''):
"""Modify a portalbox
colID - the id of the collection involved
pbxID - the id of the portalbox that should be modified
sel_ln - the language of the portalbox that should be modified
title - the title
body - the content
score - if several portalboxes in one position, who should appear on top.
position - position on page"""
try:
if title:
res = run_sql("UPDATE portalbox SET title=%s WHERE id=%s", (title, pbxID))
if body:
res = run_sql("UPDATE portalbox SET body=%s WHERE id=%s", (body, pbxID))
if score:
res = run_sql("UPDATE collection_portalbox SET score=%s WHERE id_collection=%s and id_portalbox=%s and ln=%s", (score, colID, pbxID, sel_ln))
if position:
res = run_sql("UPDATE collection_portalbox SET position=%s WHERE id_collection=%s and id_portalbox=%s and ln=%s", (position, colID, pbxID, sel_ln))
return (1, "")
except Exception, e:
register_exception()
return (0, e)
def switch_fld_score(colID, id_1, id_2):
"""Switch the scores of id_1 and id_2 in collection_field_fieldvalue
colID - collection the id_1 or id_2 is connected to
id_1/id_2 - id field from tables like format..portalbox...
table - name of the table"""
try:
res1 = run_sql("SELECT score FROM collection_field_fieldvalue WHERE id_collection=%s and id_field=%s", (colID, id_1))
res2 = run_sql("SELECT score FROM collection_field_fieldvalue WHERE id_collection=%s and id_field=%s", (colID, id_2))
if res1[0][0] == res2[0][0]:
return (0, (1, "Cannot rearrange the selected fields, either rearrange by name or use the mySQL client to fix the problem."))
else:
res = run_sql("UPDATE collection_field_fieldvalue SET score=%s WHERE id_collection=%s and id_field=%s", (res2[0][0], colID, id_1))
res = run_sql("UPDATE collection_field_fieldvalue SET score=%s WHERE id_collection=%s and id_field=%s", (res1[0][0], colID, id_2))
return (1, "")
except StandardError, e:
register_exception()
return (0, e)
def switch_fld_value_score(colID, id_1, fldvID_1, fldvID_2):
"""Switch the scores of two field_value
colID - collection the id_1 or id_2 is connected to
id_1/id_2 - id field from tables like format..portalbox...
table - name of the table"""
try:
res1 = run_sql("SELECT score_fieldvalue FROM collection_field_fieldvalue WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (colID, id_1, fldvID_1))
res2 = run_sql("SELECT score_fieldvalue FROM collection_field_fieldvalue WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (colID, id_1, fldvID_2))
if res1[0][0] == res2[0][0]:
return (0, (1, "Cannot rearrange the selected fields, either rearrange by name or use the mySQL client to fix the problem."))
else:
res = run_sql("UPDATE collection_field_fieldvalue SET score_fieldvalue=%s WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (res2[0][0], colID, id_1, fldvID_1))
res = run_sql("UPDATE collection_field_fieldvalue SET score_fieldvalue=%s WHERE id_collection=%s and id_field=%s and id_fieldvalue=%s", (res1[0][0], colID, id_1, fldvID_2))
return (1, "")
except Exception, e:
register_exception()
return (0, e)
def switch_pbx_score(colID, id_1, id_2, sel_ln):
"""Switch the scores of id_1 and id_2 in the table given by the argument.
colID - collection the id_1 or id_2 is connected to
id_1/id_2 - id field from tables like format..portalbox...
table - name of the table"""
try:
res1 = run_sql("SELECT score FROM collection_portalbox WHERE id_collection=%s and id_portalbox=%s and ln=%s", (colID, id_1, sel_ln))
res2 = run_sql("SELECT score FROM collection_portalbox WHERE id_collection=%s and id_portalbox=%s and ln=%s", (colID, id_2, sel_ln))
if res1[0][0] == res2[0][0]:
return (0, (1, "Cannot rearrange the selected fields, either rearrange by name or use the mySQL client to fix the problem."))
res = run_sql("UPDATE collection_portalbox SET score=%s WHERE id_collection=%s and id_portalbox=%s and ln=%s", (res2[0][0], colID, id_1, sel_ln))
res = run_sql("UPDATE collection_portalbox SET score=%s WHERE id_collection=%s and id_portalbox=%s and ln=%s", (res1[0][0], colID, id_2, sel_ln))
return (1, "")
except Exception, e:
register_exception()
return (0, e)
def switch_score(colID, id_1, id_2, table):
"""Switch the scores of id_1 and id_2 in the table given by the argument.
colID - collection the id_1 or id_2 is connected to
id_1/id_2 - id field from tables like format..portalbox...
table - name of the table"""
try:
res1 = run_sql("SELECT score FROM collection_%s WHERE id_collection=%%s and id_%s=%%s" % (table, table), (colID, id_1))
res2 = run_sql("SELECT score FROM collection_%s WHERE id_collection=%%s and id_%s=%%s" % (table, table), (colID, id_2))
if res1[0][0] == res2[0][0]:
return (0, (1, "Cannot rearrange the selected fields, either rearrange by name or use the mySQL client to fix the problem."))
res = run_sql("UPDATE collection_%s SET score=%%s WHERE id_collection=%%s and id_%s=%%s" % (table, table), (res2[0][0], colID, id_1))
res = run_sql("UPDATE collection_%s SET score=%%s WHERE id_collection=%%s and id_%s=%%s" % (table, table), (res1[0][0], colID, id_2))
return (1, "")
except Exception, e:
register_exception()
return (0, e)
def get_detailed_page_tabs(colID=None, recID=None, ln=CFG_SITE_LANG):
"""
Returns the complete list of tabs to be displayed in the
detailed record pages.
Returned structured is a dict with
- key : last component of the url that leads to detailed record tab: http:www.../CFG_SITE_RECORD/74/key
- values: a dictionary with the following keys:
- label: *string* label to be printed as tab (Not localized here)
- visible: *boolean* if False, tab should not be shown
- enabled: *boolean* if True, tab should be disabled
- order: *int* position of the tab in the list of tabs
- ln: language of the tab labels
returns dict
"""
_ = gettext_set_language(ln)
tabs = {'metadata' : {'label': _('Information'), 'visible': False, 'enabled': True, 'order': 1},
'references': {'label': _('References'), 'visible': False, 'enabled': True, 'order': 2},
'citations' : {'label': _('Citations'), 'visible': False, 'enabled': True, 'order': 3},
'keywords' : {'label': _('Keywords'), 'visible': False, 'enabled': True, 'order': 4},
'comments' : {'label': _('Discussion'), 'visible': False, 'enabled': True, 'order': 5},
'usage' : {'label': _('Usage statistics'), 'visible': False, 'enabled': True, 'order': 6},
'files' : {'label': _('Files'), 'visible': False, 'enabled': True, 'order': 7},
'plots' : {'label': _('Plots'), 'visible': False, 'enabled': True, 'order': 8},
'holdings' : {'label': _('Holdings'), 'visible': False, 'enabled': True, 'order': 9},
'linkbacks' : {'label': _('Linkbacks'), 'visible': False, 'enabled': True, 'order': 10},
}
res = run_sql("SELECT tabs FROM collectiondetailedrecordpagetabs " + \
"WHERE id_collection=%s", (colID, ))
if len(res) > 0:
tabs_state = res[0][0].split(';')
for tab_state in tabs_state:
if tabs.has_key(tab_state):
tabs[tab_state]['visible'] = True;
else:
# no preference set for this collection.
# assume all tabs are displayed
for key in tabs.keys():
tabs[key]['visible'] = True
if not CFG_WEBCOMMENT_ALLOW_COMMENTS and \
not CFG_WEBCOMMENT_ALLOW_REVIEWS:
tabs['comments']['visible'] = False
tabs['comments']['enabled'] = False
if recID is not None:
# Disable references if no references found
#bfo = BibFormatObject(recID)
#if bfe_references.format_element(bfo, '', '') == '':
# tabs['references']['enabled'] = False
## FIXME: the above was commented out because bfe_references
## may be too slow. And we do not really need this anyway
## because we can disable tabs in WebSearch Admin on a
## collection-by-collection basis. If we need this, then we
## should probably call bfo.fields('999') here that should be
## much faster than calling bfe_references.
# Disable citations if not citations found
#if len(get_cited_by(recID)) == 0:
# tabs['citations']['enabled'] = False
## FIXME: the above was commented out because get_cited_by()
## may be too slow. And we do not really need this anyway
## because we can disable tags in WebSearch Admin on a
## collection-by-collection basis.
# Disable Files tab if no file found except for Plots:
disable_files_tab_p = True
for abibdoc in BibRecDocs(recID).list_bibdocs():
abibdoc_type = abibdoc.get_type()
if abibdoc_type == 'Plot':
continue # ignore attached plots
else:
if CFG_INSPIRE_SITE and not \
abibdoc_type in ('', 'INSPIRE-PUBLIC', 'Supplementary Material'):
# ignore non-empty, non-INSPIRE-PUBLIC, non-suppl doctypes for INSPIRE
continue
# okay, we found at least one non-Plot file:
disable_files_tab_p = False
break
if disable_files_tab_p:
tabs['files']['enabled'] = False
#Disable holdings tab if collection != Books
collection = run_sql("""select name from collection where id=%s""", (colID, ))
if collection[0][0] != 'Books':
tabs['holdings']['enabled'] = False
# Disable Plots tab if no docfile of doctype Plot found
brd = BibRecDocs(recID)
if len(brd.list_bibdocs('Plot')) == 0:
tabs['plots']['enabled'] = False
if CFG_CERN_SITE:
from invenio.search_engine import get_collection_reclist
if recID in get_collection_reclist("Books & Proceedings"):
tabs['holdings']['visible'] = True
tabs['holdings']['enabled'] = True
tabs[''] = tabs['metadata']
del tabs['metadata']
return tabs
def get_detailed_page_tabs_counts(recID):
"""
Returns the number of citations, references and comments/reviews
that have to be shown on the corresponding tabs in the
detailed record pages
@param recID: record id
@return: dictionary with following keys
'Citations': number of citations to be shown in the "Citations" tab
'References': number of references to be shown in the "References" tab
'Discussions': number of comments and reviews to be shown in the "Discussion" tab
"""
num_comments = 0 #num of comments
num_reviews = 0 #num of reviews
tabs_counts = {'Citations' : 0,
'References' : -1,
'Discussions' : 0
}
from invenio.search_engine import get_field_tags, get_record
if CFG_BIBRANK_SHOW_CITATION_LINKS:
tabs_counts['Citations'] = get_cited_by_count(recID)
if not CFG_CERN_SITE:#FIXME:should be replaced by something like CFG_SHOW_REFERENCES
reftag = ""
reftags = get_field_tags("reference")
if reftags:
reftag = reftags[0]
tmprec = get_record(recID)
if reftag and len(reftag) > 4:
tabs_counts['References'] = len(record_get_field_instances(tmprec, reftag[0:3], reftag[3], reftag[4]))
# obtain number of comments/reviews
from invenio.webcommentadminlib import get_nb_reviews, get_nb_comments
if CFG_WEBCOMMENT_ALLOW_COMMENTS and CFG_WEBSEARCH_SHOW_COMMENT_COUNT:
num_comments = get_nb_comments(recID, count_deleted=False)
if CFG_WEBCOMMENT_ALLOW_REVIEWS and CFG_WEBSEARCH_SHOW_REVIEW_COUNT:
num_reviews = get_nb_reviews(recID, count_deleted=False)
if num_comments or num_reviews:
tabs_counts['Discussions'] = num_comments + num_reviews
return tabs_counts
| gpl-2.0 |
waytai/django | docs/_ext/djangodocs.py | 321 | 12049 | """
Sphinx plugins for Django documentation.
"""
import json
import os
import re
from docutils import nodes
from docutils.parsers.rst import directives
from sphinx import __version__ as sphinx_ver, addnodes
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.util.compat import Directive
from sphinx.util.console import bold
from sphinx.util.nodes import set_source_info
from sphinx.writers.html import SmartyPantsHTMLTranslator
# RE for option descriptions without a '--' prefix
simple_option_desc_re = re.compile(
r'([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)')
def setup(app):
app.add_crossref_type(
directivename="setting",
rolename="setting",
indextemplate="pair: %s; setting",
)
app.add_crossref_type(
directivename="templatetag",
rolename="ttag",
indextemplate="pair: %s; template tag"
)
app.add_crossref_type(
directivename="templatefilter",
rolename="tfilter",
indextemplate="pair: %s; template filter"
)
app.add_crossref_type(
directivename="fieldlookup",
rolename="lookup",
indextemplate="pair: %s; field lookup type",
)
app.add_description_unit(
directivename="django-admin",
rolename="djadmin",
indextemplate="pair: %s; django-admin command",
parse_node=parse_django_admin_node,
)
app.add_description_unit(
directivename="django-admin-option",
rolename="djadminopt",
indextemplate="pair: %s; django-admin command-line option",
parse_node=parse_django_adminopt_node,
)
app.add_config_value('django_next_version', '0.0', True)
app.add_directive('versionadded', VersionDirective)
app.add_directive('versionchanged', VersionDirective)
app.add_builder(DjangoStandaloneHTMLBuilder)
# register the snippet directive
app.add_directive('snippet', SnippetWithFilename)
# register a node for snippet directive so that the xml parser
# knows how to handle the enter/exit parsing event
app.add_node(snippet_with_filename,
html=(visit_snippet, depart_snippet_literal),
latex=(visit_snippet_latex, depart_snippet_latex),
man=(visit_snippet_literal, depart_snippet_literal),
text=(visit_snippet_literal, depart_snippet_literal),
texinfo=(visit_snippet_literal, depart_snippet_literal))
class snippet_with_filename(nodes.literal_block):
"""
Subclass the literal_block to override the visit/depart event handlers
"""
pass
def visit_snippet_literal(self, node):
"""
default literal block handler
"""
self.visit_literal_block(node)
def depart_snippet_literal(self, node):
"""
default literal block handler
"""
self.depart_literal_block(node)
def visit_snippet(self, node):
"""
HTML document generator visit handler
"""
lang = self.highlightlang
linenos = node.rawsource.count('\n') >= self.highlightlinenothreshold - 1
fname = node['filename']
highlight_args = node.get('highlight_args', {})
if 'language' in node:
# code-block directives
lang = node['language']
highlight_args['force'] = True
if 'linenos' in node:
linenos = node['linenos']
def warner(msg):
self.builder.warn(msg, (self.builder.current_docname, node.line))
highlighted = self.highlighter.highlight_block(node.rawsource, lang,
warn=warner,
linenos=linenos,
**highlight_args)
starttag = self.starttag(node, 'div', suffix='',
CLASS='highlight-%s' % lang)
self.body.append(starttag)
self.body.append('<div class="snippet-filename">%s</div>\n''' % (fname,))
self.body.append(highlighted)
self.body.append('</div>\n')
raise nodes.SkipNode
def visit_snippet_latex(self, node):
"""
Latex document generator visit handler
"""
self.verbatim = ''
def depart_snippet_latex(self, node):
"""
Latex document generator depart handler.
"""
code = self.verbatim.rstrip('\n')
lang = self.hlsettingstack[-1][0]
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
fname = node['filename']
highlight_args = node.get('highlight_args', {})
if 'language' in node:
# code-block directives
lang = node['language']
highlight_args['force'] = True
if 'linenos' in node:
linenos = node['linenos']
def warner(msg):
self.builder.warn(msg, (self.curfilestack[-1], node.line))
hlcode = self.highlighter.highlight_block(code, lang, warn=warner,
linenos=linenos,
**highlight_args)
self.body.append('\n{\\colorbox[rgb]{0.9,0.9,0.9}'
'{\\makebox[\\textwidth][l]'
'{\\small\\texttt{%s}}}}\n' % (fname,))
if self.table:
hlcode = hlcode.replace('\\begin{Verbatim}',
'\\begin{OriginalVerbatim}')
self.table.has_problematic = True
self.table.has_verbatim = True
hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim}
hlcode = hlcode.rstrip() + '\n'
self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' %
(self.table and 'Original' or ''))
self.verbatim = None
class SnippetWithFilename(Directive):
"""
The 'snippet' directive that allows to add the filename (optional)
of a code snippet in the document. This is modeled after CodeBlock.
"""
has_content = True
optional_arguments = 1
option_spec = {'filename': directives.unchanged_required}
def run(self):
code = '\n'.join(self.content)
literal = snippet_with_filename(code, code)
if self.arguments:
literal['language'] = self.arguments[0]
literal['filename'] = self.options['filename']
set_source_info(self, literal)
return [literal]
class VersionDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 1
final_argument_whitespace = True
option_spec = {}
def run(self):
if len(self.arguments) > 1:
msg = """Only one argument accepted for directive '{directive_name}::'.
Comments should be provided as content,
not as an extra argument.""".format(directive_name=self.name)
raise self.error(msg)
env = self.state.document.settings.env
ret = []
node = addnodes.versionmodified()
ret.append(node)
if self.arguments[0] == env.config.django_next_version:
node['version'] = "Development version"
else:
node['version'] = self.arguments[0]
node['type'] = self.name
if self.content:
self.state.nested_parse(self.content, self.content_offset, node)
env.note_versionchange(node['type'], node['version'], node, self.lineno)
return ret
class DjangoHTMLTranslator(SmartyPantsHTMLTranslator):
"""
Django-specific reST to HTML tweaks.
"""
# Don't use border=1, which docutils does by default.
def visit_table(self, node):
self.context.append(self.compact_p)
self.compact_p = True
self._table_row_index = 0 # Needed by Sphinx
self.body.append(self.starttag(node, 'table', CLASS='docutils'))
def depart_table(self, node):
self.compact_p = self.context.pop()
self.body.append('</table>\n')
def visit_desc_parameterlist(self, node):
self.body.append('(') # by default sphinx puts <big> around the "("
self.first_param = 1
self.optional_param_level = 0
self.param_separator = node.child_text_separator
self.required_params_left = sum([isinstance(c, addnodes.desc_parameter)
for c in node.children])
def depart_desc_parameterlist(self, node):
self.body.append(')')
if sphinx_ver < '1.0.8':
#
# Don't apply smartypants to literal blocks
#
def visit_literal_block(self, node):
self.no_smarty += 1
SmartyPantsHTMLTranslator.visit_literal_block(self, node)
def depart_literal_block(self, node):
SmartyPantsHTMLTranslator.depart_literal_block(self, node)
self.no_smarty -= 1
#
# Turn the "new in version" stuff (versionadded/versionchanged) into a
# better callout -- the Sphinx default is just a little span,
# which is a bit less obvious that I'd like.
#
# FIXME: these messages are all hardcoded in English. We need to change
# that to accommodate other language docs, but I can't work out how to make
# that work.
#
version_text = {
'versionchanged': 'Changed in Django %s',
'versionadded': 'New in Django %s',
}
def visit_versionmodified(self, node):
self.body.append(
self.starttag(node, 'div', CLASS=node['type'])
)
version_text = self.version_text.get(node['type'])
if version_text:
title = "%s%s" % (
version_text % node['version'],
":" if len(node) else "."
)
self.body.append('<span class="title">%s</span> ' % title)
def depart_versionmodified(self, node):
self.body.append("</div>\n")
# Give each section a unique ID -- nice for custom CSS hooks
def visit_section(self, node):
old_ids = node.get('ids', [])
node['ids'] = ['s-' + i for i in old_ids]
node['ids'].extend(old_ids)
SmartyPantsHTMLTranslator.visit_section(self, node)
node['ids'] = old_ids
def parse_django_admin_node(env, sig, signode):
command = sig.split(' ')[0]
env._django_curr_admin_command = command
title = "django-admin %s" % sig
signode += addnodes.desc_name(title, title)
return sig
def parse_django_adminopt_node(env, sig, signode):
"""A copy of sphinx.directives.CmdoptionDesc.parse_signature()"""
from sphinx.domains.std import option_desc_re
count = 0
firstname = ''
for m in option_desc_re.finditer(sig):
optname, args = m.groups()
if count:
signode += addnodes.desc_addname(', ', ', ')
signode += addnodes.desc_name(optname, optname)
signode += addnodes.desc_addname(args, args)
if not count:
firstname = optname
count += 1
if not count:
for m in simple_option_desc_re.finditer(sig):
optname, args = m.groups()
if count:
signode += addnodes.desc_addname(', ', ', ')
signode += addnodes.desc_name(optname, optname)
signode += addnodes.desc_addname(args, args)
if not count:
firstname = optname
count += 1
if not firstname:
raise ValueError
return firstname
class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder):
"""
Subclass to add some extra things we need.
"""
name = 'djangohtml'
def finish(self):
super(DjangoStandaloneHTMLBuilder, self).finish()
self.info(bold("writing templatebuiltins.js..."))
xrefs = self.env.domaindata["std"]["objects"]
templatebuiltins = {
"ttags": [n for ((t, n), (l, a)) in xrefs.items()
if t == "templatetag" and l == "ref/templates/builtins"],
"tfilters": [n for ((t, n), (l, a)) in xrefs.items()
if t == "templatefilter" and l == "ref/templates/builtins"],
}
outfilename = os.path.join(self.outdir, "templatebuiltins.js")
with open(outfilename, 'w') as fp:
fp.write('var django_template_builtins = ')
json.dump(templatebuiltins, fp)
fp.write(';\n')
| bsd-3-clause |
ondra-novak/chromium.src | tools/memory_inspector/classification_rules/default/mmap-android.py | 54 | 1887 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is a generic rule-tree for classifying memory maps on Android. It is a
# simple hierarchical python data structure (list of dictionaries). Some rules:
# - Order matters: what is caught by a node is not caught by its siblings.
# - Hierarchy matters: what is caught by a node is propagated to its children
# (if any). Only one of its children, though, will get the data.
# - Non leaf nodes have an extra implicit node called {node-name}-other: if
# something is caught by a non leaf node, but none of its children, it is
# appended to implicit {node-name}-other catch-all children.
#
# See memory_inspector/classification/mmap_classifier.py for more docs.
[
{
'name': 'Anon',
'mmap_file': r'(^$)|(^\[)',
'children': [
{
'name': 'stack',
'mmap_file': r'\[stack',
},
{
'name': 'libc malloc',
'mmap_file': 'libc_malloc',
},
{
'name': 'JIT',
'mmap_prot': 'r.x',
},
],
},
{
'name': 'Ashmem',
'mmap_file': r'^/dev/ashmem',
'children': [
{
'name': 'Dalvik',
'mmap_file': r'^/dev/ashmem/dalvik',
'children': [
{
'name': 'Java Heap',
'mmap_file': r'dalvik-heap',
},
{
'name': 'JIT',
'mmap_prot': 'r.x',
},
],
},
],
},
{
'name': 'Libs',
'mmap_file': r'(\.so)|(\.apk)|(\.jar)',
'children': [
{
'name': 'Native',
'mmap_file': r'\.so',
},
{
'name': 'APKs',
'mmap_file': r'\.apk',
},
{
'name': 'JARs',
'mmap_file': r'\.jar',
},
],
},
{
'name': 'Devices',
'mmap_file': r'^/dev/',
'children': [
{
'name': 'GPU',
'mmap_file': r'(nv)|(mali)',
},
],
},
]
| bsd-3-clause |
jspargo/AneMo | thermo/flask/lib/python2.7/site-packages/setuptools/site-patch.py | 720 | 2389 | def __boot():
import sys
import os
PYTHONPATH = os.environ.get('PYTHONPATH')
if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
PYTHONPATH = []
else:
PYTHONPATH = PYTHONPATH.split(os.pathsep)
pic = getattr(sys,'path_importer_cache',{})
stdpath = sys.path[len(PYTHONPATH):]
mydir = os.path.dirname(__file__)
#print "searching",stdpath,sys.path
for item in stdpath:
if item==mydir or not item:
continue # skip if current dir. on Windows, or my own directory
importer = pic.get(item)
if importer is not None:
loader = importer.find_module('site')
if loader is not None:
# This should actually reload the current module
loader.load_module('site')
break
else:
try:
import imp # Avoid import loop in Python >= 3.3
stream, path, descr = imp.find_module('site',[item])
except ImportError:
continue
if stream is None:
continue
try:
# This should actually reload the current module
imp.load_module('site',stream,path,descr)
finally:
stream.close()
break
else:
raise ImportError("Couldn't find the real 'site' module")
#print "loaded", __file__
known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
oldpos = getattr(sys,'__egginsert',0) # save old insertion position
sys.__egginsert = 0 # and reset the current one
for item in PYTHONPATH:
addsitedir(item)
sys.__egginsert += oldpos # restore effective old position
d, nd = makepath(stdpath[0])
insert_at = None
new_path = []
for item in sys.path:
p, np = makepath(item)
if np==nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here
insert_at = len(new_path)
if np in known_paths or insert_at is None:
new_path.append(item)
else:
# new path after the insert point, back-insert it
new_path.insert(insert_at, item)
insert_at += 1
sys.path[:] = new_path
if __name__=='site':
__boot()
del __boot
| gpl-2.0 |
indico/indico | indico/modules/auth/__init__.py | 4 | 6003 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import redirect, request, session
from flask_multipass import MultipassException
from werkzeug.exceptions import Forbidden
from indico.core import signals
from indico.core.auth import multipass
from indico.core.config import config
from indico.core.db import db
from indico.core.errors import NoReportError
from indico.core.logger import Logger
from indico.modules.auth.models.identities import Identity
from indico.modules.auth.models.registration_requests import RegistrationRequest
from indico.modules.auth.util import save_identity_info
from indico.modules.users import User
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem
logger = Logger.get('auth')
@multipass.identity_handler
def process_identity(identity_info):
logger.info('Received identity info: %s', identity_info)
identity = Identity.query.filter_by(provider=identity_info.provider.name,
identifier=identity_info.identifier).first()
if identity is None:
logger.info('Identity does not exist in the database yet')
user = None
emails = {email.lower() for email in identity_info.data.getlist('email') if email}
if emails:
identity_info.data.setlist('email', emails)
users = User.query.filter(~User.is_deleted, User.all_emails.in_(list(emails))).all()
if len(users) == 1:
user = users[0]
elif len(users) > 1:
# TODO: handle this case somehow.. let the user select which user to log in to?
raise NotImplementedError('Multiple emails matching multiple users')
save_identity_info(identity_info, user if user and not user.is_pending else None)
if not user or user.is_pending:
if user and user.is_pending:
logger.info('Found pending user with matching email: %s', user)
else:
logger.info('Email search did not find an existing user')
return redirect(url_for('auth.register', provider=identity_info.provider.name))
else:
logger.info('Found user with matching email: %s', user)
return redirect(url_for('auth.link_account', provider=identity_info.provider.name))
elif identity.user.is_deleted:
raise MultipassException(_('Your Indico profile has been deleted.'))
else:
user = identity.user
if user.is_pending:
# This should never happen!
raise ValueError('Got identity for pending user')
logger.info('Found existing identity %s for user %s', identity, user)
# Update the identity with the latest information
if identity.multipass_data != identity_info.multipass_data:
logger.info('Updated multipass data of identity %s for user %s', identity, user)
identity.multipass_data = identity_info.multipass_data
if identity.data != identity_info.data:
logger.info('Updated data of identity %s for user %s', identity, user)
identity.data = identity_info.data
if user.is_blocked:
raise MultipassException(_('Your Indico profile has been blocked.'))
login_user(user, identity)
def login_user(user, identity=None, admin_impersonation=False):
"""Set the session user and performs on-login logic.
When specifying `identity`, the provider/identitifer information
is saved in the session so the identity management page can prevent
the user from removing the identity he used to login.
:param user: The :class:`~indico.modules.users.User` to log in to.
:param identity: The :class:`Identity` instance used to log in.
:param admin_impersonation: Whether the login is an admin
impersonating the user and thus should not
be considered a login by the user.
"""
if user.settings.get('force_timezone'):
session.timezone = user.settings.get('timezone', config.DEFAULT_TIMEZONE)
else:
session.timezone = 'LOCAL'
session.set_session_user(user)
session.lang = user.settings.get('lang')
if not admin_impersonation:
if identity:
identity.register_login(request.remote_addr)
session['login_identity'] = identity.id
else:
session.pop('login_identity', None)
user.synchronize_data()
signals.users.logged_in.send(user, identity=identity, admin_impersonation=admin_impersonation)
@signals.menu.items.connect_via('user-profile-sidemenu')
def _extend_profile_sidemenu(sender, user, **kwargs):
yield SideMenuItem('accounts', _('Accounts'), url_for('auth.accounts'), 50, disabled=user.is_system)
@signals.users.registered.connect
def _delete_requests(user, **kwargs):
for req in RegistrationRequest.query.filter(RegistrationRequest.email.in_(user.all_emails)):
logger.info('Deleting registration request %r due to registration of %r', req, user)
db.session.delete(req)
db.session.flush()
@signals.app_created.connect
def _handle_insecure_password_logins(app, **kwargs):
@app.before_request
def _redirect_if_insecure():
if not request.endpoint:
return
if (
request.blueprint == 'assets' or
request.endpoint.endswith('.static') or
request.endpoint in ('auth.logout', 'auth.accounts', 'core.contact', 'core.change_lang')
):
return
if 'insecure_password_error' not in session:
return
if request.method != 'GET':
raise NoReportError.wrap_exc(Forbidden(_('You need to change your password')))
if request.is_xhr or request.is_json:
return
return redirect(url_for('auth.accounts'))
| mit |
aracnoz/xbmc | addons/service.xbmc.versioncheck/lib/viewer.py | 82 | 3086 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011-2013 Martijn Kaijser
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#import modules
import os
import sys
import xbmc
import xbmcgui
import xbmcaddon
### get addon info
ADDON = xbmcaddon.Addon('service.xbmc.versioncheck')
ADDONVERSION = ADDON.getAddonInfo('version')
ADDONNAME = ADDON.getAddonInfo('name')
ADDONPATH = ADDON.getAddonInfo('path').decode('utf-8')
ADDONPROFILE = xbmc.translatePath( ADDON.getAddonInfo('profile') ).decode('utf-8')
ICON = ADDON.getAddonInfo('icon')
class Viewer:
# constants
WINDOW = 10147
CONTROL_LABEL = 1
CONTROL_TEXTBOX = 5
def __init__(self, *args, **kwargs):
# activate the text viewer window
xbmc.executebuiltin("ActivateWindow(%d)" % (self.WINDOW,))
# get window
self.window = xbmcgui.Window(self.WINDOW)
# give window time to initialize
xbmc.sleep(100)
# set controls
self.setControls()
def setControls(self):
#get header, text
heading, text = self.getText()
# set heading
self.window.getControl(self.CONTROL_LABEL).setLabel("%s : %s" % (ADDONNAME, heading, ))
# set text
self.window.getControl(self.CONTROL_TEXTBOX).setText(text)
xbmc.sleep(2000)
def getText(self):
try:
if sys.argv[ 1 ] == "gotham-alpha_notice":
return "Call to Gotham alpha users", self.readFile(os.path.join(ADDONPATH , "resources/gotham-alpha_notice.txt"))
except Exception, e:
xbmc.log(ADDONNAME + ': ' + str(e), xbmc.LOGERROR)
return "", ""
def readFile(self, filename):
return open(filename).read()
class WebBrowser:
""" Display url using the default browser. """
def __init__(self, *args, **kwargs):
try:
url = sys.argv[2]
# notify user
notification(ADDONNAME, url)
xbmc.sleep(100)
# launch url
self.launchUrl(url)
except Exception, e:
xbmc.log(ADDONNAME + ': ' + str(e), xbmc.LOGERROR)
def launchUrl(self, url):
import webbrowser
webbrowser.open(url)
def Main():
try:
if sys.argv[ 1 ] == "webbrowser":
WebBrowser()
else:
Viewer()
except Exception, e:
xbmc.log(ADDONNAME + ': ' + str(e), xbmc.LOGERROR)
if (__name__ == "__main__"):
Main()
| gpl-2.0 |
google/material-design-icons | update/venv/lib/python3.9/site-packages/pip/_internal/cli/main.py | 2 | 2483 | """Primary application entrypoint.
"""
import locale
import logging
import os
import sys
from typing import List, Optional
from pip._internal.cli.autocompletion import autocomplete
from pip._internal.cli.main_parser import parse_command
from pip._internal.commands import create_command
from pip._internal.exceptions import PipError
from pip._internal.utils import deprecation
logger = logging.getLogger(__name__)
# Do not import and use main() directly! Using it directly is actively
# discouraged by pip's maintainers. The name, location and behavior of
# this function is subject to change, so calling it directly is not
# portable across different pip versions.
# In addition, running pip in-process is unsupported and unsafe. This is
# elaborated in detail at
# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
# That document also provides suggestions that should work for nearly
# all users that are considering importing and using main() directly.
# However, we know that certain users will still want to invoke pip
# in-process. If you understand and accept the implications of using pip
# in an unsupported manner, the best approach is to use runpy to avoid
# depending on the exact location of this entry point.
# The following example shows how to use runpy to invoke pip in that
# case:
#
# sys.argv = ["pip", your, args, here]
# runpy.run_module("pip", run_name="__main__")
#
# Note that this will exit the process after running, unlike a direct
# call to main. As it is not safe to do any processing after calling
# main, this should not be an issue in practice.
def main(args=None):
# type: (Optional[List[str]]) -> int
if args is None:
args = sys.argv[1:]
# Configure our deprecation warnings to be sent through loggers
deprecation.install_warning_logger()
autocomplete()
try:
cmd_name, cmd_args = parse_command(args)
except PipError as exc:
sys.stderr.write(f"ERROR: {exc}")
sys.stderr.write(os.linesep)
sys.exit(1)
# Needed for locale.getpreferredencoding(False) to work
# in pip._internal.utils.encoding.auto_decode
try:
locale.setlocale(locale.LC_ALL, "")
except locale.Error as e:
# setlocale can apparently crash if locale are uninitialized
logger.debug("Ignoring error %s when setting locale", e)
command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
return command.main(cmd_args)
| apache-2.0 |
pgum/emi1 | lib/python3.5/site-packages/pip/_vendor/requests/sessions.py | 149 | 24897 | # -*- coding: utf-8 -*-
"""
requests.session
~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
from collections import Mapping
from datetime import datetime
from .auth import _basic_auth_str
from .compat import cookielib, OrderedDict, urljoin, urlparse
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from .utils import to_key_val_list, default_headers, to_native_string
from .exceptions import (
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
from .packages.urllib3._collections import RecentlyUsedContainer
from .structures import CaseInsensitiveDict
from .adapters import HTTPAdapter
from .utils import (
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
get_auth_from_url
)
from .status_codes import codes
# formerly defined here, reexposed here for backward compatibility
from .models import REDIRECT_STATI
REDIRECT_CACHE_SIZE = 1000
def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
"""
Determines appropriate setting for a given request, taking into account the
explicit setting on that request, and the setting in the session. If a
setting is a dictionary, they will be merged together using `dict_class`
"""
if session_setting is None:
return request_setting
if request_setting is None:
return session_setting
# Bypass if not a dictionary (e.g. verify)
if not (
isinstance(session_setting, Mapping) and
isinstance(request_setting, Mapping)
):
return request_setting
merged_setting = dict_class(to_key_val_list(session_setting))
merged_setting.update(to_key_val_list(request_setting))
# Remove keys that are set to None. Extract keys first to avoid altering
# the dictionary during iteration.
none_keys = [k for (k, v) in merged_setting.items() if v is None]
for key in none_keys:
del merged_setting[key]
return merged_setting
def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
"""
Properly merges both requests and session hooks.
This is necessary because when request_hooks == {'response': []}, the
merge breaks Session hooks entirely.
"""
if session_hooks is None or session_hooks.get('response') == []:
return request_hooks
if request_hooks is None or request_hooks.get('response') == []:
return session_hooks
return merge_setting(request_hooks, session_hooks, dict_class)
class SessionRedirectMixin(object):
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses."""
i = 0
hist = [] # keep track of history
while resp.is_redirect:
prepared_request = req.copy()
if i > 0:
# Update history and keep track of redirects.
hist.append(resp)
new_hist = list(hist)
resp.history = new_hist
try:
resp.content # Consume socket so it can be released
except (ChunkedEncodingError, ContentDecodingError, RuntimeError):
resp.raw.read(decode_content=False)
if i >= self.max_redirects:
raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects, response=resp)
# Release the connection back into the pool.
resp.close()
url = resp.headers['location']
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(resp.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# The scheme should be lower case...
parsed = urlparse(url)
url = parsed.geturl()
# Facilitate relative 'location' headers, as allowed by RFC 7231.
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
# Compliant with RFC3986, we percent encode the url.
if not parsed.netloc:
url = urljoin(resp.url, requote_uri(url))
else:
url = requote_uri(url)
prepared_request.url = to_native_string(url)
# Cache the url, unless it redirects to itself.
if resp.is_permanent_redirect and req.url != prepared_request.url:
self.redirect_cache[req.url] = prepared_request.url
self.rebuild_method(prepared_request, resp)
# https://github.com/kennethreitz/requests/issues/1084
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
if 'Content-Length' in prepared_request.headers:
del prepared_request.headers['Content-Length']
prepared_request.body = None
headers = prepared_request.headers
try:
del headers['Cookie']
except KeyError:
pass
# Extract any cookies sent on the response to the cookiejar
# in the new request. Because we've mutated our copied prepared
# request, use the old one that we haven't yet touched.
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
prepared_request._cookies.update(self.cookies)
prepared_request.prepare_cookies(prepared_request._cookies)
# Rebuild auth and proxy information.
proxies = self.rebuild_proxies(prepared_request, proxies)
self.rebuild_auth(prepared_request, resp)
# Override the original request.
req = prepared_request
resp = self.send(
req,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies,
allow_redirects=False,
**adapter_kwargs
)
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
i += 1
yield resp
def rebuild_auth(self, prepared_request, response):
"""
When being redirected we may want to strip authentication from the
request to avoid leaking credentials. This method intelligently removes
and reapplies authentication where possible to avoid credential loss.
"""
headers = prepared_request.headers
url = prepared_request.url
if 'Authorization' in headers:
# If we get redirected to a new host, we should strip out any
# authentication headers.
original_parsed = urlparse(response.request.url)
redirect_parsed = urlparse(url)
if (original_parsed.hostname != redirect_parsed.hostname):
del headers['Authorization']
# .netrc might have more auth for us on our new host.
new_auth = get_netrc_auth(url) if self.trust_env else None
if new_auth is not None:
prepared_request.prepare_auth(new_auth)
return
def rebuild_proxies(self, prepared_request, proxies):
"""
This method re-evaluates the proxy configuration by considering the
environment variables. If we are redirected to a URL covered by
NO_PROXY, we strip the proxy configuration. Otherwise, we set missing
proxy keys for this URL (in case they were stripped by a previous
redirect).
This method also replaces the Proxy-Authorization header where
necessary.
"""
headers = prepared_request.headers
url = prepared_request.url
scheme = urlparse(url).scheme
new_proxies = proxies.copy() if proxies is not None else {}
if self.trust_env and not should_bypass_proxies(url):
environ_proxies = get_environ_proxies(url)
proxy = environ_proxies.get(scheme)
if proxy:
new_proxies.setdefault(scheme, environ_proxies[scheme])
if 'Proxy-Authorization' in headers:
del headers['Proxy-Authorization']
try:
username, password = get_auth_from_url(new_proxies[scheme])
except KeyError:
username, password = None, None
if username and password:
headers['Proxy-Authorization'] = _basic_auth_str(username, password)
return new_proxies
def rebuild_method(self, prepared_request, response):
"""When being redirected we may want to change the method of the request
based on certain specs or browser behavior.
"""
method = prepared_request.method
# http://tools.ietf.org/html/rfc7231#section-6.4.4
if response.status_code == codes.see_other and method != 'HEAD':
method = 'GET'
# Do what the browsers do, despite standards...
# First, turn 302s into GETs.
if response.status_code == codes.found and method != 'HEAD':
method = 'GET'
# Second, if a POST is responded to with a 301, turn it into a GET.
# This bizarre behaviour is explained in Issue 1704.
if response.status_code == codes.moved and method == 'POST':
method = 'GET'
prepared_request.method = method
class Session(SessionRedirectMixin):
"""A Requests session.
Provides cookie persistence, connection-pooling, and configuration.
Basic Usage::
>>> import requests
>>> s = requests.Session()
>>> s.get('http://httpbin.org/get')
<Response [200]>
Or as a context manager::
>>> with requests.Session() as s:
>>> s.get('http://httpbin.org/get')
<Response [200]>
"""
__attrs__ = [
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
'cert', 'prefetch', 'adapters', 'stream', 'trust_env',
'max_redirects',
]
def __init__(self):
#: A case-insensitive dictionary of headers to be sent on each
#: :class:`Request <Request>` sent from this
#: :class:`Session <Session>`.
self.headers = default_headers()
#: Default Authentication tuple or object to attach to
#: :class:`Request <Request>`.
self.auth = None
#: Dictionary mapping protocol or protocol and host to the URL of the proxy
#: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to
#: be used on each :class:`Request <Request>`.
self.proxies = {}
#: Event-handling hooks.
self.hooks = default_hooks()
#: Dictionary of querystring data to attach to each
#: :class:`Request <Request>`. The dictionary values may be lists for
#: representing multivalued query parameters.
self.params = {}
#: Stream response content default.
self.stream = False
#: SSL Verification default.
self.verify = True
#: SSL certificate default.
self.cert = None
#: Maximum number of redirects allowed. If the request exceeds this
#: limit, a :class:`TooManyRedirects` exception is raised.
self.max_redirects = DEFAULT_REDIRECT_LIMIT
#: Trust environment settings for proxy configuration, default
#: authentication and similar.
self.trust_env = True
#: A CookieJar containing all currently outstanding cookies set on this
#: session. By default it is a
#: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but
#: may be any other ``cookielib.CookieJar`` compatible object.
self.cookies = cookiejar_from_dict({})
# Default connection adapters.
self.adapters = OrderedDict()
self.mount('https://', HTTPAdapter())
self.mount('http://', HTTPAdapter())
# Only store 1000 redirects to prevent using infinite memory
self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def prepare_request(self, request):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for
transmission and returns it. The :class:`PreparedRequest` has settings
merged from the :class:`Request <Request>` instance and those of the
:class:`Session`.
:param request: :class:`Request` instance to prepare with this
session's settings.
"""
cookies = request.cookies or {}
# Bootstrap CookieJar.
if not isinstance(cookies, cookielib.CookieJar):
cookies = cookiejar_from_dict(cookies)
# Merge with session cookies
merged_cookies = merge_cookies(
merge_cookies(RequestsCookieJar(), self.cookies), cookies)
# Set environment's basic authentication if not explicitly set.
auth = request.auth
if self.trust_env and not auth and not self.auth:
auth = get_netrc_auth(request.url)
p = PreparedRequest()
p.prepare(
method=request.method.upper(),
url=request.url,
files=request.files,
data=request.data,
json=request.json,
headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict),
params=merge_setting(request.params, self.params),
auth=merge_setting(auth, self.auth),
cookies=merged_cookies,
hooks=merge_hooks(request.hooks, self.hooks),
)
return p
def request(self, method, url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=True,
proxies=None,
hooks=None,
stream=None,
verify=None,
cert=None,
json=None):
"""Constructs a :class:`Request <Request>`, prepares it and sends it.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query
string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send
in the body of the :class:`Request`.
:param json: (optional) json to send in the body of the
:class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the
:class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the
:class:`Request`.
:param files: (optional) Dictionary of ``'filename': file-like-objects``
for multipart encoding upload.
:param auth: (optional) Auth tuple or callable to enable
Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Set to True by default.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol or protocol and
hostname to the URL of the proxy.
:param stream: (optional) whether to immediately download the response
content. Defaults to ``False``.
:param verify: (optional) whether the SSL cert will be verified.
A CA_BUNDLE path can also be provided. Defaults to ``True``.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
:rtype: requests.Response
"""
# Create the Request.
req = Request(
method = method.upper(),
url = url,
headers = headers,
files = files,
data = data or {},
json = json,
params = params or {},
auth = auth,
cookies = cookies,
hooks = hooks,
)
prep = self.prepare_request(req)
proxies = proxies or {}
settings = self.merge_environment_settings(
prep.url, proxies, stream, verify, cert
)
# Send the request.
send_kwargs = {
'timeout': timeout,
'allow_redirects': allow_redirects,
}
send_kwargs.update(settings)
resp = self.send(prep, **send_kwargs)
return resp
def get(self, url, **kwargs):
"""Sends a GET request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('GET', url, **kwargs)
def options(self, url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', True)
return self.request('OPTIONS', url, **kwargs)
def head(self, url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
kwargs.setdefault('allow_redirects', False)
return self.request('HEAD', url, **kwargs)
def post(self, url, data=None, json=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('POST', url, data=data, json=json, **kwargs)
def put(self, url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PUT', url, data=data, **kwargs)
def patch(self, url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('PATCH', url, data=data, **kwargs)
def delete(self, url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
return self.request('DELETE', url, **kwargs)
def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
# Set defaults that the hooks can utilize to ensure they always have
# the correct parameters to reproduce the previous request.
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
kwargs.setdefault('proxies', self.proxies)
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
if isinstance(request, Request):
raise ValueError('You can only send PreparedRequests.')
# Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
hooks = request.hooks
# Resolve URL in redirect cache, if available.
if allow_redirects:
checked_urls = set()
while request.url in self.redirect_cache:
checked_urls.add(request.url)
new_url = self.redirect_cache.get(request.url)
if new_url in checked_urls:
break
request.url = new_url
# Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
# Start time (approximately) of the request
start = datetime.utcnow()
# Send the request
r = adapter.send(request, **kwargs)
# Total elapsed time of the request (approximately)
r.elapsed = datetime.utcnow() - start
# Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs)
# Persist cookies
if r.history:
# If the hooks create history then we want those cookies too
for resp in r.history:
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request, **kwargs)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
# Shuffle things around if there's history.
if history:
# Insert the first (original) request at the start
history.insert(0, r)
# Get the last request made
r = history.pop()
r.history = history
if not stream:
r.content
return r
def merge_environment_settings(self, url, proxies, stream, verify, cert):
"""Check the environment and merge it with some settings."""
# Gather clues from the surrounding environment.
if self.trust_env:
# Set environment's proxies.
env_proxies = get_environ_proxies(url) or {}
for (k, v) in env_proxies.items():
proxies.setdefault(k, v)
# Look for requests environment configuration and be compatible
# with cURL.
if verify is True or verify is None:
verify = (os.environ.get('REQUESTS_CA_BUNDLE') or
os.environ.get('CURL_CA_BUNDLE'))
# Merge all the kwargs.
proxies = merge_setting(proxies, self.proxies)
stream = merge_setting(stream, self.stream)
verify = merge_setting(verify, self.verify)
cert = merge_setting(cert, self.cert)
return {'verify': verify, 'proxies': proxies, 'stream': stream,
'cert': cert}
def get_adapter(self, url):
"""Returns the appropriate connection adapter for the given URL."""
for (prefix, adapter) in self.adapters.items():
if url.lower().startswith(prefix):
return adapter
# Nothing matches :-/
raise InvalidSchema("No connection adapters were found for '%s'" % url)
def close(self):
"""Closes all adapters and as such the session"""
for v in self.adapters.values():
v.close()
def mount(self, prefix, adapter):
"""Registers a connection adapter to a prefix.
Adapters are sorted in descending order by key length."""
self.adapters[prefix] = adapter
keys_to_move = [k for k in self.adapters if len(k) < len(prefix)]
for key in keys_to_move:
self.adapters[key] = self.adapters.pop(key)
def __getstate__(self):
state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
state['redirect_cache'] = dict(self.redirect_cache)
return state
def __setstate__(self, state):
redirect_cache = state.pop('redirect_cache', {})
for attr, value in state.items():
setattr(self, attr, value)
self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE)
for redirect, to in redirect_cache.items():
self.redirect_cache[redirect] = to
def session():
"""Returns a :class:`Session` for context-management."""
return Session()
| gpl-3.0 |
knittledan/Location_Search_Prediction | thirdParty/requests/packages/chardet/codingstatemachine.py | 2931 | 2318 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart
from .compat import wrap_ord
class CodingStateMachine:
def __init__(self, sm):
self._mModel = sm
self._mCurrentBytePos = 0
self._mCurrentCharLen = 0
self.reset()
def reset(self):
self._mCurrentState = eStart
def next_state(self, c):
# for each byte we get its class
# if it is first byte, we also get byte length
# PY3K: aBuf is a byte stream, so c is an int, not a byte
byteCls = self._mModel['classTable'][wrap_ord(c)]
if self._mCurrentState == eStart:
self._mCurrentBytePos = 0
self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]
# from byte's class and stateTable, we get its next state
curr_state = (self._mCurrentState * self._mModel['classFactor']
+ byteCls)
self._mCurrentState = self._mModel['stateTable'][curr_state]
self._mCurrentBytePos += 1
return self._mCurrentState
def get_current_charlen(self):
return self._mCurrentCharLen
def get_coding_state_machine(self):
return self._mModel['name']
| mit |
Rickyfox/SPR | SPR_Crawl/label.py | 1 | 4142 | __author__ = 'nick'
from pymongo import MongoClient
class DocLabeler:
"""
Class to manually label the instances in the database
"""
def __init__(self):
"""
Sole constructor. Creates database connection and auto executes the labeling process
:return:
"""
self.client = MongoClient()
self.docdb=self.client.documentdb
self.db_getunlabeled()
def db_getunlabeled(self):
"""
Fetches documents from the database that are not yet labeled and asks for label input for the three targets
:return:
"""
try:
print 'Starting getunlabeled'
unlabeled=self.docdb.docs.find( { "relevantto" : { "$exists" : False } } )
print self.docdb.docs.find({"relevantto" : {"$exists" : False}}).count()
for item in unlabeled:
title=item['title']
inputcorrect=False
relevance=[]
print title
#print 'T1: Active Learning for Unequal Misclassification Costs on Imbalanced Data - relevant? y/n/q'
while not inputcorrect:
relinput=raw_input('T1: Active Learning for Unequal Misclassification Costs on Imbalanced Data - relevant? y/n/q\n')
if relinput is 'y' or 'n':
inputcorrect=True
if relinput=='q':
self.client.close()
print 'program canceled'
break
if relinput=='y':
#item['relevant']=1
relevance.append(1)
print 'item relevant'
#self.docdb.docs.save(item)
else:
#item['relevant']=0
relevance.append(0)
print 'item not relevant'
#self.docdb.docs.save(item)
inputcorrect=False
#print 'T1: Feature Selection to Improve Classification in Medical Data Mining Applications - relevant? y/n/q'
while not inputcorrect:
relinput=raw_input('T2: Feature Selection to Improve Classification in Medical Data Mining Applications - relevant? y/n/q\n')
if relinput is 'y' or 'n':
inputcorrect=True
if relinput=='q':
self.client.close()
print 'program canceled'
break
if relinput=='y':
#item['relevant']=1
relevance.append(1)
print 'item relevant'
#self.docdb.docs.save(item)
else:
#item['relevant']=0
relevance.append(0)
print 'item not relevant'
#self.docdb.docs.save(item)
inputcorrect=False
while not inputcorrect:
relinput=raw_input('T3: Recommending Scientific Literature to Students - relevant? y/n/q\n')
if relinput is 'y' or 'n':
inputcorrect=True
if relinput=='q':
self.client.close()
print 'program canceled'
break
if relinput=='y':
#item['relevant']=1
relevance.append(1)
print 'item relevant'
#self.docdb.docs.save(item)
else:
#item['relevant']=0
relevance.append(0)
print 'item not relevant'
#self.docdb.docs.save(item)
if len(relevance)==3:
print 'relevance length okay'
print relevance
item['relevantto']=relevance
self.docdb.docs.save(item)
print '\n'
else:
print 'relevance not okay\n'
except:
self.client.close()
print 'program canceled' | apache-2.0 |
ThiefMaster/sqlalchemy | lib/sqlalchemy/orm/persistence.py | 1 | 51586 | # orm/persistence.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""private module containing functions used to emit INSERT, UPDATE
and DELETE statements on behalf of a :class:`.Mapper` and its descending
mappers.
The functions here are called only by the unit of work functions
in unitofwork.py.
"""
import operator
from itertools import groupby, chain
from .. import sql, util, exc as sa_exc
from . import attributes, sync, exc as orm_exc, evaluator
from .base import state_str, _attr_as_key, _entity_descriptor
from ..sql import expression
from ..sql.base import _from_objects
from . import loading
def _bulk_insert(
mapper, mappings, session_transaction, isstates, return_defaults):
base_mapper = mapper.base_mapper
cached_connections = _cached_connection_dict(base_mapper)
if session_transaction.session.connection_callable:
raise NotImplementedError(
"connection_callable / per-instance sharding "
"not supported in bulk_insert()")
if isstates:
if return_defaults:
states = [(state, state.dict) for state in mappings]
mappings = [dict_ for (state, dict_) in states]
else:
mappings = [state.dict for state in mappings]
else:
mappings = list(mappings)
connection = session_transaction.connection(base_mapper)
for table, super_mapper in base_mapper._sorted_tables.items():
if not mapper.isa(super_mapper):
continue
records = (
(None, state_dict, params, mapper,
connection, value_params, has_all_pks, has_all_defaults)
for
state, state_dict, params, mp,
conn, value_params, has_all_pks,
has_all_defaults in _collect_insert_commands(table, (
(None, mapping, mapper, connection)
for mapping in mappings),
bulk=True, return_defaults=return_defaults
)
)
_emit_insert_statements(base_mapper, None,
cached_connections,
super_mapper, table, records,
bookkeeping=return_defaults)
if return_defaults and isstates:
identity_cls = mapper._identity_class
identity_props = [p.key for p in mapper._identity_key_props]
for state, dict_ in states:
state.key = (
identity_cls,
tuple([dict_[key] for key in identity_props])
)
def _bulk_update(mapper, mappings, session_transaction,
isstates, update_changed_only):
base_mapper = mapper.base_mapper
cached_connections = _cached_connection_dict(base_mapper)
def _changed_dict(mapper, state):
return dict(
(k, v)
for k, v in state.dict.items() if k in state.committed_state or k
in mapper._primary_key_propkeys
)
if isstates:
if update_changed_only:
mappings = [_changed_dict(mapper, state) for state in mappings]
else:
mappings = [state.dict for state in mappings]
else:
mappings = list(mappings)
if session_transaction.session.connection_callable:
raise NotImplementedError(
"connection_callable / per-instance sharding "
"not supported in bulk_update()")
connection = session_transaction.connection(base_mapper)
for table, super_mapper in base_mapper._sorted_tables.items():
if not mapper.isa(super_mapper):
continue
records = _collect_update_commands(None, table, (
(None, mapping, mapper, connection,
(mapping[mapper._version_id_prop.key]
if mapper._version_id_prop else None))
for mapping in mappings
), bulk=True)
_emit_update_statements(base_mapper, None,
cached_connections,
super_mapper, table, records,
bookkeeping=False)
def save_obj(
base_mapper, states, uowtransaction, single=False):
"""Issue ``INSERT`` and/or ``UPDATE`` statements for a list
of objects.
This is called within the context of a UOWTransaction during a
flush operation, given a list of states to be flushed. The
base mapper in an inheritance hierarchy handles the inserts/
updates for all descendant mappers.
"""
# if batch=false, call _save_obj separately for each object
if not single and not base_mapper.batch:
for state in _sort_states(states):
save_obj(base_mapper, [state], uowtransaction, single=True)
return
states_to_update = []
states_to_insert = []
cached_connections = _cached_connection_dict(base_mapper)
for (state, dict_, mapper, connection,
has_identity,
row_switch, update_version_id) in _organize_states_for_save(
base_mapper, states, uowtransaction
):
if has_identity or row_switch:
states_to_update.append(
(state, dict_, mapper, connection, update_version_id)
)
else:
states_to_insert.append(
(state, dict_, mapper, connection)
)
for table, mapper in base_mapper._sorted_tables.items():
if table not in mapper._pks_by_table:
continue
insert = _collect_insert_commands(table, states_to_insert)
update = _collect_update_commands(
uowtransaction, table, states_to_update)
_emit_update_statements(base_mapper, uowtransaction,
cached_connections,
mapper, table, update)
_emit_insert_statements(base_mapper, uowtransaction,
cached_connections,
mapper, table, insert)
_finalize_insert_update_commands(
base_mapper, uowtransaction,
chain(
(
(state, state_dict, mapper, connection, False)
for state, state_dict, mapper, connection in states_to_insert
),
(
(state, state_dict, mapper, connection, True)
for state, state_dict, mapper, connection,
update_version_id in states_to_update
)
)
)
def post_update(base_mapper, states, uowtransaction, post_update_cols):
"""Issue UPDATE statements on behalf of a relationship() which
specifies post_update.
"""
cached_connections = _cached_connection_dict(base_mapper)
states_to_update = list(_organize_states_for_post_update(
base_mapper,
states, uowtransaction))
for table, mapper in base_mapper._sorted_tables.items():
if table not in mapper._pks_by_table:
continue
update = (
(state, state_dict, sub_mapper, connection)
for
state, state_dict, sub_mapper, connection in states_to_update
if table in sub_mapper._pks_by_table
)
update = _collect_post_update_commands(base_mapper, uowtransaction,
table, update,
post_update_cols)
_emit_post_update_statements(base_mapper, uowtransaction,
cached_connections,
mapper, table, update)
def delete_obj(base_mapper, states, uowtransaction):
"""Issue ``DELETE`` statements for a list of objects.
This is called within the context of a UOWTransaction during a
flush operation.
"""
cached_connections = _cached_connection_dict(base_mapper)
states_to_delete = list(_organize_states_for_delete(
base_mapper,
states,
uowtransaction))
table_to_mapper = base_mapper._sorted_tables
for table in reversed(list(table_to_mapper.keys())):
mapper = table_to_mapper[table]
if table not in mapper._pks_by_table:
continue
delete = _collect_delete_commands(base_mapper, uowtransaction,
table, states_to_delete)
_emit_delete_statements(base_mapper, uowtransaction,
cached_connections, mapper, table, delete)
for state, state_dict, mapper, connection, \
update_version_id in states_to_delete:
mapper.dispatch.after_delete(mapper, connection, state)
def _organize_states_for_save(base_mapper, states, uowtransaction):
"""Make an initial pass across a set of states for INSERT or
UPDATE.
This includes splitting out into distinct lists for
each, calling before_insert/before_update, obtaining
key information for each state including its dictionary,
mapper, the connection to use for the execution per state,
and the identity flag.
"""
for state, dict_, mapper, connection in _connections_for_states(
base_mapper, uowtransaction,
states):
has_identity = bool(state.key)
instance_key = state.key or mapper._identity_key_from_state(state)
row_switch = update_version_id = None
# call before_XXX extensions
if not has_identity:
mapper.dispatch.before_insert(mapper, connection, state)
else:
mapper.dispatch.before_update(mapper, connection, state)
if mapper._validate_polymorphic_identity:
mapper._validate_polymorphic_identity(mapper, state, dict_)
# detect if we have a "pending" instance (i.e. has
# no instance_key attached to it), and another instance
# with the same identity key already exists as persistent.
# convert to an UPDATE if so.
if not has_identity and \
instance_key in uowtransaction.session.identity_map:
instance = \
uowtransaction.session.identity_map[instance_key]
existing = attributes.instance_state(instance)
if not uowtransaction.is_deleted(existing):
raise orm_exc.FlushError(
"New instance %s with identity key %s conflicts "
"with persistent instance %s" %
(state_str(state), instance_key,
state_str(existing)))
base_mapper._log_debug(
"detected row switch for identity %s. "
"will update %s, remove %s from "
"transaction", instance_key,
state_str(state), state_str(existing))
# remove the "delete" flag from the existing element
uowtransaction.remove_state_actions(existing)
row_switch = existing
if (has_identity or row_switch) and mapper.version_id_col is not None:
update_version_id = mapper._get_committed_state_attr_by_column(
row_switch if row_switch else state,
row_switch.dict if row_switch else dict_,
mapper.version_id_col)
yield (state, dict_, mapper, connection,
has_identity, row_switch, update_version_id)
def _organize_states_for_post_update(base_mapper, states,
uowtransaction):
"""Make an initial pass across a set of states for UPDATE
corresponding to post_update.
This includes obtaining key information for each state
including its dictionary, mapper, the connection to use for
the execution per state.
"""
return _connections_for_states(base_mapper, uowtransaction, states)
def _organize_states_for_delete(base_mapper, states, uowtransaction):
"""Make an initial pass across a set of states for DELETE.
This includes calling out before_delete and obtaining
key information for each state including its dictionary,
mapper, the connection to use for the execution per state.
"""
for state, dict_, mapper, connection in _connections_for_states(
base_mapper, uowtransaction,
states):
mapper.dispatch.before_delete(mapper, connection, state)
if mapper.version_id_col is not None:
update_version_id = \
mapper._get_committed_state_attr_by_column(
state, dict_,
mapper.version_id_col)
else:
update_version_id = None
yield (
state, dict_, mapper, connection, update_version_id)
def _collect_insert_commands(
table, states_to_insert,
bulk=False, return_defaults=False):
"""Identify sets of values to use in INSERT statements for a
list of states.
"""
for state, state_dict, mapper, connection in states_to_insert:
if table not in mapper._pks_by_table:
continue
params = {}
value_params = {}
propkey_to_col = mapper._propkey_to_col[table]
eval_none = mapper._insert_cols_evaluating_none[table]
for propkey in set(propkey_to_col).intersection(state_dict):
value = state_dict[propkey]
col = propkey_to_col[propkey]
if value is None and propkey not in eval_none:
continue
elif not bulk and isinstance(value, sql.ClauseElement):
value_params[col.key] = value
else:
params[col.key] = value
if not bulk:
for colkey in mapper._insert_cols_as_none[table].\
difference(params).difference(value_params):
params[colkey] = None
if not bulk or return_defaults:
has_all_pks = mapper._pk_keys_by_table[table].issubset(params)
if mapper.base_mapper.eager_defaults:
has_all_defaults = mapper._server_default_cols[table].\
issubset(params)
else:
has_all_defaults = True
else:
has_all_defaults = has_all_pks = True
if mapper.version_id_generator is not False \
and mapper.version_id_col is not None and \
mapper.version_id_col in mapper._cols_by_table[table]:
params[mapper.version_id_col.key] = \
mapper.version_id_generator(None)
yield (
state, state_dict, params, mapper,
connection, value_params, has_all_pks,
has_all_defaults)
def _collect_update_commands(
uowtransaction, table, states_to_update,
bulk=False):
"""Identify sets of values to use in UPDATE statements for a
list of states.
This function works intricately with the history system
to determine exactly what values should be updated
as well as how the row should be matched within an UPDATE
statement. Includes some tricky scenarios where the primary
key of an object might have been changed.
"""
for state, state_dict, mapper, connection, \
update_version_id in states_to_update:
if table not in mapper._pks_by_table:
continue
pks = mapper._pks_by_table[table]
value_params = {}
propkey_to_col = mapper._propkey_to_col[table]
if bulk:
params = dict(
(propkey_to_col[propkey].key, state_dict[propkey])
for propkey in
set(propkey_to_col).intersection(state_dict).difference(
mapper._pk_keys_by_table[table])
)
has_all_defaults = True
else:
params = {}
for propkey in set(propkey_to_col).intersection(
state.committed_state):
value = state_dict[propkey]
col = propkey_to_col[propkey]
if isinstance(value, sql.ClauseElement):
value_params[col] = value
# guard against values that generate non-__nonzero__
# objects for __eq__()
elif state.manager[propkey].impl.is_equal(
value, state.committed_state[propkey]) is not True:
params[col.key] = value
if mapper.base_mapper.eager_defaults:
has_all_defaults = mapper._server_onupdate_default_cols[table].\
issubset(params)
else:
has_all_defaults = True
if update_version_id is not None and \
mapper.version_id_col in mapper._cols_by_table[table]:
if not bulk and not (params or value_params):
# HACK: check for history in other tables, in case the
# history is only in a different table than the one
# where the version_id_col is. This logic was lost
# from 0.9 -> 1.0.0 and restored in 1.0.6.
for prop in mapper._columntoproperty.values():
history = (
state.manager[prop.key].impl.get_history(
state, state_dict,
attributes.PASSIVE_NO_INITIALIZE))
if history.added:
break
else:
# no net change, break
continue
col = mapper.version_id_col
params[col._label] = update_version_id
if (bulk or col.key not in params) and \
mapper.version_id_generator is not False:
val = mapper.version_id_generator(update_version_id)
params[col.key] = val
elif not (params or value_params):
continue
if bulk:
pk_params = dict(
(propkey_to_col[propkey]._label, state_dict.get(propkey))
for propkey in
set(propkey_to_col).
intersection(mapper._pk_keys_by_table[table])
)
else:
pk_params = {}
for col in pks:
propkey = mapper._columntoproperty[col].key
history = state.manager[propkey].impl.get_history(
state, state_dict, attributes.PASSIVE_OFF)
if history.added:
if not history.deleted or \
("pk_cascaded", state, col) in \
uowtransaction.attributes:
pk_params[col._label] = history.added[0]
params.pop(col.key, None)
else:
# else, use the old value to locate the row
pk_params[col._label] = history.deleted[0]
params[col.key] = history.added[0]
else:
pk_params[col._label] = history.unchanged[0]
if pk_params[col._label] is None:
raise orm_exc.FlushError(
"Can't update table %s using NULL for primary "
"key value on column %s" % (table, col))
if params or value_params:
params.update(pk_params)
yield (
state, state_dict, params, mapper,
connection, value_params, has_all_defaults)
def _collect_post_update_commands(base_mapper, uowtransaction, table,
states_to_update, post_update_cols):
"""Identify sets of values to use in UPDATE statements for a
list of states within a post_update operation.
"""
for state, state_dict, mapper, connection in states_to_update:
# assert table in mapper._pks_by_table
pks = mapper._pks_by_table[table]
params = {}
hasdata = False
for col in mapper._cols_by_table[table]:
if col in pks:
params[col._label] = \
mapper._get_state_attr_by_column(
state,
state_dict, col, passive=attributes.PASSIVE_OFF)
elif col in post_update_cols:
prop = mapper._columntoproperty[col]
history = state.manager[prop.key].impl.get_history(
state, state_dict,
attributes.PASSIVE_NO_INITIALIZE)
if history.added:
value = history.added[0]
params[col.key] = value
hasdata = True
if hasdata:
yield params, connection
def _collect_delete_commands(base_mapper, uowtransaction, table,
states_to_delete):
"""Identify values to use in DELETE statements for a list of
states to be deleted."""
for state, state_dict, mapper, connection, \
update_version_id in states_to_delete:
if table not in mapper._pks_by_table:
continue
params = {}
for col in mapper._pks_by_table[table]:
params[col.key] = \
value = \
mapper._get_committed_state_attr_by_column(
state, state_dict, col)
if value is None:
raise orm_exc.FlushError(
"Can't delete from table %s "
"using NULL for primary "
"key value on column %s" % (table, col))
if update_version_id is not None and \
mapper.version_id_col in mapper._cols_by_table[table]:
params[mapper.version_id_col.key] = update_version_id
yield params, connection
def _emit_update_statements(base_mapper, uowtransaction,
cached_connections, mapper, table, update,
bookkeeping=True):
"""Emit UPDATE statements corresponding to value lists collected
by _collect_update_commands()."""
needs_version_id = mapper.version_id_col is not None and \
mapper.version_id_col in mapper._cols_by_table[table]
def update_stmt():
clause = sql.and_()
for col in mapper._pks_by_table[table]:
clause.clauses.append(col == sql.bindparam(col._label,
type_=col.type))
if needs_version_id:
clause.clauses.append(
mapper.version_id_col == sql.bindparam(
mapper.version_id_col._label,
type_=mapper.version_id_col.type))
stmt = table.update(clause)
return stmt
cached_stmt = base_mapper._memo(('update', table), update_stmt)
for (connection, paramkeys, hasvalue, has_all_defaults), \
records in groupby(
update,
lambda rec: (
rec[4], # connection
set(rec[2]), # set of parameter keys
bool(rec[5]), # whether or not we have "value" parameters
rec[6] # has_all_defaults
)
):
rows = 0
records = list(records)
statement = cached_stmt
# TODO: would be super-nice to not have to determine this boolean
# inside the loop here, in the 99.9999% of the time there's only
# one connection in use
assert_singlerow = connection.dialect.supports_sane_rowcount
assert_multirow = assert_singlerow and \
connection.dialect.supports_sane_multi_rowcount
allow_multirow = has_all_defaults and not needs_version_id
if bookkeeping and not has_all_defaults and \
mapper.base_mapper.eager_defaults:
statement = statement.return_defaults()
elif mapper.version_id_col is not None:
statement = statement.return_defaults(mapper.version_id_col)
if hasvalue:
for state, state_dict, params, mapper, \
connection, value_params, has_all_defaults in records:
c = connection.execute(
statement.values(value_params),
params)
if bookkeeping:
_postfetch(
mapper,
uowtransaction,
table,
state,
state_dict,
c,
c.context.compiled_parameters[0],
value_params)
rows += c.rowcount
check_rowcount = True
else:
if not allow_multirow:
check_rowcount = assert_singlerow
for state, state_dict, params, mapper, \
connection, value_params, has_all_defaults in records:
c = cached_connections[connection].\
execute(statement, params)
# TODO: why with bookkeeping=False?
if bookkeeping:
_postfetch(
mapper,
uowtransaction,
table,
state,
state_dict,
c,
c.context.compiled_parameters[0],
value_params)
rows += c.rowcount
else:
multiparams = [rec[2] for rec in records]
check_rowcount = assert_multirow or (
assert_singlerow and
len(multiparams) == 1
)
c = cached_connections[connection].\
execute(statement, multiparams)
rows += c.rowcount
for state, state_dict, params, mapper, \
connection, value_params, has_all_defaults in records:
if bookkeeping:
_postfetch(
mapper,
uowtransaction,
table,
state,
state_dict,
c,
c.context.compiled_parameters[0],
value_params)
if check_rowcount:
if rows != len(records):
raise orm_exc.StaleDataError(
"UPDATE statement on table '%s' expected to "
"update %d row(s); %d were matched." %
(table.description, len(records), rows))
elif needs_version_id:
util.warn("Dialect %s does not support updated rowcount "
"- versioning cannot be verified." %
c.dialect.dialect_description)
def _emit_insert_statements(base_mapper, uowtransaction,
cached_connections, mapper, table, insert,
bookkeeping=True):
"""Emit INSERT statements corresponding to value lists collected
by _collect_insert_commands()."""
cached_stmt = base_mapper._memo(('insert', table), table.insert)
for (connection, pkeys, hasvalue, has_all_pks, has_all_defaults), \
records in groupby(
insert,
lambda rec: (
rec[4], # connection
set(rec[2]), # parameter keys
bool(rec[5]), # whether we have "value" parameters
rec[6],
rec[7])):
statement = cached_stmt
if not bookkeeping or \
(
has_all_defaults
or not base_mapper.eager_defaults
or not connection.dialect.implicit_returning
) and has_all_pks and not hasvalue:
records = list(records)
multiparams = [rec[2] for rec in records]
c = cached_connections[connection].\
execute(statement, multiparams)
if bookkeeping:
for (state, state_dict, params, mapper_rec,
conn, value_params, has_all_pks, has_all_defaults), \
last_inserted_params in \
zip(records, c.context.compiled_parameters):
if state:
_postfetch(
mapper_rec,
uowtransaction,
table,
state,
state_dict,
c,
last_inserted_params,
value_params)
else:
_postfetch_bulk_save(mapper_rec, state_dict, table)
else:
if not has_all_defaults and base_mapper.eager_defaults:
statement = statement.return_defaults()
elif mapper.version_id_col is not None:
statement = statement.return_defaults(mapper.version_id_col)
for state, state_dict, params, mapper_rec, \
connection, value_params, \
has_all_pks, has_all_defaults in records:
if value_params:
result = connection.execute(
statement.values(value_params),
params)
else:
result = cached_connections[connection].\
execute(statement, params)
primary_key = result.context.inserted_primary_key
if primary_key is not None:
# set primary key attributes
for pk, col in zip(primary_key,
mapper._pks_by_table[table]):
prop = mapper_rec._columntoproperty[col]
if state_dict.get(prop.key) is None:
state_dict[prop.key] = pk
if bookkeeping:
if state:
_postfetch(
mapper_rec,
uowtransaction,
table,
state,
state_dict,
result,
result.context.compiled_parameters[0],
value_params)
else:
_postfetch_bulk_save(mapper_rec, state_dict, table)
def _emit_post_update_statements(base_mapper, uowtransaction,
cached_connections, mapper, table, update):
"""Emit UPDATE statements corresponding to value lists collected
by _collect_post_update_commands()."""
def update_stmt():
clause = sql.and_()
for col in mapper._pks_by_table[table]:
clause.clauses.append(col == sql.bindparam(col._label,
type_=col.type))
return table.update(clause)
statement = base_mapper._memo(('post_update', table), update_stmt)
# execute each UPDATE in the order according to the original
# list of states to guarantee row access order, but
# also group them into common (connection, cols) sets
# to support executemany().
for key, grouper in groupby(
update, lambda rec: (
rec[1], # connection
set(rec[0]) # parameter keys
)
):
connection = key[0]
multiparams = [params for params, conn in grouper]
cached_connections[connection].\
execute(statement, multiparams)
def _emit_delete_statements(base_mapper, uowtransaction, cached_connections,
mapper, table, delete):
"""Emit DELETE statements corresponding to value lists collected
by _collect_delete_commands()."""
need_version_id = mapper.version_id_col is not None and \
mapper.version_id_col in mapper._cols_by_table[table]
def delete_stmt():
clause = sql.and_()
for col in mapper._pks_by_table[table]:
clause.clauses.append(
col == sql.bindparam(col.key, type_=col.type))
if need_version_id:
clause.clauses.append(
mapper.version_id_col ==
sql.bindparam(
mapper.version_id_col.key,
type_=mapper.version_id_col.type
)
)
return table.delete(clause)
statement = base_mapper._memo(('delete', table), delete_stmt)
for connection, recs in groupby(
delete,
lambda rec: rec[1] # connection
):
del_objects = [params for params, connection in recs]
connection = cached_connections[connection]
expected = len(del_objects)
rows_matched = -1
only_warn = False
if connection.dialect.supports_sane_multi_rowcount:
c = connection.execute(statement, del_objects)
if not need_version_id:
only_warn = True
rows_matched = c.rowcount
elif need_version_id:
if connection.dialect.supports_sane_rowcount:
rows_matched = 0
# execute deletes individually so that versioned
# rows can be verified
for params in del_objects:
c = connection.execute(statement, params)
rows_matched += c.rowcount
else:
util.warn(
"Dialect %s does not support deleted rowcount "
"- versioning cannot be verified." %
connection.dialect.dialect_description,
stacklevel=12)
connection.execute(statement, del_objects)
else:
connection.execute(statement, del_objects)
if base_mapper.confirm_deleted_rows and \
rows_matched > -1 and expected != rows_matched:
if only_warn:
util.warn(
"DELETE statement on table '%s' expected to "
"delete %d row(s); %d were matched. Please set "
"confirm_deleted_rows=False within the mapper "
"configuration to prevent this warning." %
(table.description, expected, rows_matched)
)
else:
raise orm_exc.StaleDataError(
"DELETE statement on table '%s' expected to "
"delete %d row(s); %d were matched. Please set "
"confirm_deleted_rows=False within the mapper "
"configuration to prevent this warning." %
(table.description, expected, rows_matched)
)
def _finalize_insert_update_commands(base_mapper, uowtransaction, states):
"""finalize state on states that have been inserted or updated,
including calling after_insert/after_update events.
"""
for state, state_dict, mapper, connection, has_identity in states:
if mapper._readonly_props:
readonly = state.unmodified_intersection(
[p.key for p in mapper._readonly_props
if p.expire_on_flush or p.key not in state.dict]
)
if readonly:
state._expire_attributes(state.dict, readonly)
# if eager_defaults option is enabled, load
# all expired cols. Else if we have a version_id_col, make sure
# it isn't expired.
toload_now = []
if base_mapper.eager_defaults:
toload_now.extend(state._unloaded_non_object)
elif mapper.version_id_col is not None and \
mapper.version_id_generator is False:
if mapper._version_id_prop.key in state.unloaded:
toload_now.extend([mapper._version_id_prop.key])
if toload_now:
state.key = base_mapper._identity_key_from_state(state)
loading.load_on_ident(
uowtransaction.session.query(base_mapper),
state.key, refresh_state=state,
only_load_props=toload_now)
# call after_XXX extensions
if not has_identity:
mapper.dispatch.after_insert(mapper, connection, state)
else:
mapper.dispatch.after_update(mapper, connection, state)
def _postfetch(mapper, uowtransaction, table,
state, dict_, result, params, value_params):
"""Expire attributes in need of newly persisted database state,
after an INSERT or UPDATE statement has proceeded for that
state."""
prefetch_cols = result.context.compiled.prefetch
postfetch_cols = result.context.compiled.postfetch
returning_cols = result.context.compiled.returning
if mapper.version_id_col is not None and \
mapper.version_id_col in mapper._cols_by_table[table]:
prefetch_cols = list(prefetch_cols) + [mapper.version_id_col]
refresh_flush = bool(mapper.class_manager.dispatch.refresh_flush)
if refresh_flush:
load_evt_attrs = []
if returning_cols:
row = result.context.returned_defaults
if row is not None:
for col in returning_cols:
if col.primary_key:
continue
dict_[mapper._columntoproperty[col].key] = row[col]
if refresh_flush:
load_evt_attrs.append(mapper._columntoproperty[col].key)
for c in prefetch_cols:
if c.key in params and c in mapper._columntoproperty:
dict_[mapper._columntoproperty[c].key] = params[c.key]
if refresh_flush:
load_evt_attrs.append(mapper._columntoproperty[c].key)
if refresh_flush and load_evt_attrs:
mapper.class_manager.dispatch.refresh_flush(
state, uowtransaction, load_evt_attrs)
if postfetch_cols:
state._expire_attributes(state.dict,
[mapper._columntoproperty[c].key
for c in postfetch_cols if c in
mapper._columntoproperty]
)
# synchronize newly inserted ids from one table to the next
# TODO: this still goes a little too often. would be nice to
# have definitive list of "columns that changed" here
for m, equated_pairs in mapper._table_to_equated[table]:
sync.populate(state, m, state, m,
equated_pairs,
uowtransaction,
mapper.passive_updates)
def _postfetch_bulk_save(mapper, dict_, table):
for m, equated_pairs in mapper._table_to_equated[table]:
sync.bulk_populate_inherit_keys(dict_, m, equated_pairs)
def _connections_for_states(base_mapper, uowtransaction, states):
"""Return an iterator of (state, state.dict, mapper, connection).
The states are sorted according to _sort_states, then paired
with the connection they should be using for the given
unit of work transaction.
"""
# if session has a connection callable,
# organize individual states with the connection
# to use for update
if uowtransaction.session.connection_callable:
connection_callable = \
uowtransaction.session.connection_callable
else:
connection = uowtransaction.transaction.connection(base_mapper)
connection_callable = None
for state in _sort_states(states):
if connection_callable:
connection = connection_callable(base_mapper, state.obj())
mapper = state.manager.mapper
yield state, state.dict, mapper, connection
def _cached_connection_dict(base_mapper):
# dictionary of connection->connection_with_cache_options.
return util.PopulateDict(
lambda conn: conn.execution_options(
compiled_cache=base_mapper._compiled_cache
))
def _sort_states(states):
pending = set(states)
persistent = set(s for s in pending if s.key is not None)
pending.difference_update(persistent)
return sorted(pending, key=operator.attrgetter("insert_order")) + \
sorted(persistent, key=lambda q: q.key[1])
class BulkUD(object):
"""Handle bulk update and deletes via a :class:`.Query`."""
def __init__(self, query):
self.query = query.enable_eagerloads(False)
self.mapper = self.query._bind_mapper()
self._validate_query_state()
def _validate_query_state(self):
for attr, methname, notset, op in (
('_limit', 'limit()', None, operator.is_),
('_offset', 'offset()', None, operator.is_),
('_order_by', 'order_by()', False, operator.is_),
('_group_by', 'group_by()', False, operator.is_),
('_distinct', 'distinct()', False, operator.is_),
(
'_from_obj',
'join(), outerjoin(), select_from(), or from_self()',
(), operator.eq)
):
if not op(getattr(self.query, attr), notset):
raise sa_exc.InvalidRequestError(
"Can't call Query.update() or Query.delete() "
"when %s has been called" %
(methname, )
)
@property
def session(self):
return self.query.session
@classmethod
def _factory(cls, lookup, synchronize_session, *arg):
try:
klass = lookup[synchronize_session]
except KeyError:
raise sa_exc.ArgumentError(
"Valid strategies for session synchronization "
"are %s" % (", ".join(sorted(repr(x)
for x in lookup))))
else:
return klass(*arg)
def exec_(self):
self._do_pre()
self._do_pre_synchronize()
self._do_exec()
self._do_post_synchronize()
self._do_post()
@util.dependencies("sqlalchemy.orm.query")
def _do_pre(self, querylib):
query = self.query
self.context = querylib.QueryContext(query)
if isinstance(query._entities[0], querylib._ColumnEntity):
# check for special case of query(table)
tables = set()
for ent in query._entities:
if not isinstance(ent, querylib._ColumnEntity):
tables.clear()
break
else:
tables.update(_from_objects(ent.column))
if len(tables) != 1:
raise sa_exc.InvalidRequestError(
"This operation requires only one Table or "
"entity be specified as the target."
)
else:
self.primary_table = tables.pop()
else:
self.primary_table = query._only_entity_zero(
"This operation requires only one Table or "
"entity be specified as the target."
).mapper.local_table
session = query.session
if query._autoflush:
session._autoflush()
def _do_pre_synchronize(self):
pass
def _do_post_synchronize(self):
pass
class BulkEvaluate(BulkUD):
"""BulkUD which does the 'evaluate' method of session state resolution."""
def _additional_evaluators(self, evaluator_compiler):
pass
def _do_pre_synchronize(self):
query = self.query
target_cls = query._mapper_zero().class_
try:
evaluator_compiler = evaluator.EvaluatorCompiler(target_cls)
if query.whereclause is not None:
eval_condition = evaluator_compiler.process(
query.whereclause)
else:
def eval_condition(obj):
return True
self._additional_evaluators(evaluator_compiler)
except evaluator.UnevaluatableError:
raise sa_exc.InvalidRequestError(
"Could not evaluate current criteria in Python. "
"Specify 'fetch' or False for the "
"synchronize_session parameter.")
# TODO: detect when the where clause is a trivial primary key match
self.matched_objects = [
obj for (cls, pk), obj in
query.session.identity_map.items()
if issubclass(cls, target_cls) and
eval_condition(obj)]
class BulkFetch(BulkUD):
"""BulkUD which does the 'fetch' method of session state resolution."""
def _do_pre_synchronize(self):
query = self.query
session = query.session
context = query._compile_context()
select_stmt = context.statement.with_only_columns(
self.primary_table.primary_key)
self.matched_rows = session.execute(
select_stmt,
mapper=self.mapper,
params=query._params).fetchall()
class BulkUpdate(BulkUD):
"""BulkUD which handles UPDATEs."""
def __init__(self, query, values, update_kwargs):
super(BulkUpdate, self).__init__(query)
self.values = values
self.update_kwargs = update_kwargs
@classmethod
def factory(cls, query, synchronize_session, values, update_kwargs):
return BulkUD._factory({
"evaluate": BulkUpdateEvaluate,
"fetch": BulkUpdateFetch,
False: BulkUpdate
}, synchronize_session, query, values, update_kwargs)
def _resolve_string_to_expr(self, key):
if self.mapper and isinstance(key, util.string_types):
attr = _entity_descriptor(self.mapper, key)
return attr.__clause_element__()
else:
return key
def _resolve_key_to_attrname(self, key):
if self.mapper and isinstance(key, util.string_types):
attr = _entity_descriptor(self.mapper, key)
return attr.property.key
elif isinstance(key, attributes.InstrumentedAttribute):
return key.key
elif hasattr(key, '__clause_element__'):
key = key.__clause_element__()
if self.mapper and isinstance(key, expression.ColumnElement):
try:
attr = self.mapper._columntoproperty[key]
except orm_exc.UnmappedColumnError:
return None
else:
return attr.key
else:
raise sa_exc.InvalidRequestError(
"Invalid expression type: %r" % key)
def _do_exec(self):
values = [
(self._resolve_string_to_expr(k), v)
for k, v in (
self.values.items() if hasattr(self.values, 'items')
else self.values)
]
if not self.update_kwargs.get('preserve_parameter_order', False):
values = dict(values)
update_stmt = sql.update(self.primary_table,
self.context.whereclause, values,
**self.update_kwargs)
self.result = self.query.session.execute(
update_stmt, params=self.query._params,
mapper=self.mapper)
self.rowcount = self.result.rowcount
def _do_post(self):
session = self.query.session
session.dispatch.after_bulk_update(self)
class BulkDelete(BulkUD):
"""BulkUD which handles DELETEs."""
def __init__(self, query):
super(BulkDelete, self).__init__(query)
@classmethod
def factory(cls, query, synchronize_session):
return BulkUD._factory({
"evaluate": BulkDeleteEvaluate,
"fetch": BulkDeleteFetch,
False: BulkDelete
}, synchronize_session, query)
def _do_exec(self):
delete_stmt = sql.delete(self.primary_table,
self.context.whereclause)
self.result = self.query.session.execute(
delete_stmt,
params=self.query._params,
mapper=self.mapper)
self.rowcount = self.result.rowcount
def _do_post(self):
session = self.query.session
session.dispatch.after_bulk_delete(self)
class BulkUpdateEvaluate(BulkEvaluate, BulkUpdate):
"""BulkUD which handles UPDATEs using the "evaluate"
method of session resolution."""
def _additional_evaluators(self, evaluator_compiler):
self.value_evaluators = {}
values = (self.values.items() if hasattr(self.values, 'items')
else self.values)
for key, value in values:
key = self._resolve_key_to_attrname(key)
if key is not None:
self.value_evaluators[key] = evaluator_compiler.process(
expression._literal_as_binds(value))
def _do_post_synchronize(self):
session = self.query.session
states = set()
evaluated_keys = list(self.value_evaluators.keys())
for obj in self.matched_objects:
state, dict_ = attributes.instance_state(obj),\
attributes.instance_dict(obj)
# only evaluate unmodified attributes
to_evaluate = state.unmodified.intersection(
evaluated_keys)
for key in to_evaluate:
dict_[key] = self.value_evaluators[key](obj)
state._commit(dict_, list(to_evaluate))
# expire attributes with pending changes
# (there was no autoflush, so they are overwritten)
state._expire_attributes(dict_,
set(evaluated_keys).
difference(to_evaluate))
states.add(state)
session._register_altered(states)
class BulkDeleteEvaluate(BulkEvaluate, BulkDelete):
"""BulkUD which handles DELETEs using the "evaluate"
method of session resolution."""
def _do_post_synchronize(self):
self.query.session._remove_newly_deleted(
[attributes.instance_state(obj)
for obj in self.matched_objects])
class BulkUpdateFetch(BulkFetch, BulkUpdate):
"""BulkUD which handles UPDATEs using the "fetch"
method of session resolution."""
def _do_post_synchronize(self):
session = self.query.session
target_mapper = self.query._mapper_zero()
states = set([
attributes.instance_state(session.identity_map[identity_key])
for identity_key in [
target_mapper.identity_key_from_primary_key(
list(primary_key))
for primary_key in self.matched_rows
]
if identity_key in session.identity_map
])
attrib = [_attr_as_key(k) for k in self.values]
for state in states:
session._expire_state(state, attrib)
session._register_altered(states)
class BulkDeleteFetch(BulkFetch, BulkDelete):
"""BulkUD which handles DELETEs using the "fetch"
method of session resolution."""
def _do_post_synchronize(self):
session = self.query.session
target_mapper = self.query._mapper_zero()
for primary_key in self.matched_rows:
# TODO: inline this and call remove_newly_deleted
# once
identity_key = target_mapper.identity_key_from_primary_key(
list(primary_key))
if identity_key in session.identity_map:
session._remove_newly_deleted(
[attributes.instance_state(
session.identity_map[identity_key]
)]
)
| mit |
openstack/rally | tests/unit/test_ddt.py | 1 | 4367 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import os
from tests.unit import test
class DDTDecoratorChecker(ast.NodeVisitor):
"""Visit an AST tree looking for classes lacking the ddt.ddt decorator.
DDT uses decorators on test case functions to supply different
test data, but if the class that those functions are members of is
not decorated with @ddt.ddt, then the data expansion never happens
and the tests are incomplete. This is very easy to miss both when
writing and when reviewing code, so this visitor ensures that
every class that contains a function decorated with a @ddt.*
decorator is itself decorated with @ddt.ddt
"""
def __init__(self):
self.classes = []
self.errors = {}
@classmethod
def _get_name(cls, node):
if isinstance(node, ast.Name):
return node.id
if isinstance(node, ast.Attribute):
return cls._get_name(node.value) + "." + node.attr
return ""
def _is_ddt(self, cls_node):
return "ddt.ddt" in (self._get_name(d)
for d in cls_node.decorator_list)
def visit_ClassDef(self, node):
self.classes.append(node)
self.generic_visit(node)
self.classes.pop()
def visit_FunctionDef(self, node):
if not self.classes:
# NOTE(stpierre): we only care about functions that are
# defined inside of classes
return
cls = self.classes[-1]
if cls.name in self.errors:
# NOTE(stpierre): if this class already has been found to
# be in error, ignore the rest of its functions
return
for decorator in node.decorator_list:
if not isinstance(decorator, ast.Call):
continue
funcname = self._get_name(decorator.func)
if funcname.startswith("ddt."):
if not self._is_ddt(cls):
msg = ("Class %s has functions that use DDT, "
"but is not decorated with `ddt.ddt`" %
cls.name)
self.errors[cls.name] = {
"lineno": decorator.lineno,
"message": msg
}
class DDTDecoratorCheckerTestCase(test.TestCase):
tests_path = os.path.dirname(__file__)
def test_ddt_class_decorator(self):
"""Classes with DDT-decorated functions have ddt.ddt class decorator.
"""
errors = []
for dirname, dirnames, filenames in os.walk(self.tests_path):
for filename in filenames:
if not (filename.startswith("test_")
and filename.endswith(".py")):
continue
filename = os.path.relpath(os.path.join(dirname, filename))
with open(filename, "rb") as fh:
try:
tree = ast.parse(fh.read(), filename)
except TypeError as err:
errors.append({"message": str(err),
"filename": filename,
"lineno": -1})
visitor = DDTDecoratorChecker()
visitor.visit(tree)
errors.extend(
dict(filename=filename, **error)
for error in visitor.errors.values())
if errors:
msg = [""]
for error in errors:
msg.extend([
"Errors at %(filename)s line %(lineno)d: %(message)s" % {
"message": error["message"],
"filename": error["filename"],
"lineno": error["lineno"]},
""])
self.fail("\n".join(msg))
| apache-2.0 |
VincentHHL/otp | lib/asn1/test/asn1_SUITE_data/CommonDataTypes.py | 97 | 84084 | CommonDataTypes DEFINITIONS AUTOMATIC TAGS ::=
BEGIN
-- @prop dataType
-- @descr This types only purpose is to avoid OSS compiler warning : Duplicate PDU tag
-- @
CommonDataTypeWrapper ::= CHOICE
{
wrapAddAnalysisRejectReason AddAnalysisRejectReason,
wrapAddServiceToServiceProfileRejectReason AddServiceToServiceProfileRejectReason,
wrapAddUserIdentifiersRejectReason AddUserIdentifiersRejectReason,
wrapAdmissionRejectReason AdmissionRejectReason,
wrapAlertingUUIE AlertingUUIE,
wrapAllocateTransmissionPathRejectReason AllocateTransmissionPathRejectReason,
wrapAnalyseRejectReason AnalyseRejectReason,
wrapAvailabilityOfEquipment AvailabilityOfEquipment,
wrapBandwidth Bandwidth,
wrapBandwidthReducedInformation BandwidthReducedInformation,
wrapBandwidthReducedReason BandwidthReducedReason,
wrapBandwidthRejectReason BandwidthRejectReason,
wrapBasicCallCategories BasicCallCategories,
wrapBearerCapability BearerCapability,
wrapCallInformation CallInformation,
wrapCallModel CallModel,
wrapCallProceedingUUIE CallProceedingUUIE,
wrapCallReference CallReference,
wrapCallServices CallServices,
wrapCallState CallState,
wrapCallType CallType,
wrapCause Cause,
wrapCauseValue CauseValue,
wrapChangeServiceAndStatusRejectReason ChangeServiceAndStatusRejectReason,
wrapCheckServiceRejectReason CheckServiceRejectReason,
wrapCoding Coding,
wrapConferenceGoal ConferenceGoal,
wrapConferenceIdentifier ConferenceIdentifier,
wrapConnectTransmissionPathRejectReason ConnectTransmissionPathRejectReason,
wrapConnectUUIE ConnectUUIE,
wrapConnectionData ConnectionData,
wrapConnectionIdentifier ConnectionIdentifier,
wrapConnectionInformation ConnectionInformation,
wrapConnectionInformationOriginatingSide ConnectionInformationOriginatingSide,
wrapConnectionInformationTerminatingSide ConnectionInformationTerminatingSide,
wrapConnectionType ConnectionType,
wrapCreateEquipmentRepresentationRejectReason CreateEquipmentRepresentationRejectReason,
wrapCreateServiceAndStatusRejectReason CreateServiceAndStatusRejectReason,
wrapCreateServiceIdentifierRejectReason CreateServiceIdentifierRejectReason,
wrapDeallocateTransmissionPathRejectReason DeallocateTransmissionPathRejectReason,
wrapDetailedReasonAtom DetailedReasonAtom,
wrapDiagnostics Diagnostics,
wrapDisconnectTransmissionPathRejectReason DisconnectTransmissionPathRejectReason,
wrapDisengageReason DisengageReason,
wrapDisengageRejectReason DisengageRejectReason,
wrapDisplay Display,
wrapE164Identifier E164Identifier,
wrapEndToEndEndpointInformationServiceCallAcknowledge EndToEndEndpointInformationServiceCallAcknowledge,
wrapEndToEndEndpointInformationServiceCallActive EndToEndEndpointInformationServiceCallActive,
wrapEndToEndEndpointInformationServiceCallProgress EndToEndEndpointInformationServiceCallProgress,
wrapEndToEndEndpointInformationServiceCallSetup EndToEndEndpointInformationServiceCallSetup,
wrapEndToEndEndpointInformationServiceCallTermination EndToEndEndpointInformationServiceCallTermination,
wrapEndpointIdentifier EndpointIdentifier,
wrapEndpointRegistrationCategories EndpointRegistrationCategories,
wrapEndpointRegistrationRejectReason EndpointRegistrationRejectReason,
wrapEndpointType EndpointType,
wrapEndpointUnregistrationCategories EndpointUnregistrationCategories,
wrapEndpointUnregistrationRejectReason EndpointUnregistrationRejectReason,
wrapEquipmentAddressAN EquipmentAddressAN,
wrapEquipmentAddressLAN EquipmentAddressLAN,
wrapEquipmentRelatedInformation EquipmentRelatedInformation,
wrapEquipmentRelatedInformationIdentifier EquipmentRelatedInformationIdentifier,
wrapFacilityReason FacilityReason,
wrapFacilityUUIE FacilityUUIE,
wrapGatekeeperIdentifier GatekeeperIdentifier,
wrapGatekeeperInformation GatekeeperInformation,
wrapGatekeeperRejectReason GatekeeperRejectReason,
wrapGatewayInformation GatewayInformation,
wrapGetAnalysisRejectReason GetAnalysisRejectReason,
wrapGetEquipmentInformationRejectReason GetEquipmentInformationRejectReason,
wrapGetLANDataRejectReason GetLANDataRejectReason,
wrapGetPartyInformationRejectReason GetPartyInformationRejectReason,
wrapGetRejectReasonUser GetRejectReasonUser,
wrapGetServiceFromServiceProfileRejectReason GetServiceFromServiceProfileRejectReason,
wrapGetServiceProfileRejectReason GetServiceProfileRejectReason,
wrapGetServicesAndStatusRejectReason GetServicesAndStatusRejectReason,
wrapGetUserServiceInformationAndStatusRejectReason GetUserServiceInformationAndStatusRejectReason,
wrapH221NonStandard H221NonStandard,
wrapH310Information H310Information,
wrapH320Information H320Information,
wrapH321Information H321Information,
wrapH322Information H322Information,
wrapH323Information H323Information,
wrapH323InterfaceAddCallReferenceRejectReason H323InterfaceAddCallReferenceRejectReason,
wrapH323InterfaceAddCallRelatedDataRejectReason H323InterfaceAddCallRelatedDataRejectReason,
wrapH323InterfaceAddFixedTransportAddressDataRejectReason H323InterfaceAddFixedTransportAddressDataRejectReason,
wrapH323InterfaceAddKeysAndSetAttributesRejectReason H323InterfaceAddKeysAndSetAttributesRejectReason,
wrapH323InterfaceAdditionalKeys H323InterfaceAdditionalKeys,
wrapH323InterfaceAllocateResourceRejectReason H323InterfaceAllocateResourceRejectReason,
wrapH323InterfaceChangeKeysAndRelationsToUsersReject H323InterfaceChangeKeysAndRelationsToUsersReject,
wrapH323InterfaceCommonAttribute H323InterfaceCommonAttribute,
wrapH323InterfaceCommonAttributeIdentifier H323InterfaceCommonAttributeIdentifier,
wrapH323InterfaceCreateCallReferenceRejectReason H323InterfaceCreateCallReferenceRejectReason,
wrapH323InterfaceCreateRejectReason H323InterfaceCreateRejectReason,
wrapH323InterfaceDeallocateResourceRejectReason H323InterfaceDeallocateResourceRejectReason,
wrapH323InterfaceGetFixedTransportAddressDataRejectReason H323InterfaceGetFixedTransportAddressDataRejectReason,
wrapH323InterfaceGetOrRemoveCallRelatedDataRejectReason H323InterfaceGetOrRemoveCallRelatedDataRejectReason,
wrapH323InterfaceGetOrSetCommonRejectReason H323InterfaceGetOrSetCommonRejectReason,
wrapH323InterfaceGetOrSetInstanceRejectReason H323InterfaceGetOrSetInstanceRejectReason,
wrapH323InterfaceInstanceAttribute H323InterfaceInstanceAttribute,
wrapH323InterfaceInstanceAttributeIdentifier H323InterfaceInstanceAttributeIdentifier,
wrapH323InterfaceKey H323InterfaceKey,
wrapH323InterfaceKeyEndpointIdentifier H323InterfaceKeyEndpointIdentifier,
wrapH323InterfaceReduceBandwidthRejectReason H323InterfaceReduceBandwidthRejectReason,
wrapH323InterfaceRemoveCallReferenceRejectReason H323InterfaceRemoveCallReferenceRejectReason,
wrapH323InterfaceRemoveFixedTransportAddressDataRejectReason H323InterfaceRemoveFixedTransportAddressDataRejectReason,
wrapH323InterfaceRemoveKeysAndSetAttributesRejectReason H323InterfaceRemoveKeysAndSetAttributesRejectReason,
wrapH323InterfaceRemoveRejectReason H323InterfaceRemoveRejectReason,
wrapH324Information H324Information,
wrapHighLayerCompatibility HighLayerCompatibility,
wrapInterfaceRegistrationInformation InterfaceRegistrationInformation,
wrapLANAttribute LANAttribute,
wrapLANAttributeIdentifier LANAttributeIdentifier,
wrapLayer1ProtUserInfo Layer1ProtUserInfo,
wrapLocation Location,
wrapLocationRejectReason LocationRejectReason,
wrapLogicalConnectionPointIdentifier LogicalConnectionPointIdentifier,
wrapLowLayerCompatibility LowLayerCompatibility,
wrapMaximumNumberOfAllowedConnections MaximumNumberOfAllowedConnections,
wrapMaximumTotalBandwidth MaximumTotalBandwidth,
wrapMcuInformation McuInformation,
wrapNonStandardIdentifier NonStandardIdentifier,
wrapNonStandardMessage NonStandardMessage,
wrapNonStandardParameter NonStandardParameter,
wrapNumber Number,
wrapNumberOfTimesLANWasCrowded NumberOfTimesLANWasCrowded,
wrapNumberType NumberType,
wrapNumberingPlan NumberingPlan,
wrapObjectIdentifier ObjectIdentifier,
wrapPhysicalConnectionPointIdentifier PhysicalConnectionPointIdentifier,
wrapPid Pid,
wrapPreStringToRemoveInDestinationAddress PreStringToRemoveInDestinationAddress,
wrapProgressIndicator ProgressIndicator,
wrapProtocolIdentifier ProtocolIdentifier,
wrapQ931Timer301Value Q931Timer301Value,
wrapQ931Timer303Value Q931Timer303Value,
wrapQ954Details Q954Details,
wrapQseriesOptions QseriesOptions,
wrapRASMessageTimerValue RASMessageTimerValue,
wrapRTPSession RTPSession,
wrapRegistrationRejectReason RegistrationRejectReason,
wrapRegistrationStatus RegistrationStatus,
wrapRelationToEquipment RelationToEquipment,
wrapRelationToUser RelationToUser,
wrapReleaseCompleteReason ReleaseCompleteReason,
wrapReleaseCompleteUUIE ReleaseCompleteUUIE,
wrapReleaseInformation ReleaseInformation,
wrapRemoveAnalysisRejectReason RemoveAnalysisRejectReason,
wrapRemoveEquipmentRepresentationRejectReason RemoveEquipmentRepresentationRejectReason,
wrapRemoveServiceAndStatusRejectReason RemoveServiceAndStatusRejectReason,
wrapRemoveServiceFromServiceProfileRejectReason RemoveServiceFromServiceProfileRejectReason,
wrapRemoveServiceIdentifierRejectReason RemoveServiceIdentifierRejectReason,
wrapRepeatIndicator RepeatIndicator,
wrapRequestSeqNum RequestSeqNum,
wrapRequestedUserAndLinkedUserAreIdentical RequestedUserAndLinkedUserAreIdentical,
wrapServiceAndStatus ServiceAndStatus,
wrapServiceCallSetupRejectionInformation ServiceCallSetupRejectionInformation,
wrapServiceCallSetupRejectionReason ServiceCallSetupRejectionReason,
wrapServiceCallTerminationInformation ServiceCallTerminationInformation,
wrapServiceCallTerminationReason ServiceCallTerminationReason,
wrapServiceData ServiceData,
wrapServiceIdentifier ServiceIdentifier,
wrapServiceProfile ServiceProfile,
wrapSetEquipmentStatusRejectReason SetEquipmentStatusRejectReason,
wrapSetLANDataRejectReason SetLANDataRejectReason,
wrapSetUserAttributeData SetUserAttributeData,
wrapSetupUUIE SetupUUIE,
wrapStateOfEquipment StateOfEquipment,
wrapStateOfUser StateOfUser,
wrapStatusOfService StatusOfService,
wrapSubaddress Subaddress,
wrapSubaddressInformation SubaddressInformation,
wrapSubaddressType SubaddressType,
wrapSupportedProtocols SupportedProtocols,
wrapT120Information T120Information,
wrapTerminalInformation TerminalInformation,
wrapTerminationInitiatior TerminationInitiatior,
wrapTimeSlot TimeSlot,
wrapTransferCapability TransferCapability,
wrapTransferRate TransferRate,
wrapTransportAddress TransportAddress,
wrapTransportAddressInformation TransportAddressInformation,
wrapTransportChannelInformation TransportChannelInformation,
wrapTypeOfEquipment TypeOfEquipment,
wrapTypeOfFlowControl TypeOfFlowControl,
wrapTypeOfLAN TypeOfLAN,
wrapTypeOfRegistration TypeOfRegistration,
wrapTypeOfService TypeOfService,
wrapTypeOfUser TypeOfUser,
wrapUnknownMessageResponse UnknownMessageResponse,
wrapUnregistrationRejectReason UnregistrationRejectReason,
wrapUserAllocateResourceRejectReason UserAllocateResourceRejectReason,
wrapUserAttributeData UserAttributeData,
wrapUserAttributeIdentifier UserAttributeIdentifier,
wrapUserCreateRejectReason UserCreateRejectReason,
wrapUserDeallocateResourceRejectReason UserDeallocateResourceRejectReason,
wrapUserIdentifier UserIdentifier,
wrapUserIdentifierInformation UserIdentifierInformation,
wrapUserInformation UserInformation,
wrapUserInformationUUIE UserInformationUUIE,
wrapUserKey UserKey,
wrapUserOrEquipmentRelatedInformation UserOrEquipmentRelatedInformation,
wrapUserOrEquipmentRelatedInformationIdentifier UserOrEquipmentRelatedInformationIdentifier,
wrapUserRelatedInformation UserRelatedInformation,
wrapUserRelatedInformationIdentifier UserRelatedInformationIdentifier,
wrapUserRemoveRejectReason UserRemoveRejectReason,
wrapUserSetRejectReason UserSetRejectReason,
wrapUserSpecificInformation UserSpecificInformation,
wrapVendorIdentifier VendorIdentifier,
wrapVoiceInformation VoiceInformation,
...
}
-- ---------------------------------
--
-- AddAnalysisRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
AddAnalysisRejectReason ::= CHOICE
{
analysisTableEntryAlreadyExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AddServiceToServiceProfileRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
AddServiceToServiceProfileRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceAlreadyExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AddUserIdentifiersRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
AddUserIdentifiersRejectReason ::= CHOICE
{
userIdentifierExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AdmissionRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
AdmissionRejectReason ::= CHOICE
{
calledPartyNotRegistered NULL,
invalidPermission NULL,
requestDenied NULL,
undefinedReason NULL,
callerNotRegistered NULL,
routeCallToGatekeeper NULL,
invalidEndpointIdentifier NULL,
resourceUnavailable NULL,
...
}
-- ---------------------------------
--
-- AlertingUUIE
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
AlertingUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
destinationEndpointType EndpointType, -- destinationInfo
destinationH245Address TransportAddress OPTIONAL, -- h245Address
...
}
-- ---------------------------------
--
-- AllocateTransmissionPathRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
AllocateTransmissionPathRejectReason ::= CHOICE
{
calledUserNotAvailable NULL,
calledUserUnknown NULL,
permissionDenied NULL,
resourcesNotAvailable NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AnalyseRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
AnalyseRejectReason ::= CHOICE
{
noMatchingEntryFound NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- AvailabilityOfEquipment
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
AvailabilityOfEquipment ::= CHOICE
{
available NULL,
notAvailable NULL,
...
}
-- ---------------------------------
--
-- Bandwidth
--
-- @prop dataType
--
-- @descr States the bandwidth to be used in 100 bps.
--
-- @
--
-- ---------------------------------
Bandwidth ::= INTEGER ( 1.. 4294967295 )
-- ---------------------------------
--
-- BandwidthReducedInformation
--
-- @prop dataType
--
-- @descr States information related to the recuction of the bandwidth.
--
-- @
--
-- ---------------------------------
BandwidthReducedInformation ::= SEQUENCE
{
allocatedBandwidth Bandwidth,
bandwidthReducedReason BandwidthReducedReason,
...
}
-- ---------------------------------
--
-- BandwidthReducedReason
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
BandwidthReducedReason ::= CHOICE
{
bandwidthLimited NULL,
bandwidthAdaptedToOriginatingEndpoint NULL,
originBandwidthBarredDueToCategories NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- BandwidthRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
BandwidthRejectReason ::= CHOICE
{
notBound NULL,
invalidConferenceID NULL,
invalidPermission NULL,
insufficientResources NULL,
invalidRevision NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- BasicCallCategories
--
-- @prop dataType
--
-- @descr Categories for the service basic call.
--
-- @
-- ---------------------------------
BasicCallCategories ::= SEQUENCE
{
... -- So far, no specific categories identified
}
-- ---------------------------------
--
-- BearerCapability
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
BearerCapability ::= SEQUENCE
{
transferCapability TransferCapability,
transferRate TransferRate,
layer1ProtUserInfo Layer1ProtUserInfo,
rateMultiplier INTEGER (0..127),
...
}
-- ---------------------------------
--
-- CallInformation
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
callReference CallReference, -- callReferenceValue
conferenceID ConferenceIdentifier,
originator BOOLEAN OPTIONAL,
audio SEQUENCE OF RTPSession OPTIONAL,
video SEQUENCE OF RTPSession OPTIONAL,
data SEQUENCE OF TransportChannelInformation OPTIONAL,
h245 TransportChannelInformation,
callSignaling TransportChannelInformation,
callType CallType,
bandwidth Bandwidth, -- bandWidth
callModel CallModel,
...
}
-- ---------------------------------
--
-- CallModel
--
-- @prop dataType
--
-- @descr Type of callmodel used i.e routed via gatekeeper or not
--
-- @
--
-- ---------------------------------
CallModel ::= CHOICE
{
gatekeeperRouted NULL,
direct NULL,
...
}
-- ---------------------------------
--
-- CallProceedingUUIE
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallProceedingUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
destinationEndpointType EndpointType, -- destinationInfo
destinationH245Address TransportAddress OPTIONAL, -- h245Address
...
}
-- ---------------------------------
--
-- PreStringToRemoveInDestinationAddress
--
-- @prop dataType
--
-- @descr states the call reference that identifies a specific call.
-- Origin: H.225.0 CallReferenceValue.
--
-- @
--
-- ---------------------------------
CallReference ::= INTEGER (0..65535)
-- ---------------------------------
--
-- CallServices
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallServices ::= SEQUENCE
{
q932Full BOOLEAN,
q951Full BOOLEAN,
q952Full BOOLEAN,
q953Full BOOLEAN,
q955Full BOOLEAN,
q956Full BOOLEAN,
q957Full BOOLEAN,
q954Info Q954Details,
...
}
-- ---------------------------------
--
-- CallType
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallState ::= CHOICE
{
null NULL,
callInit NULL,
overlapSending NULL,
outgoingCallProceeding NULL,
callDelivered NULL,
callPresent NULL,
callReceived NULL,
connectRequest NULL,
incomingCallProceeding NULL,
active NULL,
disconnectRequest NULL,
disconnectIndication NULL,
releaseRequest NULL,
facilityRequest NULL,
overlapReceiving NULL,
restartRequest NULL,
restart NULL,
...
}
-- ---------------------------------
--
-- CallType
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
CallType ::= CHOICE
{
pointToPoint NULL,
oneToN NULL,
nToOne NULL,
nToN NULL,
...
}
-- ---------------------------------
--
-- Cause
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
Cause ::= SEQUENCE
{
coding Coding,
location Location,
value CauseValue,
diagnostics Diagnostics,
...
}
-- ---------------------------------
--
-- CauseValue
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
CauseValue ::= CHOICE
{
unassignedNumber NULL, -- 1
noRouteToSpecifiedTransitNetwork NULL, -- 2
noRouteToDestination NULL, -- 3
channelUnacceptable NULL, -- 6
normalClearing NULL, -- 16
userBusy NULL, -- 17
noUserResponding NULL, -- 18
noAnswereFromUser NULL, -- 19
portableNotAvailable NULL, -- 20
callRejected NULL, -- 21
numberChanged NULL, -- 22
destinationOutOfOrder NULL, -- 27
invalidNumberFormat NULL, -- 28
facilityRequestRejected NULL, -- 29
responseToStatusEnquiry NULL, -- 30
normalUnspecified NULL, -- 31
noCircuitChannelAvailable NULL, -- 34
networkOutOfOrder NULL, -- 38
temporaryFailure NULL, -- 41
switchingEquipmentCongestion NULL, -- 42
accessInformationDiscarded NULL, -- 43
requestedCircuitChannelNotAvailable NULL, -- 44
resourceUnavailableUnspecified NULL, -- 47
qualityOfServiceUnavailable NULL, -- 49
notSubscribedToRequestedFacility NULL, -- 50
bearerCapabilityNotAuthorized NULL, -- 57
bearerCapabilityNotPresentlyAvailable NULL, -- 58
serviceOrOptionNotAvailableUnspecified NULL, -- 63, 79
bearerCapabilityNotImplemented NULL, -- 65
channelTypeNotImplemented NULL, -- 66
requestedFacilityNotImplemented NULL, -- 69
onlyRestrictedDigitalInformationBcIsAvailable NULL, -- 70
invalidCallReferenceValue NULL, -- 81
incompatibleDestination NULL, -- 88
invalidTransitNetworkSelection NULL, -- 91
invalidMessageUnspecified NULL, -- 95
mandatoryInformationElementIsMissing NULL, -- 96
messageTypeNonexistingOrNotimplemented NULL, -- 97
messageNotCompatibleOrImplemented NULL, -- 98
informationElementNonExisting NULL, -- 99
invalidInformationElementContents NULL, -- 100
messageNotCompatibleWithCallState NULL, -- 101
recoveryOnTimerExpiry NULL, -- 102
protocolErrorUnspecified NULL, -- 111
interworkingUnspecified NULL, -- 127
...
}
-- ---------------------------------
--
-- ChangeServiceAndStatusRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
ChangeServiceAndStatusRejectReason ::= CHOICE
{
identifierOfServiceNotKnown NULL,
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- CheckServiceRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
CheckServiceRejectReason ::= CHOICE
{
deniedDueToInteraction NULL,
deniedDueToCategories NULL,
undefined NULL,
userNotKnown NULL,
...
}
-- ---------------------------------
--
-- Coding
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
Coding ::= CHOICE
{
ccitt NULL,
ecma NULL,
national NULL,
network NULL,
...
}
-- ---------------------------------
--
-- ConferenceGoal
--
-- @prop dataType
--
-- @descr Type of call setup desire
--
-- @
--
-- ---------------------------------
ConferenceGoal ::= CHOICE
{
create NULL,
join NULL,
invite NULL,
...
}
-- ---------------------------------
--
-- ConferenceIdentifier
--
-- @prop dataType
--
--
--
-- @
--
-- ---------------------------------
ConferenceIdentifier ::= OCTET STRING (SIZE (16))
-- ---------------------------------
--
-- ConnectTransmissionPathRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
ConnectTransmissionPathRejectReason ::= CHOICE
{
resourcesNotAllocated NULL,
switchFailure NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- ConnectUUIE
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
ConnectUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
destinationH245Address TransportAddress OPTIONAL, -- h245Address
destinationEndpointType EndpointType, -- destinationInfo
conferenceIdentifier ConferenceIdentifier, -- conferenceID
...
}
-- ---------------------------------
--
-- ConnectionData
--
-- @prop dataType
--
-- @descr This parameter holds connection data that are specific for
-- certain types of Equipments.
-- @
--
-- ---------------------------------
ConnectionData ::= CHOICE
{
timeSlotInformation SEQUENCE OF TimeSlot,
...
}
-- ---------------------------------
--
-- ConnectionIdentifier
--
-- @prop dataType
--
-- @descr Identifier to the connection handler instance.
--
-- @
--
-- ---------------------------------
ConnectionIdentifier ::= ObjectIdentifier
-- ---------------------------------
--
-- ConnectionInformation
--
-- @prop dataType
--
-- @descr This parameter specifies information that are of interest for
-- the functionallity handled by component Connection Handler.
-- @
--
-- ---------------------------------
ConnectionInformation ::= SEQUENCE
{
logicalConnectionPointIdentifier LogicalConnectionPointIdentifier,
connectionData ConnectionData OPTIONAL,
...
}
-- ---------------------------------
--
-- ConnectionInformationOriginatingSide
--
-- @prop dataType
--
-- @descr Contains connection information that shall be used for the originating side of the connection.
--
-- @
--
-- ---------------------------------
ConnectionInformationOriginatingSide ::= SEQUENCE
{
bandwidth Bandwidth,
callType CallType,
originatorConnectionInformation ConnectionInformation,
terminatorConnectionInformation ConnectionInformation,
...
}
-- ---------------------------------
--
-- ConnectionInformationTerminatingSide
--
-- @prop dataType
--
-- @descr Contains connection information that shall be used for the terminating side of the connection.
--
-- @
--
-- ---------------------------------
ConnectionInformationTerminatingSide ::= SEQUENCE
{
connectionIdentifier ConnectionIdentifier,
originatorConnectionInformation ConnectionInformation,
...
}
-- ---------------------------------
--
-- ConnectionType
--
-- @prop dataType
--
-- @descr States the type of connection.
--
-- @
--
-- ---------------------------------
ConnectionType ::= CHOICE
{
pointToPoint NULL,
oneToN NULL,
nToOne NULL,
nToN NULL,
...
}
-- ---------------------------------
--
-- CreateEquipmentRepresentationRejectReason
--
-- @prop dataType
--
-- @descr This reason for rejection.
--
-- @
--
-- ---------------------------------
CreateEquipmentRepresentationRejectReason ::= CHOICE
{
equipmentRepresentationAlreadyExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- CreateServiceAndStatusRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
CreateServiceAndStatusRejectReason ::= CHOICE
{
undefined NULL,
...
}
-- ---------------------------------
--
-- CreateServiceIdentifierRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
CreateServiceIdentifierRejectReason ::= CHOICE
{
keyNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- DeallocateTransmissionPathRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
DeallocateTransmissionPathRejectReason ::= CHOICE
{
resourcesNotAllocated NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- DetailedReasonAtom
--
-- @prop dataType
-- @descr This data type indicates the release information of a forced drop
-- during a call.
-- @
--
-- ---------------------------------
DetailedReasonAtom ::= CHOICE
{
internalDataMissmatch NULL,
destinationUserIdentifierNotKnown NULL,
rejectedDueToCategories NULL,
rejectedDueToResources NULL,
failedToOpenDestinationCallSignallingPort NULL,
theRequestedServiceIsNotSupported NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- Diagnostics
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
Diagnostics ::= INTEGER(1..127)
-- ---------------------------------
--
-- DisconnectTransmissionPathRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
DisconnectTransmissionPathRejectReason ::= CHOICE
{
resourcesNotAllocated NULL,
switchFailure NULL,
switchNotConnected NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- DisengageReason
--
-- @prop dataType
-- @descr the reason why a change was requested by the gatekeeper or the terminal.
-- @
-- ---------------------------------
DisengageReason ::= CHOICE
{
forcedDrop NULL,
normalDrop NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- DisengageRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
DisengageRejectReason ::= CHOICE
{
notRegistered NULL,
requestToDropOther NULL,
...
}
-- ---------------------------------
--
-- Display
--
-- @prop dataType
--
-- @descr Origin: Q931
--
-- @
--
-- ---------------------------------
Display ::= OCTET STRING (SIZE(1..82))
-- ---------------------------------
--
-- E164Identifier
--
-- @prop dataType
--
-- @descr Identifier for the user identifier of the type E.164.
--
-- @
--
-- ---------------------------------
E164Identifier ::= IA5String (SIZE (1..128)) (FROM ("0123456789#*,"))
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallAcknowledge
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallAcknowledge ::= SEQUENCE
{
bearerCapability BearerCapability OPTIONAL,
highLayerCompatibility HighLayerCompatibility OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
userToUserH323AcknowledgeInformation AlertingUUIE OPTIONAL,
...
}
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallActive
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallActive ::= SEQUENCE
{
bearerCapability BearerCapability OPTIONAL,
highLayerCompatibility HighLayerCompatibility OPTIONAL,
lowLayerCompatibility LowLayerCompatibility OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
userToUserH323ActiveInformation ConnectUUIE OPTIONAL,
...
}
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallProgress
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallProgress ::=SEQUENCE
{
cause Cause OPTIONAL,
highLayerCompatibility HighLayerCompatibility OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
...
}
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallSetup
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallSetup ::=SEQUENCE
{
bearerCapability BearerCapability OPTIONAL,
calledNumber Number OPTIONAL,
calledSubaddress Subaddress OPTIONAL,
callingNumber Number OPTIONAL,
callingSubaddress Subaddress OPTIONAL,
highLayerCompatibility HighLayerCompatibility OPTIONAL,
lowLayerCompatibility LowLayerCompatibility OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
repeatIndicator RepeatIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
userToUserH323SetupInformation SetupUUIE OPTIONAL,
...
}
-- ---------------------------------
--
-- EndToEndEndpointInformationServiceCallTermination
--
-- @prop dataType
--
-- @descr Information that shall be sent end to end.
--
-- @
--
-- ---------------------------------
EndToEndEndpointInformationServiceCallTermination ::=SEQUENCE
{
cause Cause OPTIONAL,
progressIndicator ProgressIndicator OPTIONAL,
userToUserQ931Information UserInformation OPTIONAL,
userToUserH323TerminationInformation ReleaseCompleteUUIE OPTIONAL,
...
}
-- ---------------------------------
--
-- EndpointIdentifier
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
EndpointIdentifier ::= BMPString (SIZE(1..128)) -- change from SIZE(128)
-- ---------------------------------
--
-- EndpointRegistrationCategories
--
-- @prop dataType
--
-- @descr Categories for the service endpoint registration.
--
-- @
-- ---------------------------------
EndpointRegistrationCategories ::= SEQUENCE
{
... -- So far, no specific categories identified
}
-- ---------------------------------
--
-- EndpointRegistrationRejectReason
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
EndpointRegistrationRejectReason ::= CHOICE
{
attemptToChangeEndpoint NULL,
requestedUserNotKnown NULL,
endpointTypeNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- EndpointType
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
EndpointType ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
vendor VendorIdentifier OPTIONAL,
gatekeeper GatekeeperInformation OPTIONAL,
gateway GatewayInformation OPTIONAL,
mcu McuInformation OPTIONAL,
terminal TerminalInformation OPTIONAL,
mc BOOLEAN,
undefinedNode BOOLEAN,
...
}
-- ---------------------------------
--
-- EndpointUnregistrationCategories
--
-- @prop dataType
--
-- @descr Categories for the service endpoint unregistration.
--
-- @
-- ---------------------------------
EndpointUnregistrationCategories ::= SEQUENCE
{
... -- So far, no specific categories identified
}
-- ---------------------------------
--
-- EndpointUnregistrationRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
EndpointUnregistrationRejectReason ::= CHOICE
{
permissionDenied NULL,
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- EquipmentAddressAN
--
-- @prop dataType
--
-- @descr States the address for a certain equipment connected
-- to the Access Node.
-- @
--
-- ---------------------------------
EquipmentAddressAN ::= SEQUENCE
{
--TBD by SEA,
...
}
-- ---------------------------------
--
-- EquipmentAddressLAN
--
-- @prop dataType
--
-- @descr States the transport address for a certain equipment
--
-- @
--
-- ---------------------------------
EquipmentAddressLAN ::= SEQUENCE
{
transportAddresses SEQUENCE OF TransportAddress,
...
}
-- ---------------------------------
--
-- EquipmentRelatedInformation
--
-- @prop dataType
--
-- @descr Contains the retreived data.
--
-- @
--
-- ---------------------------------
EquipmentRelatedInformation ::= CHOICE
{
logicalConnectionPointIdentifier LogicalConnectionPointIdentifier,
registrationStatus RegistrationStatus,
stateOfEquipment StateOfEquipment,
typeOfEquipment TypeOfEquipment,
...
}
-- ---------------------------------
--
-- EquipmentRelatedInformationIdentifier
--
--
-- @prop dataType
--
-- @descr This parameter specifies different types of data
-- that are specific to a certain equipment.
--
-- @
-- ---------------------------------
EquipmentRelatedInformationIdentifier ::= CHOICE
{
logicalConnectionPointIdentifier NULL,
registrationStatus NULL,
stateOfEquipment NULL,
typeOfEquipment NULL,
...
}
-- ---------------------------------
--
-- FacilityReason
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
FacilityReason ::= CHOICE
{
routeCallToGatekeeper NULL,
callForwarded NULL,
routeCallToMC NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- FacilityUUIE
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
FacilityUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
alternativeH245Address TransportAddress OPTIONAL, -- alternativeAddress
alternativeUserIdentifierInformation UserIdentifierInformation OPTIONAL, -- alternativeAliasAddress
conferenceIdentifier ConferenceIdentifier OPTIONAL, -- conferenceID
facilityReason FacilityReason, -- reason
...
}
-- ---------------------------------
--
-- GatekeeperIdentifier
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GatekeeperIdentifier ::= BMPString (SIZE(1..128))
-- ---------------------------------
--
-- GatekeeperInformation
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GatekeeperInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- GatekeeperRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GatekeeperRejectReason ::= CHOICE
{
resourceUnavailable NULL,
terminalExcluded NULL,
invalidRevision NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- GatewayInformation
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GatewayInformation ::= SEQUENCE
{
protocol SEQUENCE OF SupportedProtocols OPTIONAL,
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- GetAnalysisRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
GetAnalysisRejectReason ::= CHOICE
{
noDataStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetEquipmentInformationRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
GetEquipmentInformationRejectReason ::= CHOICE
{
equipmentUnknown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetLANDataRejectReason
--
-- @prop dataType
--
-- @descr This reason for rejection.
--
-- @
--
-- ---------------------------------
GetLANDataRejectReason ::= CHOICE
{
noDataStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetPartyInformationRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
GetPartyInformationRejectReason ::= CHOICE
{
noEquipmentAvailable NULL,
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetRejectReasonUser
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
GetRejectReasonUser ::= CHOICE
{
keyNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetServiceFromServiceProfileRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
GetServiceFromServiceProfileRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceDoNotExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetServiceProfileRejectReason
--
-- @prop dataType
--
-- @descr
--
-- @
--
-- ---------------------------------
GetServiceProfileRejectReason ::= CHOICE
{
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetServicesAndStatusRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
GetServicesAndStatusRejectReason ::= CHOICE
{
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- GetUserServiceInformationAndStatusRejectReason
--
-- @prop dataType
--
-- @descr Reason for the rejection.
--
-- @
--
-- ---------------------------------
GetUserServiceInformationAndStatusRejectReason ::= CHOICE
{
undefined NULL,
userNotKnown NULL,
...
}
-- ---------------------------------
--
-- H221NonStandard
-- @prop dataType
--
-- @descr Gives non standard information about the standard protocol H.221.
-- @
--
-- ---------------------------------
H221NonStandard ::= SEQUENCE
{ t35CountryCode INTEGER(0..255),
t35Extension INTEGER(0..255),
manufacturerCode INTEGER(0..65535),
...
}
-- ---------------------------------
--
-- H310Information
-- @prop dataType
-- @descr Gives detailed information about the standard protocol H.310.
-- @
--
-- ---------------------------------
H310Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H320Information
-- @prop dataType
--
-- @descr Gives detailed information about the standard protocol H.320.
-- @
--
-- ---------------------------------
H320Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H321Information
--
-- @prop dataType
-- @descr Gives detailed information about the standard protocol H.321.
-- @
--
-- ---------------------------------
H321Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H322Information
--
-- @prop dataType
-- @descr Gives detailed information about the standard protocol H.322.
-- @
--
-- ---------------------------------
H322Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H323Information
--
-- @prop dataType
-- @descr Gives detailed information about the standard protocol H.323.
-- @
--
-- ---------------------------------
H323Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- H323InterfaceAddCallReferenceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceAddCallReferenceRejectReason ::= CHOICE
{
keyNotValid NULL,
requestedCallReferenceAlreadyInUse NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceAddCallRelatedDataRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceAddCallRelatedDataRejectReason ::= CHOICE
{
callReferenceNotValid NULL,
keyNotValid NULL,
callRelatedDataAlredyStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceAddFixedTransportAddressDataRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceAddFixedTransportAddressDataRejectReason ::= CHOICE
{
fixedTransportAddressDataAlredyStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceAddKeysAndSetAttributesRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceAddKeysAndSetAttributesRejectReason ::= CHOICE
{
existingKeyNotValid NULL,
newKeyAlreadyExists NULL,
newKeyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceAdditionalKeys
--
-- @prop dataType
-- @descr Additional keys for an instance of the type H.323Interface.
-- @
-- ---------------------------------
H323InterfaceAdditionalKeys ::= SEQUENCE
{
endpointCallSignallingAddresses SEQUENCE OF TransportAddress,
endpointRASAddresses SEQUENCE OF TransportAddress,
...
}
-- ---------------------------------
--
-- H323InterfaceAllocateResourceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceAllocateResourceRejectReason ::= CHOICE
{
callReferenceNotValid NULL,
keyNotValid NULL,
resourceNotAvailable NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceChangeKeysAndRelationsToUsersReject
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceChangeKeysAndRelationsToUsersReject ::= CHOICE
{
firstKeyNotValid NULL,
secondKeyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceCommonAttribute
--
-- @prop dataType
--
-- @descr This parameter contains the attributes which holds data
-- that are common for all objects of the type H.323Interface.
--
-- @
--
-- ---------------------------------
H323InterfaceCommonAttribute ::= CHOICE
{
gatekeeperCallSignallingAddressData CHOICE
{
gatekeeperCallSignallingAddresses SEQUENCE OF TransportAddress,
undefined NULL,
...
},
gatekeeperRASAddressInformation CHOICE
{
gatekeeperRASAddressData SEQUENCE
{
multicastRASAddress TransportAddress,
gatekeeperRASAddress TransportAddress,
...
},
undefined NULL,
...
},
q931Timer301Value Q931Timer301Value,
q931Timer303Value Q931Timer303Value,
rasMessageTimerValue RASMessageTimerValue,
...
}
-- ---------------------------------
--
-- H323InterfaceCommonAttributeIdentifier
--
-- @prop dataType
--
-- @descr This parameter contains the attribute identifiers of the
-- attributes which holds data that are common for all objects
-- of the type H.323Interface.
--
-- @
--
-- ---------------------------------
H323InterfaceCommonAttributeIdentifier ::= CHOICE
{
gatekeeperCallSignallingAddresses NULL,
gatekeeperRASAddress NULL,
q931Timer301Value NULL,
q931Timer303Value NULL,
rasMessageTimerValue NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceCreateCallReferenceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
H323InterfaceCreateCallReferenceRejectReason ::= CHOICE
{
keyNotValid NULL,
noCallReferenceAvailable NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceCreateRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceCreateRejectReason ::= CHOICE
{
keyAlreadyInUse NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceDeallocateResourceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
H323InterfaceDeallocateResourceRejectReason ::= CHOICE
{
resourceNotAllocated NULL,
callReferenceNotValid NULL,
keyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceGetFixedTransportAddressDataRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
H323InterfaceGetFixedTransportAddressDataRejectReason ::= CHOICE
{
noDataStoredForThisTransportAddress NULL,
noFixedTransportAddressDataStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceGetOrRemoveCallRelatedDataRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceGetOrRemoveCallRelatedDataRejectReason ::= CHOICE
{
callReferenceNotValid NULL,
keyNotValid NULL,
noCallRelatedDataStored NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceGetOrSetCommonRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceGetOrSetCommonRejectReason ::= CHOICE
{
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceGetOrSetInstanceRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceGetOrSetInstanceRejectReason ::= CHOICE
{
keyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceInstanceAttribute
--
-- @prop dataType
--
-- @descr This parameter contains the attributes which holds data
-- that are specific for a h323Interface object.
--
-- @
--
-- ---------------------------------
H323InterfaceInstanceAttribute ::= CHOICE
{
endpointCallSignallingAddresses SEQUENCE OF TransportAddress,
endpointRasAddresses SEQUENCE OF TransportAddress,
registrationStatus RegistrationStatus,
gatekeeperCallSignallingAddress TransportAddress,
maximumTotalBandwidthForInterface Bandwidth,
preStringsToRemoveInDestinationAddress SEQUENCE OF PreStringToRemoveInDestinationAddress,
relationToH2250CallSignalling Pid,
relationToUser RelationToUser,
typeOfEquipment TypeOfEquipment,
...
}
-- ---------------------------------
--
-- H323InterfaceInstanceAttributeIdentifier
--
-- @prop dataType
--
-- @descr This parameter contains the attribute identifiers of the
-- attributes which holds data that are specific for a
-- h323Interface object.
--
-- @
--
-- ---------------------------------
H323InterfaceInstanceAttributeIdentifier ::= CHOICE
{
endpointCallSignallingAddresses NULL,
endpointRASAddresses NULL,
registrationStatus NULL,
gatekeeperCallSignallingAddress NULL,
maximumTotalBandwidthForInterface NULL,
preStringsToRemoveInDestinationAddress NULL,
relationToH2250CallSignalling NULL,
relationToUser NULL,
typeOfEquipment NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceKey
--
-- @prop dataType
-- @descr Allowed keys for an instance of the type H.323Interface.
-- @
-- ---------------------------------
H323InterfaceKey ::= CHOICE
{
endpointIdentifier EndpointIdentifier,
endpointCallSignallingAddresses SEQUENCE OF TransportAddress,
endpointRASAddresses SEQUENCE OF TransportAddress,
...
}
-- ---------------------------------
--
-- H323InterfaceKeyEndpointIdentifier
--
-- @descr Allowed keys for an instance of the type H.323Interface.
--
-- ---------------------------------
H323InterfaceKeyEndpointIdentifier ::= SEQUENCE
{
endpointIdentifier EndpointIdentifier,
...
}
-- ---------------------------------
--
-- H323InterfaceReduceBandwidthRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceReduceBandwidthRejectReason ::= CHOICE
{
bandwidthNotAllocated NULL,
callReferenceNotValid NULL,
keyNotValid NULL,
newBandwidthHigherThanAllocatedBandwidth NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceRemoveCallReferenceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
H323InterfaceRemoveCallReferenceRejectReason ::= CHOICE
{
callReferenceNotStored NULL,
keyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceRemoveFixedTransportAddressDataRejectReason
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
H323InterfaceRemoveFixedTransportAddressDataRejectReason ::= CHOICE
{
noDataStoredForThisTransportAddress NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceRemoveKeysAndSetAttributesRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceRemoveKeysAndSetAttributesRejectReason ::= CHOICE
{
keysNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H323InterfaceRemoveRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
H323InterfaceRemoveRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceInProgress NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- H324Information
-- @prop dataType
--
-- @descr Gives detailed information about the standard protocol H.324.
-- @
--
-- ---------------------------------
H324Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
HighLayerCompatibility ::= SEQUENCE
{
...
}
-- ---------------------------------
--
-- InterfaceRegistrationInformation
-- @prop dataType
--
-- @descr This parameter specifies the current registration status of an
-- endpoints registration request.
-- @
--
-- ---------------------------------
InterfaceRegistrationInformation ::= SEQUENCE
{
isInterfaceRegistered BOOLEAN,
relationToH323User EndpointIdentifier OPTIONAL,
...
}
-- ---------------------------------
--
-- LANAttribute
--
-- @prop dataType
-- @descr This parameter contains a LAN attribute value.
-- @
--
-- ---------------------------------
LANAttribute ::= CHOICE
{
maximumTotalBandwidth MaximumTotalBandwidth,
maximumNumberOfAllowedConnections MaximumNumberOfAllowedConnections,
numberOfTimesLANWasCrowded NumberOfTimesLANWasCrowded,
typeOfFlowControl TypeOfFlowControl,
typeOfLAN TypeOfLAN,
...
}
-- ---------------------------------
--
-- LANAttributeIdentifier
--
-- @prop dataType
-- @descr This parameter contains a LAN attribute identifier.
-- @
--
-- ---------------------------------
LANAttributeIdentifier ::= CHOICE
{
maximumTotalBandwidth NULL,
maximumNumberOfAllowedConnections NULL,
numberOfTimesLANWasCrowded NULL,
typeOfFlowControl NULL,
typeOfLAN NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
Layer1ProtUserInfo ::= CHOICE
{
g711u-law NULL,
g711a-law NULL,
h323VidephoneCall NULL, -- the meaning of "5" in H323
h221Andh242 NULL, -- the meaning of "5" in Q931
...
}-- @prop dataType
-- @descr Origin: Q931
-- @
Location ::= CHOICE
{
user NULL,
localPrivateNetwork NULL,
localPublicNetwork NULL,
transitNetwork NULL,
remotePublicNetwork NULL,
remotePrivateNetwork NULL,
internationalNetwork NULL,
beyondInterworkingPoint NULL,
...
}
-- ---------------------------------
--
-- LocationRejectReason
-- @prop dataType
--
-- @descr
-- @
-- ---------------------------------
LocationRejectReason ::= CHOICE
{
notRegistered NULL,
invalidPermission NULL,
requestDenied NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- LogicalConnectionPointIdentifier
--
-- @prop dataType
-- @descr Identifier of the logical connection point.
-- @
--
-- ---------------------------------
LogicalConnectionPointIdentifier ::= INTEGER (0..65535)
--
-- Created by :
-- Creation date :
-- Modified by :
-- Modification date :
-- Version :
--
-- @prop dataType
-- @descr origin Q931
-- @
LowLayerCompatibility ::= SEQUENCE
{
}
-- ---------------------------------
--
-- MaximumNumberOfAllowedConnections
--
-- @prop dataType
-- @descr States the maximum number of allowed connections.
-- @
--
-- ---------------------------------
MaximumNumberOfAllowedConnections ::= CHOICE
{
maximumNumberOfAllowedConnectionsValue INTEGER ( 0.. 999999999),
undefined NULL,
...
}
-- ---------------------------------
--
-- MaximumTotalBandwidth
-- @prop dataType
-- @descr States the maximum total bandwidth.
-- @
-- ---------------------------------
MaximumTotalBandwidth ::= CHOICE
{
maximumTotalBandwidthValue Bandwidth,
undefined NULL,
...
}
-- ---------------------------------
--
-- McuInformation
-- @prop dataType
--
-- @descr Gives detailed information about the endpoint type, MCU.
-- @
-- ---------------------------------
McuInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- NonStandardIdentifier
-- @prop dataType
--
-- @descr
-- @
--
-- ---------------------------------
NonStandardIdentifier ::= CHOICE
{
object OBJECT IDENTIFIER,
h221NonStandard H221NonStandard,
...
}
-- ---------------------------------
--
-- NonStandardMessage
-- @prop dataType
--
-- @descr
-- @
--
-- ---------------------------------
NonStandardMessage ::= SEQUENCE
{
requestSeqNum RequestSeqNum,
nonStandardData NonStandardParameter,
...
}
-- ---------------------------------
--
-- NonStandardParameter
--
-- @prop dataType
-- @
-- ---------------------------------
NonStandardParameter ::= SEQUENCE
{
nonStandardIdentifier NonStandardIdentifier,
data OCTET STRING,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
Number ::= SEQUENCE
{
type NumberType,
indicator NumberingPlan,
number IA5String (FROM ("0123456789#*")),
...
}
-- ---------------------------------
--
-- NumberOfTimesLANWasCrowded
-- @prop dataType
--
-- @descr States the number of times the Lan has been crowded,
-- i.e. the endpoints has released the initiated call due to
-- heavy load in the LAN.
-- @
--
-- ---------------------------------
NumberOfTimesLANWasCrowded ::= CHOICE
{
numberOfTimesLANWasCrowdedValue INTEGER ( 0.. 999999999),
undefined NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
NumberType ::= CHOICE
{
unknown NULL,
international NULL,
national NULL,
network NULL,
local NULL,
abbreviated NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
NumberingPlan ::= CHOICE
{
unknown NULL,
e164 NULL,
data NULL,
telex NULL,
national NULL,
private NULL,
...
}
-- ---------------------------------
--
-- ObjectIdentifier
--
-- @prop dataType
-- @descr An identifier to a certain instance of an object.
-- @
--
-- ---------------------------------
ObjectIdentifier ::= OCTET STRING
-- ---------------------------------
--
-- PhysicalConnectionPointIdentifier
--
-- @prop dataType
-- @descr Contains data that identifies a specific equipment instance.
-- @
--
-- ---------------------------------
PhysicalConnectionPointIdentifier ::= CHOICE
{
equipmentAN EquipmentAddressAN, -- Equipment connected to the Access Node.
equipmentLAN EquipmentAddressLAN, -- Equipment connected to the LAN.
...
}
-- ---------------------------------
--
-- Pid
-- @prop dataType
--
-- @descr A process identifier.
-- @
--
-- ---------------------------------
Pid ::= ObjectIdentifier
-- ---------------------------------
--
-- PreStringToRemoveInDestinationAddress
--
-- @prop dataType
--
-- @descr A pre-string that shall be removed when sending the destination address.
--
-- @
--
-- ---------------------------------
PreStringToRemoveInDestinationAddress ::= CHOICE
{
e164 IA5String (SIZE (1..128)) (FROM ("0123456789,")),
h323 BMPString (SIZE (1..256)),
-- h323 is Basic ISO/IEC 10646-1 (Unicode)
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
ProgressIndicator ::= SEQUENCE
{
}
-- ---------------------------------
--
-- ProtocolIdentifier
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
ProtocolIdentifier ::= OBJECT IDENTIFIER
-- ---------------------------------
--
-- Q931Timer301Value
-- @prop dataType
--
-- @descr States the Q931 timer 301 value to be used in milli seconds.
-- @
--
-- ---------------------------------
Q931Timer301Value ::= INTEGER ( 180000.. 360000 )
-- ---------------------------------
--
-- Q931Timer303Value
--
-- @prop dataType
-- @descr States the Q931 timer 303 value to be used in milli seconds.
-- @
--
-- ---------------------------------
Q931Timer303Value ::= INTEGER ( 1000.. 10000 )
-- ---------------------------------
--
-- Q954Details
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
Q954Details ::= SEQUENCE
{
conferenceCalling BOOLEAN,
threePartyService BOOLEAN,
...
}
-- ---------------------------------
--
-- QseriesOptions
--
-- @prop dataType
-- @
--
-- ---------------------------------
QseriesOptions ::=SEQUENCE
{
q932Full BOOLEAN,
q951Full BOOLEAN,
q952Full BOOLEAN,
q953Full BOOLEAN,
q955Full BOOLEAN,
q956Full BOOLEAN,
q957Full BOOLEAN,
q954Info Q954Details,
...
}
-- ---------------------------------
--
-- RASMessageTimerValue
--
-- @prop dataType
-- @descr States the RAS message timer value to be used in milli seconds.
-- @
--
-- ---------------------------------
RASMessageTimerValue ::= INTEGER ( 1000.. 10000 )
-- ---------------------------------
--
-- RTPSession
--
-- @prop dataType
-- @
--
-- ---------------------------------
RTPSession ::= SEQUENCE
{
rtpAddress TransportChannelInformation,
rtcpAddress TransportChannelInformation,
cname PrintableString,
ssrc INTEGER (1.. 134217727), -- change from 4294967295 for erl 4.2
sessionId INTEGER (1..255),
associatedSessionIds SEQUENCE OF INTEGER (1..255),
...
}
-- ---------------------------------
--
-- RegistrationRejectReason
--
-- @prop dataType
-- @descr Specifies the registration reject reason that are valid
-- in the H.225.0 message RegistartionReject
-- @ --
-- ---------------------------------
RegistrationRejectReason ::= CHOICE
{
discoveryRequired NULL,
invalidRevision NULL,
invalidCallSignalAddress NULL,
invalidRasAddress NULL,
duplicateAlias UserIdentifierInformation,
invalidTerminalType NULL,
undefinedReason NULL,
transportNotSupported NULL,
...
}
-- ---------------------------------
--
-- RegistrationStatus
--
-- @prop dataType
-- @
--
-- ---------------------------------
RegistrationStatus ::= CHOICE
{
notRegistered NULL,
registered NULL,
...
}
-- ---------------------------------
--
-- RelationToEquipment
--
-- @prop dataType
-- @descr Relation to the architecture component Equipment.
-- @
--
-- ---------------------------------
RelationToEquipment ::= SEQUENCE
{
relationToUser RelationToUser,
typeOfEquipment TypeOfEquipment,
...
}
-- ---------------------------------
--
-- RelationToUser
--
-- @prop dataType
-- @descr Relation to the architecture component User.
-- @
--
-- ---------------------------------
RelationToUser ::= BMPString (SIZE(1..128))
-- ---------------------------------
--
-- ReleaseCompleteReason
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
ReleaseCompleteReason ::= CHOICE
{
noBandwidth NULL,
gatekeeperResources NULL,
unreachableDestination NULL,
destinationRejection NULL,
invalidRevision NULL,
noPermission NULL,
unreachableGatekeeper NULL,
gatewayResources NULL,
badFormatAddress NULL,
adaptiveBusy NULL,
inConf NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- ReleaseCompleteUUIE
-- @prop dataType
--
-- @
-- ---------------------------------
ReleaseCompleteUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
releaseCompleteReason ReleaseCompleteReason OPTIONAL, -- reason
...
}
-- ---------------------------------
--
-- ReleaseInformation
--
-- @prop dataType
-- @descr This data type is used to transfer the reason for the
-- rejection or release.
-- @
--
-- ---------------------------------
ReleaseInformation ::= CHOICE
{
forcedDrop DetailedReasonAtom,
normalDrop NULL,
...
}
-- ---------------------------------
--
-- RemoveAnalysisRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
RemoveAnalysisRejectReason ::= CHOICE
{
analysisTableEntryNotFound NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- RemoveEquipmentRepresentationRejectReason
--
-- @prop dataType
-- @descr This reason for rejection.
-- @
--
-- ---------------------------------
RemoveEquipmentRepresentationRejectReason ::= CHOICE
{
invalidInputData NULL,
equipmentRepresentationDoesNotExist NULL,
other NULL,
...
}
-- ---------------------------------
--
-- RemoveServiceAndStatusRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
RemoveServiceAndStatusRejectReason ::= CHOICE
{
identifierOfServiceNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- RemoveServiceFromServiceProfileRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the service and its categories that
-- shall be added to a service profile.
--
-- @
--
-- ---------------------------------
RemoveServiceFromServiceProfileRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceDoNotExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- RemoveServiceIdentifierRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
-- ---------------------------------
RemoveServiceIdentifierRejectReason ::= CHOICE
{
keyNotKnown NULL,
serviceIdentifierDoNotExist NULL,
undefined NULL,
...
}
--
-- Created by :
-- Creation date :
-- Modified by :
-- Modification date :
-- Version :
--
-- @prop dataType
-- @
RepeatIndicator ::= SEQUENCE
{
}
-- ---------------------------------
--
-- RequestSeqNum
--
-- @prop dataType
-- @descr
-- @
-- ---------------------------------
RequestSeqNum ::= INTEGER (1..65535)
-- ---------------------------------
--
-- RequestedUserAndLinkedUserAreIdentical
--
-- @prop dataType
-- @descr This parameter indicates if the requested user and the user
-- linked to the requested endpoint are identical, not identical
-- or if this is undefined.
-- @
--
-- ---------------------------------
RequestedUserAndLinkedUserAreIdentical ::= CHOICE
{
yes NULL,
no NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- ServiceAndStatus
-- @prop dataType
--
-- @descr Information of a service and its state.
-- @
--
-- ---------------------------------
ServiceAndStatus ::= SEQUENCE
{
typeOfService TypeOfService,
status StatusOfService,
...
}
-- ---------------------------------
--
-- ServiceCallSetupRejectionInformation
--
-- @prop dataType
-- @descr Information related to the call setup rejection.
-- @
--
-- ---------------------------------
ServiceCallSetupRejectionInformation ::= SEQUENCE
{
terminationInitiatior TerminationInitiatior,
terminationReason ServiceCallSetupRejectionReason,
...
}
-- ---------------------------------
--
-- ServiceCallSetupRejectionReason
--
-- @prop dataType
-- @descr Reason for rejection.
-- @
-- ---------------------------------
ServiceCallSetupRejectionReason ::= CHOICE
{
calledUserBusy NULL,
calledUserNotAvailable NULL,
destinationOutOfOrder NULL,
requestedServiceBarred NULL,
requestedServiceNotAvailable NULL,
requestedServiceNotSubscribed NULL,
resourceUnavailable NULL,
temporaryFailure NULL,
unassignedUserIdentifier NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- ServiceCallTerminationInformation
-- @prop dataType
--
-- @descr States information related to the termination.
-- @
--
-- ---------------------------------
ServiceCallTerminationInformation ::= SEQUENCE
{
terminationInitiation TerminationInitiatior,
terminationReason ServiceCallTerminationReason,
...
}
-- ---------------------------------
--
-- ServiceCallTerminationReason
--
-- @prop dataType
-- @descr Reason for termination.
-- @
--
-- ---------------------------------
ServiceCallTerminationReason ::= CHOICE
{
noAnswerFromCalledUser NULL,
normalTermination NULL,
resourceUnavailable NULL,
temporaryFailure NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- ServiceData
--
-- @prop dataType
-- @descr Contains the identified services and theirs categories
-- @
-- ---------------------------------
ServiceData ::= CHOICE
{
basicCall BasicCallCategories,
endpointRegistration EndpointRegistrationCategories,
endpointUnregistration EndpointUnregistrationCategories,
...
}
-- @prop dataType
-- @descr
-- @
--
ServiceIdentifier ::= INTEGER
-- ---------------------------------
--
-- ServiceProfile
--
-- @prop dataType
-- @descr Contains services and data related to the services.
-- @
-- ---------------------------------
ServiceProfile ::= SEQUENCE
{
serviceDataInformation SEQUENCE OF ServiceData OPTIONAL,
...
}
-- ---------------------------------
--
-- SetEquipmentStatusRejectReason
--
-- @prop dataType
--
--
-- @
--
-- ---------------------------------
SetEquipmentStatusRejectReason ::= CHOICE
{
userNotKnown NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- SetLANDataRejectReason
--
-- @prop dataType
-- @descr This reason for rejection.
-- @
--
-- ---------------------------------
SetLANDataRejectReason ::= CHOICE
{
invalidInputData NULL,
other NULL,
...
}
-- ---------------------------------
--
-- SetUserAttributeData
--
-- @prop dataType
--
-- @descr This parameter contains an User attribute value.
--
-- @
--
-- ---------------------------------
SetUserAttributeData ::= CHOICE
{
maximumTotalBandwidth Bandwidth,
maximumBandwidthPerService Bandwidth,
stateOfUser StateOfUser,
typeOfUser TypeOfUser,
...
}
-- ---------------------------------
--
-- SetupUUIE
-- @prop dataType
-- @
--
-- ---------------------------------
SetupUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
sourceH245Address TransportAddress OPTIONAL, -- h245Address
sourceUserIdentifierInformation UserIdentifierInformation OPTIONAL, -- sourceAddress
sourceEndpointType EndpointType, -- sourceInfo
destinationUserIdentifierInformation UserIdentifierInformation OPTIONAL, -- destinationAddress
destinationCallSignallingAddress TransportAddress OPTIONAL, -- destCallSignalAddress
destinationExtraUserIdentifierInformation UserIdentifierInformation OPTIONAL, -- destExtraCallInfo
destinationExtraCallReference SEQUENCE OF CallReference OPTIONAL, -- destExtraCRV
activeMC BOOLEAN,
conferenceIdentifier ConferenceIdentifier, -- conferenceID
conferenceGoal ConferenceGoal,
callServices CallServices OPTIONAL,
callType CallType,
...
}
-- ---------------------------------
--
-- StateOfEquipment
--
-- @prop dataType
-- @descr States the state of the equipment.
-- @
--
-- ---------------------------------
StateOfEquipment ::= CHOICE
{
blocked NULL, -- Equipment is blocked
busy NULL, -- Equipment is busy, no more calls possible for moment
available NULL, -- Equipment has reported itself as present and is ready for actions
unregistered NULL, -- Equipment is not present
...
}
-- ---------------------------------
--
-- StateOfUser
--
-- @prop dataType
-- @descr This parameter specifies the state of the user.
-- @
-- ---------------------------------
StateOfUser ::= CHOICE
{
absent NULL,
present NULL,
...
}
-- ---------------------------------
--
-- StatusOfService
--
-- @prop dataType
-- @descr States the state of the service.
-- @
--
-- ---------------------------------
StatusOfService ::= CHOICE
{
acknowledge NULL,
active NULL,
initiatied NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
Subaddress ::= SEQUENCE
{
type SubaddressType,
indicator BOOLEAN,
address SubaddressInformation,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
SubaddressInformation ::= OCTET STRING (SIZE(1..23))
-- @prop dataType
-- @descr Origin: Q931
-- @
SubaddressType ::= CHOICE
{
nsap NULL,
user NULL,
...
}
-- ---------------------------------
--
-- SupportedProtocols
--
-- @prop dataType
-- @descr Gives detailed information about protocols that are
-- supported by the stated endpoint.
-- @
-- ---------------------------------
SupportedProtocols ::= CHOICE
{
nonStandardData NonStandardParameter,
h310 H310Information,
h320 H320Information,
h321 H321Information,
h322 H322Information,
h323 H323Information,
h324 H324Information,
voice VoiceInformation,
t120Only T120Information,
...
}
-- ---------------------------------
--
-- T120Information
--
-- @prop dataType
-- @descr Gives detailed information about the standard protocol T.120
-- @
-- ---------------------------------
T120Information ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- TerminalInformation
-- @prop dataType
--
-- @
--
-- ---------------------------------
TerminalInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
-- ---------------------------------
--
-- TerminationInitiatior
--
-- @prop dataType
-- @descr States who initiated the termination.
-- @
--
-- ---------------------------------
TerminationInitiatior ::= CHOICE
{
endpoint NULL,
serviceNode NULL,
...
}
-- ---------------------------------
--
-- TimeSlot
--
-- @prop dataType
-- @descr This parameter contains the identity of the time slot used
-- for the connection.
-- @
--
-- ---------------------------------
TimeSlot ::= INTEGER
-- @prop dataType
-- @descr Origin: Q931
-- @
TransferCapability ::= CHOICE
{
speech NULL,
unrestrictedDigital NULL,
restrictedDigital NULL,
audio3point1kHz NULL,
unrestrictedWithTonesAndAnnouncements NULL,
video NULL,
...
}
-- @prop dataType
-- @descr Origin: Q931
-- @
TransferRate ::= CHOICE
{
packedMode NULL,
r64kbps NULL,
r2x64kbps NULL,
r384kbps NULL,
r1536kbps NULL,
r1920kbps NULL,
multirate NULL,
...
}
-- ---------------------------------
--
-- TransportAddress
--
-- @prop dataType
-- @descr The transport address.
-- @
--
-- ---------------------------------
TransportAddress ::= CHOICE
{
ipV4Address SEQUENCE
{
ip OCTET STRING ( SIZE (4) ),
port INTEGER ( 0..65535 )
},
ipV6Address SEQUENCE
{
ip OCTET STRING ( SIZE (16) ),
port INTEGER ( 0..65535 ),
...
},
...
}
-- ---------------------------------
--
-- TransportAddressInformation
--
-- @prop dataType
-- @descr sequence of TransportAdress
-- @
-- ---------------------------------
TransportAddressInformation ::= SEQUENCE OF TransportAddress
-- ---------------------------------
--
-- TransportChannelInformation
--
-- @prop dataType
-- @
--
-- ---------------------------------
TransportChannelInformation ::= SEQUENCE
{
sendAddress TransportAddress OPTIONAL,
recvAddress TransportAddress OPTIONAL,
...
}
-- ---------------------------------
--
-- TypeOfEquipment
--
-- @prop dataType
-- @descr Type of equipment.
-- @
--
-- ---------------------------------
TypeOfEquipment ::= CHOICE
{
cordlessTerminal NULL,
h323Terminal NULL,
h323Gateway NULL,
isdnTerminal NULL,
...
}
-- ---------------------------------
--
-- TypeOfFlowControl
--
-- @prop dataType
-- @descr This parameter specifies the type of flow control used in the LAN.
-- @
--
-- ---------------------------------
TypeOfFlowControl ::= CHOICE
{
isa NULL,
priorityOutputRouting NULL,
other NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- TypeOfLAN
--
-- @prop dataType
-- @descr This parameter specifies the type of LAN.
-- @
--
-- ---------------------------------
TypeOfLAN ::= CHOICE
{
ethernet NULL,
tokenRing NULL,
other NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- TypeOfRegistration
--
-- @prop dataType
-- @descr Type of service.
-- @
--
-- ---------------------------------
TypeOfRegistration ::= CHOICE
{
changeOfUser NULL,
noChangeOfUser NULL,
...
}
-- ---------------------------------
--
-- TypeOfService
--
-- @prop dataType
-- @descr Type of service.
-- @
--
-- ---------------------------------
TypeOfService ::= CHOICE
{
basicCall NULL,
endpointRegistration NULL,
endpointUnregistration NULL,
...
}
-- ---------------------------------
--
-- TypeOfUser
--
-- @prop dataType
-- @descr Type of user.
-- @
--
-- ---------------------------------
TypeOfUser ::= CHOICE
{
human NULL,
network NULL,
...
}
-- ---------------------------------
--
-- UnknownMessageResponse
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
UnknownMessageResponse ::= SEQUENCE
{
requestSeqNum RequestSeqNum,
...
}
-- ---------------------------------
--
-- UnregistrationRejectReason
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
UnregistrationRejectReason ::= CHOICE
{
notCurrentlyRegistered NULL,
callInProgress NULL,
undefinedReason NULL,
...
}
-- ---------------------------------
--
-- UserAllocateResourceRejectReason
--
-- @prop dataType
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
UserAllocateResourceRejectReason ::= CHOICE
{
keyNotValid NULL,
resourceNotAvailable NULL,
serviceIdentifierExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserAttributeData
--
-- @prop dataType
--
-- @descr This parameter contains an User attribute value.
--
-- @
--
-- ---------------------------------
UserAttributeData ::= CHOICE
{
maximumTotalBandwidth Bandwidth,
maximumBandwidthPerService Bandwidth,
relationToEquipment SEQUENCE OF RelationToEquipment,
stateOfUser StateOfUser,
typeOfUser TypeOfUser,
userIdentifierInformation SEQUENCE OF UserIdentifier,
...
}
-- ---------------------------------
--
-- UserAttributeIdentifier
--
-- @prop dataType
--
-- @descr This parameter contains User attribute identifiers.
--
-- @
--
-- ---------------------------------
UserAttributeIdentifier ::= CHOICE
{
maximumTotalBandwidth NULL,
maximumBandwidthPerService NULL,
relationToEquipment NULL,
stateOfUser NULL,
typeOfUser NULL,
userIdentifierInformation NULL,
...
}
-- ---------------------------------
--
-- UserCreateRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
UserCreateRejectReason ::= CHOICE
{
userIdentifierAlreadyExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserDeallocateResourceRejectReason
-- @prop dataType
--
-- @descr Reason for the rejection.
-- @
--
-- ---------------------------------
UserDeallocateResourceRejectReason ::= CHOICE
{
resourceNotAllocated NULL,
serviceIdentifierNotValid NULL,
userNotExist NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserIdentifier
--
-- @prop dataType
-- @descr The identifier of the User.
-- @
--
-- ---------------------------------
UserIdentifier ::= CHOICE
{
e164 E164Identifier,
h323 BMPString (SIZE (1..256)),
-- h323 is Basic ISO/IEC 10646-1 (Unicode)
...
}
-- ---------------------------------
--
-- UserIdentifierInformation
--
-- @prop dataType
-- @descr sequence of UserIdentifier
-- @
--
-- ---------------------------------
UserIdentifierInformation ::= SEQUENCE OF UserIdentifier--
-- Created by :
-- Creation date :
-- Modified by :
-- Modification date :
-- Version :
--
-- @prop dataType
-- @
UserInformation ::= OCTET STRING (SIZE(1..131))
-- ---------------------------------
--
-- UserInformationUUIE
--
-- @prop dataType
-- @ --
-- ---------------------------------
UserInformationUUIE ::= SEQUENCE
{
protocolIdentifier ProtocolIdentifier,
...
}
-- ---------------------------------
--
-- UserKey
--
-- @prop dataType
-- @descr Unique key for a certain user.
-- @
-- ---------------------------------
UserKey ::= CHOICE
{
relationToUser RelationToUser,
userIdentifierInformation SEQUENCE OF UserIdentifier,
...
}
-- ---------------------------------
--
-- UserOrEquipmentRelatedInformation
--
-- @prop dataType
-- @descr This parameter specifies the type of information.
-- @
-- ---------------------------------
UserOrEquipmentRelatedInformation ::= CHOICE
{
userRelatedInformation SEQUENCE OF UserRelatedInformation,
equipmentRelatedInformation SEQUENCE OF EquipmentRelatedInformation,
...
}
-- ---------------------------------
--
-- UserOrEquipmentRelatedInformationIdentifier
--
-- @prop dataType
-- @descr This parameter specifies the type of information identifiers.
-- @
-- ---------------------------------
UserOrEquipmentRelatedInformationIdentifier ::= CHOICE
{
userRelatedInformationIdentifiers SEQUENCE OF UserRelatedInformationIdentifier,
equipmentRelatedInformationIdentifiers SEQUENCE OF EquipmentRelatedInformationIdentifier,
...
}
-- ---------------------------------
--
-- UserRelatedInformation
--
-- @prop dataType
-- @descr This parameter specifies different types of data
-- that are related to the user.
-- @
--
-- ---------------------------------
UserRelatedInformation ::= CHOICE
{
numberOfEquipments INTEGER,
stateOfUser StateOfUser,
typeOfUser TypeOfUser,
...
}
-- ---------------------------------
--
-- UserRelatedInformationIdentifier
--
--
-- @prop dataType
--
-- @descr This parameter specifies different types of data
-- that are specific to a certain user.
--
-- @
-- ---------------------------------
UserRelatedInformationIdentifier ::= CHOICE
{
numberOfEquipments NULL,
stateOfUser NULL,
typeOfUser NULL,
...
}
-- ---------------------------------
--
-- UserRemoveRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
UserRemoveRejectReason ::= CHOICE
{
keyNotValid NULL,
serviceInProgress NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserSetRejectReason
--
-- @prop dataType
--
-- @descr This parameter states the reason for the rejection.
--
-- @
--
-- ---------------------------------
UserSetRejectReason ::= CHOICE
{
keyNotValid NULL,
undefined NULL,
...
}
-- ---------------------------------
--
-- UserSpecificInformation
--
-- @descr This parameter specifies different types of data
-- that are specific to the user.
-- @
--
-- ---------------------------------
UserSpecificInformation ::= CHOICE
{
userRelatedInformation SEQUENCE OF UserRelatedInformation,
equipmentRelatedInformation SEQUENCE OF EquipmentRelatedInformation,
...
}
-- ---------------------------------
--
-- VendorIdentifier
--
-- @prop dataType
-- @
--
-- ---------------------------------
VendorIdentifier ::= SEQUENCE
{
vendor H221NonStandard,
productId OCTET STRING (SIZE(1..256)) OPTIONAL,
versionId OCTET STRING (SIZE(1..256)) OPTIONAL,
...
}
-- ---------------------------------
--
-- VoiceInformation
--
-- @prop dataType
-- @descr
-- @
--
-- ---------------------------------
VoiceInformation ::= SEQUENCE
{
nonStandardData NonStandardParameter OPTIONAL,
...
}
END
| apache-2.0 |
ctrlaltdel/neutrinator | vendor/dogpile/cache/plugins/mako_cache.py | 70 | 2941 | """
Mako Integration
----------------
dogpile.cache includes a `Mako <http://www.makotemplates.org>`_ plugin
that replaces `Beaker <http://beaker.groovie.org>`_
as the cache backend.
Setup a Mako template lookup using the "dogpile.cache" cache implementation
and a region dictionary::
from dogpile.cache import make_region
from mako.lookup import TemplateLookup
my_regions = {
"local":make_region().configure(
"dogpile.cache.dbm",
expiration_time=360,
arguments={"filename":"file.dbm"}
),
"memcached":make_region().configure(
"dogpile.cache.pylibmc",
expiration_time=3600,
arguments={"url":["127.0.0.1"]}
)
}
mako_lookup = TemplateLookup(
directories=["/myapp/templates"],
cache_impl="dogpile.cache",
cache_args={
'regions':my_regions
}
)
To use the above configuration in a template, use the ``cached=True``
argument on any Mako tag which accepts it, in conjunction with the
name of the desired region as the ``cache_region`` argument::
<%def name="mysection()" cached="True" cache_region="memcached">
some content that's cached
</%def>
"""
from mako.cache import CacheImpl
class MakoPlugin(CacheImpl):
"""A Mako ``CacheImpl`` which talks to dogpile.cache."""
def __init__(self, cache):
super(MakoPlugin, self).__init__(cache)
try:
self.regions = self.cache.template.cache_args['regions']
except KeyError:
raise KeyError(
"'cache_regions' argument is required on the "
"Mako Lookup or Template object for usage "
"with the dogpile.cache plugin.")
def _get_region(self, **kw):
try:
region = kw['region']
except KeyError:
raise KeyError(
"'cache_region' argument must be specified with 'cache=True'"
"within templates for usage with the dogpile.cache plugin.")
try:
return self.regions[region]
except KeyError:
raise KeyError("No such region '%s'" % region)
def get_and_replace(self, key, creation_function, **kw):
expiration_time = kw.pop("timeout", None)
return self._get_region(**kw).get_or_create(
key, creation_function,
expiration_time=expiration_time)
def get_or_create(self, key, creation_function, **kw):
return self.get_and_replace(key, creation_function, **kw)
def put(self, key, value, **kw):
self._get_region(**kw).put(key, value)
def get(self, key, **kw):
expiration_time = kw.pop("timeout", None)
return self._get_region(**kw).get(key, expiration_time=expiration_time)
def invalidate(self, key, **kw):
self._get_region(**kw).delete(key)
| gpl-3.0 |
canwe/NewsBlur | vendor/feedvalidator/iso639codes.py | 16 | 18360 | """$Id: iso639codes.py 699 2006-09-25 02:01:18Z rubys $"""
__author__ = "Sam Ruby <http://intertwingly.net/> and Mark Pilgrim <http://diveintomark.org/>"
__version__ = "$Revision: 699 $"
__date__ = "$Date: 2006-09-25 02:01:18 +0000 (Mon, 25 Sep 2006) $"
__copyright__ = "Copyright (c) 2002 Sam Ruby and Mark Pilgrim"
isoLang = \
{'aa': 'Afar',
'ab': 'Abkhazian',
'ae': 'Avestan',
'af': 'Afrikaans',
'ak': 'Akan',
'am': 'Amharic',
'an': 'Aragonese',
'ar': 'Arabic',
'as': 'Assamese',
'av': 'Avaric',
'ay': 'Aymara',
'az': 'Azerbaijani',
'ba': 'Bashkir',
'be': 'Byelorussian',
'bg': 'Bulgarian',
'bh': 'Bihari',
'bi': 'Bislama',
'bm': 'Bambara',
'bn': 'Bengali;Bangla',
'bo': 'Tibetan',
'br': 'Breton',
'bs': 'Bosnian',
'ca': 'Catalan',
'ce': 'Chechen',
'ch': 'Chamorro',
'co': 'Corsican',
'cr': 'Cree',
'cs': 'Czech',
'cu': 'Church Slavic',
'cv': 'Chuvash',
'cy': 'Welsh',
'da': 'Danish',
'de': 'German',
'dv': 'Divehi',
'dz': 'Dzongkha',
'ee': 'Ewe',
'el': 'Greek',
'en': 'English',
'eo': 'Esperanto',
'es': 'Spanish',
'et': 'Estonian',
'eu': 'Basque',
'fa': 'Persian (Farsi)',
'ff': 'Fulah',
'fi': 'Finnish',
'fj': 'Fiji',
'fo': 'Faroese',
'fr': 'French',
'fy': 'Frisian, Western',
'ga': 'Irish',
'gd': 'Scots Gaelic',
'gl': 'Galician',
'gn': 'Guarani',
'gu': 'Gujarati',
'gv': 'Manx',
'ha': 'Hausa',
'he': 'Hebrew',
'hi': 'Hindi',
'ho': 'Hiri Motu',
'hr': 'Croatian',
'ht': 'Haitian',
'hu': 'Hungarian',
'hy': 'Armenian',
'hz': 'Herero',
'ia': 'Interlingua',
'id': 'Indonesian',
'ie': 'Interlingue',
'ig': 'Igbo',
'ii': 'Sichuan Yi',
'ik': 'Inupiak',
'io': 'Ido',
'is': 'Icelandic',
'it': 'Italian',
'iu': 'Inuktitut',
'ja': 'Japanese',
'jv': 'Javanese',
'ka': 'Georgian',
'kg': 'Kongo',
'ki': 'Kikuyu; Gikuyu',
'kj': 'Kuanyama; Kwanyama',
'kk': 'Kazakh',
'kl': 'Greenlandic',
'km': 'Cambodian',
'kn': 'Kannada',
'ko': 'Korean',
'kr': 'Kanuri',
'ks': 'Kashmiri',
'ku': 'Kurdish',
'kv': 'Komi',
'kw': 'Cornish',
'ky': 'Kirghiz',
'la': 'Latin',
'lb': 'Letzeburgesch; Luxembourgish',
'lg': 'Ganda',
'li': 'Limburgan; Limburger, Limburgish',
'ln': 'Lingala',
'lo': 'Lao',
'lt': 'Lithuanian',
'lu': 'Luba-Katanga',
'lv': 'Latvian',
'mg': 'Malagasy',
'mh': 'Marshallese',
'mi': 'Maori',
'mk': 'Macedonian',
'ml': 'Malayalam',
'mn': 'Mongolian',
'mo': 'Moldavian',
'mr': 'Marathi',
'ms': 'Malay',
'mt': 'Maltese',
'my': 'Burmese',
'na': 'Nauru',
'nb': 'Norwegian Bokmal',
'nd': 'Ndebele, North',
'ne': 'Nepali',
'ng': 'Ndonga',
'nl': 'Dutch',
'nn': 'Norwegian Nynorsk',
'no': 'Norwegian',
'nr': 'Ndebele, South',
'nv': 'Navaho; Navajo',
'ny': 'Chewa; Chichewa; Nyanha',
'oc': 'Occitan',
'oj': 'Ojibwa',
'om': 'Afan (Oromo)',
'or': 'Oriya',
'os': 'Ossetian; Ossetic',
'pa': 'Punjabi',
'pi': 'Pali',
'pl': 'Polish',
'ps': 'Pushto',
'pt': 'Portuguese',
'qu': 'Quechua',
'rm': 'Rhaeto-Romance',
'rn': 'Kurundi',
'ro': 'Romanian',
'ru': 'Russian',
'rw': 'Kinyarwanda',
'sa': 'Sanskrit',
'sc': 'Sardinian',
'sd': 'Sindhi',
'se': 'Northern Sami',
'sg': 'Sangho',
'sh': 'Serbo-Croatian',
'si': 'Singhalese',
'sk': 'Slovak',
'sl': 'Slovenian',
'sm': 'Samoan',
'sn': 'Shona',
'so': 'Somali',
'sq': 'Albanian',
'sr': 'Serbian',
'ss': 'Swati',
'st': 'Sotho, Southern',
'su': 'Sundanese',
'sv': 'Swedish',
'sw': 'Swahili',
'ta': 'Tamil',
'te': 'Telugu',
'tg': 'Tajik',
'th': 'Thai',
'ti': 'Tigrinya',
'tk': 'Turkmen',
'tl': 'Tagalog',
'tn': 'Tswana',
'to': 'Tonga',
'tr': 'Turkish',
'ts': 'Tsonga',
'tt': 'Tatar',
'tw': 'Twi',
'ty': 'Tahitian',
'ug': 'Uigur',
'uk': 'Ukrainian',
'ur': 'Urdu',
'uz': 'Uzbek',
've': 'Venda',
'vi': 'Vietnamese',
'vo': 'Volapuk',
'wa': 'Walloon',
'wo': 'Wolof',
'xh': 'Xhosa',
'yi': 'Yiddish',
'yo': 'Yoruba',
'za': 'Zhuang',
'zh': 'Chinese',
'zu': 'Zulu',
'x' : 'a user-defined language',
'xx': 'a user-defined language',
'abk': 'Abkhazian',
'ace': 'Achinese',
'ach': 'Acoli',
'ada': 'Adangme',
'ady': 'Adygei',
'ady': 'Adyghe',
'aar': 'Afar',
'afh': 'Afrihili',
'afr': 'Afrikaans',
'afa': 'Afro-Asiatic (Other)',
'ain': 'Ainu',
'aka': 'Akan',
'akk': 'Akkadian',
'alb': 'Albanian',
'sqi': 'Albanian',
'gws': 'Alemanic',
'ale': 'Aleut',
'alg': 'Algonquian languages',
'tut': 'Altaic (Other)',
'amh': 'Amharic',
'anp': 'Angika',
'apa': 'Apache languages',
'ara': 'Arabic',
'arg': 'Aragonese',
'arc': 'Aramaic',
'arp': 'Arapaho',
'arn': 'Araucanian',
'arw': 'Arawak',
'arm': 'Armenian',
'hye': 'Armenian',
'rup': 'Aromanian',
'art': 'Artificial (Other)',
'asm': 'Assamese',
'ast': 'Asturian',
'ath': 'Athapascan languages',
'aus': 'Australian languages',
'map': 'Austronesian (Other)',
'ava': 'Avaric',
'ave': 'Avestan',
'awa': 'Awadhi',
'aym': 'Aymara',
'aze': 'Azerbaijani',
'ast': 'Bable',
'ban': 'Balinese',
'bat': 'Baltic (Other)',
'bal': 'Baluchi',
'bam': 'Bambara',
'bai': 'Bamileke languages',
'bad': 'Banda',
'bnt': 'Bantu (Other)',
'bas': 'Basa',
'bak': 'Bashkir',
'baq': 'Basque',
'eus': 'Basque',
'btk': 'Batak (Indonesia)',
'bej': 'Beja',
'bel': 'Belarusian',
'bem': 'Bemba',
'ben': 'Bengali',
'ber': 'Berber (Other)',
'bho': 'Bhojpuri',
'bih': 'Bihari',
'bik': 'Bikol',
'byn': 'Bilin',
'bin': 'Bini',
'bis': 'Bislama',
'byn': 'Blin',
'nob': 'Bokmal, Norwegian',
'bos': 'Bosnian',
'bra': 'Braj',
'bre': 'Breton',
'bug': 'Buginese',
'bul': 'Bulgarian',
'bua': 'Buriat',
'bur': 'Burmese',
'mya': 'Burmese',
'cad': 'Caddo',
'car': 'Carib',
'spa': 'Castilian',
'cat': 'Catalan',
'cau': 'Caucasian (Other)',
'ceb': 'Cebuano',
'cel': 'Celtic (Other)',
'cai': 'Central American Indian (Other)',
'chg': 'Chagatai',
'cmc': 'Chamic languages',
'cha': 'Chamorro',
'che': 'Chechen',
'chr': 'Cherokee',
'nya': 'Chewa',
'chy': 'Cheyenne',
'chb': 'Chibcha',
'nya': 'Chichewa',
'chi': 'Chinese',
'zho': 'Chinese',
'chn': 'Chinook jargon',
'chp': 'Chipewyan',
'cho': 'Choctaw',
'zha': 'Chuang',
'chu': 'Church Slavic; Church Slavonic; Old Church Slavonic; Old Church Slavic; Old Bulgarian',
'chk': 'Chuukese',
'chv': 'Chuvash',
'nwc': 'Classical Nepal Bhasa; Classical Newari; Old Newari',
'cop': 'Coptic',
'cor': 'Cornish',
'cos': 'Corsican',
'cre': 'Cree',
'mus': 'Creek',
'crp': 'Creoles and pidgins(Other)',
'cpe': 'Creoles and pidgins, English-based (Other)',
'cpf': 'Creoles and pidgins, French-based (Other)',
'cpp': 'Creoles and pidgins, Portuguese-based (Other)',
'crh': 'Crimean Tatar; Crimean Turkish',
'scr': 'Croatian',
'hrv': 'Croatian',
'cus': 'Cushitic (Other)',
'cze': 'Czech',
'ces': 'Czech',
'dak': 'Dakota',
'dan': 'Danish',
'dar': 'Dargwa',
'day': 'Dayak',
'del': 'Delaware',
'din': 'Dinka',
'div': 'Divehi',
'doi': 'Dogri',
'dgr': 'Dogrib',
'dra': 'Dravidian (Other)',
'dua': 'Duala',
'dut': 'Dutch',
'nld': 'Dutch',
'dum': 'Dutch, Middle (ca. 1050-1350)',
'dyu': 'Dyula',
'dzo': 'Dzongkha',
'efi': 'Efik',
'egy': 'Egyptian (Ancient)',
'eka': 'Ekajuk',
'elx': 'Elamite',
'eng': 'English',
'enm': 'English, Middle (1100-1500)',
'ang': 'English, Old (ca.450-1100)',
'myv': 'Erzya',
'epo': 'Esperanto',
'est': 'Estonian',
'ewe': 'Ewe',
'ewo': 'Ewondo',
'fan': 'Fang',
'fat': 'Fanti',
'fao': 'Faroese',
'fij': 'Fijian',
'fil': 'Filipino; Pilipino',
'fin': 'Finnish',
'fiu': 'Finno-Ugrian (Other)',
'fon': 'Fon',
'fre': 'French',
'fra': 'French',
'frm': 'French, Middle (ca.1400-1600)',
'fro': 'French, Old (842-ca.1400)',
'frs': 'Frisian, Eastern',
'fry': 'Frisian, Western',
'fur': 'Friulian',
'ful': 'Fulah',
'gaa': 'Ga',
'gla': 'Gaelic',
'glg': 'Gallegan',
'lug': 'Ganda',
'gay': 'Gayo',
'gba': 'Gbaya',
'gez': 'Geez',
'geo': 'Georgian',
'kat': 'Georgian',
'ger': 'German',
'deu': 'German',
'nds': 'German, Low',
'gmh': 'German, Middle High (ca.1050-1500)',
'goh': 'German, Old High (ca.750-1050)',
'gem': 'Germanic (Other)',
'kik': 'Gikuyu',
'gil': 'Gilbertese',
'gon': 'Gondi',
'gor': 'Gorontalo',
'got': 'Gothic',
'grb': 'Grebo',
'grc': 'Greek, Ancient (to 1453)',
'gre': 'Greek, Modern (1453-)',
'ell': 'Greek, Modern (1453-)',
'kal': 'Greenlandic; Kalaallisut',
'grn': 'Guarani',
'guj': 'Gujarati',
'gwi': 'Gwich\'in',
'hai': 'Haida',
'hat': 'Haitian',
'hau': 'Hausa',
'haw': 'Hawaiian',
'heb': 'Hebrew',
'her': 'Herero',
'hil': 'Hiligaynon',
'him': 'Himachali',
'hin': 'Hindi',
'hmo': 'Hiri Motu',
'hit': 'Hittite',
'hmn': 'Hmong',
'hun': 'Hungarian',
'hup': 'Hupa',
'iba': 'Iban',
'ice': 'Icelandic',
'isl': 'Icelandic',
'ido': 'Ido',
'ibo': 'Igbo',
'ijo': 'Ijo',
'ilo': 'Iloko',
'smn': 'Inari Sami',
'inc': 'Indic (Other)',
'ine': 'Indo-European (Other)',
'ind': 'Indonesian',
'inh': 'Ingush',
'ina': 'Interlingua (International Auxiliary Language Association)',
'ile': 'Interlingue',
'iku': 'Inuktitut',
'ipk': 'Inupiaq',
'ira': 'Iranian (Other)',
'gle': 'Irish',
'mga': 'Irish, Middle (900-1200)',
'sga': 'Irish, Old (to 900)',
'iro': 'Iroquoian languages',
'ita': 'Italian',
'jpn': 'Japanese',
'jav': 'Javanese',
'jrb': 'Judeo-Arabic',
'jpr': 'Judeo-Persian',
'kbd': 'Kabardian',
'kab': 'Kabyle',
'kac': 'Kachin',
'kal': 'Kalaallisut',
'xal': 'Kalmyk',
'kam': 'Kamba',
'kan': 'Kannada',
'kau': 'Kanuri',
'krc': 'Karachay-Balkar',
'kaa': 'Kara-Kalpak',
'krl': 'Karelian',
'kar': 'Karen',
'kas': 'Kashmiri',
'csb': 'Kashubian',
'kaw': 'Kawi',
'kaz': 'Kazakh',
'kha': 'Khasi',
'khm': 'Khmer',
'khi': 'Khoisan (Other)',
'kho': 'Khotanese',
'kik': 'Kikuyu',
'kmb': 'Kimbundu',
'kin': 'Kinyarwanda',
'kir': 'Kirghiz',
'tlh': 'Klingon; tlhIngan-Hol',
'kom': 'Komi',
'kon': 'Kongo',
'kok': 'Konkani',
'kor': 'Korean',
'kos': 'Kosraean',
'kpe': 'Kpelle',
'kro': 'Kru',
'kua': 'Kuanyama',
'kum': 'Kumyk',
'kur': 'Kurdish',
'kru': 'Kurukh',
'kut': 'Kutenai',
'kua': 'Kwanyama',
'lad': 'Ladino',
'lah': 'Lahnda',
'lam': 'Lamba',
'lao': 'Lao',
'lat': 'Latin',
'lav': 'Latvian',
'ltz': 'Letzeburgesch',
'lez': 'Lezghian',
'lim': 'Limburgan',
'lin': 'Lingala',
'lit': 'Lithuanian',
'jbo': 'Lojban',
'nds': 'Low German',
'dsb': 'Lower Sorbian',
'loz': 'Lozi',
'lub': 'Luba-Katanga',
'lua': 'Luba-Lulua',
'lui': 'Luiseno',
'smj': 'Lule Sami',
'lun': 'Lunda',
'luo': 'Luo (Kenya and Tanzania)',
'lus': 'Lushai',
'ltz': 'Luxembourgish',
'mac': 'Macedonian',
'mkd': 'Macedonian',
'mad': 'Madurese',
'mag': 'Magahi',
'mai': 'Maithili',
'mak': 'Makasar',
'mlg': 'Malagasy',
'may': 'Malay',
'msa': 'Malay',
'mal': 'Malayalam',
'mlt': 'Maltese',
'mnc': 'Manchu',
'mdr': 'Mandar',
'man': 'Mandingo',
'mni': 'Manipuri',
'mno': 'Manobo languages',
'glv': 'Manx',
'mao': 'Maori',
'mri': 'Maori',
'mar': 'Marathi',
'chm': 'Mari',
'mah': 'Marshallese',
'mwr': 'Marwari',
'mas': 'Masai',
'myn': 'Mayan languages',
'men': 'Mende',
'mic': 'Micmac',
'min': 'Minangkabau',
'mwl': 'Mirandese',
'mis': 'Miscellaneous languages',
'moh': 'Mohawk',
'mdf': 'Moksha',
'mol': 'Moldavian',
'mkh': 'Mon-Khmer (Other)',
'lol': 'Mongo',
'mon': 'Mongolian',
'mos': 'Mossi',
'mul': 'Multiple languages',
'mun': 'Munda languages',
'nah': 'Nahuatl',
'nau': 'Nauru',
'nav': 'Navaho; Navajo',
'nde': 'Ndebele, North',
'nbl': 'Ndebele, South',
'ndo': 'Ndonga',
'nap': 'Neapolitan',
'nep': 'Nepali',
'new': 'Newari',
'nia': 'Nias',
'nic': 'Niger-Kordofanian (Other)',
'ssa': 'Nilo-Saharan (Other)',
'niu': 'Niuean',
'nog': 'Nogai',
'non': 'Norse, Old',
'nai': 'North American Indian (Other)',
'frr': 'Northern Frisian',
'sme': 'Northern Sami',
'nso': 'Northern Sotho; Pedi; Sepedi',
'nde': 'North Ndebele',
'nor': 'Norwegian',
'nob': 'Norwegian Bokmal',
'nno': 'Norwegian Nynorsk',
'nub': 'Nubian languages',
'nym': 'Nyamwezi',
'nya': 'Nyanja',
'nyn': 'Nyankole',
'nno': 'Nynorsk, Norwegian',
'nyo': 'Nyoro',
'nzi': 'Nzima',
'oci': 'Occitan (post 1500)',
'oji': 'Ojibwa',
'ori': 'Oriya',
'orm': 'Oromo',
'osa': 'Osage',
'oss': 'Ossetian; Ossetic',
'oto': 'Otomian languages',
'pal': 'Pahlavi',
'pau': 'Palauan',
'pli': 'Pali',
'pam': 'Pampanga',
'pag': 'Pangasinan',
'pan': 'Panjabi',
'pap': 'Papiamento',
'paa': 'Papuan (Other)',
'per': 'Persian',
'fas': 'Persian',
'peo': 'Persian, Old (ca.600-400)',
'phi': 'Philippine (Other)',
'phn': 'Phoenician',
'pon': 'Pohnpeian',
'pol': 'Polish',
'por': 'Portuguese',
'pra': 'Prakrit languages',
'oci': 'Provencal',
'pro': 'Provencal, Old (to 1500)',
'pan': 'Punjabi',
'pus': 'Pushto',
'que': 'Quechua',
'roh': 'Raeto-Romance',
'raj': 'Rajasthani',
'rap': 'Rapanui',
'rar': 'Rarotongan',
'qaa': 'Reserved for local use',
'qtz': 'Reserved for local use',
'roa': 'Romance (Other)',
'rum': 'Romanian',
'ron': 'Romanian',
'rom': 'Romany',
'run': 'Rundi',
'rus': 'Russian',
'sal': 'Salishan languages',
'sam': 'Samaritan Aramaic',
'smi': 'Sami languages (Other)',
'smo': 'Samoan',
'sad': 'Sandawe',
'sag': 'Sango',
'san': 'Sanskrit',
'sat': 'Santali',
'srd': 'Sardinian',
'sas': 'Sasak',
'nds': 'Saxon, Low',
'sco': 'Scots',
'gla': 'Scottish Gaelic',
'sel': 'Selkup',
'sem': 'Semitic (Other)',
'nso': 'Sepedi; Northern Sotho; Pedi',
'scc': 'Serbian',
'srp': 'Serbian',
'srr': 'Serer',
'shn': 'Shan',
'sna': 'Shona',
'iii': 'Sichuan Yi',
'scn': 'Sicilian',
'sid': 'Sidamo',
'sgn': 'Sign languages',
'bla': 'Siksika',
'snd': 'Sindhi',
'sin': 'Sinhalese',
'sit': 'Sino-Tibetan (Other)',
'sio': 'Siouan languages',
'sms': 'Skolt Sami',
'den': 'Slave (Athapascan)',
'sla': 'Slavic (Other)',
'slo': 'Slovak',
'slk': 'Slovak',
'slv': 'Slovenian',
'sog': 'Sogdian',
'som': 'Somali',
'son': 'Songhai',
'snk': 'Soninke',
'wen': 'Sorbian languages',
'nso': 'Sotho, Northern',
'sot': 'Sotho, Southern',
'sai': 'South American Indian (Other)',
'alt': 'Southern Altai',
'sma': 'Southern Sami',
'nbl': 'South Ndebele',
'spa': 'Spanish',
'srn': 'Sranan Tongo',
'suk': 'Sukuma',
'sux': 'Sumerian',
'sun': 'Sundanese',
'sus': 'Susu',
'swa': 'Swahili',
'ssw': 'Swati',
'swe': 'Swedish',
'gsw': 'Swiss German; Alemanic',
'syr': 'Syriac',
'tgl': 'Tagalog',
'tah': 'Tahitian',
'tai': 'Tai (Other)',
'tgk': 'Tajik',
'tmh': 'Tamashek',
'tam': 'Tamil',
'tat': 'Tatar',
'tel': 'Telugu',
'ter': 'Tereno',
'tet': 'Tetum',
'tha': 'Thai',
'tib': 'Tibetan',
'bod': 'Tibetan',
'tig': 'Tigre',
'tir': 'Tigrinya',
'tem': 'Timne',
'tiv': 'Tiv',
'tlh': 'tlhIngan-Hol; Klingon',
'tli': 'Tlingit',
'tpi': 'Tok Pisin',
'tkl': 'Tokelau',
'tog': 'Tonga (Nyasa)',
'ton': 'Tonga (Tonga Islands)',
'tsi': 'Tsimshian',
'tso': 'Tsonga',
'tsn': 'Tswana',
'tum': 'Tumbuka',
'tup': 'Tupi languages',
'tur': 'Turkish',
'ota': 'Turkish, Ottoman (1500-1928)',
'tuk': 'Turkmen',
'tvl': 'Tuvalu',
'tyv': 'Tuvinian',
'twi': 'Twi',
'udm': 'Udmurt',
'uga': 'Ugaritic',
'uig': 'Uighur',
'ukr': 'Ukrainian',
'umb': 'Umbundu',
'und': 'Undetermined',
'hsb': 'Upper Sorbian',
'urd': 'Urdu',
'uzb': 'Uzbek',
'vai': 'Vai',
'cat': 'Valencian',
'ven': 'Venda',
'vie': 'Vietnamese',
'vol': 'Volapuk',
'vot': 'Votic',
'wak': 'Wakashan languages',
'wal': 'Walamo',
'wln': 'Walloon',
'war': 'Waray',
'was': 'Washo',
'wel': 'Welsh',
'cym': 'Welsh',
'fry': 'Wester Frisian',
'wol': 'Wolof',
'xho': 'Xhosa',
'sah': 'Yakut',
'yao': 'Yao',
'yap': 'Yapese',
'yid': 'Yiddish',
'yor': 'Yoruba',
'ypk': 'Yupik languages',
'znd': 'Zande',
'zap': 'Zapotec',
'zen': 'Zenaga',
'zha': 'Zhuang',
'zul': 'Zulu',
'zun': 'Zuni' }
| mit |
Carmezim/tensorflow | tensorflow/contrib/opt/python/training/moving_average_optimizer.py | 84 | 5839 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Moving average optimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import moving_averages
from tensorflow.python.training import optimizer
from tensorflow.python.training import saver
class MovingAverageOptimizer(optimizer.Optimizer):
"""Optimizer that computes a moving average of the variables.
Empirically it has been found that using the moving average of the trained
parameters of a deep network is better than using its trained parameters
directly. This optimizer allows you to compute this moving average and swap
the variables at save time so that any code outside of the training loop will
use by default the averaged values instead of the original ones.
Example of usage:
```python
// Encapsulate your favorite optimizer (here the momentum one)
// inside the MovingAverageOptimizer.
opt = tf.train.MomentumOptimizer(learning_rate, FLAGS.momentum)
opt = tf.contrib.opt.MovingAverageOptimizer(opt)
// Then create your model and all its variables.
model = build_model()
// Add the training op that optimizes using opt.
// This needs to be called before swapping_saver().
opt.minimize(cost, var_list)
// Then create your saver like this:
saver = opt.swapping_saver()
// Pass it to your training loop.
slim.learning.train(
model,
...
saver=saver)
```
Note that for evaluation, the normal saver should be used instead of
swapping_saver().
"""
def __init__(self, opt, average_decay=0.9999, num_updates=None,
sequential_update=True):
"""Construct a new MovingAverageOptimizer.
Args:
opt: A tf.Optimizer that will be used to compute and apply gradients.
average_decay: Float. Decay to use to maintain the moving averages
of trained variables.
See tf.train.ExponentialMovingAverage for details.
num_updates: Optional count of number of updates applied to variables.
See tf.train.ExponentialMovingAverage for details.
sequential_update: Bool. If False, will compute the moving average at the
same time as the model is updated, potentially doing
benign data races.
If True, will update the moving average after gradient
updates.
"""
self._optimizer = opt
self._ema = moving_averages.ExponentialMovingAverage(
average_decay, num_updates=num_updates)
self._variable_map = None
self._sequential_update = sequential_update
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
train_op = self._optimizer.apply_gradients(
grads_and_vars, global_step=global_step, name=name)
var_list = [x[1] for x in grads_and_vars if x[0] is not None]
self._variable_map = {}
if self._sequential_update:
with ops.control_dependencies([train_op]):
ma_op = self._ema.apply(var_list)
else:
ma_op = self._ema.apply(var_list)
for v in var_list:
v_avg = self._ema.average(v)
self._variable_map[v.op.name] = v_avg
self._variable_map[v_avg.op.name] = v
return control_flow_ops.group(train_op, ma_op, name="train_with_avg")
def swapping_saver(self, var_list=None, name='swapping_saver', **kwargs):
"""Create a saver swapping moving averages and variables.
You should use this saver during training. It will save the moving averages
of the trained parameters under the original parameter names. For
evaluations or inference you should use a regular saver and it will
automatically use the moving averages for the trained variable.
You must call this function after all variables have been created and after
you have called Optimizer.minimize().
Args:
var_list: List of variables to save, as per `Saver()`.
If set to None, will save all the variables that have been
created before this call.
name: The name of the saver.
**kwargs: Keyword arguments of `Saver()`.
Returns:
A `tf.train.Saver` object.
Raises:
RuntimeError: If apply_gradients or minimize has not been called before.
"""
if self._variable_map is None:
raise RuntimeError('Must call apply_gradients or minimize before '
'creating the swapping_saver')
if var_list is None:
var_list = variables.global_variables()
if not isinstance(var_list, dict):
var_list = saver.BaseSaverBuilder.OpListToDict(var_list)
# Now swap variables and moving averages
swapped_var_list = {}
for k, v in six.iteritems(var_list):
v_swap = self._variable_map.get(v.op.name, None)
if v_swap:
swapped_var_list[k] = v_swap
else:
swapped_var_list[k] = v
# Build the swapping saver.
return saver.Saver(swapped_var_list, name=name, **kwargs)
| apache-2.0 |
apollo13/ansible | test/units/module_utils/common/parameters/test_list_deprecations.py | 37 | 1535 | # -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.common.parameters import list_deprecations
@pytest.fixture
def params():
return {
'name': 'bob',
'dest': '/etc/hosts',
'state': 'present',
'value': 5,
}
def test_list_deprecations():
argument_spec = {
'old': {'type': 'str', 'removed_in_version': '2.5'},
'foo': {'type': 'dict', 'options': {'old': {'type': 'str', 'removed_in_version': 1.0}}},
'bar': {'type': 'list', 'elements': 'dict', 'options': {'old': {'type': 'str', 'removed_in_version': '2.10'}}},
}
params = {
'name': 'rod',
'old': 'option',
'foo': {'old': 'value'},
'bar': [{'old': 'value'}, {}],
}
result = list_deprecations(argument_spec, params)
assert len(result) == 3
result.sort(key=lambda entry: entry['msg'])
assert result[0]['msg'] == """Param 'bar["old"]' is deprecated. See the module docs for more information"""
assert result[0]['version'] == '2.10'
assert result[1]['msg'] == """Param 'foo["old"]' is deprecated. See the module docs for more information"""
assert result[1]['version'] == 1.0
assert result[2]['msg'] == "Param 'old' is deprecated. See the module docs for more information"
assert result[2]['version'] == '2.5'
| gpl-3.0 |
matthiasdiener/spack | var/spack/repos/builtin/packages/gource/package.py | 5 | 2259 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gource(AutotoolsPackage):
"""Software version control visualization."""
homepage = "http://gource.io"
url = "https://github.com/acaudwell/Gource/releases/download/gource-0.44/gource-0.44.tar.gz"
version('0.44', '79cda1bfaad16027d59cce55455bfab88b57c69d')
depends_on('automake', type='build')
depends_on('autoconf', type='build')
depends_on('libtool', type='build')
depends_on('glm', type='build')
depends_on('pkgconfig', type='build')
depends_on('[email protected]:')
depends_on('pcre')
depends_on('[email protected]:+filesystem+system')
depends_on('glew')
depends_on('jpeg')
depends_on('libpng')
depends_on('pcre')
depends_on('sdl2')
depends_on('sdl2-image')
parallel = False
force_autoreconf = True
def configure_args(self):
spec = self.spec
return [
'--disable-dependency-tracking',
'--without-x',
'--with-boost=%s' % spec['boost'].prefix
]
| lgpl-2.1 |
jinghaomiao/apollo | cyber/python/cyber_py3/cyber.py | 2 | 13565 | #!/usr/bin/env python3
# ****************************************************************************
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ****************************************************************************
# -*- coding: utf-8 -*-
"""Module for init environment."""
import ctypes
import importlib
import os
import sys
import threading
import time
from google.protobuf.descriptor_pb2 import FileDescriptorProto
PY_CALLBACK_TYPE = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
PY_CALLBACK_TYPE_T = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p)
# init vars
wrapper_lib_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../internal'))
sys.path.append(wrapper_lib_path)
_CYBER = importlib.import_module('_cyber_wrapper')
##
# @brief init cyber environment.
# @param module_name Used as the log file name.
#
# @return Success is True, otherwise False.
def init(module_name="cyber_py"):
"""
init cyber environment.
"""
return _CYBER.py_init(module_name)
def ok():
"""
is cyber envi ok.
"""
return _CYBER.py_ok()
def shutdown():
"""
shutdown cyber envi.
"""
return _CYBER.py_shutdown()
def is_shutdown():
"""
is cyber shutdown.
"""
return _CYBER.py_is_shutdown()
def waitforshutdown():
"""
wait until the cyber is shutdown.
"""
return _CYBER.py_waitforshutdown()
# //////////////////////////////class//////////////////////////////
class Writer(object):
"""
Class for cyber writer wrapper.
"""
def __init__(self, name, writer, data_type):
self.name = name
self.writer = writer
self.data_type = data_type
##
# @brief write message.
#
# @param data is a message type.
#
# @return Success is 0, otherwise False.
def write(self, data):
"""
writer message string
"""
return _CYBER.PyWriter_write(self.writer, data.SerializeToString())
class Reader(object):
"""
Class for cyber reader wrapper.
"""
def __init__(self, name, reader, data_type):
self.name = name
self.reader = reader
self.data_type = data_type
class Client(object):
"""
Class for cyber service client wrapper.
"""
def __init__(self, client, data_type):
self.client = client
self.data_type = data_type
##
# @brief send request message to service.
#
# @param data is a message type.
#
# @return None or response from service.
def send_request(self, data):
"""
send request to service
"""
response_str = _CYBER.PyClient_send_request(
self.client, data.SerializeToString())
if len(response_str) == 0:
return None
response = self.data_type()
response.ParseFromString(response_str)
return response
class Node(object):
"""
Class for cyber Node wrapper.
"""
def __init__(self, name):
self.node = _CYBER.new_PyNode(name)
self.list_writer = []
self.list_reader = []
self.subs = {}
self.pubs = {}
self.list_client = []
self.list_service = []
self.mutex = threading.Lock()
self.callbacks = {}
self.services = {}
def __del__(self):
# print("+++ node __del___")
for writer in self.list_writer:
_CYBER.delete_PyWriter(writer)
for reader in self.list_reader:
_CYBER.delete_PyReader(reader)
for c in self.list_client:
_CYBER.delete_PyClient(c)
for s in self.list_service:
_CYBER.delete_PyService(s)
_CYBER.delete_PyNode(self.node)
##
# @brief register proto message by proto descriptor file.
#
# @param file_desc object about datatype.DESCRIPTOR.file .
def register_message(self, file_desc):
"""
register proto message desc file.
"""
for dep in file_desc.dependencies:
self.register_message(dep)
proto = FileDescriptorProto()
file_desc.CopyToProto(proto)
proto.name = file_desc.name
desc_str = proto.SerializeToString()
_CYBER.PyNode_register_message(self.node, desc_str)
##
# @brief create a channel writer for send message to another channel.
#
# @param name is the channel name.
# @param data_type is message class for serialization
# @param qos_depth is a queue size, which defines the size of the cache.
#
# @return return the writer object.
def create_writer(self, name, data_type, qos_depth=1):
"""
create a channel writer for send message to another channel.
"""
self.register_message(data_type.DESCRIPTOR.file)
datatype = data_type.DESCRIPTOR.full_name
writer = _CYBER.PyNode_create_writer(self.node, name,
datatype, qos_depth)
self.list_writer.append(writer)
return Writer(name, writer, datatype)
def reader_callback(self, name):
sub = self.subs[name.decode('utf8')]
msg_str = _CYBER.PyReader_read(sub[0], False)
if len(msg_str) > 0:
if sub[3] != "RawData":
proto = sub[3]()
proto.ParseFromString(msg_str)
else:
# print "read rawdata-> ",sub[3]
proto = msg_str
if sub[2] is None:
sub[1](proto)
else:
sub[1](proto, sub[2])
return 0
##
# @brief create a channel reader for receive message from another channel.
#
# @param name the channel name to read.
# @param data_type message class for serialization
# @param callback function to call (fn(data)) when data is received. If
# args is set, the function must accept the args as a second argument,
# i.e. fn(data, args)
# @param args additional arguments to pass to the callback
#
# @return return the writer object.
def create_reader(self, name, data_type, callback, args=None):
"""
create a channel reader for receive message from another channel.
"""
self.mutex.acquire()
if name in self.subs.keys():
self.mutex.release()
return None
self.mutex.release()
# datatype = data_type.DESCRIPTOR.full_name
reader = _CYBER.PyNode_create_reader(
self.node, name, str(data_type))
if reader is None:
return None
self.list_reader.append(reader)
sub = (reader, callback, args, data_type, False)
self.mutex.acquire()
self.subs[name] = sub
self.mutex.release()
fun_reader_cb = PY_CALLBACK_TYPE(self.reader_callback)
self.callbacks[name] = fun_reader_cb
f_ptr = ctypes.cast(self.callbacks[name], ctypes.c_void_p).value
_CYBER.PyReader_register_func(reader, f_ptr)
return Reader(name, reader, data_type)
def create_rawdata_reader(self, name, callback, args=None):
"""
Create RawData reader:listener RawMessage
"""
return self.create_reader(name, "RawData", callback, args)
##
# @brief create client for the c/s.
#
# @param name the service name.
# @param request_data_type the request message type.
# @param response_data_type the response message type.
#
# @return the client object.
def create_client(self, name, request_data_type, response_data_type):
datatype = request_data_type.DESCRIPTOR.full_name
c = _CYBER.PyNode_create_client(self.node, name,
str(datatype))
self.list_client.append(c)
return Client(c, response_data_type)
def service_callback(self, name):
v = self.services[name]
msg_str = _CYBER.PyService_read(v[0])
if (len(msg_str) > 0):
proto = v[3]()
proto.ParseFromString(msg_str)
response = None
if v[2] is None:
response = v[1](proto)
else:
response = v[1](proto, v[2])
_CYBER.PyService_write(v[0], response.SerializeToString())
return 0
##
# @brief create client for the c/s.
#
# @param name the service name.
# @param req_data_type the request message type.
# @param res_data_type the response message type.
# @param callback function to call (fn(data)) when data is received. If
# args is set, the function must accept the args as a second argument,
# i.e. fn(data, args)
# @param args additional arguments to pass to the callback.
#
# @return return the service object.
def create_service(self, name, req_data_type, res_data_type, callback,
args=None):
self.mutex.acquire()
if name in self.services.keys():
self.mutex.release()
return None
self.mutex.release()
datatype = req_data_type.DESCRIPTOR.full_name
s = _CYBER.PyNode_create_service(self.node, name, str(datatype))
self.list_service.append(s)
v = (s, callback, args, req_data_type, False)
self.mutex.acquire()
self.services[name] = v
self.mutex.release()
f = PY_CALLBACK_TYPE(self.service_callback)
self.callbacks[name] = f
f_ptr = ctypes.cast(f, ctypes.c_void_p).value
_CYBER.PyService_register_func(s, f_ptr)
return s
def spin(self):
"""
spin for every 0.002s.
"""
while not _CYBER.py_is_shutdown():
time.sleep(0.002)
class ChannelUtils(object):
@staticmethod
##
# @brief Parse rawmsg from rawmsg data by message type.
#
# @param msg_type message type.
# @param rawmsgdata rawmsg data.
#
# @return a human readable form of this message. For debugging and
# other purposes.
def get_debugstring_rawmsgdata(msg_type, rawmsgdata):
return _CYBER.PyChannelUtils_get_debugstring_by_msgtype_rawmsgdata(msg_type, rawmsgdata)
@staticmethod
##
# @brief Parse rawmsg from channel name.
#
# @param channel_name channel name.
# @param sleep_s wait time for topo discovery.
#
# @return return the messsage type of this channel.
def get_msgtype(channel_name, sleep_s=2):
return _CYBER.PyChannelUtils_get_msg_type(channel_name, sleep_s)
@staticmethod
##
# @brief Get all active channel names
#
# @param sleep_s wait time for topo discovery.
#
# @return all active channel names.
def get_channels(sleep_s=2):
return _CYBER.PyChannelUtils_get_active_channels(sleep_s)
@staticmethod
##
# @brief Get the active channel info.
#
# @param sleep_s wait time for topo discovery.
#
# @return all active channels info. {'channel1':[], 'channel2':[]} .
def get_channels_info(sleep_s=2):
return _CYBER.PyChannelUtils_get_channels_info(sleep_s)
class NodeUtils(object):
@staticmethod
##
# @brief Get all active node names.
#
# @param sleep_s wait time for topo discovery.
#
# @return all active node names.
def get_nodes(sleep_s=2):
return _CYBER.PyNodeUtils_get_active_nodes(sleep_s)
@staticmethod
##
# @brief Get node attribute by the node name.
#
# @param node_name node name.
# @param sleep_s wait time for topo discovery.
#
# @return the node's attribute.
def get_node_attr(node_name, sleep_s=2):
return _CYBER.PyNodeUtils_get_node_attr(node_name, sleep_s)
@staticmethod
##
# @brief Get node's reader channel names
#
# @param node_name the node name.
# @param sleep_s wait time for topo discovery.
#
# @return node's reader channel names.
def get_readersofnode(node_name, sleep_s=2):
return _CYBER.PyNodeUtils_get_readersofnode(node_name, sleep_s)
@staticmethod
##
# @brief Get node's writer channel names.
#
# @param node_name the node name.
# @param sleep_s wait time for topo discovery.
#
# @return node's writer channel names.
def get_writersofnode(node_name, sleep_s=2):
return _CYBER.PyNodeUtils_get_writersofnode(node_name, sleep_s)
class ServiceUtils(object):
@staticmethod
##
# @brief Get all active service names.
#
# @param sleep_s wait time for topo discovery.
#
# @return all active service names.
def get_services(sleep_s=2):
return _CYBER.PyServiceUtils_get_active_services(sleep_s)
@staticmethod
##
# @brief Get service attribute by the service name.
#
# @param service_name service name.
# @param sleep_s wait time for topo discovery.
#
# @return the service's attribute.
def get_service_attr(service_name, sleep_s=2):
return _CYBER.PyServiceUtils_get_service_attr(service_name, sleep_s)
| apache-2.0 |
futurulus/scipy | scipy/integrate/odepack.py | 47 | 7285 | # Author: Travis Oliphant
from __future__ import division, print_function, absolute_import
__all__ = ['odeint']
from . import _odepack
from copy import copy
import warnings
class ODEintWarning(Warning):
pass
_msgs = {2: "Integration successful.",
1: "Nothing was done; the integration time was 0.",
-1: "Excess work done on this call (perhaps wrong Dfun type).",
-2: "Excess accuracy requested (tolerances too small).",
-3: "Illegal input detected (internal error).",
-4: "Repeated error test failures (internal error).",
-5: "Repeated convergence failures (perhaps bad Jacobian or tolerances).",
-6: "Error weight became zero during problem.",
-7: "Internal workspace insufficient to finish (internal error)."
}
def odeint(func, y0, t, args=(), Dfun=None, col_deriv=0, full_output=0,
ml=None, mu=None, rtol=None, atol=None, tcrit=None, h0=0.0,
hmax=0.0, hmin=0.0, ixpr=0, mxstep=0, mxhnil=0, mxordn=12,
mxords=5, printmessg=0):
"""
Integrate a system of ordinary differential equations.
Solve a system of ordinary differential equations using lsoda from the
FORTRAN library odepack.
Solves the initial value problem for stiff or non-stiff systems
of first order ode-s::
dy/dt = func(y,t0,...)
where y can be a vector.
Parameters
----------
func : callable(y, t0, ...)
Computes the derivative of y at t0.
y0 : array
Initial condition on y (can be a vector).
t : array
A sequence of time points for which to solve for y. The initial
value point should be the first element of this sequence.
args : tuple, optional
Extra arguments to pass to function.
Dfun : callable(y, t0, ...)
Gradient (Jacobian) of `func`.
col_deriv : bool, optional
True if `Dfun` defines derivatives down columns (faster),
otherwise `Dfun` should define derivatives across rows.
full_output : bool, optional
True if to return a dictionary of optional outputs as the second output
printmessg : bool, optional
Whether to print the convergence message
Returns
-------
y : array, shape (len(t), len(y0))
Array containing the value of y for each desired time in t,
with the initial value `y0` in the first row.
infodict : dict, only returned if full_output == True
Dictionary containing additional output information
======= ============================================================
key meaning
======= ============================================================
'hu' vector of step sizes successfully used for each time step.
'tcur' vector with the value of t reached for each time step.
(will always be at least as large as the input times).
'tolsf' vector of tolerance scale factors, greater than 1.0,
computed when a request for too much accuracy was detected.
'tsw' value of t at the time of the last method switch
(given for each time step)
'nst' cumulative number of time steps
'nfe' cumulative number of function evaluations for each time step
'nje' cumulative number of jacobian evaluations for each time step
'nqu' a vector of method orders for each successful step.
'imxer' index of the component of largest magnitude in the
weighted local error vector (e / ewt) on an error return, -1
otherwise.
'lenrw' the length of the double work array required.
'leniw' the length of integer work array required.
'mused' a vector of method indicators for each successful time step:
1: adams (nonstiff), 2: bdf (stiff)
======= ============================================================
Other Parameters
----------------
ml, mu : int, optional
If either of these are not None or non-negative, then the
Jacobian is assumed to be banded. These give the number of
lower and upper non-zero diagonals in this banded matrix.
For the banded case, `Dfun` should return a matrix whose
rows contain the non-zero bands (starting with the lowest diagonal).
Thus, the return matrix `jac` from `Dfun` should have shape
``(ml + mu + 1, len(y0))`` when ``ml >=0`` or ``mu >=0``.
The data in `jac` must be stored such that ``jac[i - j + mu, j]``
holds the derivative of the `i`th equation with respect to the `j`th
state variable. If `col_deriv` is True, the transpose of this
`jac` must be returned.
rtol, atol : float, optional
The input parameters `rtol` and `atol` determine the error
control performed by the solver. The solver will control the
vector, e, of estimated local errors in y, according to an
inequality of the form ``max-norm of (e / ewt) <= 1``,
where ewt is a vector of positive error weights computed as
``ewt = rtol * abs(y) + atol``.
rtol and atol can be either vectors the same length as y or scalars.
Defaults to 1.49012e-8.
tcrit : ndarray, optional
Vector of critical points (e.g. singularities) where integration
care should be taken.
h0 : float, (0: solver-determined), optional
The step size to be attempted on the first step.
hmax : float, (0: solver-determined), optional
The maximum absolute step size allowed.
hmin : float, (0: solver-determined), optional
The minimum absolute step size allowed.
ixpr : bool, optional
Whether to generate extra printing at method switches.
mxstep : int, (0: solver-determined), optional
Maximum number of (internally defined) steps allowed for each
integration point in t.
mxhnil : int, (0: solver-determined), optional
Maximum number of messages printed.
mxordn : int, (0: solver-determined), optional
Maximum order to be allowed for the non-stiff (Adams) method.
mxords : int, (0: solver-determined), optional
Maximum order to be allowed for the stiff (BDF) method.
See Also
--------
ode : a more object-oriented integrator based on VODE.
quad : for finding the area under a curve.
"""
if ml is None:
ml = -1 # changed to zero inside function call
if mu is None:
mu = -1 # changed to zero inside function call
t = copy(t)
y0 = copy(y0)
output = _odepack.odeint(func, y0, t, args, Dfun, col_deriv, ml, mu,
full_output, rtol, atol, tcrit, h0, hmax, hmin,
ixpr, mxstep, mxhnil, mxordn, mxords)
if output[-1] < 0:
warning_msg = _msgs[output[-1]] + " Run with full_output = 1 to get quantitative information."
warnings.warn(warning_msg, ODEintWarning)
elif printmessg:
warning_msg = _msgs[output[-1]]
warnings.warn(warning_msg, ODEintWarning)
if full_output:
output[1]['message'] = _msgs[output[-1]]
output = output[:-1]
if len(output) == 1:
return output[0]
else:
return output
| bsd-3-clause |
Mahdisadjadi/arxivscraper | arxivscraper/arxivscraper.py | 1 | 9520 | """
A python program to retreive recrods from ArXiv.org in given
categories and specific date range.
Author: Mahdi Sadjadi (sadjadi.seyedmahdi[AT]gmail[DOT]com).
"""
from __future__ import print_function
import xml.etree.ElementTree as ET
import datetime
import time
import sys
PYTHON3 = sys.version_info[0] == 3
if PYTHON3:
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError
else:
from urllib import urlencode
from urllib2 import HTTPError, urlopen
OAI = '{http://www.openarchives.org/OAI/2.0/}'
ARXIV = '{http://arxiv.org/OAI/arXiv/}'
BASE = 'http://export.arxiv.org/oai2?verb=ListRecords&'
class Record(object):
"""
A class to hold a single record from ArXiv
Each records contains the following properties:
object should be of xml.etree.ElementTree.Element.
"""
def __init__(self, xml_record):
"""if not isinstance(object,ET.Element):
raise TypeError("")"""
self.xml = xml_record
self.id = self._get_text(ARXIV, 'id')
self.url = 'https://arxiv.org/abs/' + self.id
self.title = self._get_text(ARXIV, 'title')
self.abstract = self._get_text(ARXIV, 'abstract')
self.cats = self._get_text(ARXIV, 'categories')
self.created = self._get_text(ARXIV, 'created')
self.updated = self._get_text(ARXIV, 'updated')
self.doi = self._get_text(ARXIV, 'doi')
self.authors = self._get_authors()
self.affiliation = self._get_affiliation()
def _get_text(self, namespace, tag):
"""Extracts text from an xml field"""
try:
return self.xml.find(namespace + tag).text.strip().lower().replace('\n', ' ')
except:
return ''
def _get_name(self, parent, attribute):
try:
return parent.find(ARXIV + attribute).text.lower()
except:
return "n/a"
def _get_authors(self):
authors_xml = self.xml.findall(ARXIV + 'authors/' + ARXIV + 'author')
last_names = [self._get_name(author, 'keyname') for author in authors_xml]
first_names = [self._get_name(author, 'forenames') for author in authors_xml]
full_names = [a+' '+b for a,b in zip(first_names, last_names)]
return full_names
def _get_affiliation(self):
authors = self.xml.findall(ARXIV + 'authors/' + ARXIV + 'author')
try:
affiliation = [author.find(ARXIV + 'affiliation').text.lower() for author in authors]
return affiliation
except:
return []
def output(self):
d = {
'title': self.title,
'id': self.id,
'abstract': self.abstract,
'categories': self.cats,
'doi': self.doi,
'created': self.created,
'updated': self.updated,
'authors': self.authors,
'affiliation': self.affiliation,
'url': self.url
}
return d
class Scraper(object):
"""
A class to hold info about attributes of scraping,
such as date range, categories, and number of returned
records. If `from` is not provided, the first day of
the current month will be used. If `until` is not provided,
the current day will be used.
Paramters
---------
category: str
The category of scraped records
data_from: str
starting date in format 'YYYY-MM-DD'. Updated eprints are included even if
they were created outside of the given date range. Default: first day of current month.
date_until: str
final date in format 'YYYY-MM-DD'. Updated eprints are included even if
they were created outside of the given date range. Default: today.
t: int
Waiting time between subsequent calls to API, triggred by Error 503.
timeout: int
Timeout in seconds after which the scraping stops. Default: 300s
filter: dictionary
A dictionary where keys are used to limit the saved results. Possible keys:
subcats, author, title, abstract. See the example, below.
Example:
Returning all eprints from
```
import arxivscraper.arxivscraper as ax
scraper = ax.Scraper(category='stat',date_from='2017-12-23',date_until='2017-12-25',t=10,
filters={'affiliation':['facebook'],'abstract':['learning']})
output = scraper.scrape()
```
"""
def __init__(self, category, date_from=None, date_until=None, t=30, timeout=300, filters={}):
self.cat = str(category)
self.t = t
self.timeout = timeout
DateToday = datetime.date.today()
if date_from is None:
self.f = str(DateToday.replace(day=1))
else:
self.f = date_from
if date_until is None:
self.u = str(DateToday)
else:
self.u = date_until
self.url = BASE + 'from=' + self.f + '&until=' + self.u + '&metadataPrefix=arXiv&set=%s' % self.cat
self.filters = filters
if not self.filters:
self.append_all = True
else:
self.append_all = False
self.keys = filters.keys()
def scrape(self):
t0 = time.time()
tx = time.time()
elapsed = 0.0
url = self.url
ds = []
k = 1
while True:
print('fetching up to ', 1000 * k, 'records...')
try:
response = urlopen(url)
except HTTPError as e:
if e.code == 503:
to = int(e.hdrs.get('retry-after', 30))
print('Got 503. Retrying after {0:d} seconds.'.format(self.t))
time.sleep(self.t)
continue
else:
raise
k += 1
xml = response.read()
root = ET.fromstring(xml)
records = root.findall(OAI + 'ListRecords/' + OAI + 'record')
for record in records:
meta = record.find(OAI + 'metadata').find(ARXIV + 'arXiv')
record = Record(meta).output()
if self.append_all:
ds.append(record)
else:
save_record = False
for key in self.keys:
for word in self.filters[key]:
if word.lower() in record[key]:
save_record = True
if save_record:
ds.append(record)
try:
token = root.find(OAI + 'ListRecords').find(OAI + 'resumptionToken')
except:
return 1
if token is None or token.text is None:
break
else:
url = BASE + 'resumptionToken=%s' % token.text
ty = time.time()
elapsed += (ty-tx)
if elapsed >= self.timeout:
break
else:
tx = time.time()
t1 = time.time()
print('fetching is completed in {0:.1f} seconds.'.format(t1 - t0))
print ('Total number of records {:d}'.format(len(ds)))
return ds
def search_all(df, col, *words):
"""
Return a sub-DataFrame of those rows whose Name column match all the words.
source: https://stackoverflow.com/a/22624079/3349443
"""
return df[np.logical_and.reduce([df[col].str.contains(word) for word in words])]
cats = [
'astro-ph', 'cond-mat', 'gr-qc', 'hep-ex', 'hep-lat', 'hep-ph', 'hep-th',
'math-ph', 'nlin', 'nucl-ex', 'nucl-th', 'physics', 'quant-ph', 'math', 'CoRR', 'q-bio',
'q-fin', 'stat']
subcats = {'cond-mat': ['cond-mat.dis-nn', 'cond-mat.mtrl-sci', 'cond-mat.mes-hall',
'cond-mat.other', 'cond-mat.quant-gas', 'cond-mat.soft', 'cond-mat.stat-mech',
'cond-mat.str-el', 'cond-mat.supr-con'],
'hep-th': [],'hep-ex': [],'hep-ph': [],
'gr-qc': [],'quant-ph': [],'q-fin': ['q-fin.CP', 'q-fin.EC', 'q-fin.GN',
'q-fin.MF', 'q-fin.PM', 'q-fin.PR', 'q-fin.RM', 'q-fin.ST', 'q-fin.TR'],
'nucl-ex': [],'CoRR': [],'nlin': ['nlin.AO', 'nlin.CG', 'nlin.CD', 'nlin.SI',
'nlin.PS'],
'physics': ['physics.acc-ph', 'physics.app-ph', 'physics.ao-ph',
'physics.atom-ph', 'physics.atm-clus', 'physics.bio-ph', 'physics.chem-ph',
'physics.class-ph', 'physics.comp-ph', 'physics.data-an', 'physics.flu-dyn',
'physics.gen-ph', 'physics.geo-ph', 'physics.hist-ph', 'physics.ins-det',
'physics.med-ph', 'physics.optics', 'physics.ed-ph', 'physics.soc-ph',
'physics.plasm-ph', 'physics.pop-ph', 'physics.space-ph'],
'math-ph': [],
'math': ['math.AG', 'math.AT', 'math.AP', 'math.CT', 'math.CA', 'math.CO',
'math.AC', 'math.CV', 'math.DG', 'math.DS', 'math.FA', 'math.GM', 'math.GN',
'math.GT', 'math.GR', 'math.HO', 'math.IT', 'math.KT', 'math.LO', 'math.MP',
'math.MG', 'math.NT', 'math.NA', 'math.OA', 'math.OC', 'math.PR', 'math.QA',
'math.RT', 'math.RA', 'math.SP', 'math.ST', 'math.SG'],
'q-bio': ['q-bio.BM',
'q-bio.CB', 'q-bio.GN', 'q-bio.MN', 'q-bio.NC', 'q-bio.OT', 'q-bio.PE', 'q-bio.QM',
'q-bio.SC', 'q-bio.TO'],
'nucl-th': [],'stat': ['stat.AP', 'stat.CO', 'stat.ML',
'stat.ME', 'stat.OT', 'stat.TH'],
'hep-lat': [],'astro-ph': ['astro-ph.GA',
'astro-ph.CO', 'astro-ph.EP', 'astro-ph.HE', 'astro-ph.IM', 'astro-ph.SR']
}
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.