repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
xuewei4d/scikit-learn
examples/neighbors/plot_nca_dim_reduction.py
24
3839
""" ============================================================== Dimensionality Reduction with Neighborhood Components Analysis ============================================================== Sample usage of Neighborhood Components Analysis for dimensionality reduction. This example compares different (linear) dimensionality reduction methods applied on the Digits data set. The data set contains images of digits from 0 to 9 with approximately 180 samples of each class. Each image is of dimension 8x8 = 64, and is reduced to a two-dimensional data point. Principal Component Analysis (PCA) applied to this data identifies the combination of attributes (principal components, or directions in the feature space) that account for the most variance in the data. Here we plot the different samples on the 2 first principal components. Linear Discriminant Analysis (LDA) tries to identify attributes that account for the most variance *between classes*. In particular, LDA, in contrast to PCA, is a supervised method, using known class labels. Neighborhood Components Analysis (NCA) tries to find a feature space such that a stochastic nearest neighbor algorithm will give the best accuracy. Like LDA, it is a supervised method. One can see that NCA enforces a clustering of the data that is visually meaningful despite the large reduction in dimension. """ # License: BSD 3 clause import numpy as np import matplotlib.pyplot as plt from sklearn import datasets from sklearn.model_selection import train_test_split from sklearn.decomposition import PCA from sklearn.discriminant_analysis import LinearDiscriminantAnalysis from sklearn.neighbors import (KNeighborsClassifier, NeighborhoodComponentsAnalysis) from sklearn.pipeline import make_pipeline from sklearn.preprocessing import StandardScaler print(__doc__) n_neighbors = 3 random_state = 0 # Load Digits dataset X, y = datasets.load_digits(return_X_y=True) # Split into train/test X_train, X_test, y_train, y_test = \ train_test_split(X, y, test_size=0.5, stratify=y, random_state=random_state) dim = len(X[0]) n_classes = len(np.unique(y)) # Reduce dimension to 2 with PCA pca = make_pipeline(StandardScaler(), PCA(n_components=2, random_state=random_state)) # Reduce dimension to 2 with LinearDiscriminantAnalysis lda = make_pipeline(StandardScaler(), LinearDiscriminantAnalysis(n_components=2)) # Reduce dimension to 2 with NeighborhoodComponentAnalysis nca = make_pipeline(StandardScaler(), NeighborhoodComponentsAnalysis(n_components=2, random_state=random_state)) # Use a nearest neighbor classifier to evaluate the methods knn = KNeighborsClassifier(n_neighbors=n_neighbors) # Make a list of the methods to be compared dim_reduction_methods = [('PCA', pca), ('LDA', lda), ('NCA', nca)] # plt.figure() for i, (name, model) in enumerate(dim_reduction_methods): plt.figure() # plt.subplot(1, 3, i + 1, aspect=1) # Fit the method's model model.fit(X_train, y_train) # Fit a nearest neighbor classifier on the embedded training set knn.fit(model.transform(X_train), y_train) # Compute the nearest neighbor accuracy on the embedded test set acc_knn = knn.score(model.transform(X_test), y_test) # Embed the data set in 2 dimensions using the fitted model X_embedded = model.transform(X) # Plot the projected points and show the evaluation score plt.scatter(X_embedded[:, 0], X_embedded[:, 1], c=y, s=30, cmap='Set1') plt.title("{}, KNN (k={})\nTest accuracy = {:.2f}".format(name, n_neighbors, acc_knn)) plt.show()
bsd-3-clause
CiscoSystems/horizon
openstack_dashboard/dashboards/project/database_backups/views.py
33
4357
# Copyright 2013 Rackspace Hosting # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Views for displaying database backups. """ from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import tables as horizon_tables from horizon.utils import filters from horizon import views as horizon_views from horizon import workflows as horizon_workflows from openstack_dashboard import api from openstack_dashboard.dashboards.project.database_backups import tables from openstack_dashboard.dashboards.project.database_backups import workflows class IndexView(horizon_tables.DataTableView): table_class = tables.BackupsTable template_name = 'project/database_backups/index.html' page_title = _("Backups") def _get_extra_data(self, backup): """Apply extra info to the backup.""" instance_id = backup.instance_id # TODO(rdopieralski) It's not clear where this attribute is supposed # to come from. At first glance it looks like it will always be {}. if not hasattr(self, '_instances'): self._instances = {} instance = self._instances.get(instance_id) if instance is None: try: instance = api.trove.instance_get(self.request, instance_id) except Exception: instance = _('Not Found') backup.instance = instance return backup def get_data(self): # TODO(rmyers) Add pagination support after it is available # https://blueprints.launchpad.net/trove/+spec/paginate-backup-list try: backups = api.trove.backup_list(self.request) backups = map(self._get_extra_data, backups) except Exception: backups = [] msg = _('Error getting database backup list.') exceptions.handle(self.request, msg) return backups class BackupView(horizon_workflows.WorkflowView): workflow_class = workflows.CreateBackup template_name = "project/database_backups/backup.html" page_title = _("Backup Database") def get_context_data(self, **kwargs): context = super(BackupView, self).get_context_data(**kwargs) context["instance_id"] = kwargs.get("instance_id") self._instance = context['instance_id'] return context class DetailView(horizon_views.APIView): template_name = "project/database_backups/details.html" page_title = _("Backup Details: {{ backup.name }}") def get_data(self, request, context, *args, **kwargs): backup_id = kwargs.get("backup_id") try: backup = api.trove.backup_get(request, backup_id) created_at = filters.parse_isotime(backup.created) updated_at = filters.parse_isotime(backup.updated) backup.duration = updated_at - created_at except Exception: redirect = reverse('horizon:project:database_backups:index') msg = _('Unable to retrieve details for backup: %s') % backup_id exceptions.handle(self.request, msg, redirect=redirect) try: if(hasattr(backup, 'parent_id') and backup.parent_id is not None): backup.parent = api.trove.backup_get(request, backup.parent_id) except Exception: redirect = reverse('horizon:project:database_backups:index') msg = (_('Unable to retrieve details for parent backup: %s') % backup.parent_id) exceptions.handle(self.request, msg, redirect=redirect) try: instance = api.trove.instance_get(request, backup.instance_id) except Exception: instance = None context['backup'] = backup context['instance'] = instance return context
apache-2.0
FireWRT/OpenWrt-Firefly-Libraries
staging_dir/host/lib/python2.7/distutils/core.py
75
9019
"""distutils.core The only module that needs to be imported to use the Distutils; provides the 'setup' function (which is to be called from the setup script). Also indirectly provides the Distribution and Command classes, although they are really defined in distutils.dist and distutils.cmd. """ __revision__ = "$Id$" import sys import os from distutils.debug import DEBUG from distutils.errors import (DistutilsSetupError, DistutilsArgError, DistutilsError, CCompilerError) # Mainly import these so setup scripts can "from distutils.core import" them. from distutils.dist import Distribution from distutils.cmd import Command from distutils.config import PyPIRCCommand from distutils.extension import Extension # This is a barebones help message generated displayed when the user # runs the setup script with no arguments at all. More useful help # is generated with various --help options: global help, list commands, # and per-command help. USAGE = """\ usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] or: %(script)s --help [cmd1 cmd2 ...] or: %(script)s --help-commands or: %(script)s cmd --help """ def gen_usage(script_name): script = os.path.basename(script_name) return USAGE % {'script': script} # Some mild magic to control the behaviour of 'setup()' from 'run_setup()'. _setup_stop_after = None _setup_distribution = None # Legal keyword arguments for the setup() function setup_keywords = ('distclass', 'script_name', 'script_args', 'options', 'name', 'version', 'author', 'author_email', 'maintainer', 'maintainer_email', 'url', 'license', 'description', 'long_description', 'keywords', 'platforms', 'classifiers', 'download_url', 'requires', 'provides', 'obsoletes', ) # Legal keyword arguments for the Extension constructor extension_keywords = ('name', 'sources', 'include_dirs', 'define_macros', 'undef_macros', 'library_dirs', 'libraries', 'runtime_library_dirs', 'extra_objects', 'extra_compile_args', 'extra_link_args', 'swig_opts', 'export_symbols', 'depends', 'language') def setup(**attrs): """The gateway to the Distutils: do everything your setup script needs to do, in a highly flexible and user-driven way. Briefly: create a Distribution instance; find and parse config files; parse the command line; run each Distutils command found there, customized by the options supplied to 'setup()' (as keyword arguments), in config files, and on the command line. The Distribution instance might be an instance of a class supplied via the 'distclass' keyword argument to 'setup'; if no such class is supplied, then the Distribution class (in dist.py) is instantiated. All other arguments to 'setup' (except for 'cmdclass') are used to set attributes of the Distribution instance. The 'cmdclass' argument, if supplied, is a dictionary mapping command names to command classes. Each command encountered on the command line will be turned into a command class, which is in turn instantiated; any class found in 'cmdclass' is used in place of the default, which is (for command 'foo_bar') class 'foo_bar' in module 'distutils.command.foo_bar'. The command class must provide a 'user_options' attribute which is a list of option specifiers for 'distutils.fancy_getopt'. Any command-line options between the current and the next command are used to set attributes of the current command object. When the entire command-line has been successfully parsed, calls the 'run()' method on each command object in turn. This method will be driven entirely by the Distribution object (which each command object has a reference to, thanks to its constructor), and the command-specific options that became attributes of each command object. """ global _setup_stop_after, _setup_distribution # Determine the distribution class -- either caller-supplied or # our Distribution (see below). klass = attrs.get('distclass') if klass: del attrs['distclass'] else: klass = Distribution if 'script_name' not in attrs: attrs['script_name'] = os.path.basename(sys.argv[0]) if 'script_args' not in attrs: attrs['script_args'] = sys.argv[1:] # Create the Distribution instance, using the remaining arguments # (ie. everything except distclass) to initialize it try: _setup_distribution = dist = klass(attrs) except DistutilsSetupError, msg: if 'name' in attrs: raise SystemExit, "error in %s setup command: %s" % \ (attrs['name'], msg) else: raise SystemExit, "error in setup command: %s" % msg if _setup_stop_after == "init": return dist # Find and parse the config file(s): they will override options from # the setup script, but be overridden by the command line. dist.parse_config_files() if DEBUG: print "options (after parsing config files):" dist.dump_option_dicts() if _setup_stop_after == "config": return dist # Parse the command line and override config files; any # command-line errors are the end user's fault, so turn them into # SystemExit to suppress tracebacks. try: ok = dist.parse_command_line() except DistutilsArgError, msg: raise SystemExit, gen_usage(dist.script_name) + "\nerror: %s" % msg if DEBUG: print "options (after parsing command line):" dist.dump_option_dicts() if _setup_stop_after == "commandline": return dist # And finally, run all the commands found on the command line. if ok: try: dist.run_commands() except KeyboardInterrupt: raise SystemExit, "interrupted" except (IOError, os.error), exc: if DEBUG: sys.stderr.write("error: %s\n" % (exc,)) raise else: raise SystemExit, "error: %s" % (exc,) except (DistutilsError, CCompilerError), msg: if DEBUG: raise else: raise SystemExit, "error: " + str(msg) return dist def run_setup(script_name, script_args=None, stop_after="run"): """Run a setup script in a somewhat controlled environment, and return the Distribution instance that drives things. This is useful if you need to find out the distribution meta-data (passed as keyword args from 'script' to 'setup()', or the contents of the config files or command-line. 'script_name' is a file that will be run with 'execfile()'; 'sys.argv[0]' will be replaced with 'script' for the duration of the call. 'script_args' is a list of strings; if supplied, 'sys.argv[1:]' will be replaced by 'script_args' for the duration of the call. 'stop_after' tells 'setup()' when to stop processing; possible values: init stop after the Distribution instance has been created and populated with the keyword arguments to 'setup()' config stop after config files have been parsed (and their data stored in the Distribution instance) commandline stop after the command-line ('sys.argv[1:]' or 'script_args') have been parsed (and the data stored in the Distribution) run [default] stop after all commands have been run (the same as if 'setup()' had been called in the usual way Returns the Distribution instance, which provides all information used to drive the Distutils. """ if stop_after not in ('init', 'config', 'commandline', 'run'): raise ValueError, "invalid value for 'stop_after': %r" % (stop_after,) global _setup_stop_after, _setup_distribution _setup_stop_after = stop_after save_argv = sys.argv g = {'__file__': script_name} l = {} try: try: sys.argv[0] = script_name if script_args is not None: sys.argv[1:] = script_args f = open(script_name) try: exec f.read() in g, l finally: f.close() finally: sys.argv = save_argv _setup_stop_after = None except SystemExit: # Hmm, should we do something if exiting with a non-zero code # (ie. error)? pass except: raise if _setup_distribution is None: raise RuntimeError, \ ("'distutils.core.setup()' was never called -- " "perhaps '%s' is not a Distutils setup script?") % \ script_name # I wonder if the setup script's namespace -- g and l -- would be of # any interest to callers? return _setup_distribution
gpl-2.0
noba3/KoTos
addons/plugin.video.tvondesizonexl/tvshows/views.py
3
3472
''' Created on Nov 30, 2013 @author: [email protected] This file is part of XOZE. XOZE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. XOZE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with XOZE. If not, see <http://www.gnu.org/licenses/>. ''' from xoze.context import AddonContext import logging def show_start_view(modelMap, window): logging.getLogger().debug('starting addon') def show_wish_view(modelMap, window): logging.getLogger().debug('starting wish window') window.getControl(100).setVisible(False) window.getControl(200).setVisible(False) window.getControl(300).setVisible(False) window.getControl(400).setVisible(False) window.getControl(500).setVisible(False) window.getControl(600).setVisible(False) window.getControl(800).setVisible(False) window.getControl(900).setVisible(False) window.getControl(700).setVisible(True) window.setFocusId(701) def handle_wish_closed(window, control_id): logging.getLogger().debug('closing wish and proceed') window.getControl(700).setVisible(False) def handle_init(window, control_id): window.getControl(100).setVisible(False) window.getControl(200).setVisible(False) window.getControl(300).setVisible(False) window.getControl(400).setVisible(False) window.getControl(500).setVisible(False) window.getControl(600).setVisible(False) window.getControl(700).setVisible(False) window.getControl(800).setVisible(False) window.getControl(900).setVisible(False) window.getControl(1000).setVisible(False) window.getControl(1100).setVisible(True) window.getControl(1200).setVisible(False) window.setFocusId(1100) displayBackControl = AddonContext().get_addon().getSetting('displayBackControl') if displayBackControl is not None and displayBackControl == 'true': window.getControl(10).setVisible(True) else: window.getControl(10).setVisible(False) def show_tv_source_view(modelMap, window): window.getControl(100).setVisible(False) window.getControl(200).setVisible(False) window.getControl(300).setVisible(False) window.getControl(400).setVisible(False) window.getControl(500).setVisible(False) window.getControl(600).setVisible(False) window.getControl(800).setVisible(False) window.getControl(1000).setVisible(False) window.getControl(1100).setVisible(False) window.getControl(1202).reset() window.getControl(1200).setVisible(True) window.getControl(1202).addItems(modelMap['tv_sources_items']) window.setFocusId(1202) def handle_tv_source_selected(window, control_id): logging.getLogger().debug('handle tv source selection... ') list_control = window.getControl(control_id) item = list_control.getSelectedItem() req_attrib_map = {} if item is not None: logging.getLogger().debug('handle tv source selected : %s ' % item.getProperty('source-name')) req_attrib_map['source-id'] = item.getProperty('source-id') return req_attrib_map
gpl-2.0
double12gzh/nova
nova/api/ec2/ec2utils.py
52
14370
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import re from oslo_log import log as logging from oslo_utils import timeutils from oslo_utils import uuidutils import six from nova import context from nova import exception from nova.i18n import _ from nova.i18n import _LI from nova.network import model as network_model from nova import objects from nova.objects import base as obj_base from nova.openstack.common import memorycache LOG = logging.getLogger(__name__) # NOTE(vish): cache mapping for one week _CACHE_TIME = 7 * 24 * 60 * 60 _CACHE = None def memoize(func): @functools.wraps(func) def memoizer(context, reqid): global _CACHE if not _CACHE: _CACHE = memorycache.get_client() key = "%s:%s" % (func.__name__, reqid) key = str(key) value = _CACHE.get(key) if value is None: value = func(context, reqid) _CACHE.set(key, value, time=_CACHE_TIME) return value return memoizer def reset_cache(): global _CACHE _CACHE = None def image_type(image_type): """Converts to a three letter image type. aki, kernel => aki ari, ramdisk => ari anything else => ami """ if image_type == 'kernel': return 'aki' if image_type == 'ramdisk': return 'ari' if image_type not in ['aki', 'ari']: return 'ami' return image_type def resource_type_from_id(context, resource_id): """Get resource type by ID Returns a string representation of the Amazon resource type, if known. Returns None on failure. :param context: context under which the method is called :param resource_id: resource_id to evaluate """ known_types = { 'i': 'instance', 'r': 'reservation', 'vol': 'volume', 'snap': 'snapshot', 'ami': 'image', 'aki': 'image', 'ari': 'image' } type_marker = resource_id.split('-')[0] return known_types.get(type_marker) @memoize def id_to_glance_id(context, image_id): """Convert an internal (db) id to a glance id.""" return objects.S3ImageMapping.get_by_id(context, image_id).uuid @memoize def glance_id_to_id(context, glance_id): """Convert a glance id to an internal (db) id.""" if not glance_id: return try: return objects.S3ImageMapping.get_by_uuid(context, glance_id).id except exception.NotFound: s3imap = objects.S3ImageMapping(context, uuid=glance_id) s3imap.create() return s3imap.id def ec2_id_to_glance_id(context, ec2_id): image_id = ec2_id_to_id(ec2_id) return id_to_glance_id(context, image_id) def glance_id_to_ec2_id(context, glance_id, image_type='ami'): image_id = glance_id_to_id(context, glance_id) if image_id is None: return return image_ec2_id(image_id, image_type=image_type) def ec2_id_to_id(ec2_id): """Convert an ec2 ID (i-[base 16 number]) to an instance id (int).""" try: return int(ec2_id.split('-')[-1], 16) except ValueError: raise exception.InvalidEc2Id(ec2_id=ec2_id) def image_ec2_id(image_id, image_type='ami'): """Returns image ec2_id using id and three letter type.""" template = image_type + '-%08x' return id_to_ec2_id(image_id, template=template) def get_ip_info_for_instance_from_nw_info(nw_info): if not isinstance(nw_info, network_model.NetworkInfo): nw_info = network_model.NetworkInfo.hydrate(nw_info) ip_info = {} fixed_ips = nw_info.fixed_ips() ip_info['fixed_ips'] = [ip['address'] for ip in fixed_ips if ip['version'] == 4] ip_info['fixed_ip6s'] = [ip['address'] for ip in fixed_ips if ip['version'] == 6] ip_info['floating_ips'] = [ip['address'] for ip in nw_info.floating_ips()] return ip_info def get_ip_info_for_instance(context, instance): """Return a dictionary of IP information for an instance.""" if isinstance(instance, obj_base.NovaObject): nw_info = instance.info_cache.network_info else: # FIXME(comstud): Temporary as we transition to objects. info_cache = instance.info_cache or {} nw_info = info_cache.get('network_info') # Make sure empty response is turned into the model if not nw_info: nw_info = [] return get_ip_info_for_instance_from_nw_info(nw_info) def id_to_ec2_id(instance_id, template='i-%08x'): """Convert an instance ID (int) to an ec2 ID (i-[base 16 number]).""" return template % int(instance_id) def id_to_ec2_inst_id(instance_id): """Get or create an ec2 instance ID (i-[base 16 number]) from uuid.""" if instance_id is None: return None elif uuidutils.is_uuid_like(instance_id): ctxt = context.get_admin_context() int_id = get_int_id_from_instance_uuid(ctxt, instance_id) return id_to_ec2_id(int_id) else: return id_to_ec2_id(instance_id) def ec2_inst_id_to_uuid(context, ec2_id): """"Convert an instance id to uuid.""" int_id = ec2_id_to_id(ec2_id) return get_instance_uuid_from_int_id(context, int_id) @memoize def get_instance_uuid_from_int_id(context, int_id): imap = objects.EC2InstanceMapping.get_by_id(context, int_id) return imap.uuid def id_to_ec2_snap_id(snapshot_id): """Get or create an ec2 volume ID (vol-[base 16 number]) from uuid.""" if uuidutils.is_uuid_like(snapshot_id): ctxt = context.get_admin_context() int_id = get_int_id_from_snapshot_uuid(ctxt, snapshot_id) return id_to_ec2_id(int_id, 'snap-%08x') else: return id_to_ec2_id(snapshot_id, 'snap-%08x') def id_to_ec2_vol_id(volume_id): """Get or create an ec2 volume ID (vol-[base 16 number]) from uuid.""" if uuidutils.is_uuid_like(volume_id): ctxt = context.get_admin_context() int_id = get_int_id_from_volume_uuid(ctxt, volume_id) return id_to_ec2_id(int_id, 'vol-%08x') else: return id_to_ec2_id(volume_id, 'vol-%08x') def ec2_vol_id_to_uuid(ec2_id): """Get the corresponding UUID for the given ec2-id.""" ctxt = context.get_admin_context() # NOTE(jgriffith) first strip prefix to get just the numeric int_id = ec2_id_to_id(ec2_id) return get_volume_uuid_from_int_id(ctxt, int_id) _ms_time_regex = re.compile('^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3,6}Z$') def status_to_ec2_attach_status(volume): """Get the corresponding EC2 attachment state. According to EC2 API, the valid attachment status in response is: attaching | attached | detaching | detached """ volume_status = volume.get('status') attach_status = volume.get('attach_status') if volume_status in ('attaching', 'detaching'): ec2_attach_status = volume_status elif attach_status in ('attached', 'detached'): ec2_attach_status = attach_status else: msg = _("Unacceptable attach status:%s for ec2 API.") % attach_status raise exception.Invalid(msg) return ec2_attach_status def is_ec2_timestamp_expired(request, expires=None): """Checks the timestamp or expiry time included in an EC2 request and returns true if the request is expired """ timestamp = request.get('Timestamp') expiry_time = request.get('Expires') def parse_strtime(strtime): if _ms_time_regex.match(strtime): # NOTE(MotoKen): time format for aws-sdk-java contains millisecond time_format = "%Y-%m-%dT%H:%M:%S.%fZ" else: time_format = "%Y-%m-%dT%H:%M:%SZ" return timeutils.parse_strtime(strtime, time_format) try: if timestamp and expiry_time: msg = _("Request must include either Timestamp or Expires," " but cannot contain both") LOG.error(msg) raise exception.InvalidRequest(msg) elif expiry_time: query_time = parse_strtime(expiry_time) return timeutils.is_older_than(query_time, -1) elif timestamp: query_time = parse_strtime(timestamp) # Check if the difference between the timestamp in the request # and the time on our servers is larger than 5 minutes, the # request is too old (or too new). if query_time and expires: return timeutils.is_older_than(query_time, expires) or \ timeutils.is_newer_than(query_time, expires) return False except ValueError: LOG.info(_LI("Timestamp is invalid.")) return True @memoize def get_int_id_from_instance_uuid(context, instance_uuid): if instance_uuid is None: return try: imap = objects.EC2InstanceMapping.get_by_uuid(context, instance_uuid) return imap.id except exception.NotFound: imap = objects.EC2InstanceMapping(context) imap.uuid = instance_uuid imap.create() return imap.id @memoize def get_int_id_from_volume_uuid(context, volume_uuid): if volume_uuid is None: return try: vmap = objects.EC2VolumeMapping.get_by_uuid(context, volume_uuid) return vmap.id except exception.NotFound: vmap = objects.EC2VolumeMapping(context) vmap.uuid = volume_uuid vmap.create() return vmap.id @memoize def get_volume_uuid_from_int_id(context, int_id): vmap = objects.EC2VolumeMapping.get_by_id(context, int_id) return vmap.uuid def ec2_snap_id_to_uuid(ec2_id): """Get the corresponding UUID for the given ec2-id.""" ctxt = context.get_admin_context() # NOTE(jgriffith) first strip prefix to get just the numeric int_id = ec2_id_to_id(ec2_id) return get_snapshot_uuid_from_int_id(ctxt, int_id) @memoize def get_int_id_from_snapshot_uuid(context, snapshot_uuid): if snapshot_uuid is None: return try: smap = objects.EC2SnapshotMapping.get_by_uuid(context, snapshot_uuid) return smap.id except exception.NotFound: smap = objects.EC2SnapshotMapping(context, uuid=snapshot_uuid) smap.create() return smap.id @memoize def get_snapshot_uuid_from_int_id(context, int_id): smap = objects.EC2SnapshotMapping.get_by_id(context, int_id) return smap.uuid _c2u = re.compile('(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))') def camelcase_to_underscore(str): return _c2u.sub(r'_\1', str).lower().strip('_') def _try_convert(value): """Return a non-string from a string or unicode, if possible. ============= ===================================================== When value is returns ============= ===================================================== zero-length '' 'None' None 'True' True case insensitive 'False' False case insensitive '0', '-0' 0 0xN, -0xN int from hex (positive) (N is any number) 0bN, -0bN int from binary (positive) (N is any number) * try conversion to int, float, complex, fallback value """ def _negative_zero(value): epsilon = 1e-7 return 0 if abs(value) < epsilon else value if len(value) == 0: return '' if value == 'None': return None lowered_value = value.lower() if lowered_value == 'true': return True if lowered_value == 'false': return False for prefix, base in [('0x', 16), ('0b', 2), ('0', 8), ('', 10)]: try: if lowered_value.startswith((prefix, "-" + prefix)): return int(lowered_value, base) except ValueError: pass try: return _negative_zero(float(value)) except ValueError: return value def dict_from_dotted_str(items): """parse multi dot-separated argument into dict. EBS boot uses multi dot-separated arguments like BlockDeviceMapping.1.DeviceName=snap-id Convert the above into {'block_device_mapping': {'1': {'device_name': snap-id}}} """ args = {} for key, value in items: parts = key.split(".") key = str(camelcase_to_underscore(parts[0])) if isinstance(value, six.string_types): # NOTE(vish): Automatically convert strings back # into their respective values value = _try_convert(value) if len(parts) > 1: d = args.get(key, {}) args[key] = d for k in parts[1:-1]: k = camelcase_to_underscore(k) v = d.get(k, {}) d[k] = v d = v d[camelcase_to_underscore(parts[-1])] = value else: args[key] = value return args def search_opts_from_filters(filters): return {f['name'].replace('-', '_'): f['value']['1'] for f in filters if f['value']['1']} if filters else {} def regex_from_ec2_regex(ec2_re): """Converts an EC2-style regex to a python regex. Approach is based on python fnmatch. """ iter_ec2_re = iter(ec2_re) py_re = '' for char in iter_ec2_re: if char == '*': py_re += '.*' elif char == '?': py_re += '.' elif char == '\\': try: next_char = next(iter_ec2_re) except StopIteration: next_char = '' if next_char == '*' or next_char == '?': py_re += '[%s]' % next_char else: py_re += '\\\\' + next_char else: py_re += re.escape(char) return '\A%s\Z(?s)' % py_re
apache-2.0
HashGrowth/py-google-auth
py_google_auth/login.py
1
14354
import falcon import json import jsonpickle import os from . import utils from . import login_utils from . import step_two_utils from . import change_method_utils def verify_data_exist(req, resp, resource, params): ''' Verify if payload was sent with request. ''' body = req.stream.read() try: data = json.loads(body.decode('utf-8')) req.stream = data except ValueError: raise falcon.HTTPBadRequest('Empty payload', 'No valid json was supplied with request') def verify_credentials(req, resp, resource, params): ''' Decorator method to verify whether email and password are present in data and also the email is valid or not. ''' data = req.stream # extract required parameters from the data. try: email = data['email'] data['password'] except KeyError: msg = "Either email or password is not present or the email is invalid." raise falcon.HTTPBadRequest('Incomplete credentials', msg) if not login_utils.is_valid_email(email): msg = 'This email address does not exist.' raise falcon.HTTPUnauthorized('Invalid credentials', msg, False) def validate_request(req, resp, resource, params): ''' Decorator method to validate token before processing request. ''' # read request body and parse it into a json object. data = req.stream # token to grant access to API # this is set in the environment of the system where API is deployed. valid_token = os.environ.get('PY_GOOGLE_AUTH_TOKEN') if 'token' not in data: msg = 'Please send access token along with your request' raise falcon.HTTPBadRequest('Token Required', msg) else: # token received from the request data. req_token = data['token'] if req_token != valid_token: msg = 'Please supply a valid token.' raise falcon.HTTPBadRequest('Invalid Token', msg) # since stream is a file, it has been read once so won't be able to read it again in the end # point functions that are called afterwards, so setting it to the data that was already parsed # so that it is available in the functions that follows. req.stream = data @falcon.before(verify_data_exist) @falcon.before(validate_request) @falcon.before(verify_credentials) class NormalLogin(object): ''' Handles initial login request. ''' def on_post(self, req, resp): # set in the decorator method for request validation. data = req.stream email = data['email'] password = data['password'] # call the function to make initial login attempt. response, error, session = login_utils.login(email, password) # if two factor auth detected if error and error == 303: # find the default tfa method response_default, error_default = login_utils.get_default_method(response.text) # collect all enabled methods on a user's google account. response_alternate, error_alternate, session = login_utils.select_alternate_method( session, response.url) response_data = {} # if both default_method and available methods not fetched, that is some exception # occured in making requests or format of the response page has changed then respond # with a 500 to indicate that the request can't be fulfilled. Requires updates in API # implementation. if error_default and error_alternate: resp.status = falcon.HTTP_500 resp.body = json.dumps(response_default) # if available methods not fetched; return default_method only elif error_alternate: # from get_default_method response, we extract default method default_method = response_default['method'] # set variables in session and prepare response using a utility method response_data, session = utils.handle_default_method(default_method, response, session) # encode session as json; details in the function itself. session = utils.serialize_session(session) response_data['session'] = session resp.status = falcon.HTTP_502 resp.body = json.dumps(response_data) # if default method not available; return all enabled methods elif error_default: select_method_url = response_alternate['select_method_url'] methods = response_alternate['methods'] # save url to select methods, this is used to again get the form of method # selection which will in turn give appropriate payload for selected method session.select_method_url = select_method_url # encode session as json; details in the function itself. session = utils.serialize_session(session) response_data['methods'] = methods response_data['session'] = session resp.status = falcon.HTTP_503 resp.body = json.dumps(response_data) else: # if both default method and available methods fetched # from get_default_method response, we extract default method default_method = response_default['method'] select_method_url = response_alternate['select_method_url'] methods = response_alternate['methods'] response_data, session = utils.handle_default_method(default_method, response, session) # save url to select methods, this is used to again get the form of method # selection which will in turn give appropriate payload for selected method session.select_method_url = select_method_url # encode session as json; details in the function itself. session = utils.serialize_session(session) response_data['methods'] = methods response_data['session'] = session resp.status = falcon.HTTP_303 resp.body = json.dumps(response_data) elif error and error == 504: resp.status = falcon.HTTP_504 elif error and error == 401: resp.status = falcon.HTTP_401 # Too many login attempts can throw captcha, in this case we need to rout the request to # another server (if deployed in big scale where multiple servers are available to handle # this part else just try after some time). elif error and error == 429: resp.status = falcon.HTTP_429 # Any other error indicates that API needs update in its implementation. elif error: resp.status = falcon.HTTP_500 resp.body = json.dumps(response) else: # encode session as json; this is different from the encoding process used above when # two factor auth was detected, here no extra variables are stuffed so it is directly # encoded into json and sent back. session = jsonpickle.encode(session) # if no two factor auth detected resp.status = falcon.HTTP_200 resp.body = json.dumps({'session': session}) @falcon.before(verify_data_exist) @falcon.before(validate_request) class StepTwoLogin(object): ''' Handles two factor authentication. ''' def on_post(self, req, resp): # set in the decorator method for request validation. data = req.stream # extract required parameters from the data. method = data['method'] session = data['session'] # deserialize session into an object from the string. session = utils.deserialize_session(session) # if method is google prompt then no otp is avaiable in the request. if method != 1: otp = data['otp'] query_params = None # but query_params are present in the session object which were stuffed in previous call to # the API. else: otp = None query_params = session.query_params # extract other variables that were stuffed in previous call to the API. tfa_url = session.next_url payload = session.prev_payload # remove the variables from the session object so as to make it a normal requests.Session # object. session = utils.clean_session(session) # make the login attempt for second step of authentication response, error, session = step_two_utils.second_step_login(session, method, tfa_url, payload, query_params, otp) # since no further requests will be made in sequence after this request so no extra # variables are stuffed hence normal json encoding works here for the session object. response_data = {} if error: if error == 504: resp.status = falcon.HTTP_504 elif error == 400: msg = "Send a valid method code" raise falcon.HTTPBadRequest('Invalid Method', msg) elif error == 406: resp.status = falcon.HTTP_406 elif error == 412: resp.status = falcon.HTTP_412 elif error == 408: resp.status = falcon.HTTP_408 elif error == 503: url = response['url'] methods = response['methods'] # save the url from where list of methods was obtained, this will be used to # collect payload in next request when a method will be selected session.select_method_url = url session = utils.serialize_session(session) response_data['methods'] = methods resp.status = falcon.HTTP_503 elif error == 502: methods = utils.get_method_names() default_method = [m for m in methods if methods[m][1] in response.url][0] # set variables in session and prepare response using a utility method response_data, session = utils.handle_default_method(default_method, response, session) session = utils.serialize_session(session) response_data['default_method'] = default_method resp.status = falcon.HTTP_502 elif error == 506: # using this way because no falcon status codes suits the purpose. resp.status = "506" session = utils.serialize_session(session) else: resp.status = falcon.HTTP_500 response_data = response else: resp.status = falcon.HTTP_200 # 502 and 503 shows that too many attempts with wrong otp were made, so in this case we # either fall back to default method or provide a list of methods to select from (when # default is blocked) if error != 503 and error != 502: session = jsonpickle.encode(session) response_data['session'] = session resp.body = json.dumps(response_data) @falcon.before(verify_data_exist) @falcon.before(validate_request) class ChangeMethod(object): ''' Handle changing the two factor method. ''' def on_post(self, req, resp): # set in the decorator method for request validation. data = req.stream # extract required parameters from the data. method = data['method'] session = data['session'] # deserialize session into an object from the string. session = utils.deserialize_session(session) # extract other variables that were stuffed in previous call to the API. select_method_url = session.select_method_url # remove the variables from the session object so as to make it a normal requests.Session # object. session = utils.clean_session(session) # get response for url and payload for next request for the selected method; in this # function, a POST request is made to a url ( which is prepared according to the selected # method) and which in turn sends otp or prompt to user. response, error, session = change_method_utils.get_alternate_method(session, method, select_method_url) # data to send back response_data = {} if error: if error == 504: resp.status = falcon.HTTP_504 elif error == 400: msg = "Send a valid method" raise falcon.HTTPBadRequest("Invalid Method", msg) else: resp.status = falcon.HTTP_500 response_data = response else: # if method is text message, extract the phone number from it. if "text message" in method: phone_num = change_method_utils.extract_phone_num(method) response_data['number'] = phone_num # get the method code, this is done so that the api user can get the method code to # send back in the next call to step two end point. method = change_method_utils.get_method_for_selection(method) # payload for next request payload = utils.make_payload(response.text) # stuffing data for next request from the user; explained in detail in class `Login`. session.next_url = response.url session.prev_payload = payload response_data['method'] = method resp.status = falcon.HTTP_200 # encode session as json; need to call the serialize function because again extra variables # are being stuffed in the session. session = utils.serialize_session(session) response_data['session'] = session resp.body = json.dumps(response_data)
mit
halvertoluke/edx-platform
common/lib/capa/capa/xqueue_interface.py
179
4373
# # LMS Interface to external queueing system (xqueue) # import hashlib import json import logging import requests import dogstats_wrapper as dog_stats_api log = logging.getLogger(__name__) dateformat = '%Y%m%d%H%M%S' XQUEUE_METRIC_NAME = 'edxapp.xqueue' # Wait time for response from Xqueue. XQUEUE_TIMEOUT = 35 # seconds def make_hashkey(seed): """ Generate a string key by hashing """ h = hashlib.md5() h.update(str(seed)) return h.hexdigest() def make_xheader(lms_callback_url, lms_key, queue_name): """ Generate header for delivery and reply of queue request. Xqueue header is a JSON-serialized dict: { 'lms_callback_url': url to which xqueue will return the request (string), 'lms_key': secret key used by LMS to protect its state (string), 'queue_name': designate a specific queue within xqueue server, e.g. 'MITx-6.00x' (string) } """ return json.dumps({ 'lms_callback_url': lms_callback_url, 'lms_key': lms_key, 'queue_name': queue_name }) def parse_xreply(xreply): """ Parse the reply from xqueue. Messages are JSON-serialized dict: { 'return_code': 0 (success), 1 (fail) 'content': Message from xqueue (string) } """ try: xreply = json.loads(xreply) except ValueError, err: log.error(err) return (1, 'unexpected reply from server') return_code = xreply['return_code'] content = xreply['content'] return (return_code, content) class XQueueInterface(object): """ Interface to the external grading system """ def __init__(self, url, django_auth, requests_auth=None): self.url = unicode(url) self.auth = django_auth self.session = requests.Session() self.session.auth = requests_auth def send_to_queue(self, header, body, files_to_upload=None): """ Submit a request to xqueue. header: JSON-serialized dict in the format described in 'xqueue_interface.make_xheader' body: Serialized data for the receipient behind the queueing service. The operation of xqueue is agnostic to the contents of 'body' files_to_upload: List of file objects to be uploaded to xqueue along with queue request Returns (error_code, msg) where error_code != 0 indicates an error """ # log the send to xqueue header_info = json.loads(header) queue_name = header_info.get('queue_name', u'') dog_stats_api.increment(XQUEUE_METRIC_NAME, tags=[ u'action:send_to_queue', u'queue:{}'.format(queue_name) ]) # Attempt to send to queue (error, msg) = self._send_to_queue(header, body, files_to_upload) # Log in, then try again if error and (msg == 'login_required'): (error, content) = self._login() if error != 0: # when the login fails log.debug("Failed to login to queue: %s", content) return (error, content) if files_to_upload is not None: # Need to rewind file pointers for f in files_to_upload: f.seek(0) (error, msg) = self._send_to_queue(header, body, files_to_upload) return (error, msg) def _login(self): payload = { 'username': self.auth['username'], 'password': self.auth['password'] } return self._http_post(self.url + '/xqueue/login/', payload) def _send_to_queue(self, header, body, files_to_upload): payload = { 'xqueue_header': header, 'xqueue_body': body } files = {} if files_to_upload is not None: for f in files_to_upload: files.update({f.name: f}) return self._http_post(self.url + '/xqueue/submit/', payload, files=files) def _http_post(self, url, data, files=None): try: r = self.session.post(url, data=data, files=files) except requests.exceptions.ConnectionError, err: log.error(err) return (1, 'cannot connect to server') if r.status_code not in [200]: return (1, 'unexpected HTTP status code [%d]' % r.status_code) return parse_xreply(r.text)
agpl-3.0
sensysnetworks/uClinux
user/python/Lib/dos-8x3/test_cla.py
10
3126
"Test the functionality of Python classes implementing operators." testmeths = [ # Binary operations "add", "radd", "sub", "rsub", "mul", "rmul", "div", "rdiv", "mod", "rmod", "divmod", "rdivmod", "pow", "rpow", "rshift", "rrshift", "lshift", "rlshift", "and", "rand", "or", "ror", "xor", "rxor", # List/dict operations "contains", "getitem", "getslice", "setitem", "setslice", "delitem", "delslice", # Unary operations "neg", "pos", "abs", "int", "long", "float", "oct", "hex", # generic operations "init", "del", ] # These need to return something other than None # "coerce", # "hash", # "str", # "repr", # These are separate because they can influence the test of other methods. # "getattr", # "setattr", # "delattr", class AllTests: def __coerce__(self, *args): print "__coerce__:", args return (self,) + args def __hash__(self, *args): print "__hash__:", args return hash(id(self)) def __str__(self, *args): print "__str__:", args return "AllTests" def __repr__(self, *args): print "__repr__:", args return "AllTests" def __cmp__(self, *args): print "__cmp__:", args return 0 for method in testmeths: exec("""def __%(method)s__(self, *args): print "__%(method)s__:", args """%locals(), AllTests.__dict__); # this also tests __init__ of course. testme = AllTests() # Binary operations testme + 1 1 + testme testme - 1 1 - testme testme * 1 1 * testme testme / 1 1 / testme testme % 1 1 % testme divmod(testme,1) divmod(1, testme) testme ** 1 1 ** testme testme >> 1 1 >> testme testme << 1 1 << testme testme & 1 1 & testme testme | 1 1 | testme testme ^ 1 1 ^ testme # List/dict operations 1 in testme testme[1] testme[1] = 1 del testme[1] testme[:42] testme[:42] = "The Answer" del testme[:42] testme[2:1024:10] testme[2:1024:10] = "A lot" del testme[2:1024:10] testme[:42, ..., :24:, 24, 100] testme[:42, ..., :24:, 24, 100] = "Strange" del testme[:42, ..., :24:, 24, 100] # Now remove the slice hooks to see if converting normal slices to slice # object works. del AllTests.__getslice__ del AllTests.__setslice__ del AllTests.__delslice__ testme[:42] testme[:42] = "The Answer" del testme[:42] # Unary operations -testme +testme abs(testme) int(testme) long(testme) float(testme) oct(testme) hex(testme) # And the rest... hash(testme) repr(testme) str(testme) testme == 1 testme < 1 testme > 1 testme <> 1 testme != 1 1 == testme 1 < testme 1 > testme 1 <> testme 1 != testme # This test has to be last (duh.) del testme # Interfering tests class ExtraTests: def __getattr__(self, *args): print "__getattr__:", args return "SomeVal" def __setattr__(self, *args): print "__setattr__:", args def __delattr__(self, *args): print "__delattr__:", args testme = ExtraTests() testme.spam testme.eggs = "spam, spam, spam and ham" del testme.cardinal
gpl-2.0
allmende/synnefo
snf-pithos-backend/pithos/backends/lib/sqlalchemy/public.py
10
4501
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from dbworker import DBWorker from sqlalchemy import Table, Column, String, Integer, Boolean, MetaData from sqlalchemy.sql import and_, select from sqlalchemy.schema import Index from sqlalchemy.exc import NoSuchTableError from pithos.backends.random_word import get_random_word from dbworker import ESCAPE_CHAR import logging logger = logging.getLogger(__name__) def create_tables(engine): metadata = MetaData() columns = [] columns.append(Column('public_id', Integer, primary_key=True)) columns.append(Column('path', String(2048), nullable=False)) columns.append(Column('active', Boolean, nullable=False, default=True)) columns.append(Column('url', String(2048), nullable=True)) public = Table('public', metadata, *columns, mysql_engine='InnoDB', sqlite_autoincrement=True) # place an index on path Index('idx_public_path', public.c.path, unique=True) # place an index on url Index('idx_public_url', public.c.url, unique=True) metadata.create_all(engine) return metadata.sorted_tables class Public(DBWorker): """Paths can be marked as public.""" def __init__(self, **params): DBWorker.__init__(self, **params) try: metadata = MetaData(self.engine) self.public = Table('public', metadata, autoload=True) except NoSuchTableError: tables = create_tables(self.engine) map(lambda t: self.__setattr__(t.name, t), tables) def get_unique_url(self, public_security, public_url_alphabet): l = public_security while 1: candidate = get_random_word(length=l, alphabet=public_url_alphabet) if self.public_path(candidate) is None: return candidate l += 1 def public_set(self, path, public_security, public_url_alphabet): s = select([self.public.c.public_id]) s = s.where(self.public.c.path == path) r = self.conn.execute(s) row = r.fetchone() r.close() if not row: url = self.get_unique_url( public_security, public_url_alphabet ) s = self.public.insert() s = s.values(path=path, active=True, url=url) r = self.conn.execute(s) r.close() logger.info('Public url set for path: %s' % path) def public_unset(self, path): s = self.public.delete() s = s.where(self.public.c.path == path) r = self.conn.execute(s) if r.rowcount != 0: logger.info('Public url unset for path: %s' % path) r.close() def public_unset_bulk(self, paths): if not paths: return s = self.public.delete() s = s.where(self.public.c.path.in_(paths)) self.conn.execute(s).close() def public_get(self, path): s = select([self.public.c.url]) s = s.where(and_(self.public.c.path == path, self.public.c.active == True)) r = self.conn.execute(s) row = r.fetchone() r.close() if row: return row[0] return None def public_list(self, prefix): s = select([self.public.c.path, self.public.c.url]) s = s.where(self.public.c.path.like( self.escape_like(prefix) + '%', escape=ESCAPE_CHAR)) s = s.where(self.public.c.active == True) r = self.conn.execute(s) rows = r.fetchall() r.close() return rows def public_path(self, public): s = select([self.public.c.path]) s = s.where(and_(self.public.c.url == public, self.public.c.active == True)) r = self.conn.execute(s) row = r.fetchone() r.close() if row: return row[0] return None
gpl-3.0
SublimeText/Pywin32
lib/x64/win32/lib/win32traceutil.py
4
1531
# This is a helper for the win32trace module # If imported from a normal Python program, it sets up sys.stdout and sys.stderr # so output goes to the collector. # If run from the command line, it creates a collector loop. # Eg: # C:>start win32traceutil.py (or python.exe win32traceutil.py) # will start a process with a (pretty much) blank screen. # # then, switch to a DOS prompt, and type: # C:>python.exe # Python 1.4 etc... # >>> import win32traceutil # Redirecting output to win32trace remote collector # >>> print "Hello" # >>> # And the output will appear in the first collector process. # Note - the client or the collector can be started first. # There is a 0x20000 byte buffer. If this gets full, it is reset, and new # output appended from the start. import win32trace def RunAsCollector(): import sys try: import win32api win32api.SetConsoleTitle("Python Trace Collector") except: pass # Oh well! win32trace.InitRead() print("Collecting Python Trace Output...") try: while 1: # a short timeout means ctrl+c works next time we wake... sys.stdout.write(win32trace.blockingread(500)) except KeyboardInterrupt: print("Ctrl+C") def SetupForPrint(): win32trace.InitWrite() try: # Under certain servers, sys.stdout may be invalid. print("Redirecting output to win32trace remote collector") except: pass win32trace.setprint() # this works in an rexec environment. if __name__=='__main__': RunAsCollector() else: SetupForPrint()
bsd-3-clause
aborrero/pkg-rpmlint
SCLCheck.py
1
16259
# -*- coding: utf-8 -*- ############################################################################# # File : SCLCheck.py # Package : rpmlint # Author : Miro Hrončok # Created on : Wed Jul 24 20:25 2013 # Purpose : Software Collections checks. ############################################################################# import os import re import AbstractCheck from Filter import addDetails, printError, printWarning import Pkg # Compile all regexes here allowed_etc = re.compile(r'^/etc/(cron|profile|logrotate)\.d/', re.M) allowed_var = re.compile(r'^/var/(log|lock)/', re.M) buildrequires = re.compile(r'^BuildRequires:\s*(.*)', re.M) global_scl_definition = re.compile(r'(^|\s)%(define|global)\s+scl\s+\S+\s*$', re.M) libdir = re.compile(r'%\{?\??_libdir\}?', re.M) name = re.compile(r'^Name:\s*(.*)', re.M) name_small = re.compile(r'^%\{?name\}?', re.M) noarch = re.compile(r'^BuildArch:\s*noarch\s*$', re.M) obsoletes_conflicts = re.compile(r'^(Obsoletes|(Build)?Conflicts):\s*(.*)', re.M) pkg_name = re.compile(r'(^|\s)%\{!\?scl:%(define|global)\s+pkg_name\s+%\{name\}\}\s*$', re.M) provides = re.compile(r'^Provides:\s*(.*)', re.M) requires = re.compile(r'(^|:)Requires:\s*(.*)', re.M) scl_files = re.compile(r'(^|\s)%\{?\??scl_files\}?\s*$', re.M) scl_install = re.compile(r'(^|\s)%\{?\??scl_install\}?\s*$', re.M) scl_macros = re.compile(r'(^|\s)%\{?\??_root_sysconfdir\}?/rpm/macros\.%\{?\??scl\}?-config\s*^', re.M) scl_package_definition = re.compile(r'(^|\s)%\{\?scl\s*:\s*%scl_package\s+\S+\s*\}\s*$', re.M) scl_prefix_noncond = re.compile(r'%\{?scl_prefix\}?', re.M) scl_prefix = re.compile(r'%\{?\??scl_prefix\}?', re.M) scl_prefix_start = re.compile(r'^%\{?\??scl_prefix\}?', re.M) scl_runtime = re.compile(r'%\{?\??scl\}?-runtime\}?', re.M) scl_use = re.compile(r'%\{?\??\!?\??scl') setup = re.compile(r'^%setup(.*)', re.M) startdir = re.compile(r'^/opt/[^/]+/', re.M) subpackage_alien = re.compile(r'(^|\s)%package\s+(-n\s+)?(?!(build|runtime))\S+\s*$', re.M) subpackage_any = re.compile(r'(^|\s)%package\s+(.*)', re.M) subpackage_build = re.compile(r'(^|\s)%package\s+build\s*$', re.M) subpackage_runtime = re.compile(r'(^|\s)%package\s+runtime\s*$', re.M) def index_or_sub(source, word, sub=0): """ Helper function that returns index of word in source or sub when not found. """ try: return source.index(word) except: return sub class SCLCheck(AbstractCheck.AbstractCheck): '''Software Collections checks''' def __init__(self): AbstractCheck.AbstractCheck.__init__(self, "SCLCheck") self._spec_file = None def check_source(self, pkg): # lookup spec file for fname, pkgfile in pkg.files().items(): if fname.endswith('.spec'): self._spec_file = pkgfile.path self.check_spec(pkg, self._spec_file) def check_spec(self, pkg, spec_file): '''SCL spec file checks''' spec = '\n'.join(Pkg.readlines(spec_file)) if global_scl_definition.search(spec): self.check_metapackage(pkg, spec) elif scl_package_definition.search(spec): self.check_scl_spec(pkg, spec) elif scl_use.search(spec): printError(pkg, 'undeclared-scl') def check_binary(self, pkg): '''SCL binary package checks''' # Assume that no dash in package name means no SCL splits = pkg.name.split('-') if len(splits) < 2: return scl_name = splits[0] # While we are here, check if it's a runtime/build package is_runtime = splits[-1] == 'runtime' is_build = splits[-1] == 'build' del splits # Now test if there is /opt/foo/ dir good = False for fname in pkg.files().keys(): if startdir.search(fname): good = True break if not good: return # Test if our dir is named the same way as scl good = True for fname in pkg.files().keys(): if not startdir.search(fname): if allowed_etc.search(fname) or allowed_var.search(fname) or \ fname.startswith('/usr/bin/'): continue if fname.startswith('/etc/rpm/'): if not is_build: printWarning(pkg, 'scl-rpm-macros-outside-of-build', fname) continue if is_runtime and \ fname == os.path.join('/etc/scl/prefixes', scl_name): continue printError(pkg, 'file-outside-of-scl-tree', fname) else: if fname.split('/')[3] != scl_name: good = False if not good: printError(pkg, 'scl-name-screwed-up') def check_metapackage(self, pkg, spec): '''SCL metapackage spec checks''' # Examine subpackages runtime = subpackage_runtime.search(spec) if not runtime: printError(pkg, 'no-runtime-in-scl-metapackage') build = subpackage_build.search(spec) if not build: printError(pkg, 'no-build-in-scl-metapackage') else: # Get (B)Rs section for build subpackage end = index_or_sub(spec[build.end():], '%package', -1) if 'scl-utils-build' not in \ ' '.join(self.get_requires(spec[build.end():end])): printWarning(pkg, 'scl-build-without-requiring-scl-utils-build') alien = subpackage_alien.search(spec) if alien: printError(pkg, 'weird-subpackage-in-scl-metapackage', alien.group()[9:]) # Get (B)Rs section for main package end = index_or_sub(spec, '%package', -1) if 'scl-utils-build' not in \ ' '.join(self.get_build_requires(spec[:end])): printError(pkg, 'scl-metapackage-without-scl-utils-build-br') # Enter %install section install_start = index_or_sub(spec, '%install') install_end = index_or_sub(spec, '%check') if not install_end: install_end = index_or_sub(spec, '%clean') if not install_end: install_end = index_or_sub(spec, '%files') if not install_end: install_end = index_or_sub(spec, '%changelog', -1) # Search %scl_install if not scl_install.search(spec[install_start:install_end]): printError(pkg, 'scl-metapackage-without-%scl_install') if noarch.search(spec[:install_start]) and \ libdir.search(spec[install_start:install_end]): printError(pkg, 'noarch-scl-metapackage-with-libdir') # Analyze %files files = self.get_files(spec) if files: printWarning(pkg, 'scl-main-metapackage-contains-files', ', '.join(files)) if runtime: if not scl_files.search( '\n'.join(self.get_files(spec, 'runtime'))): printError(pkg, 'scl-runtime-package-without-%scl_files') if build: if not scl_macros.search( '\n'.join(self.get_files(spec, 'build'))): printError(pkg, 'scl-build-package-without-rpm-macros') def check_scl_spec(self, pkg, spec): '''SCL ready spec checks''' # For the entire spec if not pkg_name.search(spec): printWarning(pkg, 'missing-pkg_name-definition') if scl_prefix_noncond.search(self.remove_scl_conds(spec)): printWarning(pkg, 'scl-prefix-without-condition') if not scl_prefix.search(self.get_name(spec)): printError(pkg, 'name-without-scl-prefix') for item in self.get_obsoletes_and_conflicts(spec): if not scl_prefix.search(item): printError(pkg, 'obsoletes-or-conflicts-without-scl-prefix') break for item in self.get_provides(spec): if not scl_prefix.search(item): printError(pkg, 'provides-without-scl-prefix') break setup_opts = setup.search(spec) if setup_opts: if '-n' not in setup_opts.groups()[0]: printError(pkg, 'scl-setup-without-n') # Examine main package and subpackages one by one borders = [] borders.append(0) # main package starts at the beginning while True: more = subpackage_any.search(spec[borders[-1]:]) if not more: break splits = more.groups()[1].split() if len(splits) > 1 and splits[0] == '-n': if not scl_prefix_start.search(splits[-1]): printError(pkg, 'subpackage-with-n-without-scl-prefix') # current end is counted only from last one borders.append(borders[-1] + more.end()) subpackages = [(borders[i], borders[i + 1]) for i in range(len(borders) - 1)] for subpackage in subpackages: ok = False for require in self.get_requires(spec[subpackage[0]:subpackage[1]]): # Remove flase entries if not require or require == ':': continue # If it starts with %{name}, it,s fine # If it starts with SCL prefix, it's fine # If it is scl-runtime, it's the best if name_small.search(require) or \ scl_prefix_start.search(require) or \ scl_runtime.match(require): ok = True break if not ok: printError(pkg, 'doesnt-require-scl-runtime-or-other-scl-package') break def get_requires(self, text, build=False): '''For given piece of spec, find Requires (or BuildRequires)''' if build: search = buildrequires else: search = requires res = [] while True: more = search.search(text) if not more: break res.extend(more.groups()) text = text[more.end():] return res def get_build_requires(self, text): '''Call get_requires() with build = True''' return self.get_requires(text, True) def get_name(self, text): '''For given piece of spec, get the Name of the main package''' sname = name.search(text) if not sname: return None return sname.groups()[0].strip() def get_obsoletes_and_conflicts(self, text): '''For given piece of spec, find Obsoletes and Conflicts''' res = [] while True: more = obsoletes_conflicts.search(text) if not more: break # 1st group is 'Obsoletes' or 'Conflicts', 2nd is Build or None res.extend(more.groups()[2:]) text = text[more.end():] return res def get_provides(self, text): '''For given piece of spec, find Provides''' res = [] while True: more = provides.search(text) if not more: break res.extend(more.groups()) text = text[more.end():] return res def get_files(self, text, subpackage=None): """ Return the list of files in %files section for given subpackage or main package. """ if subpackage: pattern = r'%%\{?\??files\}?(\s+-n)?\s+%s\s*$' % subpackage else: pattern = r'%\{?\??files\}?\s*$' search = re.search(pattern, text, re.M) if not search: return [] start = search.end() end = index_or_sub(text[start:], '%files') if not end: end = index_or_sub(text[start:], '%changelog', -1) return list(filter(None, text[start:start + end].strip().split('\n'))) def remove_scl_conds(self, text): '''Returns given text without %scl conds blocks''' while text.count('%{?scl:') > 0: spos = text.index('%{?scl:') pos = spos + 7 counter = 1 while counter: if text[pos] == '{': counter += 1 if text[pos] == '}': counter -= 1 pos += 1 text = text[:spos] + text[pos:] return text # Create an object to enable the auto registration of the test check = SCLCheck() # Add information about checks addDetails( 'undeclared-scl', '''Specfile contains %scl* macros, but was not recognized as SCL metapackage or SCL ready package. If this should be an SCL metapackage, don't forget to define the %scl macro. If this should be an SCL ready package, run %scl conditionalized %scl_package macro, e.g. %{?scl:%scl_package foo}.''', 'no-runtime-in-scl-metapackage', 'SCL metapackage must have runtime subpackage.', 'no-build-in-scl-metapackage', 'SCL metapackage must have build subpackage.', 'weird-subpackage-in-scl-metapackage', 'Only allowed subpackages in SCL metapackage are build and runtime.', 'scl-metapackage-without-scl-utils-build-br', 'SCL metapackage must BuildRequire scl-utils-build.', 'scl-build-without-requiring-scl-utils-build', 'SCL runtime package should Require scl-utils-build.', 'scl-metapackage-without-%scl_install', 'SCL metapackage must call %scl_install in the %install section.', 'noarch-scl-metapackage-with-libdir', '''If "enable" script of SCL metapackage contains %{_libdir}, the package must be arch specific, otherwise it may be noarch.''', 'scl-main-metapackage-contains-files', 'Main package of SCL metapackage should not contain any files.', 'scl-runtime-package-without-%scl_files', 'SCL runtime package must contain %scl_files in %files section.', 'scl-build-package-without-rpm-macros', '''SCL build package must contain %{_root_sysconfdir}/rpm/macros. %{scl}-config in %files section.''', 'missing-pkg_name-definition', '%{!?scl:%global pkg_name %{name}} is missing in the specfile.', 'name-without-scl-prefix', 'Name of SCL package must start with %{?scl_prefix}.', 'scl-prefix-without-condition', '''The SCL prefix is used without condition - this won't work if the package is build outside of SCL - use %{?scl_prefix} with questionmark.''', 'obsoletes-or-conflicts-without-scl-prefix', '''Obsoletes, Conflicts and Build Conflicts must always be prefixed with %{?scl_prefix}. This is extremely important, as the SCLs are often used for deploying new packages on older systems (that may contain old packages, now obsoleted by the new ones), but they shouldn't Obsolete or Conflict with the non-SCL RPMs installed on the system (that's the idea of SCL).''', 'provides-without-scl-prefix', 'Provides tag must always be prefixed with %{?scl_prefix}.', 'doesnt-require-scl-runtime-or-other-scl-package', '''The package must require %{scl}-runtime, unless it depends on another package that requires %{scl}-runtime. It's impossible to check what other packages require, so this simply checks if this package requires at least something from its collection.''', 'subpackage-with-n-without-scl-prefix', '''If (and only if) a package defines its name with -n, the name must be prefixed with %{?scl_prefix}.''', 'scl-setup-without-n', '''The %setup macro needs the -n argument for SCL builds, because the directory with source probably doesn't include SCL prefix in its name.''', 'scl-name-screwed-up', '''SCL package's name starts with SCL prefix. That prefix is used as a directory, where files are stored: If the prefix is foo, the directory is /opt/provides/foo. This package doesn't respect that. This means either the name of the package is wrong, or the directory.''', 'file-outside-of-scl-tree', '''SCL package should only contain files in /opt/provider/scl-name directory or in other allowed directories such as some directories in /etc or /var. Wrapper scripts in /usr/bin are also allowed.''', 'scl-rpm-macros-outside-of-build', '''RPM macros in SCL packages should belong to -build subpackage of the SCL metapackage.''', )
gpl-2.0
mensler/ansible
lib/ansible/modules/network/avi/avi_systemconfiguration.py
43
6203
#!/usr/bin/python # # Created on Aug 25, 2016 # @author: Gaurav Rastogi ([email protected]) # Eric Anderson ([email protected]) # module_check: supported # Avi Version: 17.1.1 # # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: avi_systemconfiguration author: Gaurav Rastogi ([email protected]) short_description: Module for setup of SystemConfiguration Avi RESTful Object description: - This module is used to configure SystemConfiguration object - more examples at U(https://github.com/avinetworks/devops) requirements: [ avisdk ] version_added: "2.3" options: state: description: - The state that should be applied on the entity. default: present choices: ["absent","present"] admin_auth_configuration: description: - Adminauthconfiguration settings for systemconfiguration. dns_configuration: description: - Dnsconfiguration settings for systemconfiguration. dns_virtualservice_refs: description: - Dns virtualservices hosting fqdn records for applications across avi vantage. - If no virtualservices are provided, avi vantage will provide dns services for configured applications. - Switching back to avi vantage from dns virtualservices is not allowed. - It is a reference to an object of type virtualservice. docker_mode: description: - Boolean flag to set docker_mode. - Default value when not specified in API or module is interpreted by Avi Controller as False. email_configuration: description: - Emailconfiguration settings for systemconfiguration. global_tenant_config: description: - Tenantconfiguration settings for systemconfiguration. linux_configuration: description: - Linuxconfiguration settings for systemconfiguration. mgmt_ip_access_control: description: - Configure ip access control for controller to restrict open access. ntp_configuration: description: - Ntpconfiguration settings for systemconfiguration. portal_configuration: description: - Portalconfiguration settings for systemconfiguration. proxy_configuration: description: - Proxyconfiguration settings for systemconfiguration. snmp_configuration: description: - Snmpconfiguration settings for systemconfiguration. ssh_ciphers: description: - Allowed ciphers list for ssh to the management interface on the controller and service engines. - If this is not specified, all the default ciphers are allowed. - Ssh -q cipher provides the list of default ciphers supported. ssh_hmacs: description: - Allowed hmac list for ssh to the management interface on the controller and service engines. - If this is not specified, all the default hmacs are allowed. - Ssh -q mac provides the list of default hmacs supported. tech_support_uploader_configuration: description: - Techsupportuploaderconfiguration settings for systemconfiguration. url: description: - Avi controller URL of the object. uuid: description: - Unique object identifier of the object. extends_documentation_fragment: - avi ''' EXAMPLES = """ - name: Example to create SystemConfiguration object avi_systemconfiguration: controller: 10.10.25.42 username: admin password: something state: present name: sample_systemconfiguration """ RETURN = ''' obj: description: SystemConfiguration (api/systemconfiguration) object returned: success, changed type: dict ''' from ansible.module_utils.basic import AnsibleModule try: from ansible.module_utils.avi import ( avi_common_argument_spec, HAS_AVI, avi_ansible_api) except ImportError: HAS_AVI = False def main(): argument_specs = dict( state=dict(default='present', choices=['absent', 'present']), admin_auth_configuration=dict(type='dict',), dns_configuration=dict(type='dict',), dns_virtualservice_refs=dict(type='list',), docker_mode=dict(type='bool',), email_configuration=dict(type='dict',), global_tenant_config=dict(type='dict',), linux_configuration=dict(type='dict',), mgmt_ip_access_control=dict(type='dict',), ntp_configuration=dict(type='dict',), portal_configuration=dict(type='dict',), proxy_configuration=dict(type='dict',), snmp_configuration=dict(type='dict',), ssh_ciphers=dict(type='list',), ssh_hmacs=dict(type='list',), tech_support_uploader_configuration=dict(type='dict',), url=dict(type='str',), uuid=dict(type='str',), ) argument_specs.update(avi_common_argument_spec()) module = AnsibleModule( argument_spec=argument_specs, supports_check_mode=True) if not HAS_AVI: return module.fail_json(msg=( 'Avi python API SDK (avisdk>=17.1) is not installed. ' 'For more details visit https://github.com/avinetworks/sdk.')) return avi_ansible_api(module, 'systemconfiguration', set([])) if __name__ == '__main__': main()
gpl-3.0
VishvajitP/python-social-auth
social/storage/base.py
52
8266
"""Models mixins for Social Auth""" import re import time import base64 import uuid import warnings from datetime import datetime, timedelta import six from openid.association import Association as OpenIdAssociation from social.backends.utils import get_backend from social.strategies.utils import get_current_strategy CLEAN_USERNAME_REGEX = re.compile(r'[^\w.@+_-]+', re.UNICODE) class UserMixin(object): user = '' provider = '' uid = None extra_data = None def get_backend(self, strategy=None): strategy = strategy or get_current_strategy() if strategy: return get_backend(strategy.get_backends(), self.provider) def get_backend_instance(self, strategy=None): strategy = strategy or get_current_strategy() Backend = self.get_backend(strategy) if Backend: return Backend(strategy=strategy) @property def access_token(self): """Return access_token stored in extra_data or None""" return self.extra_data.get('access_token') @property def tokens(self): warnings.warn('tokens is deprecated, use access_token instead') return self.access_token def refresh_token(self, strategy, *args, **kwargs): token = self.extra_data.get('refresh_token') or \ self.extra_data.get('access_token') backend = self.get_backend(strategy) if token and backend and hasattr(backend, 'refresh_token'): backend = backend(strategy=strategy) response = backend.refresh_token(token, *args, **kwargs) access_token = response.get('access_token') refresh_token = response.get('refresh_token') if access_token or refresh_token: if access_token: self.extra_data['access_token'] = access_token if refresh_token: self.extra_data['refresh_token'] = refresh_token self.save() def expiration_datetime(self): """Return provider session live seconds. Returns a timedelta ready to use with session.set_expiry(). If provider returns a timestamp instead of session seconds to live, the timedelta is inferred from current time (using UTC timezone). None is returned if there's no value stored or it's invalid. """ if self.extra_data and 'expires' in self.extra_data: try: expires = int(self.extra_data.get('expires')) except (ValueError, TypeError): return None now = datetime.utcnow() # Detect if expires is a timestamp if expires > time.mktime(now.timetuple()): # expires is a datetime return datetime.fromtimestamp(expires) - now else: # expires is a timedelta return timedelta(seconds=expires) def set_extra_data(self, extra_data=None): if extra_data and self.extra_data != extra_data: if self.extra_data: self.extra_data.update(extra_data) else: self.extra_data = extra_data return True @classmethod def clean_username(cls, value): """Clean username removing any unsupported character""" return CLEAN_USERNAME_REGEX.sub('', value) @classmethod def changed(cls, user): """The given user instance is ready to be saved""" raise NotImplementedError('Implement in subclass') @classmethod def get_username(cls, user): """Return the username for given user""" raise NotImplementedError('Implement in subclass') @classmethod def user_model(cls): """Return the user model""" raise NotImplementedError('Implement in subclass') @classmethod def username_max_length(cls): """Return the max length for username""" raise NotImplementedError('Implement in subclass') @classmethod def allowed_to_disconnect(cls, user, backend_name, association_id=None): """Return if it's safe to disconnect the social account for the given user""" raise NotImplementedError('Implement in subclass') @classmethod def disconnect(cls, entry): """Disconnect the social account for the given user""" raise NotImplementedError('Implement in subclass') @classmethod def user_exists(cls, *args, **kwargs): """ Return True/False if a User instance exists with the given arguments. Arguments are directly passed to filter() manager method. """ raise NotImplementedError('Implement in subclass') @classmethod def create_user(cls, *args, **kwargs): """Create a user instance""" raise NotImplementedError('Implement in subclass') @classmethod def get_user(cls, pk): """Return user instance for given id""" raise NotImplementedError('Implement in subclass') @classmethod def get_users_by_email(cls, email): """Return users instances for given email address""" raise NotImplementedError('Implement in subclass') @classmethod def get_social_auth(cls, provider, uid): """Return UserSocialAuth for given provider and uid""" raise NotImplementedError('Implement in subclass') @classmethod def get_social_auth_for_user(cls, user, provider=None, id=None): """Return all the UserSocialAuth instances for given user""" raise NotImplementedError('Implement in subclass') @classmethod def create_social_auth(cls, user, uid, provider): """Create a UserSocialAuth instance for given user""" raise NotImplementedError('Implement in subclass') class NonceMixin(object): """One use numbers""" server_url = '' timestamp = 0 salt = '' @classmethod def use(cls, server_url, timestamp, salt): """Create a Nonce instance""" raise NotImplementedError('Implement in subclass') class AssociationMixin(object): """OpenId account association""" server_url = '' handle = '' secret = '' issued = 0 lifetime = 0 assoc_type = '' @classmethod def oids(cls, server_url, handle=None): kwargs = {'server_url': server_url} if handle is not None: kwargs['handle'] = handle return sorted([(assoc.id, cls.openid_association(assoc)) for assoc in cls.get(**kwargs) ], key=lambda x: x[1].issued, reverse=True) @classmethod def openid_association(cls, assoc): secret = assoc.secret if not isinstance(secret, six.binary_type): secret = secret.encode() return OpenIdAssociation(assoc.handle, base64.decodestring(secret), assoc.issued, assoc.lifetime, assoc.assoc_type) @classmethod def store(cls, server_url, association): """Create an Association instance""" raise NotImplementedError('Implement in subclass') @classmethod def get(cls, *args, **kwargs): """Get an Association instance""" raise NotImplementedError('Implement in subclass') @classmethod def remove(cls, ids_to_delete): """Remove an Association instance""" raise NotImplementedError('Implement in subclass') class CodeMixin(object): email = '' code = '' verified = False def verify(self): self.verified = True self.save() @classmethod def generate_code(cls): return uuid.uuid4().hex @classmethod def make_code(cls, email): code = cls() code.email = email code.code = cls.generate_code() code.verified = False code.save() return code @classmethod def get_code(cls, code): raise NotImplementedError('Implement in subclass') class BaseStorage(object): user = UserMixin nonce = NonceMixin association = AssociationMixin code = CodeMixin @classmethod def is_integrity_error(cls, exception): """Check if given exception flags an integrity error in the DB""" raise NotImplementedError('Implement in subclass')
bsd-3-clause
mattcongy/itshop
docker-images/taigav2/taiga-back/taiga/projects/likes/services.py
2
3693
# -*- coding: utf-8 -*- # Copyright (C) 2014-2016 Andrey Antukh <[email protected]> # Copyright (C) 2014-2016 Jesús Espino <[email protected]> # Copyright (C) 2014-2016 David Barragán <[email protected]> # Copyright (C) 2014-2016 Alejandro Alonso <[email protected]> # Copyright (C) 2014-2016 Anler Hernández <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.db.models import F from django.db.transaction import atomic from django.apps import apps from django.contrib.auth import get_user_model from .models import Like def add_like(obj, user): """Add a like to an object. If the user has already liked the object nothing happends, so this function can be considered idempotent. :param obj: Any Django model instance. :param user: User adding the like. :class:`~taiga.users.models.User` instance. """ obj_type = apps.get_model("contenttypes", "ContentType").objects.get_for_model(obj) with atomic(): like, created = Like.objects.get_or_create(content_type=obj_type, object_id=obj.id, user=user) if like.project is not None: like.project.refresh_totals() return like def remove_like(obj, user): """Remove an user like from an object. If the user has not liked the object nothing happens so this function can be considered idempotent. :param obj: Any Django model instance. :param user: User removing her like. :class:`~taiga.users.models.User` instance. """ obj_type = apps.get_model("contenttypes", "ContentType").objects.get_for_model(obj) with atomic(): qs = Like.objects.filter(content_type=obj_type, object_id=obj.id, user=user) if not qs.exists(): return like = qs.first() project = like.project qs.delete() if project is not None: project.refresh_totals() def get_fans(obj): """Get the fans of an object. :param obj: Any Django model instance. :return: User queryset object representing the users that liked the object. """ obj_type = apps.get_model("contenttypes", "ContentType").objects.get_for_model(obj) return get_user_model().objects.filter(likes__content_type=obj_type, likes__object_id=obj.id) def get_liked(user_or_id, model): """Get the objects liked by an user. :param user_or_id: :class:`~taiga.users.models.User` instance or id. :param model: Show only objects of this kind. Can be any Django model class. :return: Queryset of objects representing the likes of the user. """ obj_type = apps.get_model("contenttypes", "ContentType").objects.get_for_model(model) conditions = ('likes_like.content_type_id = %s', '%s.id = likes_like.object_id' % model._meta.db_table, 'likes_like.user_id = %s') if isinstance(user_or_id, get_user_model()): user_id = user_or_id.id else: user_id = user_or_id return model.objects.extra(where=conditions, tables=('likes_like',), params=(obj_type.id, user_id))
mit
areski/django
tests/auth_tests/test_middleware.py
86
1251
from django.contrib.auth.middleware import AuthenticationMiddleware from django.contrib.auth.models import User from django.http import HttpRequest from django.test import TestCase class TestAuthenticationMiddleware(TestCase): def setUp(self): self.user = User.objects.create_user('test_user', '[email protected]', 'test_password') self.middleware = AuthenticationMiddleware() self.client.force_login(self.user) self.request = HttpRequest() self.request.session = self.client.session def test_no_password_change_doesnt_invalidate_session(self): self.request.session = self.client.session self.middleware.process_request(self.request) self.assertIsNotNone(self.request.user) self.assertFalse(self.request.user.is_anonymous()) def test_changed_password_invalidates_session(self): # After password change, user should be anonymous self.user.set_password('new_password') self.user.save() self.middleware.process_request(self.request) self.assertIsNotNone(self.request.user) self.assertTrue(self.request.user.is_anonymous()) # session should be flushed self.assertIsNone(self.request.session.session_key)
bsd-3-clause
noironetworks/neutron
neutron/services/trunk/drivers/linuxbridge/agent/driver.py
3
8707
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron_lib.callbacks import events as local_events from neutron_lib.callbacks import registry from neutron_lib.callbacks import resources as local_resources from oslo_log import log as logging import oslo_messaging from neutron.api.rpc.callbacks import events from neutron.api.rpc.handlers import resources_rpc from neutron.services.trunk import constants as t_const from neutron.services.trunk.drivers.linuxbridge.agent import trunk_plumber from neutron.services.trunk.rpc import agent as trunk_rpc LOG = logging.getLogger(__name__) def init_handler(resource, event, trigger, payload=None): """Handler for agent init event.""" LinuxBridgeTrunkDriver() @registry.has_registry_receivers class LinuxBridgeTrunkDriver(trunk_rpc.TrunkSkeleton): """Driver responsible for handling trunk/subport/port events. Receives data model events from the server and VIF events from the agent and uses these to drive a Plumber instance to wire up VLAN subinterfaces for any trunks. """ def __init__(self, plumber=None, trunk_api=None): self._plumber = plumber or trunk_plumber.Plumber() self._tapi = trunk_api or _TrunkAPI(trunk_rpc.TrunkStub()) super(LinuxBridgeTrunkDriver, self).__init__() def handle_trunks(self, context, resource_type, trunks, event_type): """Trunk data model change from the server.""" for trunk in trunks: if event_type in (events.UPDATED, events.CREATED): self._tapi.put_trunk(trunk.port_id, trunk) self.wire_trunk(context, trunk) elif event_type == events.DELETED: self._tapi.put_trunk(trunk.port_id, None) self._plumber.delete_trunk_subports(trunk) def handle_subports(self, context, resource_type, subports, event_type): """Subport data model change from the server.""" affected_trunks = set() if event_type == events.DELETED: method = self._tapi.delete_trunk_subport else: method = self._tapi.put_trunk_subport for s in subports: affected_trunks.add(s['trunk_id']) method(s['trunk_id'], s) for trunk_id in affected_trunks: trunk = self._tapi.get_trunk_by_id(context, trunk_id) if not trunk: continue self.wire_trunk(context, trunk) @registry.receives(local_resources.PORT_DEVICE, [local_events.AFTER_DELETE]) def agent_port_delete(self, resource, event, trigger, context, port_id, **kwargs): """Agent informed us a VIF was removed.""" # NOTE(kevinbenton): we don't need to do anything to cleanup VLAN # interfaces if a trunk was removed because the kernel will do that # for us. We also don't update the trunk status to DOWN because we # don't want to race with another agent that the trunk may have been # moved to. @registry.receives(local_resources.PORT_DEVICE, [local_events.AFTER_UPDATE]) def agent_port_change(self, resource, event, trigger, context, device_details, **kwargs): """The agent hath informed us thusly of a port update or create.""" trunk = self._tapi.get_trunk(context, device_details['port_id']) if trunk: # a wild trunk has appeared! make its children self.wire_trunk(context, trunk) return # clear any VLANs in case this was a trunk that changed status while # agent was offline. self._plumber.delete_subports_by_port_id(device_details['port_id']) def wire_trunk(self, context, trunk): """Wire up subports while keeping the server trunk status apprised.""" if not self._plumber.trunk_on_host(trunk): LOG.debug("Trunk %s not present on this host", trunk.port_id) return self._tapi.bind_subports_to_host(context, trunk) try: self._plumber.ensure_trunk_subports(trunk) self._tapi.set_trunk_status(context, trunk, t_const.ACTIVE_STATUS) except Exception: if not self._plumber.trunk_on_host(trunk): LOG.debug("Trunk %s removed during wiring", trunk.port_id) return # something broke LOG.exception("Failure setting up subports for %s", trunk.port_id) self._tapi.set_trunk_status(context, trunk, t_const.DEGRADED_STATUS) class _TrunkAPI(object): """Our secret stash of trunks stored by port ID. Tell no one.""" def __init__(self, trunk_stub): self.server_api = trunk_stub self._trunk_by_port_id = {} self._trunk_by_id = {} self._sub_port_id_to_trunk_port_id = {} def _fetch_trunk(self, context, port_id): try: t = self.server_api.get_trunk_details(context, port_id) LOG.debug("Found trunk %(t)s for port %(p)s", dict(p=port_id, t=t)) return t except resources_rpc.ResourceNotFound: return None except oslo_messaging.RemoteError as e: if e.exc_type != 'CallbackNotFound': raise LOG.debug("Trunk plugin disabled on server. Assuming port %s is " "not a trunk.", port_id) return None def set_trunk_status(self, context, trunk, status): self.server_api.update_trunk_status(context, trunk.id, status) def bind_subports_to_host(self, context, trunk): self.server_api.update_subport_bindings(context, trunk.sub_ports) def put_trunk_subport(self, trunk_id, subport): LOG.debug("Adding subport %(sub)s to trunk %(trunk)s", dict(sub=subport, trunk=trunk_id)) if trunk_id not in self._trunk_by_id: # not on this agent return trunk = self._trunk_by_id[trunk_id] trunk.sub_ports = [s for s in trunk.sub_ports if s.port_id != subport.port_id] + [subport] def delete_trunk_subport(self, trunk_id, subport): LOG.debug("Removing subport %(sub)s from trunk %(trunk)s", dict(sub=subport, trunk=trunk_id)) if trunk_id not in self._trunk_by_id: # not on this agent return trunk = self._trunk_by_id[trunk_id] trunk.sub_ports = [s for s in trunk.sub_ports if s.port_id != subport.port_id] def put_trunk(self, port_id, trunk): if port_id in self._trunk_by_port_id: # already existed. expunge sub_port cross ref self._sub_port_id_to_trunk_port_id = { s: p for s, p in self._sub_port_id_to_trunk_port_id.items() if p != port_id} self._trunk_by_port_id[port_id] = trunk if not trunk: return self._trunk_by_id[trunk.id] = trunk for sub in trunk.sub_ports: self._sub_port_id_to_trunk_port_id[sub.port_id] = trunk.port_id def get_trunk_by_id(self, context, trunk_id): """Gets trunk object based on trunk_id. None if not in cache.""" return self._trunk_by_id.get(trunk_id) def get_trunk(self, context, port_id): """Gets trunk object for port_id. None if not trunk.""" if port_id not in self._trunk_by_port_id: # TODO(kevinbenton): ask the server for *all* trunk port IDs on # start and eliminate asking the server if every port is a trunk # TODO(kevinbenton): clear this on AMQP reconnect LOG.debug("Cache miss for port %s, fetching from server", port_id) self.put_trunk(port_id, self._fetch_trunk(context, port_id)) return self.get_trunk(context, port_id) return self._trunk_by_port_id[port_id] def get_trunk_for_subport(self, context, port_id): """Returns trunk if port_id is a subport, else None.""" trunk_port = self._sub_port_id_to_trunk_port_id.get(port_id) if trunk_port: return self.get_trunk(context, trunk_port)
apache-2.0
tdyas/pants
src/python/pants/backend/native/tasks/conan_fetch.py
1
9173
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). import functools import os import re from pants.backend.native.targets.external_native_library import ExternalNativeLibrary from pants.backend.native.targets.packaged_native_library import PackagedNativeLibrary from pants.backend.native.tasks.conan_prep import ConanPrep from pants.base.build_environment import get_pants_cachedir from pants.base.exceptions import TaskError from pants.base.workunit import WorkUnitLabel from pants.engine.platform import Platform from pants.task.simple_codegen_task import SimpleCodegenTask from pants.util.contextutil import temporary_dir from pants.util.dirutil import mergetree, safe_file_dump, safe_mkdir from pants.util.enums import match from pants.util.memo import memoized_property class ConanFetch(SimpleCodegenTask): gentarget_type = ExternalNativeLibrary sources_globs = ( "include/**/*", "lib/*", ) @property def validate_sources_present(self): return False def synthetic_target_type(self, target): return PackagedNativeLibrary default_remotes = { "conan-center": "https://conan.bintray.com", } @classmethod def register_options(cls, register): super().register_options(register) register( "--conan-remotes", type=dict, default=cls.default_remotes, advanced=True, fingerprint=True, help="The conan remotes to download conan packages from.", ) @classmethod def implementation_version(cls): return super().implementation_version() + [("ConanFetch", 1)] @classmethod def prepare(cls, options, round_manager): super().prepare(options, round_manager) round_manager.require_data(ConanPrep.tool_instance_cls) class ConanConfigError(TaskError): pass class ConanFetchError(TaskError): pass @property def _remotes_txt_content(self): """Generate a file containing overrides for Conan remotes which get applied to registry.json.""" return "{}\n".format( "\n".join( "{name} {url} {is_ssl}".format( name=name, url=url, is_ssl=re.match(r"^https://", url) is not None ) for name, url in self.get_options().conan_remotes.items() ) ) def _conan_user_home(self, conan, in_workdir=False): """Create the CONAN_USER_HOME for this task fingerprint and initialize the Conan remotes. See https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install for docs on configuring remotes. """ # This argument is exposed so tests don't leak out of the workdir. if in_workdir: base_cache_dir = self.workdir else: base_cache_dir = get_pants_cachedir() user_home_base = os.path.join(base_cache_dir, "conan-support", "conan-user-home") # Locate the subdirectory of the pants shared cachedir specific to this task's option values. user_home = os.path.join(user_home_base, self.fingerprint) conan_install_base = os.path.join(user_home, ".conan") # Conan doesn't copy remotes.txt into the .conan subdir after the "config install" command, it # simply edits registry.json. However, it is valid to have this file there, and Conan won't # touch it, so we use its presence to detect whether we have appropriately initialized the # Conan installation. remotes_txt_sentinel = os.path.join(conan_install_base, "remotes.txt") if not os.path.isfile(remotes_txt_sentinel): safe_mkdir(conan_install_base) # Conan doesn't consume the remotes.txt file just by being in the conan directory -- we need # to create another directory containing any selection of files detailed in # https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install # and "install" from there to our desired conan directory. with temporary_dir() as remotes_install_dir: # Create an artificial conan configuration dir containing just remotes.txt. remotes_txt_for_install = os.path.join(remotes_install_dir, "remotes.txt") safe_file_dump(remotes_txt_for_install, self._remotes_txt_content) # Configure the desired user home from this artificial config dir. argv = ["config", "install", remotes_install_dir] workunit_factory = functools.partial( self.context.new_workunit, name="initial-conan-config", labels=[WorkUnitLabel.TOOL], ) env = { "CONAN_USER_HOME": user_home, } cmdline, exit_code = conan.run(workunit_factory, argv, env=env) if exit_code != 0: raise self.ConanConfigError( "Error configuring conan with argv {} and environment {}: exited non-zero ({}).".format( cmdline, env, exit_code ), exit_code=exit_code, ) # Generate the sentinel file so that we know the remotes have been successfully configured for # this particular task fingerprint in successive pants runs. safe_file_dump(remotes_txt_sentinel, self._remotes_txt_content) return user_home @memoized_property def _conan_os_name(self): return match(Platform.current, {Platform.darwin: "Macos", Platform.linux: "Linux"}) @property def _copy_target_attributes(self): basic_attributes = [a for a in super()._copy_target_attributes if a != "provides"] return basic_attributes + [ "include_relpath", "lib_relpath", "native_lib_names", ] def execute_codegen(self, target, target_workdir): """Invoke the conan pex to fetch conan packages specified by a `ExternalNativeLibrary` target. :param ExternalNativeLibrary target: a target containing conan package specifications. :param str target_workdir: where to copy the installed package contents to. """ conan = self.context.products.get_data(ConanPrep.tool_instance_cls) # TODO: we should really be able to download all of these in one go, and we should make an # upstream PR to allow that against Conan if not. for conan_requirement in target.packages: # See https://docs.conan.io/en/latest/reference/commands/consumer/install.html for # documentation on the 'install' command. argv = [ "install", conan_requirement.pkg_spec, "--settings", "os={}".format(self._conan_os_name), ] for remote in self.get_options().conan_remotes: argv.extend(["--remote", remote]) workunit_factory = functools.partial( self.context.new_workunit, name="install-conan-{}".format(conan_requirement.pkg_spec), labels=[WorkUnitLabel.TOOL], ) # CONAN_USER_HOME is somewhat documented at # https://docs.conan.io/en/latest/mastering/sharing_settings_and_config.html. user_home = self._conan_user_home(conan) env = { "CONAN_USER_HOME": user_home, } with conan.run_with(workunit_factory, argv, env=env) as (cmdline, exit_code, workunit): if exit_code != 0: raise self.ConanFetchError( "Error performing conan install with argv {} and environment {}: exited non-zero ({}).".format( cmdline, env, exit_code ), exit_code=exit_code, ) # Read the stdout from the read-write buffer, from the beginning of the output, and convert # to unicode. conan_install_stdout = workunit.output("stdout").read_from(0).decode() pkg_sha = conan_requirement.parse_conan_stdout_for_pkg_sha(conan_install_stdout) installed_data_dir = os.path.join( user_home, ".conan", "data", conan_requirement.directory_path, "package", pkg_sha ) # Copy over the contents of the installed package into the target output directory. These # paths are currently hardcoded -- see `ExternalNativeLibrary`. mergetree( os.path.join(installed_data_dir, conan_requirement.include_relpath), os.path.join(target_workdir, "include"), ) mergetree( os.path.join(installed_data_dir, conan_requirement.lib_relpath), os.path.join(target_workdir, "lib"), )
apache-2.0
quantopian/coal-mine
coal_mine/abstract_store.py
1
3427
# Copyright 2015 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you # may not use this file except in compliance with the License. You # may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. See the License for the specific language governing # permissions and limitations under the License. """ Abstract store for Coal Mine Subclass for a specific storage engine. """ from abc import ABCMeta, abstractmethod class AbstractStore(object, metaclass=ABCMeta): # pragma: no cover @abstractmethod def __init__(self, *args, **kwargs): """Args and behavior are dependent on the storage engine.""" raise NotImplementedError('__init__') @abstractmethod def create(self, canary): """Make sure you copy the data in `canary` rather than storing the dict internally.""" raise NotImplementedError('create') @abstractmethod def update(self, identifier, updates): raise NotImplementedError('update') @abstractmethod def get(self, identifier): """Should raise KeyError if not found, or return a dict with these keys: id, name, description, slug, periodicity, emails, late, paused, deadline, history. History should be a list of tuples, each of which contains a naive UTC timestamp and a possibly empty comment, sorted from most to least recent. Deadline should be a naive UTC timestamp. NOTE: The caller could modify the dict you return, so don't return anything you have a pointer to internally! If you need to return a dict which you're also using internally, then deepcopy it.""" raise NotImplementedError('get') @abstractmethod def list(self, *, verbose=False, paused=None, late=None, search=None): """Return an iterator which yields dicts (but see the note on get()). If verbose is False, then the dicts contain only name and id, otherwise, all fields (same as returned by get()) are returned. If paused, late, and/or search are specified, they are used to filter the results. The latter is a regular expression (string, not regular expression object), which is matched against the name, slug, and id of canaries and only matches are returned.""" raise NotImplementedError('list') @abstractmethod def upcoming_deadlines(self): """Return an iterator which yields canaries (same as returned by get(); see in particular the note there) that are unpaused and not yet late, sorted by deadline in increasing order, i.e., the canary that will pass its deadline soonest is returned first.""" raise NotImplementedError('upcoming_deadlines') @abstractmethod def delete(self, identifier): """Raise KeyError if a canary with the specified identifier doesn't exist.""" raise NotImplementedError('delete') @abstractmethod def find_identifier(self, slug): """Should raise KeyError if a canary with the specified slug does not exist, or return the identifier string.""" raise NotImplementedError('find_identifier')
apache-2.0
blooparksystems/odoo
openerp/addons/base/ir/ir_config_parameter.py
15
4146
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. """ Store database-specific configuration parameters """ import uuid import datetime from openerp import SUPERUSER_ID from openerp.osv import osv, fields from openerp.tools import misc, config, ormcache """ A dictionary holding some configuration parameters to be initialized when the database is created. """ _default_parameters = { "database.secret": lambda: (str(uuid.uuid4()), ['base.group_erp_manager']), "database.uuid": lambda: (str(uuid.uuid1()), []), "database.create_date": lambda: (datetime.datetime.now().strftime(misc.DEFAULT_SERVER_DATETIME_FORMAT), ['base.group_user']), "web.base.url": lambda: ("http://localhost:%s" % config.get('xmlrpc_port'), []), } class ir_config_parameter(osv.osv): """Per-database storage of configuration key-value pairs.""" _name = 'ir.config_parameter' _rec_name = 'key' _columns = { 'key': fields.char('Key', required=True, select=1), 'value': fields.text('Value', required=True), 'group_ids': fields.many2many('res.groups', 'ir_config_parameter_groups_rel', 'icp_id', 'group_id', string='Groups'), } _sql_constraints = [ ('key_uniq', 'unique (key)', 'Key must be unique.') ] def init(self, cr, force=False): """ Initializes the parameters listed in _default_parameters. It overrides existing parameters if force is ``True``. """ for key, func in _default_parameters.iteritems(): # force=True skips search and always performs the 'if' body (because ids=False) ids = not force and self.search(cr, SUPERUSER_ID, [('key','=',key)]) if not ids: value, groups = func() self.set_param(cr, SUPERUSER_ID, key, value, groups=groups) def get_param(self, cr, uid, key, default=False, context=None): """Retrieve the value for a given key. :param string key: The key of the parameter value to retrieve. :param string default: default value if parameter is missing. :return: The value of the parameter, or ``default`` if it does not exist. :rtype: string """ result = self._get_param(cr, uid, key) if result is None: return default return result @ormcache('uid', 'key') def _get_param(self, cr, uid, key): params = self.search_read(cr, uid, [('key', '=', key)], fields=['value'], limit=1) if not params: return None return params[0]['value'] def set_param(self, cr, uid, key, value, groups=(), context=None): """Sets the value of a parameter. :param string key: The key of the parameter value to set. :param string value: The value to set. :param list of string groups: List of group (xml_id allowed) to read this key. :return: the previous value of the parameter or False if it did not exist. :rtype: string """ self._get_param.clear_cache(self) ids = self.search(cr, uid, [('key','=',key)], context=context) gids = [] for group_xml in groups: res_id = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, group_xml) if res_id: gids.append((4, res_id)) vals = {'value': value} if gids: vals.update(group_ids=gids) if ids: param = self.browse(cr, uid, ids[0], context=context) old = param.value self.write(cr, uid, ids, vals, context=context) return old else: vals.update(key=key) self.create(cr, uid, vals, context=context) return False def write(self, cr, uid, ids, vals, context=None): self._get_param.clear_cache(self) return super(ir_config_parameter, self).write(cr, uid, ids, vals, context=context) def unlink(self, cr, uid, ids, context=None): self._get_param.clear_cache(self) return super(ir_config_parameter, self).unlink(cr, uid, ids, context=context)
gpl-3.0
diamondman/proteusisc
proteusisc/bittypes.py
1
42182
from __future__ import generator_stop from itertools import islice import collections from bitarray import bitarray as _bitarray import math from .errors import ProteusDataJoinError from .contracts import ZERO, ONE, NOCARE, ARBITRARY class bitarray(_bitarray): def _easy_mergable(self, other): return False #Consider merging single bit bitarrays. def byteiter(self): data = self.tobytes() for byte in data: yield byte class ConstantBitarray(collections.Sequence): """A bitarray type where all bits are the same value. The bitarray class is already more efficient at storing a sequence of boolean values than an array, but all bits having the same value is a common enough case to optimize for. The most immediate obvious is a lower memory footprint, as only one boolean value is stored. But there are more important benefits. Splitting or reversing a constant bitarray, or combining two constant bitarrays (that have the same value) is trivial. Checking if any or all bits are set is trivial (A normal bitarray has to scan every bit every time such a check is done). A constant bitarray shows intent. By simply checking the type of the bitarray, we can know if sending the data in the bitarray down a wire requires arbitrary control of the signal value for every bit, of if we can get away with a simplier constant value. Args: val: A boolean that will be the value for each bit in the array. length: An integer specifying how many bits are in the array. """ def __init__(self, val, length): self._val = bool(val) self._length = length def __len__(self): return self._length def __getitem__(self, index): if isinstance(index, slice): indices = index.indices(len(self)) return ConstantBitarray(self._val, len(range(*indices))) if isinstance(index, int): index = len(self)-abs(index) if index < 0 else index if (index < self._length and index >= 0): return self._val raise IndexError("%s index out of range"%type(self)) raise TypeError("%s indices must be integers or slices, not %s"% (type(self), type(index))) def __repr__(self): return "<Const: %s (%s)>"%\ (self._val, self._length)# pragma: no cover def __add__(self, other): """Handles combining different bitarray types. There are special rules for combining each type of bitarray: bitarray, ConstantBitarray, and NoCareBitArray. For example, two NoCareBitarrays combine into a bigger NoCareBit array, while combining two ConstantBitArrays depends on if the two array's constant value are the same. """ if len(self) == 0: return other if len(other) == 0: return self if isinstance(other, ConstantBitarray): if self._val == other._val: return ConstantBitarray(self._val, self._length+other._length) return CompositeBitarray(self, other) if isinstance(other, bitarray): return CompositeBitarray(self, other) return NotImplemented def __radd__(self, other): if isinstance(other, bitarray): return CompositeBitarray(other, self) return NotImplemented def __iter__(self): for _ in range(self._length): yield self._val def __reversed__(self): for _ in range(self._length): yield self._val def __eq__(self, other): if isinstance(other, ConstantBitarray): return len(self) == len(other) and self._val == other._val if isinstance(other, bitarray): if len(self) != len(other): return False if self._val: return other.all() else: return not other.any() return NotImplemented def count(self, val=True): """Get the number of bits in the array with the specified value. Args: val: A boolean value to check against the array's value. Returns: An integer of the number of bits in the array equal to val. """ if val == self._val: return self._length return 0 def any(self): return self._val def all(self): return self._val #@profile def tobytes(self): if not len(self): return b'' if self._val: if len(self)%8: return bytes([0xFF]*(math.ceil(len(self)/8)-1)+\ [(0xFF<<(8-len(self)%8))&0xFF]) return bytes([0xFF]*(math.ceil(len(self)/8))) return bytes([0x00]*(math.ceil(len(self)/8))) def reverse(self): pass def split(self, bitindex): return self[:bitindex], self[bitindex:] def _easy_mergable(self, other): return isinstance(other, NoCareBitarray) or\ (isinstance(other, PreferFalseBitarray) and\ self._val is False) or\ (isinstance(other, ConstantBitarray) and\ other._val == self._val) def byteiter(self): if self._val: for _ in range(math.ceil(len(self)/8)): yield 0xFF else: for _ in range(math.ceil(len(self)/8)): yield 0 class NoCareBitarray(collections.Sequence): """A bitarray type with no preference on its bit values. https://en.wikipedia.org/wiki/Don%27t-care_term When writing data to certain fields, sometimes the value of the field simply does not matter. In programming, we often fill 0 or Null for these values because the cost of any of these filler values are the same. If such an no care parameter were set to 0, but setting it to 1 would somehow let the computer run the program faster, it would be a clear win. But the computer can not tell that the 0 put as a place holder is JUST a placeholder. In this project, parameters passed over a serial datastream are often represented with the type bitarray.bitarray. To allow optimizing the sending of data, the NoCareBitarray explicitly stands in for a sequence of bits where the value does not matter, so that as it is combined with other bits, a more efficient (by some metric) sequence of bits can be produced than if strict adherence to a placeholder value were held. Like ConstantBitarrays, NoCareBitarrays have a small memory footprint, and are efficiently combined, sliced, and checked for values. A nocare bitarray shows intent. By simply checking the type of the bitarray, we can know if sending the data in the bitarray down a wire requires have any requirements at all, or if the bits are free to be optimized aggressively without danger of losing useful data. Args: length: An integer specifying how many bits are in the array. """ def __init__(self, length): self._length = length def __len__(self): return self._length def __getitem__(self, index): if isinstance(index, slice): indices = index.indices(len(self)) return NoCareBitarray(len(range(*indices))) if isinstance(index, int): index = len(self)-abs(index) if index < 0 else index if (index < self._length and index >= 0): return None#False raise IndexError("%s index out of range"%type(self)) raise TypeError("%s indices must be integers or slices, not %s"% (type(self), type(index))) def __iter__(self): for _ in range(self._length): yield None#False def __reversed__(self): for _ in range(self._length): yield None#False def __repr__(self): return "<NC: (%s)>"%self._length # pragma: no cover def __add__(self, other): """Handles combining different bitarray types. There are special rules for combining each type of bitarray: bitarray, ConstantBitarray, and NoCareBitArray. For example, two NoCareBitarrays combine into a bigger NoCareBit array, while combining two ConstantBitArrays depends on if the two array's constant value are the same. """ if isinstance(other, bool): return NotImplemented if len(self) == 0: return other if len(other) == 0: return self if isinstance(other, NoCareBitarray): return NoCareBitarray(self._length+other._length) if isinstance(other, ConstantBitarray): return ConstantBitarray(other._val, self._length+other._length) if isinstance(other, (bitarray, PreferFalseBitarray)): return CompositeBitarray(self, other) return NotImplemented def __radd__(self, other): if isinstance(other, bool): return NotImplemented if len(self) == 0: return other if len(other) == 0: return self if isinstance(other, ConstantBitarray): return ConstantBitarray(other._val, self._length+other._length) if isinstance(other, (bitarray, PreferFalseBitarray)): return CompositeBitarray(other, self) return NotImplemented def count(self, val=True): """Get the number of bits in the array with the specified value. Args: val: A boolean value to check against the array's value. Returns: An integer of the number of bits in the array equal to val. """ return 0 def any(self): return False def all(self): return False #@profile def tobytes(self): if not len(self): return b'' return bytes([0x00]*(math.ceil(len(self)/8))) def reverse(self): pass def split(self, bitindex): return self[:bitindex], self[bitindex:] def _easy_mergable(self, other): return isinstance(other, (NoCareBitarray, PreferFalseBitarray, ConstantBitarray)) def byteiter(self): for _ in range(math.ceil(len(self)/8)): yield 0 class PreferFalseBitarray(collections.Sequence): def __init__(self, length): self._length = length def __len__(self): return self._length def __getitem__(self, index): if isinstance(index, slice): indices = index.indices(len(self)) return PreferFalseBitarray(len(range(*indices))) if isinstance(index, int): index = len(self)-abs(index) if index < 0 else index if (index < self._length and index >= 0): return None#False raise IndexError("%s index out of range"%type(self)) raise TypeError("%s indices must be integers or slices, not %s"% (type(self), type(index))) def __iter__(self): for _ in range(self._length): yield None#False def __reversed__(self): for _ in range(self._length): yield None#False def __repr__(self): return "<F*: (%s)>"%self._length # pragma: no cover def __add__(self, other): """Handles combining different bitarray types. There are special rules for combining each type of bitarray: bitarray, ConstantBitarray, and NoCareBitArray. For example, two NoCareBitarrays combine into a bigger NoCareBit array, while combining two ConstantBitArrays depends on if the two array's constant value are the same. """ if isinstance(other, bool): return NotImplemented if len(self) == 0: return other if len(other) == 0: return self if isinstance(other, (PreferFalseBitarray, NoCareBitarray)): return PreferFalseBitarray(self._length+other._length) if isinstance(other, ConstantBitarray): if not other._val: return ConstantBitarray(False, self._length+other._length) return CompositeBitarray(self, other) if isinstance(other, bitarray): return CompositeBitarray(self, other) return NotImplemented def __radd__(self, other): if isinstance(other, bool): return NotImplemented if len(self) == 0: return other if len(other) == 0: return self if isinstance(other, (PreferFalseBitarray, NoCareBitarray)): return PreferFalseBitarray(self._length+other._length) if isinstance(other, ConstantBitarray): if not other._val: return ConstantBitarray(False, self._length+other._length) return CompositeBitarray(other, self) if isinstance(other, bitarray): return CompositeBitarray(other, self) return NotImplemented def count(self, val=True): """Get the number of bits in the array with the specified value. Args: val: A boolean value to check against the array's value. Returns: An integer of the number of bits in the array equal to val. """ return 0 def any(self): return False def all(self): return False #@profile def tobytes(self): if not len(self): return b'' return bytes([0x00]*(math.ceil(len(self)/8))) def reverse(self): pass def split(self, bitindex): return self[:bitindex], self[bitindex:] def _easy_mergable(self, other): return isinstance(other, (NoCareBitarray, PreferFalseBitarray))\ or (isinstance(other, ConstantBitarray) and\ other._val is False) def byteiter(self): for _ in range(math.ceil(len(self)/8)): yield 0 class CompositeBitarray(collections.Sequence): """A container to hold multiple bitarray types without actually combining them and losing information about which bits are NoCare. Most bits marked as No Care have no negative effect if the bits assume either True or False. If a ConstantBitArray(True,...) is added to a NoCareBitarray(...), the result will be a ConstantBitArray with a value of True and the length of both component bitarrays. This is fine for TDI and TMS bits, but not acceptable for TDO bits. For primitives that support arbitrary tdo bits, the No Care bits that were added to the sequence should turn into False bits, which is violated in the addition demonstrated above (the NoCare bits are not retrievable from the combined ConstantBitArray). The current solution is to build a bitarray class that keeps track of the component bitarrays that would normally have been merged together. These components stay split until the point the 'prepare' method is called. Arguments to 'prepare' specify if the associated primitive supports arbitrary TDO data or not, so the combination of data can take into account if the NoCare bits should be converted to False or True. """ #@profile def __init__(self, component1=None, component2=None): """Create a bitarray object that stores its components by reference). Args: *components: Any number of bitarray instances to store in this composition. """ if component1 is None and component2 is not None: component1 = component2 component2 = None self._llhead = None self._lltail = None #self._length = 0 #self._offset = 0 if isinstance(component1, CompositeBitarray): self._llhead = component1._llhead self._lltail = component1._lltail self._offset = component1._offset self._tailbitsused = component1._tailbitsused self._length = len(component1) else: self._llhead = self._lltail = _DLLNode(component1) self._offset = 0 self._tailbitsused = len(component1) self._length = self._tailbitsused if component2 is not None: oldtail = self._lltail if isinstance(component2, CompositeBitarray): if self._lltail is component2._llhead: if self._tail_end != component2._offset: raise ProteusDataJoinError() if component2._is_single_llnode: self._tailbitsused += component2._tailbitsused else: self._tailbitsused = component2._tailbitsused self._lltail = component2._lltail self._length += len(component2) elif self._lltail.next is component2._llhead and\ self._tailoffset == 0 and\ component2._offset == 0: self._lltail = component2._lltail self._tailbitsused = component2._tailbitsused self._length += len(component2) elif component2._llhead.prev is not None or\ self._lltail.next is not None or\ component2._offset or self._tailoffset or\ self._llhead is component2._lltail: #Will not catch everything. Good enough to #prevent most accidents. A 'perfect' version #would require walking the whole tree. No way. raise ProteusDataJoinError() else: self._length += len(component2) self._lltail.next = component2._llhead self._lltail = component2._lltail self._tailbitsused = component2._tailbitsused else: if self._tailoffset or self._lltail.next is not None: raise ProteusDataJoinError() self._tailbitsused = len(component2) self._length += self._tailbitsused node = _DLLNode(component2) node.prev = self._lltail self._lltail = node #WHEN IT IS OK TO MERGE #oldtail can merge right if (oldtail is not head or offset is 0) and (oldtail.next is not tail or tailbitsused is len of node) and data is combinable. Do it recursive? #Merging can happen right until can't. Move back node and merge until can't. Repeat till new left node is incompatible. #if merging with the tail node, the tail node is fully used #Merge will start at seam, or have nothing to do. if oldtail is not self._llhead or self._offset == 0: self._do_merge(oldtail) def _do_merge(self, startpoint=None, stoponfail=True): if self._is_single_llnode: return headend = self._llhead if self._offset == 0 else \ self._llhead.next tailend = self._lltail if self._tailbitsused ==\ self._taillen else self._lltail.prev if not startpoint: startpoint = tailend.prev if headend is tailend: return #Skip if only one node in merge list. for mergebase in startpoint.iterprevtill(headend): anymerges = False mergetarget = mergebase.next while True: if mergebase.value._easy_mergable(mergetarget.value): #Merge two links in the chain anymerges = True mergebase._value += mergetarget.value mergebase.next = mergetarget.next if mergetarget is self._lltail: self._lltail = mergebase self._tailbitsused = len(mergebase._value) else: break if mergetarget is tailend: tailend = mergebase break mergetarget = mergetarget.next if not anymerges and stoponfail: break def _iter_components(self): for elem in self._llhead.iternexttill(self._lltail): yield elem.value def __len__(self): return self._length def __getitem__(self, index): if isinstance(index, int): print("GETTING", index, "WARNING, SLOW!") index = len(self)-abs(index) if index < 0 else index if (index < self._length and index >= 0): index += self._offset for elem in self._iter_components(): if index < len(elem): return elem[index] index -= len(elem) raise IndexError("Iteration finished before index found.") raise IndexError("%s index out of range"%type(self)) raise TypeError("%s indices must be int, not %s"% (type(self), type(index)) ) def __str__(self): return "".join(['?' if isinstance(elem, NoCareBitarray) else ('!' if isinstance(elem, PreferFalseBitarray) else (('T' if b else 'F') if isinstance(elem, ConstantBitarray) else ('1' if b else '0'))) for elem in self._iter_components() for b in elem])\ [self._offset:-self._tailoffset or None] def __repr__(self): return "<CMP: %s (%s)>"%\ (str(self), self._length)# pragma: no cover #@profile def __add__(self, other): if isinstance(other, (CompositeBitarray, ConstantBitarray, NoCareBitarray, bitarray, PreferFalseBitarray)): return CompositeBitarray(self, other) return NotImplemented #@profile def __radd__(self, other): if isinstance(other, (ConstantBitarray, NoCareBitarray, bitarray, PreferFalseBitarray)): return CompositeBitarray(other, self) return NotImplemented def __iter__(self): node = self._llhead if self._llhead is self._lltail: for bit in islice(node.value, self._offset, self._offset+self._tailbitsused): yield bit return else: for bit in islice(node.value, self._offset, None): yield bit while True: node = node.next if node is self._lltail: break for bit in node.value: yield bit for bit in islice(node.value, None, self._tailbitsused or None): yield bit def __reversed__(self): node = self._lltail ptiter = reversed(node.value) for _ in range(self._tailoffset): next(ptiter) if node is self._llhead: for _ in range(self._tailoffset, len(node.value)-self._offset): yield next(ptiter) return else: for bit in ptiter: yield bit while True: node = node.prev if node is self._llhead: break for bit in reversed(node.value): yield bit ptiter = reversed(node.value) for _ in range(len(node.value)-self._offset): yield next(ptiter) def __eq__(self, other): if isinstance(other, collections.Iterable): if len(self) != len(other): return False i1 = iter(self) i2 = iter(other) def checkwithnone(a, b): if a is None or b is None: return True return a == b return all(checkwithnone(next(i1), v) for v in i2) return NotImplemented def count(self, val=True): """Get the number of bits in the array with the specified value. Args: val: A boolean value to check against the array's value. Returns: An integer of the number of bits in the array equal to val. """ return sum((elem.count(val) for elem in self._iter_components())) def any(self): return any((elem.any() for elem in self._iter_components())) def all(self): return all((elem.all() for elem in self._iter_components())) def split(self, bitindex): if bitindex < 0: raise ValueError("bitindex must be larger or equal to 0.") if bitindex > len(self): raise ValueError("bitindex larger than the array's size. " "Len: %s; bitindex: %s"%(len(self), bitindex)) if bitindex == 0: return None, self if bitindex == len(self): return self, None bitoffset = 0 bitindexoffset = bitindex + self._offset for comp in self._llhead.iternexttill(self._lltail): if bitindexoffset in range( bitoffset+self._tail_left_offset, bitoffset+len(comp.value)): break else: bitoffset += len(comp.value) elemindex = bitindexoffset-bitoffset left = CompositeBitarray(self) left._lltail = comp if elemindex else comp.prev left._offset = self._offset left._tailbitsused = \ (elemindex or len(comp.prev.value))-left._tail_left_offset left._length = bitindex right = CompositeBitarray(self) right._llhead = comp.next if elemindex == len(comp.value)\ else comp right._offset = 0 if elemindex == len(comp.value)\ else elemindex right._tailbitsused = self._tailbitsused-\ (right._tail_left_offset- self._tail_left_offset) right._length = len(self)-bitindex return left, right def prepare(self, *, primef, reqef): """Extract the composite array's data into a usable bitarray based on if NoCare bits should be rendered as True or False. This method does the heavy lifting of producing a bitarray that is more efficient for tdo bits when that optimization is available. KWArgs: primef: A contracts.Requirement capability of the associated primitive. reqef: A contracts.Requirement (generated from primitive compilation) describing the signal requirements of the data in this CompositeBitarray. Returns: A bitarray (CompositeBitarray, ConstantBitarray, etc) that is the combined result of all the composite bitarray's components. If this CompositeBitarray's backing linked list can be merged into a single node, that single node is returned. Otherwise, this CompositeBitarray is returned. """ #TODO remove bitarray copies! if not primef.satisfies(reqef): raise Exception("Compiler error. Requested effect can not be " "satisfied by primitive capabilities") assertPreferFalse = reqef == ZERO or primef == ARBITRARY or\ (reqef == NOCARE and primef == ZERO) testBitarrayFalse = reqef==ZERO or\ (reqef==NOCARE and primef==ZERO) testBitarrayTrue = reqef==ONE or (reqef==NOCARE and primef==ONE) assert not (testBitarrayTrue and testBitarrayFalse) #print("DATA", self) #print("ORIG", ["%s(%s:%s)"% # (type(elem.value).__name__, # elem.value._val if isinstance(elem.value, # ConstantBitarray)\ # else "_", len(elem.value)) # for elem in self._llhead.iternexttill(self._lltail)]) if self._offset or self._tailoffset: if self._is_single_llnode: if isinstance(self._llhead.value, (ConstantBitarray, NoCareBitarray, PreferFalseBitarray)): oldnode = self._llhead if self._offset == 0: oldnode.prev = None if self._tailoffset == 0: oldnode.next = None self._llhead = _DLLNode( oldnode.value[self._offset:\ self._offset+self._tailbitsused]) self._lltail = self._llhead self._offset = 0 self._tailbitsused = self._taillen elif isinstance(self._llhead.value, bitarray): if testBitarrayFalse or testBitarrayTrue: oldnode = self._llhead newval = oldnode.value[self._offset: self._offset+self._tailbitsused] if testBitarrayFalse: if not newval.any(): newval = ConstantBitarray(False, len(newval)) else: raise Exception("bitarray in data contains a 1") if testBitarrayTrue: if newval.all(): newval = ConstantBitarray(True, len(newval)) else: raise Exception("bitarray in data contains a 0") self._llhead = _DLLNode(newval) self._lltail = self._llhead self._offset = 0 self._tailbitsused = self._taillen else: #IF HEAD IS NOT TAIL; OFFSET OR TAILOFFSET if self._offset: if isinstance(self._llhead.value, (ConstantBitarray, NoCareBitarray, PreferFalseBitarray)): oldhead = self._llhead self._llhead = _DLLNode( oldhead.value[self._offset:]) self._llhead.next = oldhead.next oldhead.next = None self._offset = 0 elif isinstance(self._llhead.value, bitarray): oldhead = self._llhead newval = oldhead.value[self._offset:] if testBitarrayFalse: if not newval.any(): newval = ConstantBitarray(False, len(newval)) else: raise Exception("bitarray in data contains a 1") if testBitarrayTrue: if newval.all(): newval = ConstantBitarray(True, len(newval)) else: raise Exception("bitarray in data contains a 0") self._llhead = _DLLNode(newval) self._llhead.next = oldhead.next oldhead.next = None self._offset = 0 if self._tailoffset:#IF HEAD IS NOT TAIL AND TAILOFFSET if isinstance(self._lltail.value, (ConstantBitarray, NoCareBitarray, PreferFalseBitarray)): oldtail = self._lltail self._lltail = _DLLNode( oldtail.value[:self._tailbitsused]) self._lltail.prev = oldtail.prev oldtail.prev = None self._tailbitsused = self._taillen elif isinstance(self._lltail.value, bitarray): oldtail = self._lltail newval = oldtail.value[:self._tailbitsused] if testBitarrayFalse: if not newval.any(): newval = ConstantBitarray(False, len(newval)) else: raise Exception("bitarray in data contains a 1") if testBitarrayTrue: if newval.all(): newval = ConstantBitarray(True, len(newval)) else: raise Exception("bitarray in data contains a 0") self._lltail = _DLLNode(newval) self._lltail.prev = oldtail.prev oldtail.prev = None self._tailbitsused = self._taillen for elem in self._llhead.iternexttill(self._lltail): if isinstance(elem.value, PreferFalseBitarray): if assertPreferFalse: elem._value = ConstantBitarray(False, len(elem.value)) else: elem._value = NoCareBitarray(len(elem.value)) if isinstance(elem.value, bitarray): if testBitarrayFalse: if not elem.value.any(): elem.value = ConstantBitarray(False, len(elem.value)) else: raise Exception("bitarray in data contains a 1") if testBitarrayTrue: if elem.value.all(): elem.value = ConstantBitarray(True, len(elem.value)) else: raise Exception("bitarray in data contains a 0") #print("TRAN", ["%s(%s:%s)"% # (type(elem.value).__name__, # elem.value._val if isinstance(elem.value, # ConstantBitarray)\ # else "_", len(elem.value)) # for elem in self._llhead.iternexttill(self._lltail)]) if not self._is_single_llnode and\ (self._lltail.next is not self._llhead or\ (self._offset == 0 and self._tailbitsused == self._taillen) ): self._do_merge(stoponfail=False) #print("\033[1mPOST", "+ ".join(["%s%s(%s:%s)\033[0m"% # ('\033[91m' if isinstance(elem.value, bitarray) else # ('\033[94m' if isinstance(elem.value, # (NoCareBitarray, PreferFalseBitarray)) # else '\033[92m'),type(elem.value).__name__, # elem.value._val if isinstance(elem.value, # ConstantBitarray)\ # else (elem.value.to01() if isinstance(elem.value, # bitarray) # else "_"), len(elem.value)) # for elem in self._llhead.iternexttill(self._lltail)])) if self._is_single_llnode and self._offset == 0 and\ self._tailbitsused == self._taillen: if isinstance(self._llhead.value, (NoCareBitarray, PreferFalseBitarray)): return ConstantBitarray(False, len(self._llhead.value)) return self._llhead.value return self @property def _taillen(self): return len(self._lltail.value) @property def _tail_end(self): return self._tail_left_offset + self._tailbitsused @property def _tail_left_offset(self): return self._offset if self._is_single_llnode else 0 @property def _tailoffset(self): return self._taillen-self._tailbitsused-self._tail_left_offset @property def _headbitsused(self): return self._tailbitsused if self._is_single_llnode else\ (len(self._llhead.value)-self._offset) @property def _is_single_llnode(self): return self._lltail is self._llhead def tobytes(self): def bnext(iterator): return bool(next(iterator)) data = bytearray(math.ceil(len(self)/8)) it = iter(self) i = -1 for i in range((len(self)//8)//2): data[i<<1], data[(i<<1)+1] = \ bitarray((bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it), bnext(it)))\ .tobytes() i2 = (i+1)<<1 if (len(self)/8)%2 >= 1: data[i2] =\ (bnext(it)<<7 | bnext(it)<<6 | bnext(it)<<5 | bnext(it)<<4 |\ bnext(it)<<3 | bnext(it)<<2 | bnext(it)<<1 | bnext(it)) i2 += 1 offset = 7 tmp = 0 for b in it: tmp |= bool(b)<<offset offset -= 1 data[-1] = tmp return data #tmpba = bitarray(self) #return tmpba.tobytes() def byteiter(self): elemiter = self._iter_components() outoffset = 0 res = 0 #PROCESS FIRST ELEMENT IF OFFSET if self._offset: elem = next(elemiter) bitsused = self._headbitsused ielem = elem.byteiter() for _ in range(self._offset//8):#Skip full bytes offset next(ielem) inoffset = self._offset%8 #offset in first used byte if inoffset == 0: for _ in range(bitsused//8): yield next(ielem) outoffset = bitsused%8 if outoffset: res = next(ielem)&(0x100-(1<<(8-outoffset))) else: res = next(ielem) << inoffset for _ in range(bitsused//8): tmp2 = next(ielem) yield (res | (tmp2>>(8-inoffset)))&0xFF res = tmp2 << inoffset bitsofextrabyte = bitsused%8 if bitsofextrabyte == 0: #perfect alignment with output bytes outoffset = 0 #elif bitsofextrabyte > inoffset: # print("SHOULD BE IMPOSSIBLE!")#pragma: no cover # raise Exception("IMPOSSIBLE?!")#pragma: no cover else: #NO MORE BITS NEEDED FOR CURRENT BYTE outoffset = bitsofextrabyte res &= (0x100-(1<<(8-bitsofextrabyte))) #NORMAL LOOP for elem in elemiter: ielem = elem.byteiter() if outoffset == 0: for _ in range(len(elem)//8): yield next(ielem) outoffset = len(elem)%8 if outoffset: res = next(ielem)&(0x100-(1<<(8-outoffset))) else: if len(elem) < 8-outoffset: tmp2 = next(ielem)&(0x100-(1<<(8-len(elem)))) res |= tmp2 >> outoffset outoffset += len(elem) elif len(elem) == 8-outoffset: res |= next(ielem) >> outoffset outoffset = 0 yield res else: outoffsetinv = 8-outoffset for _ in range((outoffset+len(elem))//8): tmp2 = next(ielem) yield (res | (tmp2>>outoffset))&0xFF res = tmp2 << outoffsetinv if (outoffset+len(elem))%8 == 0: #If outoffset plus number of bits is divisible by 8. #ONLY HAPPENS WHEN LAST BYTE PERFECTLY LINED UP outoffset = 0 elif (outoffset+len(elem))%8 > outoffset: #NEED MORE BITS res |= next(ielem) >> outoffset outoffset = (outoffset+len(elem))%8 res &= (0x100-(1<<(8-outoffset))) else: #NO MORE BITS NEEDED FOR CURRENT BYTE res |= tmp2 >> outoffset outoffset = (outoffset+len(elem))%8 res &= (0x100-(1<<(8-outoffset))) if outoffset: yield res class _DLLNode(object): def __init__(self, value): self._value = value self._next = None self._prev = None @property def next(self): return self._next @next.setter def next(self, node): if self is node: raise ValueError("Invalid next node. Infinite Loop") self._next = node if node is not None: node._prev = self @property def prev(self): return self._prev @prev.setter def prev(self, node): if self is node: raise ValueError("Invalid prev node. Infinite Loop") self._prev = node if node is not None: node._next = self @property def value(self): return self._value def iternexttill(self, target): node = self while True: yield node if node is target: break node = node.next def iterprevtill(self, target): node = self while True: yield node if node is target: break node = node.prev def __repr__(self): return "Node(%s%s)"%\ (self.value[:32], "..." if len(self.value)>32 else "")
lgpl-2.1
Lilywei123/tempest
tempest/cli/__init__.py
6
4600
# Copyright 2013 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools from tempest_lib.cli import base from tempest_lib.cli import output_parser import testtools from tempest.common import credentials from tempest import config from tempest import exceptions from tempest.openstack.common import versionutils from tempest import test CONF = config.CONF def check_client_version(client, version): """Checks if the client's version is compatible with the given version @param client: The client to check. @param version: The version to compare against. @return: True if the client version is compatible with the given version parameter, False otherwise. """ current_version = base.execute(client, '', params='--version', merge_stderr=True, cli_dir=CONF.cli.cli_dir) if not current_version.strip(): raise exceptions.TempestException('"%s --version" output was empty' % client) return versionutils.is_compatible(version, current_version, same_major=False) def min_client_version(*args, **kwargs): """A decorator to skip tests if the client used isn't of the right version. @param client: The client command to run. For python-novaclient, this is 'nova', for python-cinderclient this is 'cinder', etc. @param version: The minimum version required to run the CLI test. """ def decorator(func): @functools.wraps(func) def wrapper(*func_args, **func_kwargs): if not check_client_version(kwargs['client'], kwargs['version']): msg = "requires %s client version >= %s" % (kwargs['client'], kwargs['version']) raise testtools.TestCase.skipException(msg) return func(*func_args, **func_kwargs) return wrapper return decorator class ClientTestBase(test.BaseTestCase): @classmethod def resource_setup(cls): if not CONF.cli.enabled: msg = "cli testing disabled" raise cls.skipException(msg) super(ClientTestBase, cls).resource_setup() cls.cred_prov = credentials.get_isolated_credentials(cls.__name__) cls.creds = cls.cred_prov.get_admin_creds() def _get_clients(self): clients = base.CLIClient(self.creds.username, self.creds.password, self.creds.tenant_name, CONF.identity.uri, CONF.cli.cli_dir) return clients # TODO(mtreinish): The following code is basically copied from tempest-lib. # The base cli test class in tempest-lib 0.0.1 doesn't work as a mixin like # is needed here. The code below should be removed when tempest-lib # provides a way to provide this functionality def setUp(self): super(ClientTestBase, self).setUp() self.clients = self._get_clients() self.parser = output_parser def assertTableStruct(self, items, field_names): """Verify that all items has keys listed in field_names. :param items: items to assert are field names in the output table :type items: list :param field_names: field names from the output table of the cmd :type field_names: list """ for item in items: for field in field_names: self.assertIn(field, item) def assertFirstLineStartsWith(self, lines, beginning): """Verify that the first line starts with a string :param lines: strings for each line of output :type lines: list :param beginning: verify this is at the beginning of the first line :type beginning: string """ self.assertTrue(lines[0].startswith(beginning), msg=('Beginning of first line has invalid content: %s' % lines[:3]))
apache-2.0
imankulov/sentry
src/sentry/debug/panels/redis.py
23
2896
from __future__ import absolute_import, unicode_literals from django.template import Context, Template from django.utils.translation import ugettext_lazy as _ from time import time from .base import CallRecordingPanel from ..utils.function_wrapper import FunctionWrapper from ..utils.patch_context import PatchContext TEMPLATE = Template(""" {% load i18n %} <h4>{% trans "Requests" %}</h4> <table> <thead> <tr> <th>{% trans "Duration" %}</th> <th>{% trans "Command" %}</th> <th>{% trans "Args" %}</th> </tr> </thead> <tbody> {% for call in calls %} <tr> <td>{{ call.duration }} ms</td> <td>{{ call.command }}</td> <td>{{ call.args }} {{ call.kwargs }}</td> </tr> {% endfor %} </tbody> </table> """) class RedisPipelineWrapper(FunctionWrapper): def __call__(self, func, pipeline, *args, **kwargs): __traceback_hide__ = True # NOQA command_stack = pipeline.command_stack[:] start = time() try: return func(pipeline, *args, **kwargs) finally: end = time() data = { 'name': 'pipeline', 'args': repr(command_stack), 'kwargs': repr({}), 'start': start, 'end': end, } self.record(data) class RedisWrapper(FunctionWrapper): def __call__(self, func, *args, **kwargs): __traceback_hide__ = True # NOQA start = time() try: return func(*args, **kwargs) finally: end = time() data = { 'name': args[1], 'args': repr(args[2:]), 'kwargs': repr(kwargs), 'start': start, 'end': end, } self.record(data) class RedisPanel(CallRecordingPanel): title = nav_title = _("Redis") @classmethod def get_context(cls, collector): return [ PatchContext('redis.client.StrictRedis.execute_command', RedisWrapper(collector)), PatchContext('redis.client.BasePipeline.execute', RedisPipelineWrapper(collector)), ] @property def content(self): stats = self.get_stats() return TEMPLATE.render(Context(stats)) def process_response(self, request, response): calls = [] total_time = 0 for call in self.calls: duration = int((call['end'] - call['start']) * 1000) total_time += duration calls.append({ 'duration': duration, 'command': call['name'], 'args': call['args'], 'kwargs': call['kwargs'], }) self.record_stats({ 'calls': calls, 'total_time': total_time, })
bsd-3-clause
robclark/chromium
chrome/test/functional/ap_lab/ap_configurator.py
76
11626
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import copy import logging import os import pyauto_ap_configurator import pyauto import selenium.common.exceptions from selenium.webdriver.support.ui import WebDriverWait class APConfigurator(object): """Base class for objects to configure access points using webdriver.""" def __init__(self, pyauto_instance): self.pyauto_instance = pyauto_instance self._driver = pyauto_instance.NewWebDriver() # Any call to wait.until() will raise an exception if the timeout is hit. self._wait = WebDriverWait(self._driver, timeout=5) # Possible bands self.band_2ghz = '2.4GHz' self.band_5ghz = '5GHz' # Possible modes self.mode_a = 0x0001 self.mode_b = 0x0010 self.mode_g = 0x0100 self.mode_n = 0x1000 # Possible security settings self.security_disabled = 'Disabled' self.security_wep = 'WEP' self.security_wpawpsk = 'WPA-Personal' self.security_wpa2wpsk = 'WPA2-Personal' self.security_wpa8021x = 'WPA-Enterprise' self.security_wpa28021x = 'WPA2-Enterprise' self.wep_authentication_open = 'Open' self.wep_authentication_shared = 'Shared Key' self._command_list = [] def _WaitForObjectByXPath(self, xpath): """Waits for an object to appear.""" try: self._wait.until(lambda _: self._driver.find_element_by_xpath(xpath)) except selenium.common.exceptions.TimeoutException, e: logging.exception('Unable to find the wait for object by xpath: %s\n' 'WebDriver exception: %s', xpath, str(e)) def SelectItemFromPopupByID(self, item, element_id, wait_for_xpath=None): """Selects an item from a popup, by passing the element ID. Args: item: the item to select from the popup element_id: the html ID of the item wait_for_xpath: an item to wait for before returning """ xpath = 'id("%s")' % element_id self.SelectItemFromPopupByXPath(item, xpath, wait_for_xpath) def SelectItemFromPopupByXPath(self, item, xpath, wait_for_xpath=None): """Selects an item from a popup, by passing the xpath of the popup. Args: item: the item to select from the popup xpath: the xpath of the popup wait_for_xpath: an item to wait for before returning """ popup = self._driver.find_element_by_xpath(xpath) for option in popup.find_elements_by_tag_name('option'): if option.text == item: option.click() break if wait_for_xpath: self._WaitForObjectByXPath(wait_for_xpath) def SetContentOfTextFieldByID(self, content, text_field_id, wait_for_xpath=None): """Sets the content of a textfield, by passing the element ID. Args: content: the content to apply to the textfield text_field_id: the html ID of the textfield wait_for_xpath: an item to wait for before returning """ xpath = 'id("%s")' % text_field_id self.SetConentsOfTextFieldByXPath(content, xpath, wait_for_xpath) def SetConentsOfTextFieldByXPath(self, content, xpath, wait_for_xpath=None): """Sets the content of a textfield, by passing the xpath. Args: content: the content to apply to the textfield xpath: the xpath of the textfield wait_for_xpath: an item to wait for before returning """ # When we can get the value we know the text field is ready. text_field = self._driver.find_element_by_xpath(xpath) try: self._wait.until(lambda _: text_field.get_attribute('value')) except selenium.common.exceptions.TimeoutException, e: logging.exception('Unable to obtain the value of the text field %s.\n' 'WebDriver exception: %s', wait_for_xpath, str(e)) text_field = self._driver.find_element_by_xpath(xpath) text_field.clear() text_field.send_keys(content) if wait_for_xpath: self._WaitForObjectByXPath(wait_for_xpath) def SetCheckBoxSelectedByID(self, check_box_id, selected=True, wait_for_xpath=None): """Sets the state of a checkbox, by passing the ID. Args: check_box_id: the html id of the checkbox selected: True to enable the checkbox; False otherwise wait_for_xpath: an item to wait for before returning """ xpath = 'id("%s")' % check_box_id self.SetCheckBoxSelectedByXPath(xpath, selected, wait_for_xpath) def SetCheckBoxSelectedByXPath(self, xpath, selected=True, wait_for_xpath=None): """Sets the state of a checkbox, by passing the xpath. Args: xpath: the xpath of the checkbox selected: True to enable the checkbox; False otherwise wait_for_xpath: an item to wait for before returning """ check_box = self._driver.find_element_by_xpath(xpath) value = check_box.get_attribute('value') if (value == '1' and not selected) or (value == '0' and selected): check_box.click() if wait_for_xpath: self._WaitForObjectByXPath(wait_for_xpath) def AddItemToCommandList(self, method, args, page, priority): """Adds commands to be executed against the AP web UI. Args: method: the method to run args: the arguments for the method you want executed page: the page on the web ui where the method should be run against priority: the priority of the method """ self._command_list.append({'method': method, 'args': copy.copy(args), 'page': page, 'priority': priority}) def GetRouterName(self): """Returns a string to describe the router. Note: The derived class must implement this method. """ raise NotImplementedError def GetRouterShortName(self): """Returns a short string to describe the router. Note: The derived class must implement this method. """ raise NotImplementedError def GetNumberOfPages(self): """Returns the number of web pages used to configure the router. Note: This is used internally by applySettings, and this method must be implemented by the derived class. """ raise NotImplementedError def GetSupportedBands(self): """Returns a list of dictionaries describing the supported bands. Example: returned is a dictionary of band and a list of channels. The band object returned must be one of those defined in the __init___ of this class. supported_bands = [{'band' : self.band_2GHz, 'channels' : [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]}, {'band' : self.band_5ghz, 'channels' : [26, 40, 44, 48, 149, 153, 157, 161, 165]}] Returns: A list of dictionaries as described above Note: The derived class must implement this method. """ raise NotImplementedError def GetSupportedModes(self): """Returns a list of dictionaries describing the supported modes. Example: returned is a dictionary of band and a list of modess. The band and modes objects returned must be one of those defined in the __init___ of this class. supported_modes = [{'band' : self.band_2GHz, 'modes' : [mode_b, mode_b | mode_g]}, {'band' : self.band_5ghz, 'modes' : [mode_a, mode_n, mode_a | mode_n]}] Returns: A list of dictionaries as described above Note: The derived class must implement this method. """ raise NotImplementedError def NavigateToPage(self, page_number): """Navigates to the page corresponding to the given page number. This method performs the translation between a page number and a url to load. This is used internally by applySettings. Args: page_number: Page number of the page to load Returns: True if navigation is successful; False otherwise. Note: The derived class must implement this method. """ raise NotImplementedError def SavePage(self, page_number): """Saves the given page. Args: page_number: Page number of the page to save. Returns: True if navigation is successful; False otherwise. Note: The derived class must implement this method. """ raise NotImplementedError def SetMode(self, mode, band=None): """Sets the mode. Args: mode: must be one of the modes listed in __init__() band: the band to select Note: The derived class must implement this method """ raise NotImplementedError def SetRadio(self, enabled=True): """Turns the radio on and off. Args: enabled: True to turn on the radio; False otherwise Note: The derived class must implement this method. """ raise NotImplementedError def SetSSID(self, ssid): """Sets the SSID of the wireless network. Args: ssid: Name of the wireless network Note: The derived class must implement this method. """ raise NotImplementedError def SetChannel(self, channel): """Sets the channel of the wireless network. Args: channel: Integer value of the channel Note: The derived class must implement this method. """ raise NotImplementedError def SetBand(self, band): """Sets the band of the wireless network. Currently there are only two possible values for band 2kGHz and 5kGHz. Args: band: Constant describing the band type Note: The derived class must implement this method. """ raise NotImplementedError def SetSecurityDisabled(self): """Disables the security of the wireless network. Note: The derived class must implement this method. """ raise NotImplementedError def SetSecurityWEP(self, key_value, authentication): """Enabled WEP security for the wireless network. Args: key_value: encryption key to use authentication: one of two supported authentication types: wep_authentication_open or wep_authentication_shared Note: The derived class must implement this method. """ raise NotImplementedError def SetSecurityWPAPSK(self, shared_key, update_interval=1800): """Enabled WPA using a private security key for the wireless network. Args: shared_key: shared encryption key to use update_interval: number of seconds to wait before updating Note: The derived class must implement this method. """ raise NotImplementedError def SetVisibility(self, visible=True): """Set the visibility of the wireless network. Args: visible: True for visible; False otherwise Note: The derived class must implement this method. """ raise NotImplementedError def ApplySettings(self): """Apply all settings to the access point.""" # Pull items by page and then sort if self.GetNumberOfPages() == -1: self.fail(msg='Number of pages is not set.') page_range = range(1, self.GetNumberOfPages() + 1) for i in page_range: page_commands = [] for command in self._command_list: if command['page'] == i: page_commands.append(command) # Sort the commands in this page by priority sorted_page_commands = sorted(page_commands, key=lambda k: k['priority']) if sorted_page_commands and self.NavigateToPage(i): for command in sorted_page_commands: command['method'](*command['args']) self.SavePage(i) self._command_list = []
bsd-3-clause
alexryndin/ambari
ambari-server/src/main/resources/stacks/ADH/1.4/services/ATLAS/package/scripts/params.py
1
20505
#!/usr/bin/env python """ Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # Python Imports import os import sys # Local Imports from resource_management import get_bare_principal from status_params import * from resource_management import format_stack_version, Script from resource_management.libraries.functions import format from resource_management.libraries.functions.default import default from resource_management.libraries.functions.stack_features import check_stack_feature from resource_management.libraries.functions import StackFeature from resource_management.libraries.functions.is_empty import is_empty from resource_management.libraries.functions.expect import expect from resource_management.libraries.functions.setup_ranger_plugin_xml import generate_ranger_service_config def configs_for_ha(atlas_hosts, metadata_port, is_atlas_ha_enabled, metadata_protocol): """ Return a dictionary of additional configs to merge if Atlas HA is enabled. :param atlas_hosts: List of hostnames that contain Atlas :param metadata_port: Port number :param is_atlas_ha_enabled: None, True, or False :param metadata_protocol: http or https :return: Dictionary with additional configs to merge to application-properties if HA is enabled. """ additional_props = {} if atlas_hosts is None or len(atlas_hosts) == 0 or metadata_port is None: return additional_props # Sort to guarantee each host sees the same values, assuming restarted at the same time. atlas_hosts = sorted(atlas_hosts) # E.g., id1,id2,id3,...,idn _server_id_list = ["id" + str(i) for i in range(1, len(atlas_hosts) + 1)] atlas_server_ids = ",".join(_server_id_list) additional_props["atlas.server.ids"] = atlas_server_ids i = 0 for curr_hostname in atlas_hosts: id = _server_id_list[i] prop_name = "atlas.server.address." + id prop_value = curr_hostname + ":" + metadata_port additional_props[prop_name] = prop_value if "atlas.rest.address" in additional_props: additional_props["atlas.rest.address"] += "," + metadata_protocol + "://" + prop_value else: additional_props["atlas.rest.address"] = metadata_protocol + "://" + prop_value i += 1 # This may override the existing property if i == 1 or (i > 1 and is_atlas_ha_enabled is False): additional_props["atlas.server.ha.enabled"] = "false" elif i > 1: additional_props["atlas.server.ha.enabled"] = "true" return additional_props # server configurations config = Script.get_config() exec_tmp_dir = Script.get_tmp_dir() stack_root = Script.get_stack_root() # Needed since this is an Atlas Hook service. cluster_name = config['clusterName'] java_version = expect("/hostLevelParams/java_version", int) zk_root = default('/configurations/application-properties/atlas.server.ha.zookeeper.zkroot', '/apache_atlas') stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks) atlas_kafka_group_id = default('/configurations/application-properties/atlas.kafka.hook.group.id', None) if security_enabled: _hostname_lowercase = config['hostname'].lower() _atlas_principal_name = config['configurations']['application-properties']['atlas.authentication.principal'] atlas_jaas_principal = _atlas_principal_name.replace('_HOST',_hostname_lowercase) atlas_keytab_path = config['configurations']['application-properties']['atlas.authentication.keytab'] # New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade version = default("/commandParams/version", None) # stack version stack_version_unformatted = config['hostLevelParams']['stack_version'] stack_version_formatted = format_stack_version(stack_version_unformatted) metadata_home = format('{stack_root}/atlas-server') metadata_bin = format("{metadata_home}/bin") python_binary = os.environ['PYTHON_EXE'] if 'PYTHON_EXE' in os.environ else sys.executable metadata_start_script = format("{metadata_bin}/atlas_start.py") metadata_stop_script = format("{metadata_bin}/atlas_stop.py") # metadata local directory structure log_dir = config['configurations']['atlas-env']['metadata_log_dir'] # service locations hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 'HADOOP_HOME' in os.environ else '/etc/hadoop/conf' # some commands may need to supply the JAAS location when running as atlas atlas_jaas_file = format("{conf_dir}/atlas_jaas.conf") # user user_group = config['configurations']['cluster-env']['user_group'] # metadata env java64_home = config['hostLevelParams']['java_home'] java_exec = format("{java64_home}/bin/java") env_sh_template = config['configurations']['atlas-env']['content'] # credential provider credential_provider = format( "jceks://file@{conf_dir}/atlas-site.jceks") # command line args ssl_enabled = default("/configurations/application-properties/atlas.enableTLS", False) http_port = default("/configurations/application-properties/atlas.server.http.port", "21000") https_port = default("/configurations/application-properties/atlas.server.https.port", "21443") if ssl_enabled: metadata_port = https_port metadata_protocol = 'https' else: metadata_port = http_port metadata_protocol = 'http' metadata_host = config['hostname'] atlas_hosts = sorted(default('/clusterHostInfo/atlas_server_hosts', [])) metadata_server_host = atlas_hosts[0] if len(atlas_hosts) > 0 else "UNKNOWN_HOST" # application properties application_properties = dict(config['configurations']['application-properties']) application_properties["atlas.server.bind.address"] = metadata_host # trimming knox_key if 'atlas.sso.knox.publicKey' in application_properties: knox_key = application_properties['atlas.sso.knox.publicKey'] knox_key_without_new_line = knox_key.replace("\n","") application_properties['atlas.sso.knox.publicKey'] = knox_key_without_new_line if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, version_for_stack_feature_checks): metadata_server_url = application_properties["atlas.rest.address"] else: # In HDP 2.3 and 2.4 the property was computed and saved to the local config but did not exist in the database. metadata_server_url = format('{metadata_protocol}://{metadata_server_host}:{metadata_port}') application_properties["atlas.rest.address"] = metadata_server_url # Atlas HA should populate # atlas.server.ids = id1,id2,...,idn # atlas.server.address.id# = host#:port # User should not have to modify this property, but still allow overriding it to False if multiple Atlas servers exist # This can be None, True, or False is_atlas_ha_enabled = default("/configurations/application-properties/atlas.server.ha.enabled", None) additional_ha_props = configs_for_ha(atlas_hosts, metadata_port, is_atlas_ha_enabled, metadata_protocol) for k,v in additional_ha_props.iteritems(): application_properties[k] = v metadata_env_content = config['configurations']['atlas-env']['content'] metadata_opts = config['configurations']['atlas-env']['metadata_opts'] metadata_classpath = config['configurations']['atlas-env']['metadata_classpath'] data_dir = format("{stack_root}/atlas-server/data") expanded_war_dir = os.environ['METADATA_EXPANDED_WEBAPP_DIR'] if 'METADATA_EXPANDED_WEBAPP_DIR' in os.environ else format("{stack_root}/atlas-server/server/webapp") metadata_log4j_content = config['configurations']['atlas-log4j']['content'] metadata_solrconfig_content = default("/configurations/atlas-solrconfig/content", None) atlas_log_level = config['configurations']['atlas-log4j']['atlas_log_level'] audit_log_level = config['configurations']['atlas-log4j']['audit_log_level'] atlas_log_max_backup_size = default("/configurations/atlas-log4j/atlas_log_max_backup_size", 256) atlas_log_number_of_backup_files = default("/configurations/atlas-log4j/atlas_log_number_of_backup_files", 20) # smoke test smoke_test_user = config['configurations']['cluster-env']['smokeuser'] smoke_test_password = 'smoke' smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name'] smokeuser_keytab = config['configurations']['cluster-env']['smokeuser_keytab'] security_check_status_file = format('{log_dir}/security_check.status') # hbase hbase_conf_dir = "/etc/hbase/conf" atlas_search_backend = default("/configurations/application-properties/atlas.graph.index.search.backend", "") search_backend_solr = atlas_search_backend.startswith('solr') # infra solr infra_solr_znode = default("/configurations/infra-solr-env/infra_solr_znode", None) infra_solr_hosts = default("/clusterHostInfo/infra_solr_hosts", []) infra_solr_replication_factor = 2 if len(infra_solr_hosts) > 1 else 1 atlas_solr_shards = default("/configurations/atlas-env/atlas_solr-shards", 1) has_infra_solr = len(infra_solr_hosts) > 0 infra_solr_role_atlas = default('configurations/infra-solr-security-json/infra_solr_role_atlas', 'atlas_user') infra_solr_role_dev = default('configurations/infra-solr-security-json/infra_solr_role_dev', 'dev') infra_solr_role_ranger_audit = default('configurations/infra-solr-security-json/infra_solr_role_ranger_audit', 'ranger_audit_user') # zookeeper zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts'] zookeeper_port = default('/configurations/zoo.cfg/clientPort', None) # get comma separated lists of zookeeper hosts from clusterHostInfo index = 0 zookeeper_quorum = "" for host in zookeeper_hosts: zookeeper_host = host if zookeeper_port is not None: zookeeper_host = host + ":" + str(zookeeper_port) zookeeper_quorum += zookeeper_host index += 1 if index < len(zookeeper_hosts): zookeeper_quorum += "," stack_supports_atlas_hdfs_site_on_namenode_ha = check_stack_feature(StackFeature.ATLAS_HDFS_SITE_ON_NAMENODE_HA, version_for_stack_feature_checks) atlas_server_xmx = default("configurations/atlas-env/atlas_server_xmx", 2048) atlas_server_max_new_size = default("configurations/atlas-env/atlas_server_max_new_size", 614) hbase_master_hosts = default('/clusterHostInfo/hbase_master_hosts', []) has_hbase_master = not len(hbase_master_hosts) == 0 atlas_hbase_setup = format("{exec_tmp_dir}/atlas_hbase_setup.rb") atlas_kafka_setup = format("{exec_tmp_dir}/atlas_kafka_acl.sh") atlas_graph_storage_hbase_table = default('/configurations/application-properties/atlas.graph.storage.hbase.table', None) atlas_audit_hbase_tablename = default('/configurations/application-properties/atlas.audit.hbase.tablename', None) hbase_user_keytab = default('/configurations/hbase-env/hbase_user_keytab', None) hbase_principal_name = default('/configurations/hbase-env/hbase_principal_name', None) # ToDo: Kafka port to Atlas # Used while upgrading the stack in a kerberized cluster and running kafka-acls.sh hosts_with_kafka = default('/clusterHostInfo/kafka_broker_hosts', []) host_with_kafka = hostname in hosts_with_kafka ranger_tagsync_hosts = default("/clusterHostInfo/ranger_tagsync_hosts", []) has_ranger_tagsync = len(ranger_tagsync_hosts) > 0 rangertagsync_user = "rangertagsync" kafka_keytab = default('/configurations/kafka-env/kafka_keytab', None) kafka_principal_name = default('/configurations/kafka-env/kafka_principal_name', None) default_replication_factor = default('/configurations/application-properties/atlas.notification.replicas', None) if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, version_for_stack_feature_checks): default_replication_factor = default('/configurations/application-properties/atlas.notification.replicas', None) kafka_env_sh_template = config['configurations']['kafka-env']['content'] kafka_home = os.path.join(stack_root, "", "kafka") kafka_conf_dir = os.path.join(kafka_home, "config") kafka_zk_endpoint = default("/configurations/kafka-broker/zookeeper.connect", None) kafka_kerberos_enabled = (('security.inter.broker.protocol' in config['configurations']['kafka-broker']) and ((config['configurations']['kafka-broker']['security.inter.broker.protocol'] == "PLAINTEXTSASL") or (config['configurations']['kafka-broker']['security.inter.broker.protocol'] == "SASL_PLAINTEXT"))) if security_enabled and stack_version_formatted != "" and 'kafka_principal_name' in config['configurations']['kafka-env'] \ and check_stack_feature(StackFeature.KAFKA_KERBEROS, stack_version_formatted): _hostname_lowercase = config['hostname'].lower() _kafka_principal_name = config['configurations']['kafka-env']['kafka_principal_name'] kafka_jaas_principal = _kafka_principal_name.replace('_HOST', _hostname_lowercase) kafka_keytab_path = config['configurations']['kafka-env']['kafka_keytab'] kafka_bare_jaas_principal = get_bare_principal(_kafka_principal_name) kafka_kerberos_params = "-Djava.security.auth.login.config={0}/kafka_jaas.conf".format(kafka_conf_dir) else: kafka_kerberos_params = '' kafka_jaas_principal = None kafka_keytab_path = None namenode_host = set(default("/clusterHostInfo/namenode_host", [])) has_namenode = not len(namenode_host) == 0 # ranger altas plugin section start # ranger host ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", []) has_ranger_admin = not len(ranger_admin_hosts) == 0 retry_enabled = default("/commandParams/command_retry_enabled", False) stack_supports_atlas_ranger_plugin = check_stack_feature(StackFeature.ATLAS_RANGER_PLUGIN_SUPPORT, version_for_stack_feature_checks) stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks) # ranger support xml_configuration flag, instead of depending on ranger xml_configurations_supported/ranger-env, using stack feature xml_configurations_supported = check_stack_feature(StackFeature.RANGER_XML_CONFIGURATION, version_for_stack_feature_checks) # ranger atlas plugin enabled property enable_ranger_atlas = default("/configurations/ranger-atlas-plugin-properties/ranger-atlas-plugin-enabled", "No") enable_ranger_atlas = True if enable_ranger_atlas.lower() == "yes" else False # ranger hbase plugin enabled property enable_ranger_hbase = default("/configurations/ranger-hbase-plugin-properties/ranger-hbase-plugin-enabled", "No") enable_ranger_hbase = True if enable_ranger_hbase.lower() == 'yes' else False if stack_supports_atlas_ranger_plugin and enable_ranger_atlas: # for create_hdfs_directory hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] if has_namenode else None hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if has_namenode else None hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None hdfs_site = config['configurations']['hdfs-site'] default_fs = config['configurations']['core-site']['fs.defaultFS'] dfs_type = default("/commandParams/dfs_type", "") import functools from resource_management.libraries.resources.hdfs_resource import HdfsResource from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources #create partial functions with common arguments for every HdfsResource call #to create hdfs directory we need to call params.HdfsResource in code HdfsResource = functools.partial( HdfsResource, user = hdfs_user, hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore", security_enabled = security_enabled, keytab = hdfs_user_keytab, kinit_path_local = kinit_path_local, hadoop_bin_dir = hadoop_bin_dir, hadoop_conf_dir = hadoop_conf_dir, principal_name = hdfs_principal_name, hdfs_site = hdfs_site, default_fs = default_fs, immutable_paths = get_not_managed_resources(), dfs_type = dfs_type ) # ranger atlas service/repository name repo_name = str(config['clusterName']) + '_atlas' repo_name_value = config['configurations']['ranger-atlas-security']['ranger.plugin.atlas.service.name'] if not is_empty(repo_name_value) and repo_name_value != "{{repo_name}}": repo_name = repo_name_value ssl_keystore_password = config['configurations']['ranger-atlas-policymgr-ssl']['xasecure.policymgr.clientssl.keystore.password'] ssl_truststore_password = config['configurations']['ranger-atlas-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password'] credential_file = format('/etc/ranger/{repo_name}/cred.jceks') xa_audit_hdfs_is_enabled = default('/configurations/ranger-atlas-audit/xasecure.audit.destination.hdfs', False) # get ranger policy url policymgr_mgr_url = config['configurations']['ranger-atlas-security']['ranger.plugin.atlas.policy.rest.url'] if not is_empty(policymgr_mgr_url) and policymgr_mgr_url.endswith('/'): policymgr_mgr_url = policymgr_mgr_url.rstrip('/') downloaded_custom_connector = None driver_curl_source = None driver_curl_target = None ranger_env = config['configurations']['ranger-env'] # create ranger-env config having external ranger credential properties if not has_ranger_admin and enable_ranger_atlas: external_admin_username = default('/configurations/ranger-atlas-plugin-properties/external_admin_username', 'admin') external_admin_password = default('/configurations/ranger-atlas-plugin-properties/external_admin_password', 'admin') external_ranger_admin_username = default('/configurations/ranger-atlas-plugin-properties/external_ranger_admin_username', 'amb_ranger_admin') external_ranger_admin_password = default('/configurations/ranger-atlas-plugin-properties/external_ranger_admin_password', 'amb_ranger_admin') ranger_env = {} ranger_env['admin_username'] = external_admin_username ranger_env['admin_password'] = external_admin_password ranger_env['ranger_admin_username'] = external_ranger_admin_username ranger_env['ranger_admin_password'] = external_ranger_admin_password ranger_plugin_properties = config['configurations']['ranger-atlas-plugin-properties'] ranger_atlas_audit = config['configurations']['ranger-atlas-audit'] ranger_atlas_audit_attrs = config['configuration_attributes']['ranger-atlas-audit'] ranger_atlas_security = config['configurations']['ranger-atlas-security'] ranger_atlas_security_attrs = config['configuration_attributes']['ranger-atlas-security'] ranger_atlas_policymgr_ssl = config['configurations']['ranger-atlas-policymgr-ssl'] ranger_atlas_policymgr_ssl_attrs = config['configuration_attributes']['ranger-atlas-policymgr-ssl'] policy_user = config['configurations']['ranger-atlas-plugin-properties']['policy_user'] atlas_repository_configuration = { 'username' : config['configurations']['ranger-atlas-plugin-properties']['REPOSITORY_CONFIG_USERNAME'], 'password' : unicode(config['configurations']['ranger-atlas-plugin-properties']['REPOSITORY_CONFIG_PASSWORD']), 'atlas.rest.address' : metadata_server_url, 'commonNameForCertificate' : config['configurations']['ranger-atlas-plugin-properties']['common.name.for.certificate'], 'ambari.service.check.user' : policy_user } custom_ranger_service_config = generate_ranger_service_config(ranger_plugin_properties) if len(custom_ranger_service_config) > 0: atlas_repository_configuration.update(custom_ranger_service_config) if security_enabled: atlas_repository_configuration['policy.download.auth.users'] = metadata_user atlas_repository_configuration['tag.download.auth.users'] = metadata_user atlas_ranger_plugin_repo = { 'isEnabled': 'true', 'configs': atlas_repository_configuration, 'description': 'atlas repo', 'name': repo_name, 'type': 'atlas', } # ranger atlas plugin section end # atlas admin login username password atlas_admin_username = config['configurations']['atlas-env']['atlas.admin.username'] atlas_admin_password = config['configurations']['atlas-env']['atlas.admin.password']
apache-2.0
sunzhxjs/JobGIS
lib/python2.7/site-packages/pip/_vendor/requests/models.py
187
29277
# -*- coding: utf-8 -*- """ requests.models ~~~~~~~~~~~~~~~ This module contains the primary objects that power Requests. """ import collections import datetime from io import BytesIO, UnsupportedOperation from .hooks import default_hooks from .structures import CaseInsensitiveDict from .auth import HTTPBasicAuth from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar from .packages.urllib3.fields import RequestField from .packages.urllib3.filepost import encode_multipart_formdata from .packages.urllib3.util import parse_url from .packages.urllib3.exceptions import ( DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) from .exceptions import ( HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, ContentDecodingError, ConnectionError, StreamConsumedError) from .utils import ( guess_filename, get_auth_from_url, requote_uri, stream_decode_response_unicode, to_key_val_list, parse_header_links, iter_slices, guess_json_utf, super_len, to_native_string) from .compat import ( cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO, is_py2, chardet, builtin_str, basestring) from .compat import json as complexjson from .status_codes import codes #: The set of HTTP status codes that indicate an automatically #: processable redirect. REDIRECT_STATI = ( codes.moved, # 301 codes.found, # 302 codes.other, # 303 codes.temporary_redirect, # 307 codes.permanent_redirect, # 308 ) DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 class RequestEncodingMixin(object): @property def path_url(self): """Build the path URL to use.""" url = [] p = urlsplit(self.url) path = p.path if not path: path = '/' url.append(path) query = p.query if query: url.append('?') url.append(query) return ''.join(url) @staticmethod def _encode_params(data): """Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ if isinstance(data, (str, bytes)): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for k, vs in to_key_val_list(data): if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): vs = [vs] for v in vs: if v is not None: result.append( (k.encode('utf-8') if isinstance(k, str) else k, v.encode('utf-8') if isinstance(v, str) else v)) return urlencode(result, doseq=True) else: return data @staticmethod def _encode_files(files, data): """Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict. """ if (not files): raise ValueError("Files must be provided.") elif isinstance(data, basestring): raise ValueError("Data must not be a string.") new_fields = [] fields = to_key_val_list(data or {}) files = to_key_val_list(files or {}) for field, val in fields: if isinstance(val, basestring) or not hasattr(val, '__iter__'): val = [val] for v in val: if v is not None: # Don't call str() on bytestrings: in Py3 it all goes wrong. if not isinstance(v, bytes): v = str(v) new_fields.append( (field.decode('utf-8') if isinstance(field, bytes) else field, v.encode('utf-8') if isinstance(v, str) else v)) for (k, v) in files: # support for explicit filename ft = None fh = None if isinstance(v, (tuple, list)): if len(v) == 2: fn, fp = v elif len(v) == 3: fn, fp, ft = v else: fn, fp, ft, fh = v else: fn = guess_filename(v) or k fp = v if isinstance(fp, (str, bytes, bytearray)): fdata = fp else: fdata = fp.read() rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) body, content_type = encode_multipart_formdata(new_fields) return body, content_type class RequestHooksMixin(object): def register_hook(self, event, hook): """Properly register a hook.""" if event not in self.hooks: raise ValueError('Unsupported event specified, with event name "%s"' % (event)) if isinstance(hook, collections.Callable): self.hooks[event].append(hook) elif hasattr(hook, '__iter__'): self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable)) def deregister_hook(self, event, hook): """Deregister a previously registered hook. Returns True if the hook existed, False if not. """ try: self.hooks[event].remove(hook) return True except ValueError: return False class Request(RequestHooksMixin): """A user-created :class:`Request <Request>` object. Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server. :param method: HTTP method to use. :param url: URL to send. :param headers: dictionary of headers to send. :param files: dictionary of {filename: fileobject} files to multipart upload. :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place. :param json: json for the body to attach to the request (if files or data is not specified). :param params: dictionary of URL parameters to append to the URL. :param auth: Auth handler or (user, pass) tuple. :param cookies: dictionary or CookieJar of cookies to attach to this request. :param hooks: dictionary of callback hooks, for internal usage. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> req.prepare() <PreparedRequest [GET]> """ def __init__(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None): # Default empty dicts for dict params. data = [] if data is None else data files = [] if files is None else files headers = {} if headers is None else headers params = {} if params is None else params hooks = {} if hooks is None else hooks self.hooks = default_hooks() for (k, v) in list(hooks.items()): self.register_hook(event=k, hook=v) self.method = method self.url = url self.headers = headers self.files = files self.data = data self.json = json self.params = params self.auth = auth self.cookies = cookies def __repr__(self): return '<Request [%s]>' % (self.method) def prepare(self): """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.""" p = PreparedRequest() p.prepare( method=self.method, url=self.url, headers=self.headers, files=self.files, data=self.data, json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, hooks=self.hooks, ) return p class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): """The fully mutable :class:`PreparedRequest <PreparedRequest>` object, containing the exact bytes that will be sent to the server. Generated from either a :class:`Request <Request>` object or manually. Usage:: >>> import requests >>> req = requests.Request('GET', 'http://httpbin.org/get') >>> r = req.prepare() <PreparedRequest [GET]> >>> s = requests.Session() >>> s.send(r) <Response [200]> """ def __init__(self): #: HTTP verb to send to the server. self.method = None #: HTTP URL to send the request to. self.url = None #: dictionary of HTTP headers. self.headers = None # The `CookieJar` used to create the Cookie header will be stored here # after prepare_cookies is called self._cookies = None #: request body to send to the server. self.body = None #: dictionary of callback hooks, for internal usage. self.hooks = default_hooks() def prepare(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None): """Prepares the entire request with the given parameters.""" self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) # Note that prepare_auth must be last to enable authentication schemes # such as OAuth to work on a fully prepared request. # This MUST go after prepare_auth. Authenticators could add a hook self.prepare_hooks(hooks) def __repr__(self): return '<PreparedRequest [%s]>' % (self.method) def copy(self): p = PreparedRequest() p.method = self.method p.url = self.url p.headers = self.headers.copy() if self.headers is not None else None p._cookies = _copy_cookie_jar(self._cookies) p.body = self.body p.hooks = self.hooks return p def prepare_method(self, method): """Prepares the given HTTP method.""" self.method = method if self.method is not None: self.method = to_native_string(self.method.upper()) def prepare_url(self, url, params): """Prepares the given HTTP URL.""" #: Accept objects that have string representations. #: We're unable to blindly call unicode/str functions #: as this will include the bytestring indicator (b'') #: on python 3.x. #: https://github.com/kennethreitz/requests/pull/2238 if isinstance(url, bytes): url = url.decode('utf8') else: url = unicode(url) if is_py2 else str(url) # Don't do any URL preparation for non-HTTP schemes like `mailto`, # `data` etc to work around exceptions from `url_parse`, which # handles RFC 3986 only. if ':' in url and not url.lower().startswith('http'): self.url = url return # Support for unicode domain names and paths. try: scheme, auth, host, port, path, query, fragment = parse_url(url) except LocationParseError as e: raise InvalidURL(*e.args) if not scheme: error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") error = error.format(to_native_string(url, 'utf8')) raise MissingSchema(error) if not host: raise InvalidURL("Invalid URL %r: No host supplied" % url) # Only want to apply IDNA to the hostname try: host = host.encode('idna').decode('utf-8') except UnicodeError: raise InvalidURL('URL has an invalid label.') # Carefully reconstruct the network location netloc = auth or '' if netloc: netloc += '@' netloc += host if port: netloc += ':' + str(port) # Bare domains aren't valid URLs. if not path: path = '/' if is_py2: if isinstance(scheme, str): scheme = scheme.encode('utf-8') if isinstance(netloc, str): netloc = netloc.encode('utf-8') if isinstance(path, str): path = path.encode('utf-8') if isinstance(query, str): query = query.encode('utf-8') if isinstance(fragment, str): fragment = fragment.encode('utf-8') if isinstance(params, (str, bytes)): params = to_native_string(params) enc_params = self._encode_params(params) if enc_params: if query: query = '%s&%s' % (query, enc_params) else: query = enc_params url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) self.url = url def prepare_headers(self, headers): """Prepares the given HTTP headers.""" if headers: self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items()) else: self.headers = CaseInsensitiveDict() def prepare_body(self, data, files, json=None): """Prepares the given HTTP body data.""" # Check if file, fo, generator, iterator. # If not, run through normal process. # Nottin' on you. body = None content_type = None length = None if not data and json is not None: content_type = 'application/json' body = complexjson.dumps(json) is_stream = all([ hasattr(data, '__iter__'), not isinstance(data, (basestring, list, tuple, dict)) ]) try: length = super_len(data) except (TypeError, AttributeError, UnsupportedOperation): length = None if is_stream: body = data if files: raise NotImplementedError('Streamed bodies and files are mutually exclusive.') if length: self.headers['Content-Length'] = builtin_str(length) else: self.headers['Transfer-Encoding'] = 'chunked' else: # Multi-part file uploads. if files: (body, content_type) = self._encode_files(files, data) else: if data: body = self._encode_params(data) if isinstance(data, basestring) or hasattr(data, 'read'): content_type = None else: content_type = 'application/x-www-form-urlencoded' self.prepare_content_length(body) # Add content-type if it wasn't explicitly provided. if content_type and ('content-type' not in self.headers): self.headers['Content-Type'] = content_type self.body = body def prepare_content_length(self, body): if hasattr(body, 'seek') and hasattr(body, 'tell'): body.seek(0, 2) self.headers['Content-Length'] = builtin_str(body.tell()) body.seek(0, 0) elif body is not None: l = super_len(body) if l: self.headers['Content-Length'] = builtin_str(l) elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None): self.headers['Content-Length'] = '0' def prepare_auth(self, auth, url=''): """Prepares the given HTTP auth data.""" # If no Auth is explicitly provided, extract it from the URL first. if auth is None: url_auth = get_auth_from_url(self.url) auth = url_auth if any(url_auth) else None if auth: if isinstance(auth, tuple) and len(auth) == 2: # special-case basic HTTP auth auth = HTTPBasicAuth(*auth) # Allow auth to make its changes. r = auth(self) # Update self to reflect the auth changes. self.__dict__.update(r.__dict__) # Recompute Content-Length self.prepare_content_length(self.body) def prepare_cookies(self, cookies): """Prepares the given HTTP cookie data. This function eventually generates a ``Cookie`` header from the given cookies using cookielib. Due to cookielib's design, the header will not be regenerated if it already exists, meaning this function can only be called once for the life of the :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls to ``prepare_cookies`` will have no actual effect, unless the "Cookie" header is removed beforehand.""" if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies else: self._cookies = cookiejar_from_dict(cookies) cookie_header = get_cookie_header(self._cookies, self) if cookie_header is not None: self.headers['Cookie'] = cookie_header def prepare_hooks(self, hooks): """Prepares the given hooks.""" # hooks can be passed as None to the prepare method and to this # method. To prevent iterating over None, simply use an empty list # if hooks is False-y hooks = hooks or [] for event in hooks: self.register_hook(event, hooks[event]) class Response(object): """The :class:`Response <Response>` object, which contains a server's response to an HTTP request. """ __attrs__ = [ '_content', 'status_code', 'headers', 'url', 'history', 'encoding', 'reason', 'cookies', 'elapsed', 'request' ] def __init__(self): super(Response, self).__init__() self._content = False self._content_consumed = False #: Integer Code of responded HTTP Status, e.g. 404 or 200. self.status_code = None #: Case-insensitive Dictionary of Response Headers. #: For example, ``headers['content-encoding']`` will return the #: value of a ``'Content-Encoding'`` response header. self.headers = CaseInsensitiveDict() #: File-like object representation of response (for advanced usage). #: Use of ``raw`` requires that ``stream=True`` be set on the request. # This requirement does not apply for use internally to Requests. self.raw = None #: Final URL location of Response. self.url = None #: Encoding to decode with when accessing r.text. self.encoding = None #: A list of :class:`Response <Response>` objects from #: the history of the Request. Any redirect responses will end #: up here. The list is sorted from the oldest to the most recent request. self.history = [] #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". self.reason = None #: A CookieJar of Cookies the server sent back. self.cookies = cookiejar_from_dict({}) #: The amount of time elapsed between sending the request #: and the arrival of the response (as a timedelta). #: This property specifically measures the time taken between sending #: the first byte of the request and finishing parsing the headers. It #: is therefore unaffected by consuming the response content or the #: value of the ``stream`` keyword argument. self.elapsed = datetime.timedelta(0) #: The :class:`PreparedRequest <PreparedRequest>` object to which this #: is a response. self.request = None def __getstate__(self): # Consume everything; accessing the content attribute makes # sure the content has been fully read. if not self._content_consumed: self.content return dict( (attr, getattr(self, attr, None)) for attr in self.__attrs__ ) def __setstate__(self, state): for name, value in state.items(): setattr(self, name, value) # pickled objects do not have .raw setattr(self, '_content_consumed', True) setattr(self, 'raw', None) def __repr__(self): return '<Response [%s]>' % (self.status_code) def __bool__(self): """Returns true if :attr:`status_code` is 'OK'.""" return self.ok def __nonzero__(self): """Returns true if :attr:`status_code` is 'OK'.""" return self.ok def __iter__(self): """Allows you to use a response as an iterator.""" return self.iter_content(128) @property def ok(self): try: self.raise_for_status() except HTTPError: return False return True @property def is_redirect(self): """True if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`). """ return ('location' in self.headers and self.status_code in REDIRECT_STATI) @property def is_permanent_redirect(self): """True if this Response one of the permanent versions of redirect""" return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) @property def apparent_encoding(self): """The apparent encoding, provided by the chardet library""" return chardet.detect(self.content)['encoding'] def iter_content(self, chunk_size=1, decode_unicode=False): """Iterates over the response data. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. If decode_unicode is True, content will be decoded using the best available encoding based on the response. """ def generate(): # Special case for urllib3. if hasattr(self.raw, 'stream'): try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: raise ContentDecodingError(e) except ReadTimeoutError as e: raise ConnectionError(e) else: # Standard file-like object. while True: chunk = self.raw.read(chunk_size) if not chunk: break yield chunk self._content_consumed = True if self._content_consumed and isinstance(self._content, bool): raise StreamConsumedError() # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) stream_chunks = generate() chunks = reused_chunks if self._content_consumed else stream_chunks if decode_unicode: chunks = stream_decode_response_unicode(chunks, self) return chunks def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. .. note:: This method is not reentrant safe. """ pending = None for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): if pending is not None: chunk = pending + chunk if delimiter: lines = chunk.split(delimiter) else: lines = chunk.splitlines() if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: pending = lines.pop() else: pending = None for line in lines: yield line if pending is not None: yield pending @property def content(self): """Content of the response, in bytes.""" if self._content is False: # Read the contents. try: if self._content_consumed: raise RuntimeError( 'The content for this response was already consumed') if self.status_code == 0: self._content = None else: self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes() except AttributeError: self._content = None self._content_consumed = True # don't need to release the connection; that's been handled by urllib3 # since we exhausted the data. return self._content @property def text(self): """Content of the response, in unicode. If Response.encoding is None, encoding will be guessed using ``chardet``. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you should set ``r.encoding`` appropriately before accessing this property. """ # Try charset from content-type content = None encoding = self.encoding if not self.content: return str('') # Fallback to auto-detected encoding. if self.encoding is None: encoding = self.apparent_encoding # Decode unicode from given encoding. try: content = str(self.content, encoding, errors='replace') except (LookupError, TypeError): # A LookupError is raised if the encoding was not found which could # indicate a misspelling or similar mistake. # # A TypeError can be raised if encoding is None # # So we try blindly encoding. content = str(self.content, errors='replace') return content def json(self, **kwargs): """Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. """ if not self.encoding and len(self.content) > 3: # No encoding set. JSON RFC 4627 section 3 states we should expect # UTF-8, -16 or -32. Detect which one to use; If the detection or # decoding fails, fall back to `self.text` (using chardet to make # a best guess). encoding = guess_json_utf(self.content) if encoding is not None: try: return complexjson.loads( self.content.decode(encoding), **kwargs ) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, # and the server didn't bother to tell us what codec *was* # used. pass return complexjson.loads(self.text, **kwargs) @property def links(self): """Returns the parsed header links of the response, if any.""" header = self.headers.get('link') # l = MultiDict() l = {} if header: links = parse_header_links(header) for link in links: key = link.get('rel') or link.get('url') l[key] = link return l def raise_for_status(self): """Raises stored :class:`HTTPError`, if one occurred.""" http_error_msg = '' if 400 <= self.status_code < 500: http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url) elif 500 <= self.status_code < 600: http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url) if http_error_msg: raise HTTPError(http_error_msg, response=self) def close(self): """Releases the connection back to the pool. Once this method has been called the underlying ``raw`` object must not be accessed again. *Note: Should not normally need to be called explicitly.* """ if not self._content_consumed: return self.raw.close() return self.raw.release_conn()
mit
wojtekka/6tunnel
test.py
1
5911
#!/usr/bin/env python import os import socket import time import select (SUCCESS, COMMAND_FAIL, CONNECT_FAIL, DISCONNECT, ACCEPT_FAIL, DATA_MISMATCH) = range(6) labels = ["success", "command fail", "connection fail", "disconnection", "accept fail", "data mismatch"] def test(expect, client_af, server_af, from_ip, to_ip, args="", client_sends_first="NICK nick\r\n", server_receives="NICK nick\r\n", app_responds="", app_inserts="", server_sends_then=":localhost 001 nick :Welcome\r\n"): # Open and close a socket to get random port available client_sock = socket.socket(client_af, socket.SOCK_STREAM, 0) client_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) client_sock.bind(('', 0)) client_port = client_sock.getsockname()[1] client_sock.close() # Open a socket for mock server server_sock = socket.socket(server_af, socket.SOCK_STREAM, 0) server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) server_sock.bind(('', 0)) server_sock.listen(0) server_port = server_sock.getsockname()[1] all_args = "-1 %s %d %s %d" % (args, client_port, to_ip, server_port) print "Running with %s" % all_args if os.system("./6tunnel " + all_args) != 0: if expect != COMMAND_FAIL: raise Exception("expected %s yet command failed" % labels[expect]) else: return client_sock = socket.socket(client_af, socket.SOCK_STREAM, 0) # Give 6tunnel instance some time to initialize connected = False for i in range(10): try: client_sock.connect((from_ip, client_port)) except socket.error: time.sleep(0.1) else: connected = True break if not connected: if expect != CONNECT_FAIL: raise Exception("expected %s yet connect failed" % labels[expect]) else: return # Send first data client_sock.send(client_sends_first) # Wait for 6tunnel to connect to the server rlist, wlist, xlist = select.select([server_sock], [], [], 1) if not rlist: if expect != ACCEPT_FAIL: raise Exception("expected %s yet accept failed" % labels[expect]) else: return accept_sock = server_sock.accept()[0] # Make sure that 6tunnel doesn't send anything to the client rlist, wlist, xlist = select.select([client_sock], [], [], 1) if rlist: try: res = client_sock.recv(1) if not res: raise socket.error except socket.error: if expect != DISCONNECT: raise Exception("expected %s yet disconnected" % labels[expect]) else: return raise Exception("unexpected data sent to client") # Do the data exchange if app_responds: tmp = client_sock.recv(len(app_responds)) if tmp != app_responds: raise Exception("expected 6tunnel response \"%s\" yet did not receive" % app_responds) if app_inserts: tmp = accept_sock.recv(len(app_inserts)) if tmp != app_inserts: raise Exception("expected 6tunnel insert \"%s\" yet did not receive" % app_sends_first) if accept_sock.recv(len(server_receives)) != server_receives: raise Exception("data mismatch") accept_sock.send(server_sends_then) if client_sock.recv(len(server_sends_then)) != server_sends_then: raise Exception("data mismatch") accept_sock.close() server_sock.close() client_sock.close() if expect != SUCCESS: raise Exception("expected %d yet succeeded" % expect) test(SUCCESS, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1') test(SUCCESS, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1', '-l 127.0.0.1') test(COMMAND_FAIL, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1', '-l ::1') test(SUCCESS, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1', '-s ::1') test(COMMAND_FAIL, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1', '-s 127.0.0.1') test(SUCCESS, socket.AF_INET, socket.AF_INET, '127.0.0.1', '127.0.0.1', '-4') test(SUCCESS, socket.AF_INET, socket.AF_INET, '127.0.0.1', '127.0.0.1', '-4 -l 127.0.0.1') test(COMMAND_FAIL, socket.AF_INET, socket.AF_INET, '127.0.0.1', '127.0.0.1', '-4 -l ::1') test(SUCCESS, socket.AF_INET, socket.AF_INET, '127.0.0.1', '127.0.0.1', '-4 -s 127.0.0.1') test(COMMAND_FAIL, socket.AF_INET, socket.AF_INET, '127.0.0.1', '127.0.0.1', '-4 -s ::1') test(SUCCESS, socket.AF_INET6, socket.AF_INET, '::1', '127.0.0.1', '-4 -6') test(SUCCESS, socket.AF_INET6, socket.AF_INET, '::1', '127.0.0.1', '-4 -6 -l ::1') test(COMMAND_FAIL, socket.AF_INET6, socket.AF_INET, '::1', '127.0.0.1', '-4 -6 -l 127.0.0.1') test(SUCCESS, socket.AF_INET6, socket.AF_INET, '::1', '127.0.0.1', '-4 -6 -s 127.0.0.1') test(COMMAND_FAIL, socket.AF_INET6, socket.AF_INET, '::1', '127.0.0.1', '-4 -6 -s ::1') test(SUCCESS, socket.AF_INET6, socket.AF_INET6, '::1', '::1', '-6') test(SUCCESS, socket.AF_INET6, socket.AF_INET6, '::1', '::1', '-6 -l ::1') test(COMMAND_FAIL, socket.AF_INET6, socket.AF_INET6, '::1', '::1', '-6 -l 127.0.0.1') test(SUCCESS, socket.AF_INET6, socket.AF_INET6, '::1', '::1', '-6 -s ::1') test(COMMAND_FAIL, socket.AF_INET6, socket.AF_INET6, '::1', '::1', '-6 -s 127.0.0.1') # Test IRC password options test(SUCCESS, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1', '-I password', app_inserts="PASS password\r\n") test(ACCEPT_FAIL, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1', '-i password', client_sends_first="NICK nick\r\n") test(ACCEPT_FAIL, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1', '-i password', client_sends_first="PASS invalid\r\nNICK nick\r\n", app_responds=":6tunnel 464 * :Password incorrect\r\n") test(SUCCESS, socket.AF_INET, socket.AF_INET6, '127.0.0.1', '::1', '-i password', client_sends_first="PASS password\r\nNICK nick\r\n")
gpl-2.0
unlessbamboo/grocery-shop
language/python/src/redis/agentTcpServer.py
1
10698
#/usr/bin/python #coding:utf-8 import os import sys import redis import socket import select import struct import atexit import signal # set root directory package_path = os.getcwd() + '/../' sys.path.append(package_path) from basepackage.baseConfig import BLACK_FILE,BLACK_PIPE from basepackage.baselog import globalLog from agentDaemon import Daemon class RedisCommnicate(): '''operator between redis with client''' def __init__(self, host='127.0.0.1', port=6379, maxnum = 2000000): '''init''' try: self._client = redis.Redis(host, port, db=0) self._pipeline = self._client.pipeline() # transation except Exception: globalLog.getError().log(globalLog.ERROR, 'connect redis server failed!') sys.exit(-1) self._maxnum = maxnum # redis key which store black list self._attackKey = 'nginx.blacklist' def _addRecord(self, searchkey): '''add a new connection to redis''' # add command to pipe if not self._client.get(searchkey): self._pipeline.setex(searchkey, 0, 60) else: self._pipeline.incr(searchkey) self._pipeline.get(searchkey) # execute command try: return int(self._pipeline.execute()[-1]) except Exception: globalLog.getError().log(globalLog.ERROR, \ 'AgentServer:execute redis command failed.') return self._maxnum def _writeBlackList(self, attack): '''write black list to main redis''' self._client.sadd(self._attackKey, attack) def addRecord(self, ip): '''add a new connection to redis''' # set searchkey globalLog.getError().log(globalLog.DEBUG, \ "searchkey:%s"%(ip)) # get info from redis and store info into redis return True if self._addRecord(ip) >= self._maxnum else False class AgentServer(Daemon): '''A TCP socket server which commnicate with nginx''' def __init__(self, redisServer, redisPort, address='127.0.0.1', port=8008, \ pid='/data/agentServer/agent.pid'): '''init''' super(AgentServer, self).__init__(pidfile=pid) self._sock = None self._packageList = [ '_handleModuleRedis', # map REDIS_PACKAGE macro in C ] self._redisConn = RedisCommnicate(redisServer, redisPort) self._address = address self._port = port self.poller = None self.fdToSocket = None self._attackPip = None def _createPipe(self, filename): '''Create a pipe''' try: if os.path.exists(filename) is False: os.mkfifo(filename) self._attackPip = open(filename, 'w') globalLog.getError().log(globalLog.DEBUG, \ 'Create pipe file successful') except (OSError, Exception), msg: globalLog.getError().log(globalLog.ERROR, \ 'Create and Open FIFO failed, %s'%msg) sys.exit(-1) def _bindSocket(self, ip, port): '''bind socket''' address = (ip, port) try: self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self._sock.bind(address) self._sock.setblocking(0) self._sock.listen(10) except Exception, msg: globalLog.getError().log(globalLog.ERROR, \ 'AgentServer:bind socket %s failed, %s'%(ip,msg)) sys.exit(-1) globalLog.getError().log(globalLog.DEBUG, 'Bind %s successful.'%ip) def _writeReject(self, data): '''write attack information into file''' # create attackfile try: if os.path.exists(BLACK_FILE) is False: open(BLACK_FILE, 'w').close() if os.path.exists(BLACK_PIPE) is False: os.mkfifo(BLACK_PIPE) except Exception,msg: globalLog.getError().log(globalLog.WARNING, \ '%s:Handle attack file %s.'%(msg, BLACK_FILE)) return # write info into attack file(pipe and normal file) try: # Whether existed blacklist ip with open(BLACK_FILE, 'r') as reFd: strbuf = reFd.read() if data in strbuf: return # write black into file with open(BLACK_FILE, 'a') as inFd: inFd.write(data+'\n') # write into pipe self._attackPip.write(data) self._attackPip.flush() except Exception, msg: globalLog.getError().log(globalLog.WARNING, \ 'Warning:replace file failed(%s)'%(msg)) def _handleModuleRedis(self, len, data): '''handle redis package''' globalLog.getError().log(globalLog.DEBUG, \ "HandleRedis:blackip:%s"%data) #self._redisConn.addRecord(info[0], info[1]) if self._redisConn.addRecord(data): globalLog.getError().log(globalLog.INFO, \ "Write blacklist:%s"%data) self._writeReject(data) def _handleModule(self, type, len, data): '''handle tcp socket package package format: { package-Type 00 package-length 0018 data(ip/token) basic package-length's value } ''' # call function use attribute getattr(self, self._packageList[type])(len, data) def _recvPackageFromClient(self, conn): '''conmmnicate between server and client''' preLength = 8# len(type + data_len),结构体本身的长度 #import pdb #pdb.set_trace() # receive package (hexadecimal number) data = conn.recv(preLength) if not data: globalLog.getError().log(globalLog.DEBUG, \ 'Received Null string, client close.') return False elif len(data) != preLength: globalLog.getError().log(globalLog.DEBUG, \ 'Package too small, drop package.') return True (type, totalLen) = struct.unpack('<ii', data) globalLog.getError().log(globalLog.DEBUG, \ "Package Type:%d, PackageLeng=%d"%(type, totalLen)) # receive package(content) data = conn.recv(totalLen) if not data: globalLog.getError().log(globalLog.ERROR, \ 'Received Null string, perhaps client close.') return False elif len(data) != totalLen: globalLog.getError().log(globalLog.DEBUG, \ 'Package too small, drop package.') return True # handle globalLog.getError().log(globalLog.DEBUG, "Pacakge Content=%s"%data) self._handleModule(type, totalLen, data) return True def _receivePackage(self, poller, conn, fdToSocket, readOnly): '''receive package''' try: if conn is self._sock: # add clifd to events globalLog.getError().log(globalLog.DEBUG, 'A new connection come on.') conn, cliAddr = conn.accept() conn.setblocking(0) fdToSocket[conn.fileno()] = conn poller.register(conn, readOnly) return True else: # receive data return False if not self._recvPackageFromClient(conn) else True except Exception, msg: globalLog.getError().log(globalLog.ERROR, msg) return False def waitConnection(self, poller, readOnly, fdToSocket, timeout): '''''' # poll timeout events = poller.poll(timeout) for fd, flag in events: # retrieve the actual socket from its file descriptor s = fdToSocket[fd] # handle inputs if (flag & (select.POLLIN | select.POLLPRI)) and \ not self._receivePackage(poller, s, fdToSocket, readOnly): poller.unregister(s) s.close() del fdToSocket[fd] elif flag & (select.POLLHUP | select.POLLERR): globalLog.getError().log(globalLog.ERROR, 'Client close connection') poller.unregister(s) s.close() def handle(self): '''waitting client connection''' # commonly used flag sets timeout = 1000 readOnly = (select.POLLIN | select.POLLPRI| select.POLLHUP | select.POLLERR) # set up the poller self.poller = select.poll() self.poller.register(self._sock, readOnly) # map file descirptors to socket objects self.fdToSocket = { self._sock.fileno():self._sock, } # communicate while True: try: self.waitConnection(self.poller, readOnly, self.fdToSocket, timeout) except Exception,msg: globalLog.getError().log(globalLog.ERROR, \ 'Exception in server handle:%s'%msg); def doExit(self): '''close all''' if self._attackPip: self._attackPip.close() def signalExit(self, a, b): '''close all''' if self._attackPip: print 'XXXXXXXXXXXXXXXXXXXXXXXXXXXX----' self._attackPip.close() print 'XXXXXXXXXXXXXXXXXXXXXXXXXXXX' sys.exit(-1) def run(self): '''run server as daemon process''' # signal handle signal.signal(signal.SIGQUIT, self.signalExit) signal.signal(signal.SIGTERM, self.signalExit) signal.signal(signal.SIGINT, self.signalExit) # bind self._createPipe(BLACK_PIPE) self._bindSocket(self._address, self._port) self.handle() if __name__ == '__main__': '''main''' # daemon process agentServer = AgentServer('172.16.161.253', 6379, pid='/tmp/agent.pid') atexit.register(agentServer.doExit) agentServer.run() ''' if len(sys.argv) == 2: if 'start' == sys.argv[1]: agentServer.start() elif 'stop' == sys.argv[1]: agentServer.stop() elif 'restart' == sys.argv[1]: agentServer.restart() else: print "Unknown command" sys.exit(2) sys.exit(0) else: print "usage: %s start|stop|restart" % sys.argv[0] sys.exit(2) '''
gpl-3.0
alistairlow/tensorflow
tensorflow/contrib/factorization/python/ops/clustering_ops.py
11
35885
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Clustering Operations.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.factorization.python.ops import gen_clustering_ops # go/tf-wildcard-import # pylint: disable=wildcard-import from tensorflow.contrib.factorization.python.ops.gen_clustering_ops import * # pylint: enable=wildcard-import from tensorflow.contrib.util import loader from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import check_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn_impl from tensorflow.python.ops import random_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variable_scope from tensorflow.python.ops.embedding_ops import embedding_lookup from tensorflow.python.platform import resource_loader _clustering_ops = loader.load_op_library( resource_loader.get_path_to_datafile('_clustering_ops.so')) # Euclidean distance between vectors U and V is defined as ||U - V||_F which is # the square root of the sum of the absolute squares of the elements difference. SQUARED_EUCLIDEAN_DISTANCE = 'squared_euclidean' # Cosine distance between vectors U and V is defined as # 1 - (U \dot V) / (||U||_F ||V||_F) COSINE_DISTANCE = 'cosine' RANDOM_INIT = 'random' KMEANS_PLUS_PLUS_INIT = 'kmeans_plus_plus' KMC2_INIT = 'kmc2' # The name of the variable holding the cluster centers. Used by the Estimator. CLUSTERS_VAR_NAME = 'clusters' class KMeans(object): """Creates the graph for k-means clustering.""" def __init__(self, inputs, num_clusters, initial_clusters=RANDOM_INIT, distance_metric=SQUARED_EUCLIDEAN_DISTANCE, use_mini_batch=False, mini_batch_steps_per_iteration=1, random_seed=0, kmeans_plus_plus_num_retries=2, kmc2_chain_length=200): """Creates an object for generating KMeans clustering graph. This class implements the following variants of K-means algorithm: If use_mini_batch is False, it runs standard full batch K-means. Each step runs a single iteration of K-Means. This step can be run sharded across multiple workers by passing a list of sharded inputs to this class. Note however that a single step needs to process the full input at once. If use_mini_batch is True, it runs a generalization of the mini-batch K-means algorithm. It runs multiple iterations, where each iteration is composed of mini_batch_steps_per_iteration steps. Two copies of cluster centers are maintained: one that is updated at the end of each iteration, and one that is updated every step. The first copy is used to compute cluster allocations for each step, and for inference, while the second copy is the one updated each step using the mini-batch update rule. After each iteration is complete, this second copy is copied back the first copy. Note that for use_mini_batch=True, when mini_batch_steps_per_iteration=1, the algorithm reduces to the standard mini-batch algorithm. Also by setting mini_batch_steps_per_iteration = num_inputs / batch_size, the algorithm becomes an asynchronous version of the full-batch algorithm. Note however that there is no guarantee by this implementation that each input is seen exactly once per iteration. Also, different updates are applied asynchronously without locking. So this asynchronous version may not behave exactly like a full-batch version. Args: inputs: An input tensor or list of input tensors. It is assumed that the data points have been previously randomly permuted. num_clusters: An integer tensor specifying the number of clusters. This argument is ignored if initial_clusters is a tensor or numpy array. initial_clusters: Specifies the clusters used during initialization. One of the following: - a tensor or numpy array with the initial cluster centers. - a function f(inputs, k) that returns up to k centers from `inputs`. - "random": Choose centers randomly from `inputs`. - "kmeans_plus_plus": Use kmeans++ to choose centers from `inputs`. - "kmc2": Use the fast k-MC2 algorithm to choose centers from `inputs`. In the last three cases, one batch of `inputs` may not yield `num_clusters` centers, in which case initialization will require multiple batches until enough centers are chosen. In the case of "random" or "kmeans_plus_plus", if the input size is <= `num_clusters` then the entire batch is chosen to be cluster centers. distance_metric: Distance metric used for clustering. Supported options: "squared_euclidean", "cosine". use_mini_batch: If true, use the mini-batch k-means algorithm. Else assume full batch. mini_batch_steps_per_iteration: Number of steps after which the updated cluster centers are synced back to a master copy. random_seed: Seed for PRNG used to initialize seeds. kmeans_plus_plus_num_retries: For each point that is sampled during kmeans++ initialization, this parameter specifies the number of additional points to draw from the current distribution before selecting the best. If a negative value is specified, a heuristic is used to sample O(log(num_to_sample)) additional points. kmc2_chain_length: Determines how many candidate points are used by the k-MC2 algorithm to produce one new cluster centers. If a (mini-)batch contains less points, one new cluster center is generated from the (mini-)batch. Raises: ValueError: An invalid argument was passed to initial_clusters or distance_metric. """ if isinstance(initial_clusters, str) and initial_clusters not in [ RANDOM_INIT, KMEANS_PLUS_PLUS_INIT, KMC2_INIT ]: raise ValueError( "Unsupported initialization algorithm '%s'" % initial_clusters) if distance_metric not in [SQUARED_EUCLIDEAN_DISTANCE, COSINE_DISTANCE]: raise ValueError("Unsupported distance metric '%s'" % distance_metric) self._inputs = inputs if isinstance(inputs, list) else [inputs] self._num_clusters = num_clusters self._initial_clusters = initial_clusters self._distance_metric = distance_metric self._use_mini_batch = use_mini_batch self._mini_batch_steps_per_iteration = int(mini_batch_steps_per_iteration) self._random_seed = random_seed self._kmeans_plus_plus_num_retries = kmeans_plus_plus_num_retries self._kmc2_chain_length = kmc2_chain_length @classmethod def _distance_graph(cls, inputs, clusters, distance_metric): """Computes distance between each input and each cluster center. Args: inputs: list of input Tensors. clusters: cluster Tensor. distance_metric: distance metric used for clustering Returns: list of Tensors, where each element corresponds to each element in inputs. The value is the distance of each row to all the cluster centers. Currently only Euclidean distance and cosine distance are supported. """ assert isinstance(inputs, list) if distance_metric == SQUARED_EUCLIDEAN_DISTANCE: return cls._compute_euclidean_distance(inputs, clusters) elif distance_metric == COSINE_DISTANCE: return cls._compute_cosine_distance( inputs, clusters, inputs_normalized=True) else: assert False, str(distance_metric) @classmethod def _compute_euclidean_distance(cls, inputs, clusters): """Computes Euclidean distance between each input and each cluster center. Args: inputs: list of input Tensors. clusters: cluster Tensor. Returns: list of Tensors, where each element corresponds to each element in inputs. The value is the distance of each row to all the cluster centers. """ output = [] for inp in inputs: with ops.colocate_with(inp, ignore_existing=True): # Computes Euclidean distance. Note the first and third terms are # broadcast additions. squared_distance = ( math_ops.reduce_sum(math_ops.square(inp), 1, keep_dims=True) - 2 * math_ops.matmul(inp, clusters, transpose_b=True) + array_ops.transpose( math_ops.reduce_sum( math_ops.square(clusters), 1, keep_dims=True))) output.append(squared_distance) return output @classmethod def _compute_cosine_distance(cls, inputs, clusters, inputs_normalized=True): """Computes cosine distance between each input and each cluster center. Args: inputs: list of input Tensor. clusters: cluster Tensor inputs_normalized: if True, it assumes that inp and clusters are normalized and computes the dot product which is equivalent to the cosine distance. Else it L2 normalizes the inputs first. Returns: list of Tensors, where each element corresponds to each element in inp. The value is the distance of each row to all the cluster centers. """ output = [] if not inputs_normalized: with ops.colocate_with(clusters, ignore_existing=True): clusters = nn_impl.l2_normalize(clusters, dim=1) for inp in inputs: with ops.colocate_with(inp, ignore_existing=True): if not inputs_normalized: inp = nn_impl.l2_normalize(inp, dim=1) output.append(1 - math_ops.matmul(inp, clusters, transpose_b=True)) return output def _infer_graph(self, inputs, clusters): """Maps input to closest cluster and the score. Args: inputs: list of input Tensors. clusters: Tensor of cluster centers. Returns: List of tuple, where each value in tuple corresponds to a value in inp. The tuple has following three elements: all_scores: distance of each input to each cluster center. score: distance of each input to closest cluster center. cluster_idx: index of cluster center closest to the corresponding input. """ assert isinstance(inputs, list) # Pairwise distances are used only by transform(). In all other cases, this # sub-graph is not evaluated. scores = self._distance_graph(inputs, clusters, self._distance_metric) output = [] if (self._distance_metric == COSINE_DISTANCE and not self._clusters_l2_normalized()): # The cosine distance between normalized vectors x and y is the same as # 2 * squared_euclidean_distance. We are using this fact and reusing the # nearest_neighbors op. # TODO(ands): Support COSINE distance in nearest_neighbors and remove # this. with ops.colocate_with(clusters, ignore_existing=True): clusters = nn_impl.l2_normalize(clusters, dim=1) for inp, score in zip(inputs, scores): with ops.colocate_with(inp, ignore_existing=True): (indices, distances) = gen_clustering_ops.nearest_neighbors( inp, clusters, 1) if self._distance_metric == COSINE_DISTANCE: distances *= 0.5 output.append((score, array_ops.squeeze(distances), array_ops.squeeze(indices))) return zip(*output) def _clusters_l2_normalized(self): """Returns True if clusters centers are kept normalized.""" return (self._distance_metric == COSINE_DISTANCE and (not self._use_mini_batch or self._mini_batch_steps_per_iteration > 1)) def _create_variables(self, num_clusters): """Creates variables. Args: num_clusters: an integer Tensor providing the number of clusters. Returns: Tuple with following elements: - cluster_centers: a Tensor for storing cluster centers - cluster_centers_initialized: bool Variable indicating whether clusters are initialized. - cluster_counts: a Tensor for storing counts of points assigned to this cluster. This is used by mini-batch training. - cluster_centers_updated: Tensor representing copy of cluster centers that are updated every step. - update_in_steps: numbers of steps left before we sync cluster_centers_updated back to cluster_centers. """ init_value = array_ops.constant([], dtype=dtypes.float32) cluster_centers = variable_scope.variable( init_value, name=CLUSTERS_VAR_NAME, validate_shape=False) cluster_centers_initialized = variable_scope.variable( False, dtype=dtypes.bool, name='initialized') if self._use_mini_batch and self._mini_batch_steps_per_iteration > 1: # Copy of cluster centers actively updated each step according to # mini-batch update rule. cluster_centers_updated = variable_scope.variable( init_value, name='clusters_updated', validate_shape=False) # How many steps till we copy the updated clusters to cluster_centers. update_in_steps = variable_scope.variable( self._mini_batch_steps_per_iteration, dtype=dtypes.int64, name='update_in_steps') # Count of points assigned to cluster_centers_updated. cluster_counts = variable_scope.variable( array_ops.zeros([num_clusters], dtype=dtypes.int64)) else: cluster_centers_updated = cluster_centers update_in_steps = None cluster_counts = ( variable_scope.variable( array_ops.ones([num_clusters], dtype=dtypes.int64)) if self._use_mini_batch else None) return (cluster_centers, cluster_centers_initialized, cluster_counts, cluster_centers_updated, update_in_steps) @classmethod def _l2_normalize_data(cls, inputs): """Normalized the input data.""" output = [] for inp in inputs: with ops.colocate_with(inp, ignore_existing=True): output.append(nn_impl.l2_normalize(inp, dim=1)) return output def training_graph(self): """Generate a training graph for kmeans algorithm. This returns, among other things, an op that chooses initial centers (init_op), a boolean variable that is set to True when the initial centers are chosen (cluster_centers_initialized), and an op to perform either an entire Lloyd iteration or a mini-batch of a Lloyd iteration (training_op). The caller should use these components as follows. A single worker should execute init_op multiple times until cluster_centers_initialized becomes True. Then multiple workers may execute training_op any number of times. Returns: A tuple consisting of: all_scores: A matrix (or list of matrices) of dimensions (num_input, num_clusters) where the value is the distance of an input vector and a cluster center. cluster_idx: A vector (or list of vectors). Each element in the vector corresponds to an input row in 'inp' and specifies the cluster id corresponding to the input. scores: Similar to cluster_idx but specifies the distance to the assigned cluster instead. cluster_centers_initialized: scalar indicating whether clusters have been initialized. init_op: an op to initialize the clusters. training_op: an op that runs an iteration of training. """ # Implementation of kmeans. if (isinstance(self._initial_clusters, str) or callable(self._initial_clusters)): initial_clusters = self._initial_clusters num_clusters = ops.convert_to_tensor(self._num_clusters) else: initial_clusters = ops.convert_to_tensor(self._initial_clusters) num_clusters = array_ops.shape(initial_clusters)[0] inputs = self._inputs (cluster_centers_var, cluster_centers_initialized, total_counts, cluster_centers_updated, update_in_steps) = self._create_variables(num_clusters) init_op = _InitializeClustersOpFactory( self._inputs, num_clusters, initial_clusters, self._distance_metric, self._random_seed, self._kmeans_plus_plus_num_retries, self._kmc2_chain_length, cluster_centers_var, cluster_centers_updated, cluster_centers_initialized).op() cluster_centers = cluster_centers_var if self._distance_metric == COSINE_DISTANCE: inputs = self._l2_normalize_data(inputs) if not self._clusters_l2_normalized(): cluster_centers = nn_impl.l2_normalize(cluster_centers, dim=1) all_scores, scores, cluster_idx = self._infer_graph(inputs, cluster_centers) if self._use_mini_batch: sync_updates_op = self._mini_batch_sync_updates_op( update_in_steps, cluster_centers_var, cluster_centers_updated, total_counts) assert sync_updates_op is not None with ops.control_dependencies([sync_updates_op]): training_op = self._mini_batch_training_op( inputs, cluster_idx, cluster_centers_updated, total_counts) else: assert cluster_centers == cluster_centers_var training_op = self._full_batch_training_op( inputs, num_clusters, cluster_idx, cluster_centers_var) return (all_scores, cluster_idx, scores, cluster_centers_initialized, init_op, training_op) def _mini_batch_sync_updates_op(self, update_in_steps, cluster_centers_var, cluster_centers_updated, total_counts): if self._use_mini_batch and self._mini_batch_steps_per_iteration > 1: assert update_in_steps is not None with ops.colocate_with(update_in_steps, ignore_existing=True): def _f(): # Note that there is a race condition here, so we do a best effort # updates here. We reset update_in_steps first so that other workers # don't duplicate the updates. Also we update cluster_center_vars # before resetting total_counts to avoid large updates to # cluster_centers_updated based on partially updated # cluster_center_vars. with ops.control_dependencies([ state_ops.assign(update_in_steps, self._mini_batch_steps_per_iteration - 1) ]): with ops.colocate_with( cluster_centers_updated, ignore_existing=True): if self._distance_metric == COSINE_DISTANCE: cluster_centers = nn_impl.l2_normalize( cluster_centers_updated, dim=1) else: cluster_centers = cluster_centers_updated with ops.colocate_with(cluster_centers_var, ignore_existing=True): with ops.control_dependencies( [state_ops.assign(cluster_centers_var, cluster_centers)]): with ops.colocate_with(None, ignore_existing=True): with ops.control_dependencies([ state_ops.assign(total_counts, array_ops.zeros_like(total_counts)) ]): return array_ops.identity(update_in_steps) return control_flow_ops.cond( update_in_steps <= 0, _f, lambda: state_ops.assign_sub(update_in_steps, 1)) else: return control_flow_ops.no_op() def _mini_batch_training_op(self, inputs, cluster_idx_list, cluster_centers, total_counts): """Creates an op for training for mini batch case. Args: inputs: list of input Tensors. cluster_idx_list: A vector (or list of vectors). Each element in the vector corresponds to an input row in 'inp' and specifies the cluster id corresponding to the input. cluster_centers: Tensor Ref of cluster centers. total_counts: Tensor Ref of cluster counts. Returns: An op for doing an update of mini-batch k-means. """ update_ops = [] for inp, cluster_idx in zip(inputs, cluster_idx_list): with ops.colocate_with(inp, ignore_existing=True): assert total_counts is not None cluster_idx = array_ops.reshape(cluster_idx, [-1]) # Dedupe the unique ids of cluster_centers being updated so that updates # can be locally aggregated. unique_ids, unique_idx = array_ops.unique(cluster_idx) num_unique_cluster_idx = array_ops.size(unique_ids) # Fetch the old values of counts and cluster_centers. with ops.colocate_with(total_counts, ignore_existing=True): old_counts = array_ops.gather(total_counts, unique_ids) # TODO(agarwal): This colocation seems to run into problems. Fix it. with ops.colocate_with(cluster_centers, ignore_existing=True): old_cluster_centers = array_ops.gather(cluster_centers, unique_ids) # Locally aggregate the increment to counts. count_updates = math_ops.unsorted_segment_sum( array_ops.ones_like(unique_idx, dtype=total_counts.dtype), unique_idx, num_unique_cluster_idx) # Locally compute the sum of inputs mapped to each id. # For a cluster with old cluster value x, old count n, and with data # d_1,...d_k newly assigned to it, we recompute the new value as # x += (sum_i(d_i) - k * x) / (n + k). # Compute sum_i(d_i), see comment above. cluster_center_updates = math_ops.unsorted_segment_sum( inp, unique_idx, num_unique_cluster_idx) # Shape to enable broadcasting count_updates and learning_rate to inp. # It extends the shape with 1's to match the rank of inp. broadcast_shape = array_ops.concat([ array_ops.reshape(num_unique_cluster_idx, [1]), array_ops.ones( array_ops.reshape(array_ops.rank(inp) - 1, [1]), dtype=dtypes.int32) ], 0) # Subtract k * x, see comment above. cluster_center_updates -= math_ops.cast( array_ops.reshape(count_updates, broadcast_shape), inp.dtype) * old_cluster_centers learning_rate = math_ops.reciprocal( math_ops.cast(old_counts + count_updates, inp.dtype)) learning_rate = array_ops.reshape(learning_rate, broadcast_shape) # scale by 1 / (n + k), see comment above. cluster_center_updates *= learning_rate # Apply the updates. update_counts = state_ops.scatter_add(total_counts, unique_ids, count_updates) update_cluster_centers = state_ops.scatter_add( cluster_centers, unique_ids, cluster_center_updates) update_ops.extend([update_counts, update_cluster_centers]) return control_flow_ops.group(*update_ops) def _full_batch_training_op(self, inputs, num_clusters, cluster_idx_list, cluster_centers): """Creates an op for training for full batch case. Args: inputs: list of input Tensors. num_clusters: an integer Tensor providing the number of clusters. cluster_idx_list: A vector (or list of vectors). Each element in the vector corresponds to an input row in 'inp' and specifies the cluster id corresponding to the input. cluster_centers: Tensor Ref of cluster centers. Returns: An op for doing an update of mini-batch k-means. """ cluster_sums = [] cluster_counts = [] epsilon = constant_op.constant(1e-6, dtype=inputs[0].dtype) for inp, cluster_idx in zip(inputs, cluster_idx_list): with ops.colocate_with(inp, ignore_existing=True): cluster_sums.append( math_ops.unsorted_segment_sum(inp, cluster_idx, num_clusters)) cluster_counts.append( math_ops.unsorted_segment_sum( array_ops.reshape( array_ops.ones( array_ops.reshape(array_ops.shape(inp)[0], [-1])), [-1, 1]), cluster_idx, num_clusters)) with ops.colocate_with(cluster_centers, ignore_existing=True): new_clusters_centers = math_ops.add_n(cluster_sums) / ( math_ops.cast(math_ops.add_n(cluster_counts), cluster_sums[0].dtype) + epsilon) if self._clusters_l2_normalized(): new_clusters_centers = nn_impl.l2_normalize(new_clusters_centers, dim=1) return state_ops.assign(cluster_centers, new_clusters_centers) class _InitializeClustersOpFactory(object): """Internal class to create the op to initialize the clusters. The op performs this algorithm (see constructor args): num_remaining = num_clusters - length(cluster_centers) if num_remaining == 0: assert that cluster_centers_initialized is true else: assert that num_remaining > 0 new_centers = choose up to num_remaining initial centers l2-normalize new_centers if using cosine distance all_centers = concat(cluster_centers, new_centers) cluster_centers := all_centers if there is a cluster_centers_updated variable: cluster_centers_updated := cluster_centers num_now_remaining = num_clusters - length(cluster_centers) if num_now_remaining == 0: cluster_centers_initialized := true """ # TODO(ccolby): Refactor this class so that kmc2 isn't so much a special case. def __init__(self, inputs, num_clusters, initial_clusters, distance_metric, random_seed, kmeans_plus_plus_num_retries, kmc2_chain_length, cluster_centers, cluster_centers_updated, cluster_centers_initialized): """Creates an op factory. Args: inputs: See KMeans constructor. num_clusters: An integer Tensor providing the number of clusters. initial_clusters: See KMeans constructor. distance_metric: See KMeans constructor. random_seed: See KMeans constructor. kmeans_plus_plus_num_retries: See KMeans constructor. kmc2_chain_length: See KMeans constructor. cluster_centers: The TF variable holding the initial centers. It may already contain some centers when the op is executed. cluster_centers_updated: A second TF variable to hold a copy of the initial centers, used for full-batch mode. In mini-batch mode, cluster_centers_updated is the same variable as cluster_centers. cluster_centers_initialized: A boolean TF variable that will be set to true when all the initial centers have been chosen. """ # All of these instance variables are constants. self._inputs = inputs self._num_clusters = num_clusters self._initial_clusters = initial_clusters self._distance_metric = distance_metric self._random_seed = random_seed self._kmeans_plus_plus_num_retries = kmeans_plus_plus_num_retries self._kmc2_chain_length = kmc2_chain_length self._cluster_centers = cluster_centers self._cluster_centers_updated = cluster_centers_updated self._cluster_centers_initialized = cluster_centers_initialized self._num_selected = array_ops.shape(self._cluster_centers)[0] self._num_remaining = self._num_clusters - self._num_selected self._num_data = math_ops.add_n( [array_ops.shape(i)[0] for i in self._inputs]) def _random(self): indices = random_ops.random_uniform( array_ops.reshape(self._num_remaining, [-1]), minval=0, maxval=math_ops.cast(self._num_data, dtypes.int64), seed=self._random_seed, dtype=dtypes.int64) return embedding_lookup(self._inputs, indices, partition_strategy='div') def _kmeans_plus_plus(self): # Points from only the first shard are used for initializing centers. # TODO(ands): Use all points. inp = self._inputs[0] if self._distance_metric == COSINE_DISTANCE: inp = nn_impl.l2_normalize(inp, dim=1) return gen_clustering_ops.kmeans_plus_plus_initialization( inp, math_ops.to_int64(self._num_remaining), self._random_seed, self._kmeans_plus_plus_num_retries) def _kmc2_multiple_centers(self): """Adds new initial cluster centers using the k-MC2 algorithm. In each call to the op, the provided batch is split into subsets based on the specified `kmc2_chain_length`. On each subset, a single Markov chain of the k-MC2 algorithm is used to add *one* new center cluster center. If there are less than `kmc2_chain_length` points in the subset, a single center is added using one Markov chain on the full input. It is assumed that the provided batch has previously been randomly permuted. Otherwise, k-MC2 may return suboptimal centers. Returns: An op that adds new cluster centers. """ # The op only operates on the first shard of data. first_shard = self._inputs[0] # Number of points in the input that can be used. batch_size = array_ops.shape(first_shard)[0] # Maximum number of subsets such that the size of each subset is at least # `kmc2_chain_length`. Final subsets may be larger. max_to_sample = math_ops.cast( batch_size / self._kmc2_chain_length, dtype=dtypes.int32) # We sample at least one new center and at most all remaining centers. num_to_sample = math_ops.maximum( math_ops.minimum(self._num_remaining, max_to_sample), 1) def _cond(i, _): """Stopping condition for the while loop.""" return math_ops.less(i, num_to_sample) def _body(i, _): """Body that adds a single new center based on a subset.""" def _sample_random(): """Returns a random point as a cluster center.""" # By assumption the batch is reshuffled and _sample_random is always # called for i=0. Hence, we simply return the first point. new_center = array_ops.reshape(first_shard[0], [1, -1]) if self._distance_metric == COSINE_DISTANCE: new_center = nn_impl.l2_normalize(new_center, dim=1) return new_center def _sample_kmc2_chain(): """Returns previous centers as well as a new center sampled using k-MC2. """ # Extract the subset from the underlying batch. start = i * self._kmc2_chain_length end = start + self._kmc2_chain_length subset = first_shard[start:end] # Compute the distances from points in the subset to previous centers. _, distances = gen_clustering_ops.nearest_neighbors( subset, self._cluster_centers, 1) # Sample index of new center using k-MC2 Markov chain. new_center_index = gen_clustering_ops.kmc2_chain_initialization( array_ops.squeeze(distances), self._random_seed) # Extract actual new center. newly_sampled_center = array_ops.reshape(subset[new_center_index], [1, -1]) # Return concatenation with previously sampled centers. if self._distance_metric == COSINE_DISTANCE: newly_sampled_center = nn_impl.l2_normalize( newly_sampled_center, dim=1) return array_ops.concat([self._cluster_centers, newly_sampled_center], 0) # Obtain a random point if there are no previously sampled centers. # Otherwise, construct a k-MC2 Markov chain. new_centers = control_flow_ops.cond( math_ops.equal(self._num_selected, 0), _sample_random, _sample_kmc2_chain) # Assign new cluster centers to underlying variable. assigned_centers = state_ops.assign( self._cluster_centers, new_centers, validate_shape=False) if self._cluster_centers_updated is not self._cluster_centers: assigned_centers = state_ops.assign( self._cluster_centers_updated, assigned_centers, validate_shape=False) return i + 1, self._num_clusters - array_ops.shape(assigned_centers)[0] # Add num_to_sample new data points. _, num_remaining = control_flow_ops.while_loop(_cond, _body, [0, 0]) return num_remaining def _greedy_batch_sampler(self, sampler): # If the input dataset size is smaller than the number of centers # remaining, choose the entire input dataset as centers. This can happen # with mini-batch. Otherwise, sample the batch according to the provided # sampler. return control_flow_ops.cond(self._num_data <= self._num_remaining, lambda: array_ops.concat(self._inputs, 0), sampler) def _single_batch_sampler(self, sampler): # Enforce that there are at least as many data points as centers # remaining. This gives the provided sampler the chance to select all # remaining centers from a single batch. with ops.control_dependencies( [check_ops.assert_greater_equal(self._num_data, self._num_remaining)]): return sampler() def _choose_initial_centers(self): if isinstance(self._initial_clusters, str): if self._initial_clusters == RANDOM_INIT: return self._greedy_batch_sampler(self._random) else: # self._initial_clusters == KMEANS_PLUS_PLUS_INIT return self._single_batch_sampler(self._kmeans_plus_plus) elif callable(self._initial_clusters): return self._initial_clusters(self._inputs, self._num_remaining) else: with ops.control_dependencies([ check_ops.assert_equal(self._num_remaining, array_ops.shape(self._initial_clusters)[0]) ]): return self._initial_clusters def _add_new_centers(self): """Adds some centers and returns the number of centers remaining.""" new_centers = self._choose_initial_centers() if self._distance_metric == COSINE_DISTANCE: new_centers = nn_impl.l2_normalize(new_centers, dim=1) # If cluster_centers is empty, it doesn't have the right shape for concat. all_centers = control_flow_ops.cond( math_ops.equal(self._num_selected, 0), lambda: new_centers, lambda: array_ops.concat([self._cluster_centers, new_centers], 0)) # TODO(ccolby): De-dupe all_centers? a = state_ops.assign( self._cluster_centers, all_centers, validate_shape=False) if self._cluster_centers_updated is not self._cluster_centers: a = state_ops.assign( self._cluster_centers_updated, a, validate_shape=False) return self._num_clusters - array_ops.shape(a)[0] def _initialize(self): with ops.control_dependencies([ check_ops.assert_positive(self._num_remaining), ]): if self._initial_clusters == KMC2_INIT: num_now_remaining = self._kmc2_multiple_centers() else: num_now_remaining = self._add_new_centers() return control_flow_ops.cond( math_ops.equal(num_now_remaining, 0), lambda: state_ops.assign(self._cluster_centers_initialized, True), control_flow_ops.no_op) def op(self): """Returns the cluster initializer op.""" return control_flow_ops.cond( math_ops.equal(self._num_remaining, 0), lambda: check_ops.assert_equal(self._cluster_centers_initialized, True), self._initialize)
apache-2.0
47lining/nucleator-core
lib/nucleator/core/setup/commands/utils/generate_cert.py
1
3070
# Copyright 2015 47Lining LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os, shlex from jinja2 import Template from subprocess import Popen, PIPE def generate_cert(file_name, cage_name, customer_domain, templates_home, siteconfig_home, pkcs12_bundle_password, debug=False): # Erase previous files if os.path.isfile("openssl.cfg"): os.remove("openssl.cfg") if os.path.isfile(file_name+".pem"): os.remove(file_name+".pem") if os.path.isfile(file_name+".crt"): os.remove(file_name+".crt") if os.path.isfile(file_name+".509"): os.remove(file_name+".509") # Write out an openssl.cfg with open (templates_home+'/openssl.cfg.j2', "r") as myfile: data = myfile.read() t = Template(data) output = t.render(customer_domain=customer_domain, file_name=file_name, cage_name=cage_name, pkcs12_bundle_password=pkcs12_bundle_password) with open (siteconfig_home+'/openssl.cfg', "w") as myfile: myfile.write(output) print "Generating key and x509 cert" # openssl x509 -req -days 365 -in $cert_name.csr -signkey $keypair_name -out $cert_name.crt cmd = "openssl req -x509 -newkey rsa:2048 -keyout "+file_name+".pem -out "+file_name+".509 -days 365 -config openssl.cfg" Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, stdin=PIPE).wait() if not os.path.isfile(file_name+".509"): print "Something went wrong in the openssl request x509 step..." print "Try running this command: '"+cmd+"'" return print "Converting to pkcs12" # openssl pkcs12 -export -in $cert_name.crt -inkey $keypair_name -out $cert_name.p12 -name $cert_name-cert -CAfile ca.crt -caname root -password pass:$keystore_password cmd = "openssl pkcs12 -export -in "+file_name+".509 -inkey "+file_name+".pem -out "+file_name+".crt -name "+file_name+"-cert -CAfile ca.crt -caname root -password pass:"+pkcs12_bundle_password+" -passin pass:"+pkcs12_bundle_password Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, stdin=PIPE).wait() if not os.path.isfile(file_name+".crt"): print "Something went wrong in the openssl pkcs12 export step..." print "Try running this command: '"+cmd+"'" else: # clean up temp files if not debug: if os.path.isfile("openssl.cfg"): os.remove("openssl.cfg") if os.path.isfile(file_name+".pem"): os.remove(file_name+".pem") if os.path.isfile(file_name+".509"): os.remove(file_name+".509") return
apache-2.0
sphinx-doc/sphinx-intl
tests/test_catalog.py
1
1258
# -*- coding: utf-8 -*- """ test_catalog ~~~~~~~~~~~~ Test functions that implements catalog related features. :copyright: Copyright 2019 by Takayuki SHIMIZUKAWA. :license: BSD, see LICENSE for details. """ from babel.messages import Catalog, Message def test_write_and_read_po_file_with_non_ascii_string(temp): from sphinx_intl import catalog cat = Catalog(locale='ja', domain='domain', fuzzy=False) msg = Message('Hello World', u'こんにちは世界') cat[msg.id] = msg po_file = (temp / 'domain.po') catalog.dump_po(po_file, cat) cat2 = catalog.load_po(po_file) assert cat2[msg.id].string == msg.string def test_fuzzy_flag_on_catalog_update(): from sphinx_intl import catalog cat = Catalog(locale='ja', domain='domain', fuzzy=False) msg = Message('Hello Internationalized Sphinx World !', u'こんにちは国際化されたSphinxの世界!') cat[msg.id] = msg assert not msg.fuzzy cat_src = Catalog(locale='en', domain='domain', fuzzy=False) msg_src = Message('Hello Internationalized Sphinx World ?') cat_src[msg_src.id] = msg_src catalog.update_with_fuzzy(cat, cat_src) assert msg.id not in cat assert cat[msg_src.id].fuzzy
bsd-2-clause
ychen820/microblog
y/google-cloud-sdk/lib/googlecloudsdk/compute/subcommands/http_health_checks/update.py
4
8462
# Copyright 2014 Google Inc. All Rights Reserved. """Command for updating HTTP health checks.""" from googlecloudsdk.calliope import arg_parsers from googlecloudsdk.calliope import exceptions from googlecloudsdk.compute.lib import base_classes THRESHOLD_UPPER_BOUND = 5000 THRESHOLD_LOWER_BOUND = 1 TIMEOUT_UPPER_BOUND_SEC = 5000 TIMEOUT_LOWER_BOUND_SEC = 1 CHECK_INTERVAL_UPPER_BOUND_SEC = 5000 CHECK_INTERVAL_LOWER_BOUND_SEC = 1 class Update(base_classes.ReadWriteCommand): """Update an HTTP health check.""" @staticmethod def Args(parser): host = parser.add_argument( '--host', help='The value of the host header used by the HTTP health check.') host.detailed_help = """\ The value of the host header used in this HTTP health check request. By default, this is empty and Google Compute Engine automatically sets the host header in health requests to the same external IP address as the forwarding rule associated with the target pool. Setting this to an empty string will clear any existing host value. """ port = parser.add_argument( '--port', help='The TCP port number for the health request.', type=int) port.detailed_help = """\ The TCP port number that this health check monitors. """ request_path = parser.add_argument( '--request-path', help='The request path for the health check.') request_path.detailed_help = """\ The request path that this health check monitors. For example, ``/healthcheck''. """ check_interval_sec = parser.add_argument( '--check-interval', help='How often to run the check.', type=arg_parsers.Duration()) check_interval_sec.detailed_help = """\ How often to perform a health check for an instance. For example, specifying ``10s'' will run the check every 10 seconds. Valid units for this flag are ``s'' for seconds, ``m'' for minutes, and ``h'' for hours. """ timeout_sec = parser.add_argument( '--timeout', help='How long to wait until check is a failure.', type=arg_parsers.Duration()) timeout_sec.detailed_help = """\ If Google Compute Engine doesn't receive an HTTP 200 response from the instance by the time specified by the value of this flag, the health check request is considered a failure. For example, specifying ``10s'' will cause the check to wait for 10 seconds before considering the request a failure. Valid units for this flag are ``s'' for seconds, ``m'' for minutes, and ``h'' for hours. """ unhealthy_threshold = parser.add_argument( '--unhealthy-threshold', help='Consecutive failures to mark instance unhealthy.', type=int) unhealthy_threshold.detailed_help = """\ The number of consecutive health check failures before a healthy instance is marked as unhealthy. """ healthy_threshold = parser.add_argument( '--healthy-threshold', help='Consecutive successes to mark instance healthy.', type=int) healthy_threshold.detailed_help = """\ The number of consecutive successful health checks before an unhealthy instance is marked as healthy. """ parser.add_argument( '--description', help=('A textual description for the HTTP health check. Pass in an ' 'empty string to unset.')) parser.add_argument( 'name', help='The name of the HTTP health check.') @property def service(self): return self.compute.httpHealthChecks @property def resource_type(self): return 'httpHealthChecks' def CreateReference(self, args): return self.CreateGlobalReference( args.name, resource_type='httpHealthChecks') def GetGetRequest(self, args): """Returns a request for fetching the existing HTTP health check.""" return (self.service, 'Get', self.messages.ComputeHttpHealthChecksGetRequest( httpHealthCheck=self.ref.Name(), project=self.project)) def GetSetRequest(self, args, replacement, existing): """Returns a request for updated the HTTP health check.""" return (self.service, 'Update', self.messages.ComputeHttpHealthChecksUpdateRequest( httpHealthCheck=self.ref.Name(), httpHealthCheckResource=replacement, project=self.project)) def Modify(self, args, existing_check): """Returns a modified HttpHealthCheck message.""" # Description and Host are the only attributes that can be cleared by # passing in an empty string (but we don't want to set it to an empty # string). if args.description: description = args.description elif args.description is None: description = existing_check.description else: description = None if args.host: host = args.host elif args.host is None: host = existing_check.host else: host = None new_health_check = self.messages.HttpHealthCheck( name=existing_check.name, host=host, port=args.port or existing_check.port, description=description, requestPath=args.request_path or existing_check.requestPath, checkIntervalSec=(args.check_interval or existing_check.checkIntervalSec), timeoutSec=args.timeout or existing_check.timeoutSec, healthyThreshold=(args.healthy_threshold or existing_check.healthyThreshold), unhealthyThreshold=(args.unhealthy_threshold or existing_check.unhealthyThreshold), ) return new_health_check def Run(self, args): if (args.check_interval is not None and (args.check_interval < CHECK_INTERVAL_LOWER_BOUND_SEC or args.check_interval > CHECK_INTERVAL_UPPER_BOUND_SEC)): raise exceptions.ToolException( '[--check-interval] must not be less than {0} second or greater ' 'than {1} seconds; received [{2}] seconds.'.format( CHECK_INTERVAL_LOWER_BOUND_SEC, CHECK_INTERVAL_UPPER_BOUND_SEC, args.check_interval)) if (args.timeout is not None and (args.timeout < TIMEOUT_LOWER_BOUND_SEC or args.timeout > TIMEOUT_UPPER_BOUND_SEC)): raise exceptions.ToolException( '[--timeout] must not be less than {0} second or greater than {1} ' 'seconds; received: [{2}] seconds.'.format( TIMEOUT_LOWER_BOUND_SEC, TIMEOUT_UPPER_BOUND_SEC, args.timeout)) if (args.healthy_threshold is not None and (args.healthy_threshold < THRESHOLD_LOWER_BOUND or args.healthy_threshold > THRESHOLD_UPPER_BOUND)): raise exceptions.ToolException( '[--healthy-threshold] must be an integer between {0} and {1}, ' 'inclusive; received: [{2}].'.format(THRESHOLD_LOWER_BOUND, THRESHOLD_UPPER_BOUND, args.healthy_threshold)) if (args.unhealthy_threshold is not None and (args.unhealthy_threshold < THRESHOLD_LOWER_BOUND or args.unhealthy_threshold > THRESHOLD_UPPER_BOUND)): raise exceptions.ToolException( '[--unhealthy-threshold] must be an integer between {0} and {1}, ' 'inclusive; received [{2}].'.format(THRESHOLD_LOWER_BOUND, THRESHOLD_UPPER_BOUND, args.unhealthy_threshold)) args_unset = not (args.port or args.request_path or args.check_interval or args.timeout or args.healthy_threshold or args.unhealthy_threshold) if args.description is None and args.host is None and args_unset: raise exceptions.ToolException('At least one property must be modified.') return super(Update, self).Run(args) Update.detailed_help = { 'brief': ('Update an HTTP health check'), 'DESCRIPTION': """\ *{command}* is used to update an existing HTTP health check. Only arguments passed in will be updated on the health check. Other attributes will remain unaffected. """, }
bsd-3-clause
ssvs111/Arduino
arduino-core/src/processing/app/i18n/python/requests/status_codes.py
252
3043
# -*- coding: utf-8 -*- from .structures import LookupDict _codes = { # Informational. 100: ('continue',), 101: ('switching_protocols',), 102: ('processing',), 103: ('checkpoint',), 122: ('uri_too_long', 'request_uri_too_long'), 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), 201: ('created',), 202: ('accepted',), 203: ('non_authoritative_info', 'non_authoritative_information'), 204: ('no_content',), 205: ('reset_content', 'reset'), 206: ('partial_content', 'partial'), 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), 208: ('im_used',), # Redirection. 300: ('multiple_choices',), 301: ('moved_permanently', 'moved', '\\o-'), 302: ('found',), 303: ('see_other', 'other'), 304: ('not_modified',), 305: ('use_proxy',), 306: ('switch_proxy',), 307: ('temporary_redirect', 'temporary_moved', 'temporary'), 308: ('resume_incomplete', 'resume'), # Client Error. 400: ('bad_request', 'bad'), 401: ('unauthorized',), 402: ('payment_required', 'payment'), 403: ('forbidden',), 404: ('not_found', '-o-'), 405: ('method_not_allowed', 'not_allowed'), 406: ('not_acceptable',), 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), 408: ('request_timeout', 'timeout'), 409: ('conflict',), 410: ('gone',), 411: ('length_required',), 412: ('precondition_failed', 'precondition'), 413: ('request_entity_too_large',), 414: ('request_uri_too_large',), 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 417: ('expectation_failed',), 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), 422: ('unprocessable_entity', 'unprocessable'), 423: ('locked',), 424: ('failed_dependency', 'dependency'), 425: ('unordered_collection', 'unordered'), 426: ('upgrade_required', 'upgrade'), 428: ('precondition_required', 'precondition'), 429: ('too_many_requests', 'too_many'), 431: ('header_fields_too_large', 'fields_too_large'), 444: ('no_response', 'none'), 449: ('retry_with', 'retry'), 450: ('blocked_by_windows_parental_controls', 'parental_controls'), 499: ('client_closed_request',), # Server Error. 500: ('internal_server_error', 'server_error', '/o\\', '✗'), 501: ('not_implemented',), 502: ('bad_gateway',), 503: ('service_unavailable', 'unavailable'), 504: ('gateway_timeout',), 505: ('http_version_not_supported', 'http_version'), 506: ('variant_also_negotiates',), 507: ('insufficient_storage',), 509: ('bandwidth_limit_exceeded', 'bandwidth'), 510: ('not_extended',), } codes = LookupDict(name='status_codes') for (code, titles) in list(_codes.items()): for title in titles: setattr(codes, title, code) if not title.startswith('\\'): setattr(codes, title.upper(), code)
lgpl-2.1
realityone/Anti_teNelgniS
src/XLStringEnc.py
2
2371
#!/usr/bin/env python # coding=utf-8 class XLStringEnc(object): """ usage: xl = XLStringEnc() print xl.decrypt('e4b5faf2c04576af98fa3dd354d32da2') print xl.encrypt('xlzjhrprotocol3x') """ base_key = 'f12acd03b45e9678' base_box = ('SDJJLKNASDHFUIAk' 'hfu340985LIASDJF' 'OISDLKJFOIESJFDK' 'SMFMLKdLKASDJFOI' 'DJKKfhisdfisdfks' 'OIAJEFJLKALSDKFj' 'kdhfiehsdKA') def update_basestring(self, base_key, base_box): self.base_key = base_key self.base_box = base_box def decrypt(self, ciphertext): length = len(ciphertext) assert length > 2 plaintext = '' for k in xrange(length / 2): m = k * 2 step = ciphertext[m:m + 2] # for i in xrange(16): # if self.base_key[i] == step[0]: # break # for j in xrange(16): # if self.base_key[j] == step[1]: # break i = self.base_key.find(step[0]) j = self.base_key.find(step[1]) # print step, i, j, m plaintext += chr( ord(self.base_box[(m / 2) % 40]) ^ (i | (16 * (i ^ j)))) return plaintext def encrypt(self, plaintext): length = len(plaintext) assert length > 0 ciphertext = '' for k, c in enumerate(plaintext): temp_num = ord(c) ^ ord(self.base_box[k % 40]) i, j = self._brute_force(temp_num) ciphertext += self.base_key[i] + self.base_key[j] return ciphertext def _brute_force(self, value): for i in xrange(16): for j in xrange(16): if (i | (16 * (i ^ j))) == value: return i, j class SNStringEnc(XLStringEnc): """ usage: sn = SNStringEnc() print sn.decrypt('b413f30110a9') """ base_key = 'f18a9d03c45e267' base_box = ('WYHNIKmkEDCYHNig' 'LcdAUJMFBVNEDCGW' 'SXLYUIEWERTIXCVB' '2NM0E1SDF4QAS4YH' 'N1FGU4SRwKMBaERT' 'nSDRgPOIlMNB2RTY' '0GHU1KIH4FD414') if __name__ == '__main__': xl = XLStringEnc() print xl.decrypt('eb1ab586d3b5f22fe4680c5b98471049')
gpl-2.0
windyuuy/opera
chromium/src/third_party/libvpx/source/libvpx/third_party/googletest/src/scripts/pump.py
603
23316
#!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """pump v0.2.0 - Pretty Useful for Meta Programming. A tool for preprocessor meta programming. Useful for generating repetitive boilerplate code. Especially useful for writing C++ classes, functions, macros, and templates that need to work with various number of arguments. USAGE: pump.py SOURCE_FILE EXAMPLES: pump.py foo.cc.pump Converts foo.cc.pump to foo.cc. GRAMMAR: CODE ::= ATOMIC_CODE* ATOMIC_CODE ::= $var ID = EXPRESSION | $var ID = [[ CODE ]] | $range ID EXPRESSION..EXPRESSION | $for ID SEPARATOR [[ CODE ]] | $($) | $ID | $(EXPRESSION) | $if EXPRESSION [[ CODE ]] ELSE_BRANCH | [[ CODE ]] | RAW_CODE SEPARATOR ::= RAW_CODE | EMPTY ELSE_BRANCH ::= $else [[ CODE ]] | $elif EXPRESSION [[ CODE ]] ELSE_BRANCH | EMPTY EXPRESSION has Python syntax. """ __author__ = '[email protected] (Zhanyong Wan)' import os import re import sys TOKEN_TABLE = [ (re.compile(r'\$var\s+'), '$var'), (re.compile(r'\$elif\s+'), '$elif'), (re.compile(r'\$else\s+'), '$else'), (re.compile(r'\$for\s+'), '$for'), (re.compile(r'\$if\s+'), '$if'), (re.compile(r'\$range\s+'), '$range'), (re.compile(r'\$[_A-Za-z]\w*'), '$id'), (re.compile(r'\$\(\$\)'), '$($)'), (re.compile(r'\$'), '$'), (re.compile(r'\[\[\n?'), '[['), (re.compile(r'\]\]\n?'), ']]'), ] class Cursor: """Represents a position (line and column) in a text file.""" def __init__(self, line=-1, column=-1): self.line = line self.column = column def __eq__(self, rhs): return self.line == rhs.line and self.column == rhs.column def __ne__(self, rhs): return not self == rhs def __lt__(self, rhs): return self.line < rhs.line or ( self.line == rhs.line and self.column < rhs.column) def __le__(self, rhs): return self < rhs or self == rhs def __gt__(self, rhs): return rhs < self def __ge__(self, rhs): return rhs <= self def __str__(self): if self == Eof(): return 'EOF' else: return '%s(%s)' % (self.line + 1, self.column) def __add__(self, offset): return Cursor(self.line, self.column + offset) def __sub__(self, offset): return Cursor(self.line, self.column - offset) def Clone(self): """Returns a copy of self.""" return Cursor(self.line, self.column) # Special cursor to indicate the end-of-file. def Eof(): """Returns the special cursor to denote the end-of-file.""" return Cursor(-1, -1) class Token: """Represents a token in a Pump source file.""" def __init__(self, start=None, end=None, value=None, token_type=None): if start is None: self.start = Eof() else: self.start = start if end is None: self.end = Eof() else: self.end = end self.value = value self.token_type = token_type def __str__(self): return 'Token @%s: \'%s\' type=%s' % ( self.start, self.value, self.token_type) def Clone(self): """Returns a copy of self.""" return Token(self.start.Clone(), self.end.Clone(), self.value, self.token_type) def StartsWith(lines, pos, string): """Returns True iff the given position in lines starts with 'string'.""" return lines[pos.line][pos.column:].startswith(string) def FindFirstInLine(line, token_table): best_match_start = -1 for (regex, token_type) in token_table: m = regex.search(line) if m: # We found regex in lines if best_match_start < 0 or m.start() < best_match_start: best_match_start = m.start() best_match_length = m.end() - m.start() best_match_token_type = token_type if best_match_start < 0: return None return (best_match_start, best_match_length, best_match_token_type) def FindFirst(lines, token_table, cursor): """Finds the first occurrence of any string in strings in lines.""" start = cursor.Clone() cur_line_number = cursor.line for line in lines[start.line:]: if cur_line_number == start.line: line = line[start.column:] m = FindFirstInLine(line, token_table) if m: # We found a regex in line. (start_column, length, token_type) = m if cur_line_number == start.line: start_column += start.column found_start = Cursor(cur_line_number, start_column) found_end = found_start + length return MakeToken(lines, found_start, found_end, token_type) cur_line_number += 1 # We failed to find str in lines return None def SubString(lines, start, end): """Returns a substring in lines.""" if end == Eof(): end = Cursor(len(lines) - 1, len(lines[-1])) if start >= end: return '' if start.line == end.line: return lines[start.line][start.column:end.column] result_lines = ([lines[start.line][start.column:]] + lines[start.line + 1:end.line] + [lines[end.line][:end.column]]) return ''.join(result_lines) def StripMetaComments(str): """Strip meta comments from each line in the given string.""" # First, completely remove lines containing nothing but a meta # comment, including the trailing \n. str = re.sub(r'^\s*\$\$.*\n', '', str) # Then, remove meta comments from contentful lines. return re.sub(r'\s*\$\$.*', '', str) def MakeToken(lines, start, end, token_type): """Creates a new instance of Token.""" return Token(start, end, SubString(lines, start, end), token_type) def ParseToken(lines, pos, regex, token_type): line = lines[pos.line][pos.column:] m = regex.search(line) if m and not m.start(): return MakeToken(lines, pos, pos + m.end(), token_type) else: print 'ERROR: %s expected at %s.' % (token_type, pos) sys.exit(1) ID_REGEX = re.compile(r'[_A-Za-z]\w*') EQ_REGEX = re.compile(r'=') REST_OF_LINE_REGEX = re.compile(r'.*?(?=$|\$\$)') OPTIONAL_WHITE_SPACES_REGEX = re.compile(r'\s*') WHITE_SPACE_REGEX = re.compile(r'\s') DOT_DOT_REGEX = re.compile(r'\.\.') def Skip(lines, pos, regex): line = lines[pos.line][pos.column:] m = re.search(regex, line) if m and not m.start(): return pos + m.end() else: return pos def SkipUntil(lines, pos, regex, token_type): line = lines[pos.line][pos.column:] m = re.search(regex, line) if m: return pos + m.start() else: print ('ERROR: %s expected on line %s after column %s.' % (token_type, pos.line + 1, pos.column)) sys.exit(1) def ParseExpTokenInParens(lines, pos): def ParseInParens(pos): pos = Skip(lines, pos, OPTIONAL_WHITE_SPACES_REGEX) pos = Skip(lines, pos, r'\(') pos = Parse(pos) pos = Skip(lines, pos, r'\)') return pos def Parse(pos): pos = SkipUntil(lines, pos, r'\(|\)', ')') if SubString(lines, pos, pos + 1) == '(': pos = Parse(pos + 1) pos = Skip(lines, pos, r'\)') return Parse(pos) else: return pos start = pos.Clone() pos = ParseInParens(pos) return MakeToken(lines, start, pos, 'exp') def RStripNewLineFromToken(token): if token.value.endswith('\n'): return Token(token.start, token.end, token.value[:-1], token.token_type) else: return token def TokenizeLines(lines, pos): while True: found = FindFirst(lines, TOKEN_TABLE, pos) if not found: yield MakeToken(lines, pos, Eof(), 'code') return if found.start == pos: prev_token = None prev_token_rstripped = None else: prev_token = MakeToken(lines, pos, found.start, 'code') prev_token_rstripped = RStripNewLineFromToken(prev_token) if found.token_type == '$var': if prev_token_rstripped: yield prev_token_rstripped yield found id_token = ParseToken(lines, found.end, ID_REGEX, 'id') yield id_token pos = Skip(lines, id_token.end, OPTIONAL_WHITE_SPACES_REGEX) eq_token = ParseToken(lines, pos, EQ_REGEX, '=') yield eq_token pos = Skip(lines, eq_token.end, r'\s*') if SubString(lines, pos, pos + 2) != '[[': exp_token = ParseToken(lines, pos, REST_OF_LINE_REGEX, 'exp') yield exp_token pos = Cursor(exp_token.end.line + 1, 0) elif found.token_type == '$for': if prev_token_rstripped: yield prev_token_rstripped yield found id_token = ParseToken(lines, found.end, ID_REGEX, 'id') yield id_token pos = Skip(lines, id_token.end, WHITE_SPACE_REGEX) elif found.token_type == '$range': if prev_token_rstripped: yield prev_token_rstripped yield found id_token = ParseToken(lines, found.end, ID_REGEX, 'id') yield id_token pos = Skip(lines, id_token.end, OPTIONAL_WHITE_SPACES_REGEX) dots_pos = SkipUntil(lines, pos, DOT_DOT_REGEX, '..') yield MakeToken(lines, pos, dots_pos, 'exp') yield MakeToken(lines, dots_pos, dots_pos + 2, '..') pos = dots_pos + 2 new_pos = Cursor(pos.line + 1, 0) yield MakeToken(lines, pos, new_pos, 'exp') pos = new_pos elif found.token_type == '$': if prev_token: yield prev_token yield found exp_token = ParseExpTokenInParens(lines, found.end) yield exp_token pos = exp_token.end elif (found.token_type == ']]' or found.token_type == '$if' or found.token_type == '$elif' or found.token_type == '$else'): if prev_token_rstripped: yield prev_token_rstripped yield found pos = found.end else: if prev_token: yield prev_token yield found pos = found.end def Tokenize(s): """A generator that yields the tokens in the given string.""" if s != '': lines = s.splitlines(True) for token in TokenizeLines(lines, Cursor(0, 0)): yield token class CodeNode: def __init__(self, atomic_code_list=None): self.atomic_code = atomic_code_list class VarNode: def __init__(self, identifier=None, atomic_code=None): self.identifier = identifier self.atomic_code = atomic_code class RangeNode: def __init__(self, identifier=None, exp1=None, exp2=None): self.identifier = identifier self.exp1 = exp1 self.exp2 = exp2 class ForNode: def __init__(self, identifier=None, sep=None, code=None): self.identifier = identifier self.sep = sep self.code = code class ElseNode: def __init__(self, else_branch=None): self.else_branch = else_branch class IfNode: def __init__(self, exp=None, then_branch=None, else_branch=None): self.exp = exp self.then_branch = then_branch self.else_branch = else_branch class RawCodeNode: def __init__(self, token=None): self.raw_code = token class LiteralDollarNode: def __init__(self, token): self.token = token class ExpNode: def __init__(self, token, python_exp): self.token = token self.python_exp = python_exp def PopFront(a_list): head = a_list[0] a_list[:1] = [] return head def PushFront(a_list, elem): a_list[:0] = [elem] def PopToken(a_list, token_type=None): token = PopFront(a_list) if token_type is not None and token.token_type != token_type: print 'ERROR: %s expected at %s' % (token_type, token.start) print 'ERROR: %s found instead' % (token,) sys.exit(1) return token def PeekToken(a_list): if not a_list: return None return a_list[0] def ParseExpNode(token): python_exp = re.sub(r'([_A-Za-z]\w*)', r'self.GetValue("\1")', token.value) return ExpNode(token, python_exp) def ParseElseNode(tokens): def Pop(token_type=None): return PopToken(tokens, token_type) next = PeekToken(tokens) if not next: return None if next.token_type == '$else': Pop('$else') Pop('[[') code_node = ParseCodeNode(tokens) Pop(']]') return code_node elif next.token_type == '$elif': Pop('$elif') exp = Pop('code') Pop('[[') code_node = ParseCodeNode(tokens) Pop(']]') inner_else_node = ParseElseNode(tokens) return CodeNode([IfNode(ParseExpNode(exp), code_node, inner_else_node)]) elif not next.value.strip(): Pop('code') return ParseElseNode(tokens) else: return None def ParseAtomicCodeNode(tokens): def Pop(token_type=None): return PopToken(tokens, token_type) head = PopFront(tokens) t = head.token_type if t == 'code': return RawCodeNode(head) elif t == '$var': id_token = Pop('id') Pop('=') next = PeekToken(tokens) if next.token_type == 'exp': exp_token = Pop() return VarNode(id_token, ParseExpNode(exp_token)) Pop('[[') code_node = ParseCodeNode(tokens) Pop(']]') return VarNode(id_token, code_node) elif t == '$for': id_token = Pop('id') next_token = PeekToken(tokens) if next_token.token_type == 'code': sep_token = next_token Pop('code') else: sep_token = None Pop('[[') code_node = ParseCodeNode(tokens) Pop(']]') return ForNode(id_token, sep_token, code_node) elif t == '$if': exp_token = Pop('code') Pop('[[') code_node = ParseCodeNode(tokens) Pop(']]') else_node = ParseElseNode(tokens) return IfNode(ParseExpNode(exp_token), code_node, else_node) elif t == '$range': id_token = Pop('id') exp1_token = Pop('exp') Pop('..') exp2_token = Pop('exp') return RangeNode(id_token, ParseExpNode(exp1_token), ParseExpNode(exp2_token)) elif t == '$id': return ParseExpNode(Token(head.start + 1, head.end, head.value[1:], 'id')) elif t == '$($)': return LiteralDollarNode(head) elif t == '$': exp_token = Pop('exp') return ParseExpNode(exp_token) elif t == '[[': code_node = ParseCodeNode(tokens) Pop(']]') return code_node else: PushFront(tokens, head) return None def ParseCodeNode(tokens): atomic_code_list = [] while True: if not tokens: break atomic_code_node = ParseAtomicCodeNode(tokens) if atomic_code_node: atomic_code_list.append(atomic_code_node) else: break return CodeNode(atomic_code_list) def ParseToAST(pump_src_text): """Convert the given Pump source text into an AST.""" tokens = list(Tokenize(pump_src_text)) code_node = ParseCodeNode(tokens) return code_node class Env: def __init__(self): self.variables = [] self.ranges = [] def Clone(self): clone = Env() clone.variables = self.variables[:] clone.ranges = self.ranges[:] return clone def PushVariable(self, var, value): # If value looks like an int, store it as an int. try: int_value = int(value) if ('%s' % int_value) == value: value = int_value except Exception: pass self.variables[:0] = [(var, value)] def PopVariable(self): self.variables[:1] = [] def PushRange(self, var, lower, upper): self.ranges[:0] = [(var, lower, upper)] def PopRange(self): self.ranges[:1] = [] def GetValue(self, identifier): for (var, value) in self.variables: if identifier == var: return value print 'ERROR: meta variable %s is undefined.' % (identifier,) sys.exit(1) def EvalExp(self, exp): try: result = eval(exp.python_exp) except Exception, e: print 'ERROR: caught exception %s: %s' % (e.__class__.__name__, e) print ('ERROR: failed to evaluate meta expression %s at %s' % (exp.python_exp, exp.token.start)) sys.exit(1) return result def GetRange(self, identifier): for (var, lower, upper) in self.ranges: if identifier == var: return (lower, upper) print 'ERROR: range %s is undefined.' % (identifier,) sys.exit(1) class Output: def __init__(self): self.string = '' def GetLastLine(self): index = self.string.rfind('\n') if index < 0: return '' return self.string[index + 1:] def Append(self, s): self.string += s def RunAtomicCode(env, node, output): if isinstance(node, VarNode): identifier = node.identifier.value.strip() result = Output() RunAtomicCode(env.Clone(), node.atomic_code, result) value = result.string env.PushVariable(identifier, value) elif isinstance(node, RangeNode): identifier = node.identifier.value.strip() lower = int(env.EvalExp(node.exp1)) upper = int(env.EvalExp(node.exp2)) env.PushRange(identifier, lower, upper) elif isinstance(node, ForNode): identifier = node.identifier.value.strip() if node.sep is None: sep = '' else: sep = node.sep.value (lower, upper) = env.GetRange(identifier) for i in range(lower, upper + 1): new_env = env.Clone() new_env.PushVariable(identifier, i) RunCode(new_env, node.code, output) if i != upper: output.Append(sep) elif isinstance(node, RawCodeNode): output.Append(node.raw_code.value) elif isinstance(node, IfNode): cond = env.EvalExp(node.exp) if cond: RunCode(env.Clone(), node.then_branch, output) elif node.else_branch is not None: RunCode(env.Clone(), node.else_branch, output) elif isinstance(node, ExpNode): value = env.EvalExp(node) output.Append('%s' % (value,)) elif isinstance(node, LiteralDollarNode): output.Append('$') elif isinstance(node, CodeNode): RunCode(env.Clone(), node, output) else: print 'BAD' print node sys.exit(1) def RunCode(env, code_node, output): for atomic_code in code_node.atomic_code: RunAtomicCode(env, atomic_code, output) def IsComment(cur_line): return '//' in cur_line def IsInPreprocessorDirevative(prev_lines, cur_line): if cur_line.lstrip().startswith('#'): return True return prev_lines != [] and prev_lines[-1].endswith('\\') def WrapComment(line, output): loc = line.find('//') before_comment = line[:loc].rstrip() if before_comment == '': indent = loc else: output.append(before_comment) indent = len(before_comment) - len(before_comment.lstrip()) prefix = indent*' ' + '// ' max_len = 80 - len(prefix) comment = line[loc + 2:].strip() segs = [seg for seg in re.split(r'(\w+\W*)', comment) if seg != ''] cur_line = '' for seg in segs: if len((cur_line + seg).rstrip()) < max_len: cur_line += seg else: if cur_line.strip() != '': output.append(prefix + cur_line.rstrip()) cur_line = seg.lstrip() if cur_line.strip() != '': output.append(prefix + cur_line.strip()) def WrapCode(line, line_concat, output): indent = len(line) - len(line.lstrip()) prefix = indent*' ' # Prefix of the current line max_len = 80 - indent - len(line_concat) # Maximum length of the current line new_prefix = prefix + 4*' ' # Prefix of a continuation line new_max_len = max_len - 4 # Maximum length of a continuation line # Prefers to wrap a line after a ',' or ';'. segs = [seg for seg in re.split(r'([^,;]+[,;]?)', line.strip()) if seg != ''] cur_line = '' # The current line without leading spaces. for seg in segs: # If the line is still too long, wrap at a space. while cur_line == '' and len(seg.strip()) > max_len: seg = seg.lstrip() split_at = seg.rfind(' ', 0, max_len) output.append(prefix + seg[:split_at].strip() + line_concat) seg = seg[split_at + 1:] prefix = new_prefix max_len = new_max_len if len((cur_line + seg).rstrip()) < max_len: cur_line = (cur_line + seg).lstrip() else: output.append(prefix + cur_line.rstrip() + line_concat) prefix = new_prefix max_len = new_max_len cur_line = seg.lstrip() if cur_line.strip() != '': output.append(prefix + cur_line.strip()) def WrapPreprocessorDirevative(line, output): WrapCode(line, ' \\', output) def WrapPlainCode(line, output): WrapCode(line, '', output) def IsHeaderGuardOrInclude(line): return (re.match(r'^#(ifndef|define|endif\s*//)\s*[\w_]+\s*$', line) or re.match(r'^#include\s', line)) def WrapLongLine(line, output): line = line.rstrip() if len(line) <= 80: output.append(line) elif IsComment(line): if IsHeaderGuardOrInclude(line): # The style guide made an exception to allow long header guard lines # and includes. output.append(line) else: WrapComment(line, output) elif IsInPreprocessorDirevative(output, line): if IsHeaderGuardOrInclude(line): # The style guide made an exception to allow long header guard lines # and includes. output.append(line) else: WrapPreprocessorDirevative(line, output) else: WrapPlainCode(line, output) def BeautifyCode(string): lines = string.splitlines() output = [] for line in lines: WrapLongLine(line, output) output2 = [line.rstrip() for line in output] return '\n'.join(output2) + '\n' def ConvertFromPumpSource(src_text): """Return the text generated from the given Pump source text.""" ast = ParseToAST(StripMetaComments(src_text)) output = Output() RunCode(Env(), ast, output) return BeautifyCode(output.string) def main(argv): if len(argv) == 1: print __doc__ sys.exit(1) file_path = argv[-1] output_str = ConvertFromPumpSource(file(file_path, 'r').read()) if file_path.endswith('.pump'): output_file_path = file_path[:-5] else: output_file_path = '-' if output_file_path == '-': print output_str, else: output_file = file(output_file_path, 'w') output_file.write('// This file was GENERATED by command:\n') output_file.write('// %s %s\n' % (os.path.basename(__file__), os.path.basename(file_path))) output_file.write('// DO NOT EDIT BY HAND!!!\n\n') output_file.write(output_str) output_file.close() if __name__ == '__main__': main(sys.argv)
bsd-3-clause
sarvex/tensorflow
tensorflow/python/training/supervisor_test.py
14
36132
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for supervisor.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import glob import os import shutil import time import uuid from six.moves import xrange # pylint: disable=redefined-builtin from tensorflow.core.framework import graph_pb2 from tensorflow.core.protobuf import config_pb2 from tensorflow.core.protobuf import meta_graph_pb2 from tensorflow.core.util import event_pb2 from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors_impl from tensorflow.python.framework import meta_graph from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import io_ops from tensorflow.python.ops import parsing_ops from tensorflow.python.ops import variables from tensorflow.python.platform import gfile from tensorflow.python.platform import test from tensorflow.python.summary import summary from tensorflow.python.summary import summary_iterator from tensorflow.python.summary.writer import writer from tensorflow.python.training import checkpoint_management from tensorflow.python.training import input as input_lib from tensorflow.python.training import saver as saver_lib from tensorflow.python.training import server_lib from tensorflow.python.training import session_manager as session_manager_lib from tensorflow.python.training import supervisor def _summary_iterator(test_dir): """Reads events from test_dir/events. Args: test_dir: Name of the test directory. Returns: A summary_iterator """ event_paths = sorted(glob.glob(os.path.join(test_dir, "event*"))) return summary_iterator.summary_iterator(event_paths[-1]) class SupervisorTest(test.TestCase): def _test_dir(self, test_name): test_dir = os.path.join(self.get_temp_dir(), test_name) if os.path.exists(test_dir): shutil.rmtree(test_dir) return test_dir def _wait_for_glob(self, pattern, timeout_secs, for_checkpoint=True): """Wait for a checkpoint file to appear. Args: pattern: A string. timeout_secs: How long to wait for in seconds. for_checkpoint: whether we're globbing for checkpoints. """ end_time = time.time() + timeout_secs while time.time() < end_time: if for_checkpoint: if checkpoint_management.checkpoint_exists(pattern): return else: if len(gfile.Glob(pattern)) >= 1: return time.sleep(0.05) self.assertFalse(True, "Glob never matched any file: %s" % pattern) # This test does not test much. def testBasics(self): logdir = self._test_dir("basics") with ops.Graph().as_default(): my_op = constant_op.constant(1.0) sv = supervisor.Supervisor(logdir=logdir) sess = sv.prepare_or_wait_for_session("") for _ in xrange(10): self.evaluate(my_op) sess.close() sv.stop() def testManagedSession(self): logdir = self._test_dir("managed_session") with ops.Graph().as_default(): my_op = constant_op.constant(1.0) sv = supervisor.Supervisor(logdir=logdir) with sv.managed_session(""): for _ in xrange(10): self.evaluate(my_op) # Supervisor has been stopped. self.assertTrue(sv.should_stop()) def testManagedSessionUserError(self): logdir = self._test_dir("managed_user_error") with ops.Graph().as_default(): my_op = constant_op.constant(1.0) sv = supervisor.Supervisor(logdir=logdir) last_step = None with self.assertRaisesRegex(RuntimeError, "failing here"): with sv.managed_session("") as sess: for step in xrange(10): last_step = step if step == 1: raise RuntimeError("failing here") else: self.evaluate(my_op) # Supervisor has been stopped. self.assertTrue(sv.should_stop()) self.assertEqual(1, last_step) def testManagedSessionIgnoreOutOfRangeError(self): logdir = self._test_dir("managed_out_of_range") with ops.Graph().as_default(): my_op = constant_op.constant(1.0) sv = supervisor.Supervisor(logdir=logdir) last_step = None with sv.managed_session("") as sess: for step in xrange(10): last_step = step if step == 3: raise errors_impl.OutOfRangeError(my_op.op.node_def, my_op.op, "all done") else: self.evaluate(my_op) # Supervisor has been stopped. OutOfRangeError was not thrown. self.assertTrue(sv.should_stop()) self.assertEqual(3, last_step) def testManagedSessionDoNotKeepSummaryWriter(self): logdir = self._test_dir("managed_not_keep_summary_writer") with ops.Graph().as_default(): summary.scalar("c1", constant_op.constant(1)) summary.scalar("c2", constant_op.constant(2)) summary.scalar("c3", constant_op.constant(3)) summ = summary.merge_all() sv = supervisor.Supervisor(logdir=logdir, summary_op=None) with sv.managed_session( "", close_summary_writer=True, start_standard_services=False) as sess: sv.summary_computed(sess, sess.run(summ)) # Sleep 1.2s to make sure that the next event file has a different name # than the current one. time.sleep(1.2) with sv.managed_session( "", close_summary_writer=True, start_standard_services=False) as sess: sv.summary_computed(sess, sess.run(summ)) event_paths = sorted(glob.glob(os.path.join(logdir, "event*"))) self.assertEqual(2, len(event_paths)) # The two event files should have the same contents. for path in event_paths: # The summary iterator should report the summary once as we closed the # summary writer across the 2 sessions. rr = summary_iterator.summary_iterator(path) # The first event should list the file_version. ev = next(rr) self.assertEqual("brain.Event:2", ev.file_version) # The next one has the graph and metagraph. ev = next(rr) self.assertTrue(ev.graph_def) ev = next(rr) self.assertTrue(ev.meta_graph_def) # The next one should have the values from the summary. # But only once. ev = next(rr) self.assertProtoEquals(""" value { tag: 'c1' simple_value: 1.0 } value { tag: 'c2' simple_value: 2.0 } value { tag: 'c3' simple_value: 3.0 } """, ev.summary) # The next one should be a stop message if we closed cleanly. ev = next(rr) self.assertEqual(event_pb2.SessionLog.STOP, ev.session_log.status) # We should be done. with self.assertRaises(StopIteration): next(rr) def testManagedSessionKeepSummaryWriter(self): logdir = self._test_dir("managed_keep_summary_writer") with ops.Graph().as_default(): summary.scalar("c1", constant_op.constant(1)) summary.scalar("c2", constant_op.constant(2)) summary.scalar("c3", constant_op.constant(3)) summ = summary.merge_all() sv = supervisor.Supervisor(logdir=logdir) with sv.managed_session( "", close_summary_writer=False, start_standard_services=False) as sess: sv.summary_computed(sess, sess.run(summ)) with sv.managed_session( "", close_summary_writer=False, start_standard_services=False) as sess: sv.summary_computed(sess, sess.run(summ)) # Now close the summary writer to flush the events. sv.summary_writer.close() # The summary iterator should report the summary twice as we reused # the same summary writer across the 2 sessions. rr = _summary_iterator(logdir) # The first event should list the file_version. ev = next(rr) self.assertEqual("brain.Event:2", ev.file_version) # The next one has the graph. ev = next(rr) self.assertTrue(ev.graph_def) ev = next(rr) self.assertTrue(ev.meta_graph_def) # The next one should have the values from the summary. ev = next(rr) self.assertProtoEquals(""" value { tag: 'c1' simple_value: 1.0 } value { tag: 'c2' simple_value: 2.0 } value { tag: 'c3' simple_value: 3.0 } """, ev.summary) # The next one should also have the values from the summary. ev = next(rr) self.assertProtoEquals(""" value { tag: 'c1' simple_value: 1.0 } value { tag: 'c2' simple_value: 2.0 } value { tag: 'c3' simple_value: 3.0 } """, ev.summary) # We should be done. self.assertRaises(StopIteration, lambda: next(rr)) def _csv_data(self, logdir): # Create a small data file with 3 CSV records. data_path = os.path.join(logdir, "data.csv") with open(data_path, "w") as f: f.write("1,2,3\n") f.write("4,5,6\n") f.write("7,8,9\n") return data_path def testManagedEndOfInputOneQueue(self): # Tests that the supervisor finishes without an error when using # a fixed number of epochs, reading from a single queue. logdir = self._test_dir("managed_end_of_input_one_queue") os.makedirs(logdir) data_path = self._csv_data(logdir) with ops.Graph().as_default(): # Create an input pipeline that reads the file 3 times. filename_queue = input_lib.string_input_producer( [data_path], num_epochs=3) reader = io_ops.TextLineReader() _, csv = reader.read(filename_queue) rec = parsing_ops.decode_csv(csv, record_defaults=[[1], [1], [1]]) sv = supervisor.Supervisor(logdir=logdir) with sv.managed_session("") as sess: while not sv.should_stop(): sess.run(rec) def testManagedEndOfInputTwoQueues(self): # Tests that the supervisor finishes without an error when using # a fixed number of epochs, reading from two queues, the second # one producing a batch from the first one. logdir = self._test_dir("managed_end_of_input_two_queues") os.makedirs(logdir) data_path = self._csv_data(logdir) with ops.Graph().as_default(): # Create an input pipeline that reads the file 3 times. filename_queue = input_lib.string_input_producer( [data_path], num_epochs=3) reader = io_ops.TextLineReader() _, csv = reader.read(filename_queue) rec = parsing_ops.decode_csv(csv, record_defaults=[[1], [1], [1]]) shuff_rec = input_lib.shuffle_batch(rec, 1, 6, 4) sv = supervisor.Supervisor(logdir=logdir) with sv.managed_session("") as sess: while not sv.should_stop(): sess.run(shuff_rec) def testManagedMainErrorTwoQueues(self): # Tests that the supervisor correctly raises a main loop # error even when using multiple queues for input. logdir = self._test_dir("managed_main_error_two_queues") os.makedirs(logdir) data_path = self._csv_data(logdir) with self.assertRaisesRegex(RuntimeError, "fail at step 3"): with ops.Graph().as_default(): # Create an input pipeline that reads the file 3 times. filename_queue = input_lib.string_input_producer( [data_path], num_epochs=3) reader = io_ops.TextLineReader() _, csv = reader.read(filename_queue) rec = parsing_ops.decode_csv(csv, record_defaults=[[1], [1], [1]]) shuff_rec = input_lib.shuffle_batch(rec, 1, 6, 4) sv = supervisor.Supervisor(logdir=logdir) with sv.managed_session("") as sess: for step in range(9): if sv.should_stop(): break elif step == 3: raise RuntimeError("fail at step 3") else: sess.run(shuff_rec) def testSessionConfig(self): logdir = self._test_dir("session_config") with ops.Graph().as_default(): with ops.device("/cpu:1"): my_op = constant_op.constant([1.0]) sv = supervisor.Supervisor(logdir=logdir) sess = sv.prepare_or_wait_for_session( "", config=config_pb2.ConfigProto(device_count={"CPU": 2})) for _ in xrange(10): self.evaluate(my_op) sess.close() sv.stop() def testChiefCanWriteEvents(self): logdir = self._test_dir("can_write") with ops.Graph().as_default(): summary.scalar("c1", constant_op.constant(1)) summary.scalar("c2", constant_op.constant(2)) summary.scalar("c3", constant_op.constant(3)) summ = summary.merge_all() sv = supervisor.Supervisor(is_chief=True, logdir=logdir, summary_op=None) meta_graph_def = meta_graph.create_meta_graph_def() sess = sv.prepare_or_wait_for_session("") sv.summary_computed(sess, sess.run(summ)) sess.close() # Wait to make sure everything is written to file before stopping. time.sleep(1) sv.stop() rr = _summary_iterator(logdir) # The first event should list the file_version. ev = next(rr) self.assertEqual("brain.Event:2", ev.file_version) # The next one has the graph. ev = next(rr) ev_graph = graph_pb2.GraphDef() ev_graph.ParseFromString(ev.graph_def) self.assertProtoEquals(sess.graph.as_graph_def(add_shapes=True), ev_graph) # Stored MetaGraphDef ev = next(rr) ev_meta_graph = meta_graph_pb2.MetaGraphDef() ev_meta_graph.ParseFromString(ev.meta_graph_def) self.assertProtoEquals(meta_graph_def, ev_meta_graph) self.assertProtoEquals( sess.graph.as_graph_def(add_shapes=True), ev_meta_graph.graph_def) # The next one should have the values from the summary. ev = next(rr) self.assertProtoEquals(""" value { tag: 'c1' simple_value: 1.0 } value { tag: 'c2' simple_value: 2.0 } value { tag: 'c3' simple_value: 3.0 } """, ev.summary) # The next one should be a stop message if we closed cleanly. ev = next(rr) self.assertEqual(event_pb2.SessionLog.STOP, ev.session_log.status) # We should be done. self.assertRaises(StopIteration, lambda: next(rr)) def testNonChiefCannotWriteEvents(self): def _summary_computed(): with ops.Graph().as_default(): sv = supervisor.Supervisor(is_chief=False) sess = sv.prepare_or_wait_for_session("") summary.scalar("c1", constant_op.constant(1)) summary.scalar("c2", constant_op.constant(2)) summ = summary.merge_all() sv.summary_computed(sess, sess.run(summ)) def _start_standard_services(): with ops.Graph().as_default(): sv = supervisor.Supervisor(is_chief=False) sess = sv.prepare_or_wait_for_session("") sv.start_standard_services(sess) self.assertRaises(RuntimeError, _summary_computed) self.assertRaises(RuntimeError, _start_standard_services) def testNoLogdirButWantSummary(self): with ops.Graph().as_default(): summary.scalar("c1", constant_op.constant(1)) summary.scalar("c2", constant_op.constant(2)) summary.scalar("c3", constant_op.constant(3)) summ = summary.merge_all() sv = supervisor.Supervisor(logdir="", summary_op=None) sess = sv.prepare_or_wait_for_session("") with self.assertRaisesRegex(RuntimeError, "requires a summary writer"): sv.summary_computed(sess, sess.run(summ)) @test_util.run_v1_only("train.Supervisor is for v1 only") def testLogdirButExplicitlyNoSummaryWriter(self): logdir = self._test_dir("explicit_no_summary_writer") with ops.Graph().as_default(): variables.VariableV1([1.0], name="foo") summary.scalar("c1", constant_op.constant(1)) summary.scalar("c2", constant_op.constant(2)) summary.scalar("c3", constant_op.constant(3)) summ = summary.merge_all() sv = supervisor.Supervisor(logdir=logdir, summary_writer=None) sess = sv.prepare_or_wait_for_session("") # Check that a checkpoint is still be generated. self._wait_for_glob(sv.save_path, 3.0) # Check that we cannot write a summary with self.assertRaisesRegex(RuntimeError, "requires a summary writer"): sv.summary_computed(sess, sess.run(summ)) def testNoLogdirButExplicitSummaryWriter(self): logdir = self._test_dir("explicit_summary_writer") with ops.Graph().as_default(): summary.scalar("c1", constant_op.constant(1)) summary.scalar("c2", constant_op.constant(2)) summary.scalar("c3", constant_op.constant(3)) summ = summary.merge_all() sw = writer.FileWriter(logdir) sv = supervisor.Supervisor(logdir="", summary_op=None, summary_writer=sw) meta_graph_def = meta_graph.create_meta_graph_def() sess = sv.prepare_or_wait_for_session("") sv.summary_computed(sess, sess.run(summ)) sess.close() # Wait to make sure everything is written to file before stopping. time.sleep(1) sv.stop() # Check the summary was written to 'logdir' rr = _summary_iterator(logdir) # The first event should list the file_version. ev = next(rr) self.assertEqual("brain.Event:2", ev.file_version) # The next one has the graph. ev = next(rr) ev_graph = graph_pb2.GraphDef() ev_graph.ParseFromString(ev.graph_def) self.assertProtoEquals(sess.graph.as_graph_def(add_shapes=True), ev_graph) # Stored MetaGraphDef ev = next(rr) ev_meta_graph = meta_graph_pb2.MetaGraphDef() ev_meta_graph.ParseFromString(ev.meta_graph_def) self.assertProtoEquals(meta_graph_def, ev_meta_graph) self.assertProtoEquals( sess.graph.as_graph_def(add_shapes=True), ev_meta_graph.graph_def) # The next one should have the values from the summary. ev = next(rr) self.assertProtoEquals(""" value { tag: 'c1' simple_value: 1.0 } value { tag: 'c2' simple_value: 2.0 } value { tag: 'c3' simple_value: 3.0 } """, ev.summary) # The next one should be a stop message if we closed cleanly. ev = next(rr) self.assertEqual(event_pb2.SessionLog.STOP, ev.session_log.status) # We should be done. self.assertRaises(StopIteration, lambda: next(rr)) def testNoLogdirSucceeds(self): with ops.Graph().as_default(): variables.VariableV1([1.0, 2.0, 3.0]) sv = supervisor.Supervisor(logdir="", summary_op=None) sess = sv.prepare_or_wait_for_session("") sess.close() sv.stop() def testUseSessionManager(self): with ops.Graph().as_default(): variables.VariableV1([1.0, 2.0, 3.0]) sm = session_manager_lib.SessionManager() # Pass in session_manager. The additional init_op is ignored. sv = supervisor.Supervisor(logdir="", session_manager=sm) sv.prepare_or_wait_for_session("") @test_util.run_v1_only("train.Supervisor is for v1 only") def testInitOp(self): logdir = self._test_dir("default_init_op") with ops.Graph().as_default(): v = variables.VariableV1([1.0, 2.0, 3.0]) sv = supervisor.Supervisor(logdir=logdir) sess = sv.prepare_or_wait_for_session("") self.assertAllClose([1.0, 2.0, 3.0], sess.run(v)) sv.stop() @test_util.run_v1_only("train.Supervisor is for v1 only") def testInitFn(self): logdir = self._test_dir("default_init_op") with ops.Graph().as_default(): v = variables.VariableV1([1.0, 2.0, 3.0]) def _init_fn(sess): sess.run(v.initializer) sv = supervisor.Supervisor(logdir=logdir, init_op=None, init_fn=_init_fn) sess = sv.prepare_or_wait_for_session("") self.assertAllClose([1.0, 2.0, 3.0], sess.run(v)) sv.stop() @test_util.run_v1_only("train.Supervisor is for v1 only") def testInitOpWithFeedDict(self): logdir = self._test_dir("feed_dict_init_op") with ops.Graph().as_default(): p = array_ops.placeholder(dtypes.float32, shape=(3,)) v = variables.VariableV1(p, name="v") sv = supervisor.Supervisor( logdir=logdir, init_op=variables.global_variables_initializer(), init_feed_dict={p: [1.0, 2.0, 3.0]}) sess = sv.prepare_or_wait_for_session("") self.assertAllClose([1.0, 2.0, 3.0], sess.run(v)) sv.stop() @test_util.run_v1_only("train.Supervisor is for v1 only") def testReadyForLocalInitOp(self): server = server_lib.Server.create_local_server() logdir = self._test_dir("default_ready_for_local_init_op") uid = uuid.uuid4().hex def get_session(is_chief): g = ops.Graph() with g.as_default(): with ops.device("/job:localhost"): v = variables.VariableV1( 1, name="default_ready_for_local_init_op_v_" + str(uid)) vadd = v.assign_add(1) w = variables.VariableV1( v, trainable=False, collections=[ops.GraphKeys.LOCAL_VARIABLES], name="default_ready_for_local_init_op_w_" + str(uid)) ready_for_local_init_op = variables.report_uninitialized_variables( variables.global_variables()) sv = supervisor.Supervisor( logdir=logdir, is_chief=is_chief, graph=g, recovery_wait_secs=1, init_op=v.initializer, ready_for_local_init_op=ready_for_local_init_op) sess = sv.prepare_or_wait_for_session(server.target) return sv, sess, v, vadd, w sv0, sess0, v0, _, w0 = get_session(True) sv1, sess1, _, vadd1, w1 = get_session(False) self.assertEqual(1, sess0.run(w0)) self.assertEqual(2, sess1.run(vadd1)) self.assertEqual(1, sess1.run(w1)) self.assertEqual(2, sess0.run(v0)) sv0.stop() sv1.stop() @test_util.run_v1_only("train.Supervisor is for v1 only") def testReadyForLocalInitOpRestoreFromCheckpoint(self): server = server_lib.Server.create_local_server() logdir = self._test_dir("ready_for_local_init_op_restore") uid = uuid.uuid4().hex # Create a checkpoint. with ops.Graph().as_default(): v = variables.VariableV1( 10.0, name="ready_for_local_init_op_restore_v_" + str(uid)) summary.scalar("ready_for_local_init_op_restore_v_" + str(uid), v) sv = supervisor.Supervisor(logdir=logdir) sv.prepare_or_wait_for_session(server.target) save_path = sv.save_path self._wait_for_glob(save_path, 3.0) self._wait_for_glob( os.path.join(logdir, "*events*"), 3.0, for_checkpoint=False) # Wait to make sure everything is written to file before stopping. time.sleep(1) sv.stop() def get_session(is_chief): g = ops.Graph() with g.as_default(): with ops.device("/job:localhost"): v = variables.VariableV1( 1.0, name="ready_for_local_init_op_restore_v_" + str(uid)) vadd = v.assign_add(1) w = variables.VariableV1( v, trainable=False, collections=[ops.GraphKeys.LOCAL_VARIABLES], name="ready_for_local_init_op_restore_w_" + str(uid)) ready_for_local_init_op = variables.report_uninitialized_variables( variables.global_variables()) sv = supervisor.Supervisor( logdir=logdir, is_chief=is_chief, graph=g, recovery_wait_secs=1, ready_for_local_init_op=ready_for_local_init_op) sess = sv.prepare_or_wait_for_session(server.target) return sv, sess, v, vadd, w sv0, sess0, v0, _, w0 = get_session(True) sv1, sess1, _, vadd1, w1 = get_session(False) self.assertEqual(10, sess0.run(w0)) self.assertEqual(11, sess1.run(vadd1)) self.assertEqual(10, sess1.run(w1)) self.assertEqual(11, sess0.run(v0)) sv0.stop() sv1.stop() def testLocalInitOp(self): logdir = self._test_dir("default_local_init_op") with ops.Graph().as_default(): # A local variable. v = variables.VariableV1( [1.0, 2.0, 3.0], trainable=False, collections=[ops.GraphKeys.LOCAL_VARIABLES]) # An entity which is initialized through a TABLE_INITIALIZER. w = variables.VariableV1([4, 5, 6], trainable=False, collections=[]) ops.add_to_collection(ops.GraphKeys.TABLE_INITIALIZERS, w.initializer) # This shouldn't add a variable to the VARIABLES collection responsible # for variables that are saved/restored from checkpoints. self.assertEqual(len(variables.global_variables()), 0) # Suppress normal variable inits to make sure the local one is # initialized via local_init_op. sv = supervisor.Supervisor(logdir=logdir, init_op=None) sess = sv.prepare_or_wait_for_session("") self.assertAllClose([1.0, 2.0, 3.0], sess.run(v)) self.assertAllClose([4, 5, 6], sess.run(w)) sv.stop() def testLocalInitOpForNonChief(self): logdir = self._test_dir("default_local_init_op_non_chief") with ops.Graph().as_default(): with ops.device("/job:localhost"): # A local variable. v = variables.VariableV1( [1.0, 2.0, 3.0], trainable=False, collections=[ops.GraphKeys.LOCAL_VARIABLES]) # This shouldn't add a variable to the VARIABLES collection responsible # for variables that are saved/restored from checkpoints. self.assertEqual(len(variables.global_variables()), 0) # Suppress normal variable inits to make sure the local one is # initialized via local_init_op. sv = supervisor.Supervisor(logdir=logdir, init_op=None, is_chief=False) sess = sv.prepare_or_wait_for_session("") self.assertAllClose([1.0, 2.0, 3.0], sess.run(v)) sv.stop() def testInitOpFails(self): server = server_lib.Server.create_local_server() logdir = self._test_dir("default_init_op_fails") with ops.Graph().as_default(): v = variables.VariableV1([1.0, 2.0, 3.0], name="v") variables.VariableV1([4.0, 5.0, 6.0], name="w") # w will not be initialized. sv = supervisor.Supervisor(logdir=logdir, init_op=v.initializer) with self.assertRaisesRegex(RuntimeError, "Variables not initialized: w"): sv.prepare_or_wait_for_session(server.target) def testInitOpFailsForTransientVariable(self): server = server_lib.Server.create_local_server() logdir = self._test_dir("default_init_op_fails_for_local_variable") with ops.Graph().as_default(): v = variables.VariableV1( [1.0, 2.0, 3.0], name="v", collections=[ops.GraphKeys.LOCAL_VARIABLES]) variables.VariableV1( [1.0, 2.0, 3.0], name="w", collections=[ops.GraphKeys.LOCAL_VARIABLES]) # w will not be initialized. sv = supervisor.Supervisor(logdir=logdir, local_init_op=v.initializer) with self.assertRaisesRegex(RuntimeError, "Variables not initialized: w"): sv.prepare_or_wait_for_session(server.target) @test_util.run_v1_only("train.Supervisor is for v1 only") def testSetupFail(self): logdir = self._test_dir("setup_fail") with ops.Graph().as_default(): variables.VariableV1([1.0, 2.0, 3.0], name="v") with self.assertRaisesRegex(ValueError, "must have their device set"): supervisor.Supervisor(logdir=logdir, is_chief=False) with ops.Graph().as_default(), ops.device("/job:ps"): variables.VariableV1([1.0, 2.0, 3.0], name="v") supervisor.Supervisor(logdir=logdir, is_chief=False) @test_util.run_v1_only("train.Supervisor is for v1 only") def testDefaultGlobalStep(self): logdir = self._test_dir("default_global_step") with ops.Graph().as_default(): variables.VariableV1(287, name="global_step") sv = supervisor.Supervisor(logdir=logdir) sess = sv.prepare_or_wait_for_session("") self.assertEqual(287, sess.run(sv.global_step)) sv.stop() @test_util.run_v1_only("train.Supervisor is for v1 only") def testRestoreFromMetaGraph(self): logdir = self._test_dir("restore_from_meta_graph") with ops.Graph().as_default(): variables.VariableV1(1, name="v0") sv = supervisor.Supervisor(logdir=logdir) sess = sv.prepare_or_wait_for_session("") filename = sv.saver.save(sess, sv.save_path) sv.stop() # Create a new Graph and Supervisor and recover. with ops.Graph().as_default(): new_saver = saver_lib.import_meta_graph(".".join([filename, "meta"])) self.assertIsNotNone(new_saver) sv2 = supervisor.Supervisor(logdir=logdir, saver=new_saver) sess = sv2.prepare_or_wait_for_session("") self.assertEqual(1, sess.run("v0:0")) sv2.saver.save(sess, sv2.save_path) sv2.stop() # This test is based on the fact that the standard services start # right away and get to run once before sv.stop() returns. # We still sleep a bit to make the test robust. @test_util.run_v1_only("train.Supervisor is for v1 only") def testStandardServicesWithoutGlobalStep(self): logdir = self._test_dir("standard_services_without_global_step") # Create a checkpoint. with ops.Graph().as_default(): v = variables.VariableV1([1.0], name="foo") summary.scalar("v", v[0]) sv = supervisor.Supervisor(logdir=logdir) meta_graph_def = meta_graph.create_meta_graph_def( saver_def=sv.saver.saver_def) sess = sv.prepare_or_wait_for_session("") save_path = sv.save_path self._wait_for_glob(save_path, 3.0) self._wait_for_glob( os.path.join(logdir, "*events*"), 3.0, for_checkpoint=False) # Wait to make sure everything is written to file before stopping. time.sleep(1) sv.stop() # There should be an event file with a version number. rr = _summary_iterator(logdir) ev = next(rr) self.assertEqual("brain.Event:2", ev.file_version) ev = next(rr) ev_graph = graph_pb2.GraphDef() ev_graph.ParseFromString(ev.graph_def) self.assertProtoEquals(sess.graph.as_graph_def(add_shapes=True), ev_graph) # Stored MetaGraphDef ev = next(rr) ev_meta_graph = meta_graph_pb2.MetaGraphDef() ev_meta_graph.ParseFromString(ev.meta_graph_def) self.assertProtoEquals(meta_graph_def, ev_meta_graph) self.assertProtoEquals( sess.graph.as_graph_def(add_shapes=True), ev_meta_graph.graph_def) ev = next(rr) self.assertProtoEquals("value { tag: 'v' simple_value: 1.0 }", ev.summary) ev = next(rr) self.assertEqual(event_pb2.SessionLog.STOP, ev.session_log.status) self.assertRaises(StopIteration, lambda: next(rr)) # There should be a checkpoint file with the variable "foo" with ops.Graph().as_default(), self.cached_session() as sess: v = variables.VariableV1([10.10], name="foo") sav = saver_lib.Saver([v]) sav.restore(sess, save_path) self.assertEqual(1.0, self.evaluate(v)[0]) # Same as testStandardServicesNoGlobalStep but with a global step. # We should get a summary about the step time. @test_util.run_v1_only("train.Supervisor is for v1 only") def testStandardServicesWithGlobalStep(self): logdir = self._test_dir("standard_services_with_global_step") # Create a checkpoint. with ops.Graph().as_default(): v = variables.VariableV1([123], name="global_step") sv = supervisor.Supervisor(logdir=logdir) meta_graph_def = meta_graph.create_meta_graph_def( saver_def=sv.saver.saver_def) sess = sv.prepare_or_wait_for_session("") # This is where the checkpoint will appear, with step number 123. save_path = "%s-123" % sv.save_path self._wait_for_glob(save_path, 3.0) self._wait_for_glob( os.path.join(logdir, "*events*"), 3.0, for_checkpoint=False) # Wait to make sure everything is written to file before stopping. time.sleep(1) sv.stop() # There should be an event file with a version number. rr = _summary_iterator(logdir) ev = next(rr) self.assertEqual("brain.Event:2", ev.file_version) ev = next(rr) ev_graph = graph_pb2.GraphDef() ev_graph.ParseFromString(ev.graph_def) self.assertProtoEquals(sess.graph.as_graph_def(add_shapes=True), ev_graph) ev = next(rr) ev_meta_graph = meta_graph_pb2.MetaGraphDef() ev_meta_graph.ParseFromString(ev.meta_graph_def) self.assertProtoEquals(meta_graph_def, ev_meta_graph) self.assertProtoEquals( sess.graph.as_graph_def(add_shapes=True), ev_meta_graph.graph_def) ev = next(rr) # It is actually undeterministic whether SessionLog.START gets written # before the summary or the checkpoint, but this works when run 10000 times. self.assertEqual(123, ev.step) self.assertEqual(event_pb2.SessionLog.START, ev.session_log.status) first = next(rr) second = next(rr) # It is undeterministic whether the value gets written before the checkpoint # since they are on separate threads, so we check for both conditions. if first.HasField("summary"): self.assertProtoEquals("""value { tag: 'global_step/sec' simple_value: 0.0 }""", first.summary) self.assertEqual(123, second.step) self.assertEqual(event_pb2.SessionLog.CHECKPOINT, second.session_log.status) else: self.assertEqual(123, first.step) self.assertEqual(event_pb2.SessionLog.CHECKPOINT, first.session_log.status) self.assertProtoEquals("""value { tag: 'global_step/sec' simple_value: 0.0 }""", second.summary) ev = next(rr) self.assertEqual(event_pb2.SessionLog.STOP, ev.session_log.status) self.assertRaises(StopIteration, lambda: next(rr)) # There should be a checkpoint file with the variable "foo" with ops.Graph().as_default(), self.cached_session() as sess: v = variables.VariableV1([-12], name="global_step") sav = saver_lib.Saver([v]) sav.restore(sess, save_path) self.assertEqual(123, self.evaluate(v)[0]) def testNoQueueRunners(self): with ops.Graph().as_default(), self.cached_session() as sess: sv = supervisor.Supervisor(logdir=self._test_dir("no_queue_runners")) self.assertEqual(0, len(sv.start_queue_runners(sess))) sv.stop() def testPrepareSessionAfterStopForChief(self): logdir = self._test_dir("prepare_after_stop_chief") with ops.Graph().as_default(): sv = supervisor.Supervisor(logdir=logdir, is_chief=True) # Create a first session and then stop. sess = sv.prepare_or_wait_for_session("") sv.stop() sess.close() self.assertTrue(sv.should_stop()) # Now create a second session and test that we don't stay stopped, until # we ask to stop again. sess2 = sv.prepare_or_wait_for_session("") self.assertFalse(sv.should_stop()) sv.stop() sess2.close() self.assertTrue(sv.should_stop()) def testPrepareSessionAfterStopForNonChief(self): logdir = self._test_dir("prepare_after_stop_nonchief") with ops.Graph().as_default(): sv = supervisor.Supervisor(logdir=logdir, is_chief=False) # Create a first session and then stop. sess = sv.prepare_or_wait_for_session("") sv.stop() sess.close() self.assertTrue(sv.should_stop()) # Now create a second session and test that we don't stay stopped, until # we ask to stop again. sess2 = sv.prepare_or_wait_for_session("") self.assertFalse(sv.should_stop()) sv.stop() sess2.close() self.assertTrue(sv.should_stop()) if __name__ == "__main__": test.main()
apache-2.0
Milias/ModellingSimulation
Week4/python/lattice.py
1
3123
# -*- coding: utf8 -*- from numpy import * import json def GenerateRectangular(N, sph_size, scale, filename): try: f = open(filename, "w+") except Exception as e: return str(e) data = {} data["FileType"] = "LatticeGenerator" data["Dimensions"] = 2 data["SpheresNumber"] = 0 data["SphereSize"] = sph_size basis = [[scale, 0.0], [0.0, scale]] data["Basis"] = basis data["Data"] = [] for i in range(N[0]): for j in range(N[1]): data["Data"].append([i,j]) data["SpheresNumber"] += 1 f.write(json.dumps(data)) f.close() return "Saved to %s successfully." % filename def GenerateHexagonal(N, sph_size, scale, filename): try: f = open(filename, "w+") except Exception as e: return str(e) data = {} data["FileType"] = "LatticeGenerator" data["Dimensions"] = 2 data["SpheresNumber"] = 0 data["SphereSize"] = sph_size basis = [[scale, 0.0], [0.5*scale, 0.5*sqrt(3)*scale]] data["Basis"] = basis data["Data"] = [] for i in range(N[0]): data["Data"].append([i,0]) data["Data"].append([i,1]) data["SpheresNumber"] += 2 for j in range(1,N[1]): data["Data"].append([i-j,2*j]) data["Data"].append([i-j,2*j+1]) data["SpheresNumber"] += 2 f.write(json.dumps(data)) f.close() return "Saved to %s successfully." % filename def GenerateSC(N, sph_size, scale, filename): try: f = open(filename, "w+") except Exception as e: return str(e) data = {} data["FileType"] = "LatticeGenerator" data["Dimensions"] = 3 data["SpheresNumber"] = 0 data["SphereSize"] = sph_size basis = [[scale,0.0,0.0],[0.0,scale,0.0],[0.0,0.0,scale]] data["Basis"] = basis data["Data"] = [] for i in range(N[0]): for j in range(N[1]): for k in range(N[2]): data["Data"].append([i, j, k]) data["SpheresNumber"] += 1 f.write(json.dumps(data)) f.close() return "Saved to %s successfully." % filename def GenerateFCC(N, sph_size, scale, filename): try: f = open(filename, "w+") except Exception as e: return str(e) data = {} data["FileType"] = "LatticeGenerator" data["Dimensions"] = 3 data["SpheresNumber"] = 0 data["SphereSize"] = sph_size basis = [[0.5*scale,0.5*scale,0.0],[0.5*scale,0.0,0.5*scale],[0.0,0.5*scale,0.5*scale]] data["Basis"] = basis data["Data"] = [] for i in range(N[0]): data["Data"].append((i, i, -i)) data["Data"].append((i+1,i,-i)) data["Data"].append((i,i+1,-i)) data["Data"].append((i,i,-i+1)) for j in range(N[1]): data["Data"].append((i+j, i-j, j-i)) data["Data"].append((i+j+1,i-j,j-i)) data["Data"].append((i+j,i-j+1,j-i)) data["Data"].append((i+j,i-j,j-i+1)) for k in range(N[2]): data["Data"].append((i+j-k, i-j+k, j-i+k)) data["Data"].append((i+j-k+1,i-j+k,j-i+k)) data["Data"].append((i+j-k,i-j+k+1,j-i+k)) data["Data"].append((i+j-k,i-j+k,j-i+k+1)) data["Data"] = list(set(data["Data"])) data["SpheresNumber"] = len(data["Data"]) f.write(json.dumps(data)) f.close() return "Saved to %s successfully." % filename
mit
sjshao09/KaggleRH
zhou_bruno.py
1
7384
import numpy as np import pandas as pd import matplotlib.pyplot as plt import seaborn as sns from sklearn import model_selection, preprocessing import xgboost as xgb import datetime # Any results you write to the current directory are saved as output. df_train = pd.read_csv("input/train.csv", parse_dates=['timestamp']) df_test = pd.read_csv("input/test.csv", parse_dates=['timestamp']) df_macro = pd.read_csv("input/macro.csv", parse_dates=['timestamp']) df_train.drop(df_train[df_train["life_sq"] > 7000].index, inplace=True) mult = 0.969 y_train = df_train['price_doc'].values * mult + 10 id_test = df_test['id'] df_train.drop(['id', 'price_doc'], axis=1, inplace=True) df_test.drop(['id'], axis=1, inplace=True) num_train = len(df_train) df_all = pd.concat([df_train, df_test]) # Next line just adds a lot of NA columns (becuase "join" only works on indexes) # but somewhow it seems to affect the result df_all = df_all.join(df_macro, on='timestamp', rsuffix='_macro') print(df_all.shape) # Add month-year month_year = (df_all.timestamp.dt.month + df_all.timestamp.dt.year * 100) month_year_cnt_map = month_year.value_counts().to_dict() df_all['month_year_cnt'] = month_year.map(month_year_cnt_map) # Add week-year count week_year = (df_all.timestamp.dt.weekofyear + df_all.timestamp.dt.year * 100) week_year_cnt_map = week_year.value_counts().to_dict() df_all['week_year_cnt'] = week_year.map(week_year_cnt_map) # Add month and day-of-week df_all['month'] = df_all.timestamp.dt.month df_all['dow'] = df_all.timestamp.dt.dayofweek # Other feature engineering df_all['rel_floor'] = df_all['floor'] / df_all['max_floor'].astype(float) df_all['rel_kitch_sq'] = df_all['kitch_sq'] / df_all['full_sq'].astype(float) ######## BEGIN 2ND SET OF BILL S CHANGES ''' ## same ones as above df_all['area_per_room'] = df_all['life_sq'] / df_all['num_room'].astype(float) df_all['livArea_ratio'] = df_all['life_sq'] / df_all['full_sq'].astype(float) df_all['yrs_old'] = 2017 - df_all['build_year'].astype(float) df_all['avgfloor_sq'] = df_all['life_sq']/df_all['max_floor'].astype(float) #living area per floor df_all['pts_floor_ratio'] = df_all['public_transport_station_km']/df_all['max_floor'].astype(float) #apartments near public t? #f_all['room_size'] = df_all['life_sq'] / df_all['num_room'].astype(float) df_all['gender_ratio'] = df_all['male_f']/df_all['female_f'].astype(float) df_all['kg_park_ratio'] = df_all['kindergarten_km']/df_all['park_km'].astype(float) df_all['high_ed_extent'] = df_all['school_km'] / df_all['kindergarten_km'] df_all['pts_x_state'] = df_all['public_transport_station_km'] * df_all['state'].astype(float) #public trans * state of listing df_all['lifesq_x_state'] = df_all['life_sq'] * df_all['state'].astype(float) df_all['floor_x_state'] = df_all['floor'] * df_all['state'].astype(float) ''' ######### END 2ND SET OF BILL S CHANGES # Remove timestamp column (may overfit the model in train) df_all.drop(['timestamp', 'timestamp_macro'], axis=1, inplace=True) factorize = lambda t: pd.factorize(t[1])[0] df_obj = df_all.select_dtypes(include=['object']) X_all = np.c_[ df_all.select_dtypes(exclude=['object']).values, np.array(list(map(factorize, df_obj.iteritems()))).T ] print(X_all.shape) X_train = X_all[:num_train] X_test = X_all[num_train:] # Deal with categorical values df_numeric = df_all.select_dtypes(exclude=['object']) df_obj = df_all.select_dtypes(include=['object']).copy() for c in df_obj: df_obj[c] = pd.factorize(df_obj[c])[0] df_values = pd.concat([df_numeric, df_obj], axis=1) # Convert to numpy values X_all = df_values.values print(X_all.shape) X_train = X_all[:num_train] X_test = X_all[num_train:] df_columns = df_values.columns xgb_params = { 'eta': 0.05, 'max_depth': 5, 'subsample': 0.7, 'colsample_bytree': 0.7, 'objective': 'reg:linear', 'eval_metric': 'rmse', 'seed': 0, 'silent': 1 } dtrain = xgb.DMatrix(X_train, y_train, feature_names=df_columns) dtest = xgb.DMatrix(X_test, feature_names=df_columns) ''' cv_output = xgb.cv(xgb_params, dtrain, num_boost_round=1000, early_stopping_rounds=20, verbose_eval=25, show_stdv=False) print('best num_boost_rounds = ', len(cv_output)) num_boost_rounds = len(cv_output) ''' num_boost_rounds = 420 # From Bruno's original CV, I think model = xgb.train(xgb_params, dtrain, num_boost_round=num_boost_rounds, evals=[(dtrain, 'train')], verbose_eval=10) ''' # ---------------------- Predict Training Set for Ensemble --------------------- # # Any results you write to the current directory are saved as output. df_train = pd.read_csv("input/train.csv", parse_dates=['timestamp']) df_test = pd.read_csv("input/test.csv", parse_dates=['timestamp']) df_macro = pd.read_csv("input/macro.csv", parse_dates=['timestamp']) df_train.loc[df_train.life_sq>7000, 'life_sq'] = 74 mult = 0.969 y_train = df_train['price_doc'].values * mult + 10 id_train = df_train['id'] id_test = df_test['id'] df_train.drop(['id', 'price_doc'], axis=1, inplace=True) df_test.drop(['id'], axis=1, inplace=True) num_train = len(df_train) df_all = pd.concat([df_train, df_test]) # Next line just adds a lot of NA columns (becuase "join" only works on indexes) # but somewhow it seems to affect the result df_all = df_all.join(df_macro, on='timestamp', rsuffix='_macro') print(df_all.shape) # Add month-year month_year = (df_all.timestamp.dt.month + df_all.timestamp.dt.year * 100) month_year_cnt_map = month_year.value_counts().to_dict() df_all['month_year_cnt'] = month_year.map(month_year_cnt_map) # Add week-year count week_year = (df_all.timestamp.dt.weekofyear + df_all.timestamp.dt.year * 100) week_year_cnt_map = week_year.value_counts().to_dict() df_all['week_year_cnt'] = week_year.map(week_year_cnt_map) # Add month and day-of-week df_all['month'] = df_all.timestamp.dt.month df_all['dow'] = df_all.timestamp.dt.dayofweek # Other feature engineering df_all['rel_floor'] = df_all['floor'] / df_all['max_floor'].astype(float) df_all['rel_kitch_sq'] = df_all['kitch_sq'] / df_all['full_sq'].astype(float) # Remove timestamp column (may overfit the model in train) df_all.drop(['timestamp', 'timestamp_macro'], axis=1, inplace=True) factorize = lambda t: pd.factorize(t[1])[0] df_obj = df_all.select_dtypes(include=['object']) X_all = np.c_[ df_all.select_dtypes(exclude=['object']).values, np.array(list(map(factorize, df_obj.iteritems()))).T ] X_train = X_all[:num_train] X_test = X_all[num_train:] # Deal with categorical values df_numeric = df_all.select_dtypes(exclude=['object']) df_obj = df_all.select_dtypes(include=['object']).copy() for c in df_obj: df_obj[c] = pd.factorize(df_obj[c])[0] df_values = pd.concat([df_numeric, df_obj], axis=1) # Convert to numpy values X_all = df_values.values X_train = X_all[:num_train] X_test = X_all[num_train:] dtrain = xgb.DMatrix(X_train, y_train, feature_names=df_columns) train_predict = model.predict(dtrain) train_predict_df = pd.DataFrame({'id': id_train, 'price_doc': train_predict}) train_predict_df.to_csv('bruno_train.csv', index=False) # ---------------------- Predict Training Set for Ensemble -------end---------- # ''' y_pred = model.predict(dtest) df_sub = pd.DataFrame({'id': id_test, 'price_doc': y_pred}) print "[INFO] Bruno Model Average Price =", df_sub['price_doc'].mean() df_sub.to_csv('bruno_test.csv', index=False) print df_sub.head()
mit
danwagnerco/bowshock
bowshock/maas.py
2
2217
# http://marsweather.ingenology.com/#get_started # Below description taken from https://github.com/ingenology/mars_weather_api # The {MAAS} API is an open source REST API built to help make it easier and more efficient to build interactive applications that want to utilize the wealth of weather data being transmitted by the Curiosity Rover on Mars. Our API is built upon the REMS (Rover Environmental Monitoring Station) data provided by the Centro de Astrobiologia (CSIC-INTA). # This API is built on Django and Django REST Framework. # Our implementation of the API is available at marsweather.ingenology.com. import decimal from bowshock.helpers import nasa_api_key, bowshock_logger, vali_date, validate_float, dispatch_http_get logger = bowshock_logger() def maas_latest(): ''' will return a JSON object for the latest report: { "report": { "terrestrial_date": "2013-05-01", "sol": 261, "ls": 310.5, "min_temp": -69.75, "min_temp_fahrenheit": -93.55, "max_temp": -4.48, "max_temp_fahrenheit": 23.94, "pressure": 868.05, "pressure_string": "Higher", "abs_humidity": null, "wind_speed": null, "wind_direction": "--", "atmo_opacity": "Sunny", "season": "Month 11", "sunrise": "2013-05-01T11:00:00Z", "sunset": "2013-05-01T22:00:00Z" } } ''' base_url = 'http://marsweather.ingenology.com/v1/latest/' return dispatch_http_get(base_url) def maas_archive(begin, end): ''' This returns a collection of JSON objects for every weather report available for October 2012: { "count": 29, "next": "http://marsweather.ingenology.com/v1/archive/?terrestrial_date_end=2012-10-31&terrestrial_date_start=2012-10-01&page=2", "previous": null, "results": [ ... ] } ''' base_url = 'http://marsweather.ingenology.com/v1/archive/?' try: vali_date(begin) vali_date(end) base_url += 'terrestrial_date_start=' + begin + "&" + 'terrestrial_date_end=' + end except: raise ValueError("Incorrect date format, should be YYYY-MM-DD") return dispatch_http_get(base_url)
gpl-2.0
edickie/ciftify
ciftify/utils.py
1
17027
#!/usr/bin/env python3 """ A collection of utilities for the epitome pipeline. Mostly for getting subject numbers/names, checking paths, gathering information, etc. """ import os import sys import copy import datetime import subprocess import tempfile import shutil import logging import math import yaml import ciftify logger = logging.getLogger(__name__) def get_subj(path, user_filter=None): """ Gets all folder names (i.e., subjects) in a directory (of subjects). Removes hidden folders. user_filter option can be used to return only the subjects that contain the given string. Warning: Returns a list in python2 and a generator in python3 so always wrap the returned value in list() if you require a list. """ subjects = [] if not os.path.exists(path): # return empty list if given bad path return subjects for subj in next(os.walk(path))[1]: subjects.append(subj) subjects.sort() subjects = filter(lambda x: x.startswith('.') == False, subjects) if user_filter: subjects = filter(lambda x: user_filter in x, subjects) return subjects def FWHM2Sigma(FWHM): ''' convert the FWHM to a Sigma value ''' if float(FWHM) == 0: sigma = 0 else: sigma = float(FWHM) / (2 * math.sqrt(2*math.log(2))) return(sigma) def make_dir(dir_name, dry_run=False, suppress_exists_error = False): # Wait till logging is needed to get logger, so logging configuration # set in main module is respected logger = logging.getLogger(__name__) if dry_run: logger.debug("Dry-run, skipping creation of directory "\ "{}".format(dir_name)) return try: os.makedirs(dir_name) except PermissionError: logger.error("You do not have permission to write to {}".format(dir_name)) except FileExistsError: if not suppress_exists_error: logger.warning("{} already exists".format(dir_name)) except OSError: logger.error('Could not create directory {}'.format(dir_name)) def check_output_writable(output_file, exit_on_error = True): ''' will test if the directory for an output_file exists and can be written too ''' logger = logging.getLogger(__name__) dirname = os.path.dirname(output_file) dirname = '.' if dirname == '' else dirname result = os.access(dirname, os.W_OK) if result == False: if exit_on_error: logger.error('Directory for output {} does not exist, ' 'or you do not have permission to write there'.format(output_file)) sys.exit(1) return(result) def check_input_readable(path, exit_on_error = True): '''check that path exists and is readable, exits upon failure by default''' logger = logging.getLogger(__name__) if not os.access(path, os.R_OK): logger.error('Input {}, does not exist, or you do not have permission to read it.' ''.format(path)) if exit_on_error: sys.exit(1) return(path) def log_arguments(arguments): '''send a formatted version of the arguments to the logger''' logger = logging.getLogger(__name__) input_args = yaml.dump(arguments, default_flow_style=False) sep = '{} '.format(os.linesep) input_args2 = input_args.replace(os.linesep,sep) input_args3 = input_args2.replace('!!python/object/new:docopt.Dict\ndictitems:','') logger.info('Arguments:{}{}'.format(sep, input_args3)) def section_header(title): '''returns a outlined bit to stick in a log file as a section header''' header = ''' ------------------------------------------------------------- {} : {} ------------------------------------------------------------- '''.format(datetime.datetime.now(),title) return(header) def ciftify_logo(): ''' this logo is ascii art with fender font''' logo = r''' .|'; || .|'; '' || || '' || .|'', || '||' ''||'' || '||' '|| ||` || || || || || || `|..|| `|..' .||. .||. `|..' .||. .||. || , |' '' ''' return(logo) def pint_logo(): ''' logo from ascii text with font fender''' logo = r""" '||'''|, |''||''| '||\ ||` |''||''| || || || ||\\ || || ||...|' || || \\ || || || || || \\|| || .|| |..||..| .|| \||. .||. """ return(logo) def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass. - Taken from six to ensure python 2 and 3 class compatibility""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper class TempDir: def __init__(self): self.path = None return def __enter__(self): self.path = tempfile.mkdtemp() return self.path def __exit__(self, type, value, traceback): if self.path is not None: shutil.rmtree(self.path) class TempSceneDir: """ A context manager for the temporary scene dir. A temp dir in the same directory as the hcp data is used for the scene file due to the fact that scene files contain a large number of relative paths and the images will come out broken if it is put anywhere else. """ def __init__(self, hcp_dir): self.base = os.path.join(hcp_dir, 'scene') def __enter__(self): self.dir = tempfile.mkdtemp(prefix=self.base) return self.dir def __exit__(self, type, value, traceback): shutil.rmtree(self.dir) class WorkDirSettings: def __init__(self, arguments): logger = logging.getLogger(__name__) try: temp_dir = arguments['--ciftify-work-dir'] except KeyError: temp_dir = None if not temp_dir: try: temp_dir = arguments['--hcp-data-dir'] if temp_dir: logger.warning("Argument --hcp-data-dir has been deprecated. " "Please instead use --ciftify-work-dir in the future.") except KeyError: temp_dir = None try: temp_subject = arguments['<subject>'] except KeyError: temp_subject = None self.work_dir = self.__set_work_dir(temp_dir, temp_subject) def __set_work_dir(self, user_dir, subject): # Wait till logging is needed to get logger, so logging configuration # set in main module is respected logger = logging.getLogger(__name__) if user_dir: return os.path.realpath(user_dir) if subject == 'HCP_S1200_GroupAvg': return None found_dir = ciftify.config.find_work_dir() if found_dir is None: logger.error("Cannot find working directory, exiting.") sys.exit(1) return os.path.realpath(found_dir) def get_registration_mode(arguments): """ Insures that the --surf-reg argument is either FS or MSMSulc """ if arguments['--surf-reg'] == "MSMSulc": return 'MSMSulc' if arguments['--surf-reg'] == "FS": return 'FS' else: logger.error('--surf-reg must be either "MSMSulc" or "FS"') sys.exit(1) class WorkFlowSettings(WorkDirSettings): ''' A convenience class for parsing settings that are shared by ciftify_recon_all and ciftify_subject_fmri ''' def __init__(self, arguments): WorkDirSettings.__init__(self, arguments) self.FSL_dir = self.__set_FSL_dir() # Read settings from yaml self.__config = self.__read_settings(arguments['--ciftify-conf']) self.high_res = self.get_config_entry('high_res') self.low_res = self.get_config_entry('low_res') self.grayord_res = self.get_config_entry('grayord_res') self.n_cpus = get_number_cpus(arguments['--n_cpus']) def __set_FSL_dir(self): fsl_dir = ciftify.config.find_fsl() if fsl_dir is None: logger.error("Cannot find FSL dir, exiting.") sys.exit(1) fsl_data = os.path.normpath(os.path.join(fsl_dir, 'data')) if not os.path.exists(fsl_data): logger.warn("Found {} for FSL path but {} does not exist. May " "prevent registration files from being found.".format( fsl_dir, fsl_data)) return fsl_dir def __read_settings(self, yaml_file): if yaml_file is None: yaml_file = os.path.join(ciftify.config.find_ciftify_global(), 'ciftify_workflow_settings.yaml') if not os.path.exists(yaml_file): logger.critical("Settings yaml file {} does not exist" "".format(yaml_file)) sys.exit(1) try: with open(yaml_file) as yaml_stream: config = yaml.load(yaml_stream, Loader=yaml.SafeLoader) except: logger.critical("Cannot read yaml config file {}, check formatting." "".format(yaml_file)) sys.exit(1) return config def get_config_entry(self, key): try: config_entry = self.__config[key] except KeyError: logger.critical("{} not defined in cifti recon settings".format(key)) sys.exit(1) return config_entry def get_resolution_config(self, method, standard_res): """ Reads the method and resolution settings. """ method_config = self.get_config_entry(method) try: resolution_config = method_config[standard_res] except KeyError: logger.error("Registration resolution {} not defined for method " "{}".format(standard_res, method)) sys.exit(1) for key in resolution_config.keys(): ## The base dir (FSL_dir currently) may need to change when new ## resolutions/methods are added reg_item = os.path.join(self.FSL_dir, resolution_config[key]) if not os.path.exists(reg_item): logger.error("Item required for registration does not exist: " "{}".format(reg_item)) sys.exit(1) resolution_config[key] = reg_item return resolution_config def get_number_cpus(user_n_cpus = None): ''' reads the number of CPUS available for multithreaded processes either from a user argument, or from the enviroment''' if user_n_cpus: try: n_cpus = int(user_n_cpus) except: logger.critical('Could note read --n_cpus entry {} as integer'.format(user_n_cpus)) sys.exit(1) else: n_cpus = os.getenv('OMP_NUM_THREADS') # if all else fails..set n_cpus to 1 if not n_cpus: n_cpus = 1 return n_cpus class VisSettings(WorkDirSettings): """ A convenience class. Provides a work_dir and qc_dir attribute and a function to set each based on the user's input and the environment. This is intended to be inherited from in each script, so that user settings can be passed together and easily kept track of. Arguments: A docopt parsed dictionary of the user's input arguments. qc_mode: The qc_mode to operate in and the string to include in the qc output folder name. Will raise SystemExit if the user hasn't set the ciftify-work-dir/hcp-data-dir and the environment variable isn't set. """ def __init__(self, arguments, qc_mode): WorkDirSettings.__init__(self, arguments) try: temp_qc = arguments['--qcdir'] except KeyError: temp_qc = None try: self.debug_mode = arguments['--debug'] except KeyError: self.debug_mode = False self.qc_mode = qc_mode self.qc_dir = self.__set_qc_dir(temp_qc) def __set_qc_dir(self, user_qc_dir): if user_qc_dir: return user_qc_dir qc_dir = os.path.join(self.work_dir, 'qc_{}'.format(self.qc_mode)) return qc_dir def run(cmd, dryrun=False, suppress_stdout=False, suppress_echo = False, suppress_stderr = False, env = None): """ Runs command in default shell, returning the return code and logging the output. It can take a cmd argument as a string or a list. If a list is given, it is joined into a string. There are some arguments for changing the way the cmd is run: dryrun: Do not actually run the command (for testing) (default: False) suppress_echo: echo's command to debug steam (default is info) suppress_stdout: Any standard output from the function is printed to the log at "debug" level but not "info" suppress_stderr: Send error message to stdout...for situations when program logs info to stderr stream..urg env: a dict of environment variables to add to the subshell (this can be a useful may to restrict CPU usage of the subprocess) """ # Wait till logging is needed to get logger, so logging configuration # set in main module is respected logger = logging.getLogger(__name__) if type(cmd) is list: cmd = ' '.join(cmd) if suppress_echo: logger.debug("Running: {}".format(cmd)) else: logger.info("Running: {}".format(cmd)) if dryrun: logger.info('Doing a dryrun') return 0 merged_env = os.environ if env: merged_env.update(env) p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=merged_env) out, err = p.communicate() # py3 compability :( out = out.decode('utf-8') err = err.decode('utf-8') if p.returncode: logger.error('cmd: {} \n Failed with returncode {}'.format(cmd, p.returncode)) if len(out) > 0: if suppress_stdout: logger.debug(out) else: logger.info(out) if len(err) > 0: if suppress_stderr: logger.info(err) else: logger.warning(err) return p.returncode class cd: """ A context manager for changing directory. Since best practices dictate returning to the original directory, saves the original directory and returns to it after the block has exited. May raise OSError if the given path doesn't exist (or the current directory is deleted before switching back) """ def __init__(self, path): user_path = os.path.expanduser(path) self.new_path = os.path.expandvars(user_path) def __enter__(self): self.old_path = os.getcwd() os.chdir(self.new_path) def __exit__(self, e, value, traceback): os.chdir(self.old_path) def get_stdout(cmd_list, echo=True): ''' run the command given from the cmd list and report the stdout result Input: A command list''' logger = logging.getLogger(__name__) if echo: logger.info('Evaluating: {}'.format(' '.join(cmd_list))) stdout = subprocess.check_output(cmd_list) return stdout.decode('utf-8') def check_output(command, stderr=None, shell = True): """ Ensures python 3 compatibility by always decoding the return value of subprocess.check_output Input: A command string""" output = subprocess.check_output(command, shell=shell, stderr=stderr) return output.decode('utf-8') def ciftify_log_endswith_done(ciftify_log): '''return true with the ciftify log file exists and ends with the word Done''' if not os.path.isfile(ciftify_log): return False with open(ciftify_log) as f: lines = f.read().splitlines() last_line = lines[-3] is_done = True if 'Done' in last_line else False return is_done def has_ciftify_recon_all_run(ciftify_work_dir, subject): '''determine if ciftify_recon_all has already completed''' ciftify_log = os.path.join(ciftify_work_dir, subject, 'cifti_recon_all.log') return ciftify_log_endswith_done(ciftify_log) def has_ciftify_fmri_run(subject, fmriname, ciftify_work_dir): '''determine if ciftify_recon_all has already completed''' ciftify_log = os.path.join(ciftify_work_dir, subject, 'MNINonLinear', 'Results', fmriname, 'ciftify_subject_fmri.log') # print('ciftify_subject_fmri done {}'.format(ciftify_log_endswith_done(ciftify_log))) return ciftify_log_endswith_done(ciftify_log)
mit
GordonGaoNY/py_rrfm_re
server/services/bfs_service.py
2
7702
from configurations.env_configs import * from services.api_service import * # def graph(): # hash = dict() # with open(_datasets_path() + 'connections.csv', 'rb') as csvfile: # reader = csv.reader(csvfile, delimiter=',') # for row in reader: # if int(row[0]) in hash.keys(): # hash[int(row[0])]['connections'].append(int(row[1])) # else: # connections1 = [] # connections1.append(int(row[1])) # hash[int(row[0])] = { # # 'email': row[2], # 'connections': connections1 # } # if int(row[1]) in hash.keys(): # hash[int(row[1])]['connections'].append(int(row[0])) # else: # connections2 = [] # connections2.append(int(row[0])) # hash[int(row[1])] = { # # 'email': "", # 'connections': connections2 # } # return hash def _request_data(uri): print("Sending request to:", uri) request = Request(uri) request.add_header('HTTP_X_IVY_SESSION_TOKEN', RAILS_TOKEN) data = json.loads(urlopen(request).read()) return data # def graph(uri): # hash = dict() # connections = _request_data(uri) # for connection in connections: # account_id = int(connection['account_id']) # requestor_id = int(connection['requestor_id']) # if account_id in hash.keys(): # hash[account_id]['connections'].append(requestor_id) # else: # connections1 = [] # connections1.append(requestor_id) # hash[account_id] = {'connections': connections1} # if requestor_id in hash.keys(): # hash[requestor_id]['connections'].append(account_id) # else: # connections2 = [] # connections2.append(account_id) # hash[requestor_id] = {'connections': connections2} # return hash def _neighbours(graph, user_id): if user_id in graph.keys(): return graph[user_id]['connections'] else: return set() # def bfs(graph, user_id): # count = 0 # commons = {} # visited, queue = set(), [] # visited.add(user_id) # friends = _neighbours(graph, user_id) # queue = _enqueue(queue, visited, friends, 0) # while queue: # current = queue.pop(0) # if current['id'] not in visited: # count += 1 # visited.add(current['id']) # connections = _neighbours(graph, current['id']) # if connections is not None: # queue = _enqueue(queue, visited, connections, # current['level'] + 1) # if current['id'] not in friends: # mutuals = _common_friends(graph, user_id, current['id']) # commons[current['id']] = { # # 'email': graph[current['id']]['email'], # 'parent': user_id, # # 'commons': mutuals, # 'level': current['level'], # 'num_of_commons': len(mutuals) # } # print count # return sorted(commons.items(), key=lambda x: (x[1]['num_of_commons'], -x[1]['level']), reverse=True)[0:20] def _common_friends(graph, user1, user2): a = _neighbours(graph, user1) b = _neighbours(graph, user2) return set(a).intersection(b) def _enqueue(queue, visited, arr, level): for i in arr: if i not in visited: queue.append({'id': i, 'level': level}) return queue from account_service import * from collections import OrderedDict def _graph(uri): hash = dict() connections = APIService().get_request(uri, '', 1000) for connection in connections: account_id = int(connection['account_id']) requestor_id = int(connection['requestor_id']) account_chapter = int(connection['account_chapter']) requestor_chapter = int(connection['requestor_chapter']) account_nationality = str(connection['account_nationality']).strip() requestor_nationality = str(connection['requestor_nationality']).strip() if account_id in hash.keys(): hash[account_id]['connections'].append(requestor_id) else: connections1 = [] connections1.append(requestor_id) # hash[account_id] = {'connections': connections1} # hash[account_id] = {'chapter': account_chapter} hash[account_id] = { 'connections': connections1, 'chapter': account_chapter, 'nationality': account_nationality } if requestor_id in hash.keys(): hash[requestor_id]['connections'].append(account_id) else: connections2 = [] connections2.append(account_id) # hash[requestor_id] = {'connections': connections2} # hash[requestor_id] = {'chapter': requestor_chapter} hash[requestor_id] = { 'connections': connections2, 'chapter': requestor_chapter, 'nationality': requestor_nationality } print "Structured graph: ", len(hash) return hash def _bfs(graph, user_id, user_data, params, decisions): commons = {} visited, queue = set(), [] visited.add(user_id) friends = _neighbours(graph, user_id) queue = _enqueue(queue, visited, friends, 0) while queue: current = queue.pop(0) if current['level'] > 6: break if params['chapter']: if graph[current['id']]['chapter'] != user_data['chapter']: continue if params['nationality']: if graph[current['id']]['nationality'] != user_data['nationality']: continue # continue if 1: accept or 2:reject if decisions.get(str(user_id)) is not None \ and decisions[str(user_id)]['decisions'].get(str(current['id'])) is not None \ and (decisions[str(user_id)]['decisions'][str(current['id'])] == 1 or decisions[str(user_id)]['decisions'][str(current['id'])] == 2): continue if current['id'] not in visited: visited.add(current['id']) connections = _neighbours(graph, current['id']) if connections is not None: queue = _enqueue(queue, visited, connections, current['level'] + 1) if current['id'] not in friends: mutuals = _common_friends(graph, user_id, current['id']) commons[current['id']] = { # 'email': graph[current['id']]['email'], 'parent': user_id, # 'commons': mutuals, 'level': current['level'], 'num_of_commons': len(mutuals) } return OrderedDict(sorted(commons.items(), key=lambda x: (x[1]['num_of_commons'], -x[1]['level']), reverse=True)[0:5]) from mongodb import update_mutual_friend_recommendations def process_mutual_friends(uri, params): networks = _graph(uri) decisions = APIService().get_request(DECISIONS_FILTER, "trackable_type=Account", 10000, 'dict') for key, value in networks.iteritems(): print("processing mutual friends for account: ", key) best_recommendations = _bfs(networks, key, value, params, decisions) update_mutual_friend_recommendations(best_recommendations) print("finished mutual friends for account: ", key) # for mutual friend api def api_process_mutuals_for(id, uri): g = _graph(uri) results = _bfs(g, id) return results
mit
moyogo/nototools
tests/coverage_test.py
3
1360
#!/usr/bin/env python # # Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for coverage.py.""" __author__ = "[email protected] (Roozbeh Pournader)" import os from os import path import tempfile import unittest from nototools import coverage from hb_input_test import make_font class CharacterSetTest(unittest.TestCase): """Test class for coverage.character_set.""" def test_sanity(self): """Test basic sanity of the method.""" font_file = tempfile.NamedTemporaryFile() font = make_font("") font.save(font_file.name) charset = coverage.character_set(font_file.name) self.assertTrue(ord(" ") in charset) self.assertTrue(ord("A") in charset) self.assertFalse(0x10B00 in charset) if __name__ == "__main__": unittest.main()
apache-2.0
felixhummel/ftt
ftt/migrations/0001_initial.py
1
2259
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-03-16 21:58 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion import mptt.fields class Migration(migrations.Migration): initial = True dependencies = [ ('auth', '0008_alter_user_username_max_length'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Entry', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('start_dt', models.DateTimeField()), ('end_dt', models.DateTimeField()), ('comment', models.TextField(blank=True, null=True)), ], ), migrations.CreateModel( name='Project', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128)), ('lft', models.PositiveIntegerField(db_index=True, editable=False)), ('rght', models.PositiveIntegerField(db_index=True, editable=False)), ('tree_id', models.PositiveIntegerField(db_index=True, editable=False)), ('level', models.PositiveIntegerField(db_index=True, editable=False)), ('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='auth.Group')), ('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='ftt.Project')), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='entry', name='project', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ftt.Project'), ), migrations.AddField( model_name='entry', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
gpl-3.0
stclair/wes-cms
django/contrib/gis/tests/layermap/models.py
239
2241
from django.contrib.gis.db import models class State(models.Model): name = models.CharField(max_length=20) objects = models.GeoManager() class County(models.Model): name = models.CharField(max_length=25) state = models.ForeignKey(State) mpoly = models.MultiPolygonField(srid=4269) # Multipolygon in NAD83 objects = models.GeoManager() class CountyFeat(models.Model): name = models.CharField(max_length=25) poly = models.PolygonField(srid=4269) objects = models.GeoManager() class City(models.Model): name = models.CharField(max_length=25) population = models.IntegerField() density = models.DecimalField(max_digits=7, decimal_places=1) dt = models.DateField() point = models.PointField() objects = models.GeoManager() class Interstate(models.Model): name = models.CharField(max_length=20) length = models.DecimalField(max_digits=6, decimal_places=2) path = models.LineStringField() objects = models.GeoManager() # Same as `City` above, but for testing model inheritance. class CityBase(models.Model): name = models.CharField(max_length=25) population = models.IntegerField() density = models.DecimalField(max_digits=7, decimal_places=1) point = models.PointField() objects = models.GeoManager() class ICity1(CityBase): dt = models.DateField() class ICity2(ICity1): dt_time = models.DateTimeField(auto_now=True) class Invalid(models.Model): point = models.PointField() # Mapping dictionaries for the models above. co_mapping = {'name' : 'Name', 'state' : {'name' : 'State'}, # ForeignKey's use another mapping dictionary for the _related_ Model (State in this case). 'mpoly' : 'MULTIPOLYGON', # Will convert POLYGON features into MULTIPOLYGONS. } cofeat_mapping = {'name' : 'Name', 'poly' : 'POLYGON', } city_mapping = {'name' : 'Name', 'population' : 'Population', 'density' : 'Density', 'dt' : 'Created', 'point' : 'POINT', } inter_mapping = {'name' : 'Name', 'length' : 'Length', 'path' : 'LINESTRING', }
bsd-3-clause
jones139/OpenSeizureDetector
audio_version/kiss_fft130/test/mk_test.py
67
3009
#!/usr/bin/env python import FFT import sys import random import re j=complex(0,1) def randvec(n,iscomplex): if iscomplex: return [ int(random.uniform(-32768,32767) ) + j*int(random.uniform(-32768,32767) ) for i in range(n) ] else: return [ int(random.uniform(-32768,32767) ) for i in range(n) ] def c_format(v,round=0): if round: return ','.join( [ '{%d,%d}' %(int(c.real),int(c.imag) ) for c in v ] ) else: s= ','.join( [ '{%.60f ,%.60f }' %(c.real,c.imag) for c in v ] ) return re.sub(r'\.?0+ ',' ',s) def test_cpx( n,inverse ,short): v = randvec(n,1) scale = 1 if short: minsnr=30 else: minsnr=100 if inverse: tvecout = FFT.inverse_fft(v) if short: scale = 1 else: scale = len(v) else: tvecout = FFT.fft(v) if short: scale = 1.0/len(v) tvecout = [ c * scale for c in tvecout ] s="""#define NFFT %d""" % len(v) + """ { double snr; kiss_fft_cpx test_vec_in[NFFT] = { """ + c_format(v) + """}; kiss_fft_cpx test_vec_out[NFFT] = {""" + c_format( tvecout ) + """}; kiss_fft_cpx testbuf[NFFT]; void * cfg = kiss_fft_alloc(NFFT,%d,0,0);""" % inverse + """ kiss_fft(cfg,test_vec_in,testbuf); snr = snr_compare(test_vec_out,testbuf,NFFT); printf("DATATYPE=" xstr(kiss_fft_scalar) ", FFT n=%d, inverse=%d, snr = %g dB\\n",NFFT,""" + str(inverse) + """,snr); if (snr<""" + str(minsnr) + """) exit_code++; free(cfg); } #undef NFFT """ return s def compare_func(): s=""" #define xstr(s) str(s) #define str(s) #s double snr_compare( kiss_fft_cpx * test_vec_out,kiss_fft_cpx * testbuf, int n) { int k; double sigpow,noisepow,err,snr,scale=0; kiss_fft_cpx err; sigpow = noisepow = .000000000000000000000000000001; for (k=0;k<n;++k) { sigpow += test_vec_out[k].r * test_vec_out[k].r + test_vec_out[k].i * test_vec_out[k].i; C_SUB(err,test_vec_out[k],testbuf[k].r); noisepow += err.r * err.r + err.i + err.i; if (test_vec_out[k].r) scale += testbuf[k].r / test_vec_out[k].r; } snr = 10*log10( sigpow / noisepow ); scale /= n; if (snr<10) printf( "\\npoor snr, try a scaling factor %f\\n" , scale ); return snr; } """ return s def main(): from getopt import getopt opts,args = getopt(sys.argv[1:],'s') opts = dict(opts) short = int( opts.has_key('-s') ) fftsizes = args if not fftsizes: fftsizes = [ 1800 ] print '#include "kiss_fft.h"' print compare_func() print "int main() { int exit_code=0;\n" for n in fftsizes: n = int(n) print test_cpx(n,0,short) print test_cpx(n,1,short) print """ return exit_code; } """ if __name__ == "__main__": main()
gpl-3.0
brandond/ansible
test/units/modules/network/itential/test_iap_token.py
54
1438
""" iap_token unit tests """ # -*- coding: utf-8 -*- # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # pylint: disable=invalid-name,protected-access,function-redefined,unused-argument # pylint: disable=unused-import,redundant-unittest-assert from __future__ import (absolute_import, division, print_function) __metaclass__ = type import unittest class TestClass(unittest.TestCase): """ Test cases """ def _assert_incident_api(self, module, url, method, headers): """ Setup Test """ self.assertTrue('http://localhost:4007/login' in url, 'token') return Response(), {'status': 200} def test_incident_url(self): self.assertTrue(True, True) class Response(object): """ Setup Response """ def read(self): return '{"token": "ljhklj%3D"}'
gpl-3.0
MeanEYE/Sunflower
sunflower/associations.py
1
10229
from __future__ import absolute_import import os import shlex import subprocess from gi.repository import Gtk, Gio from collections import namedtuple from urllib.request import pathname2url from sunflower.common import is_gui_app, decode_file_name, encode_file_name from sunflower.parameters import Parameters from sunflower.plugin_base.provider import Mode from sunflower.plugin_base.terminal import TerminalType from sunflower.gui.input_dialog import ApplicationSelectDialog ApplicationInfo = namedtuple( 'ApplicationInfo', [ 'id', 'name', 'description', 'executable', 'command_line', 'icon' ]) class AssociationManager: """Class that provides 'Open With' menu""" def __init__(self, application): self._application = application def __get_icon(self, icon_object): """Get icon string from GIO icon object""" result = None if hasattr(icon_object, 'get_names'): result = icon_object.get_names()[0] elif hasattr(icon_object, 'get_file'): result = icon_object.get_file().get_path() return result def __format_command_string(self, selection, command): """Format command string""" # we modify exec_string and use # command for testing to avoid problem # with Unicode characters in URI exec_string = command if selection is not None: # prepare lists normal_list = ['"{0}"'.format(item.replace('"', '\\"')) for item in selection] uri_list = ['"{0}"'.format(item.replace('"', '\\"')) for item in selection] dir_list = ['"{0}"'.format(os.path.dirname(item).replace('"', '\\"') for item in selection)] names_list = ['"{0}"'.format(os.path.basename(item).replace('"', '\\"') for item in selection)] # prepare single item selection if '%f' in command: exec_string = exec_string.replace('%f', '"{0}"'.format(selection[0])) if '%u' in command: exec_string = exec_string.replace('%u', '"{0}"'.format(selection[0])) if '%d' in command: exec_string = exec_string.replace('%d', '"{0}"'.format(os.path.dirname(selection[0]))) if '%n' in command: exec_string = exec_string.replace('%n', '"{0}"'.format(os.path.basename(selection[0]))) # prepare multiple selection if '%F' in command: exec_string = exec_string.replace('%F', ' '.join(normal_list)) if '%U' in command: exec_string = exec_string.replace('%U', ' '.join(uri_list)) if '%D' in command: exec_string = exec_string.replace('%D', ' '.join(dir_list)) if '%N' in command: exec_string = exec_string.replace('%N', ' '.join(names_list)) return exec_string def is_mime_type_subset(self, mime_type, super_type): """Check whether specified mime_type is a subset of super_type""" return Gio.content_type_is_a(mime_type, super_type) def is_mime_type_unknown(self, mime_type): """Check if specified mime_type is unknown""" return Gio.content_type_is_unknown(mime_type) def get_sample_data(self, path, provider): """Get sample data needed for content detection""" data = None file_handle = provider.get_file_handle(path, Mode.READ) if file_handle is not None: data = file_handle.read(128) file_handle.close() return data def get_mime_type(self, path=None, data=None): """Get mime type for specified path""" result = None if path is not None: # detect content type based on file name # due to a bug in the GI bindings of GIO, we can't pass non-UTF-8 # file names in here. In this case, that probably doesn't matter. path = decode_file_name(path) result = Gio.content_type_guess(filename=path)[0] elif data is not None: # detect content type based on data result = Gio.content_type_guess(data=data)[0] return result def get_mime_description(self, mime_type): """Get description from mime type""" return Gio.content_type_get_description(mime_type) def get_all(self): """Return list of all applications""" result = [] for app_info in Gio.app_info_get_all(): application = ApplicationInfo( id = app_info.get_id(), name = app_info.get_name(), description = app_info.get_description(), executable = app_info.get_executable(), command_line = app_info.get_commandline(), icon = self.__get_icon(app_info.get_icon()) ) result.append(application) return result def get_gio_application_by_id(self, id): """Get GIO AppInfo object for specified Id""" result = None for app_info in Gio.app_info_get_all(): if app_info.get_id() == id: result = app_info break return result def get_application_list_for_type(self, mime_type): """Get list of associated programs for specified type""" result = [] for app_info in Gio.app_info_get_all_for_type(mime_type): application = ApplicationInfo( id = app_info.get_id(), name = app_info.get_name(), description = app_info.get_description(), executable = app_info.get_executable(), command_line = app_info.get_commandline(), icon = self.__get_icon(app_info.get_icon()) ) result.append(application) return result def get_default_application_for_type(self, mime_type): """Get default application for specified type""" app_info = Gio.app_info_get_default_for_type(mime_type, must_support_uris=False) if app_info is not None: # create application container application = ApplicationInfo( id = app_info.get_id(), name = app_info.get_name(), description = app_info.get_description(), executable = app_info.get_executable(), command_line = app_info.get_commandline(), icon = self.__get_icon(app_info.get_icon()) ) else: # return None if there is no default application for this type application = None return application def set_default_application_for_type(self, mime_type, application_id): """Set default application for specified type""" result = False for app_info in Gio.app_info_get_all(): if application_id == app_info.get_id(): app_info.set_as_default_for_type(mime_type) result = True break return result def open_file(self, selection, application_info=None, exec_command=None): """Open filename using config file or specified execute command""" if application_info is not None: # launch application using GIO API application = self.get_gio_application_by_id(application_info.id) if application is not None: if application.supports_uris(): selection = [ 'file://{0}'.format(pathname2url(encode_file_name(path))) if not path.startswith('file://') else encode_file_name(path) for path in selection] application.launch_uris(selection) else: application.launch([Gio.File.new_for_path(encode_file_name(path)) for path in selection]) elif exec_command is not None: # use specified command command = exec_command selection = [item.replace('"', '\\"') for item in selection] exec_string = self.__format_command_string(selection, command) # open selected file(s) split_command = shlex.split(exec_string, posix=False) test_command = split_command[0] if len(split_command) > 1 else exec_string if is_gui_app(test_command): subprocess.Popen(split_command, cwd=os.path.dirname(selection[0])) else: active_object = self._application.get_active_object() options = Parameters() options.set('close_with_child', True) options.set('shell_command', split_command[0]) options.set('arguments', split_command) options.set('path', os.path.dirname(selection[0])) self._application.create_terminal_tab(active_object._notebook, options) def edit_file(self, selection): """Edit selected filename""" section = self._application.options.section('editor') command = section.get('default_editor') exec_string = self.__format_command_string(selection, command) # open selected file(s) split_command = shlex.split(exec_string) test_command = split_command[0] if len(split_command) > 1 else exec_string if (section.get('terminal_command') and section.get('type') == 1) \ or not is_gui_app(test_command): active_object = self._application.get_active_object() options = Parameters() options.set('close_with_child', True) options.set('shell_command', split_command[0]) options.set('arguments', split_command) options.set('path', os.path.dirname(selection[0])) self._application.create_terminal_tab(active_object._notebook, options) else: subprocess.Popen(split_command, cwd=os.path.dirname(selection[0])) def execute_file(self, path, provider=None): """Execute specified item properly.""" mime_type = self.get_mime_type(path) terminal_type = self._application.options.section('terminal').get('type') should_execute = False if provider is not None and provider.is_local: # only allow local files which have execute # bit set to be executed locally should_execute = os.access(path, os.X_OK) # if we still don't know content type, try to guess if self.is_mime_type_unknown(mime_type): data = self.get_sample_data(path, provider) mime_type = self.get_mime_type(data=data) if Gio.content_type_can_be_executable(mime_type) and should_execute: # file type is executable if is_gui_app(path): subprocess.Popen((path,), cwd=os.path.dirname(path)) else: # command is console based, create terminal tab and fork it active_object = self._application.get_active_object() options = Parameters() options.set('close_with_child', False) options.set('shell_command', path) options.set('path', os.path.dirname(path)) self._application.create_terminal_tab(active_object._notebook, options) else: # file type is not executable, try to open with default associated application default_application = self.get_default_application_for_type(mime_type) if default_application is not None: self.open_file((path,), default_application) else: # no default application selected, show application selection dialog dialog = ApplicationSelectDialog(self._application, path) result = dialog.get_response() if result[0] == Gtk.ResponseType.OK: self.open_file(selection=(path,), exec_command=result[2])
gpl-3.0
blacklin/kbengine
kbe/src/lib/python/Lib/test/test_email/test_message.py
72
26378
import unittest import textwrap from email import policy, message_from_string from email.message import EmailMessage, MIMEPart from test.test_email import TestEmailBase, parameterize # Helper. def first(iterable): return next(filter(lambda x: x is not None, iterable), None) class Test(TestEmailBase): policy = policy.default def test_error_on_setitem_if_max_count_exceeded(self): m = self._str_msg("") m['To'] = 'abc@xyz' with self.assertRaises(ValueError): m['To'] = 'xyz@abc' def test_rfc2043_auto_decoded_and_emailmessage_used(self): m = message_from_string(textwrap.dedent("""\ Subject: Ayons asperges pour le =?utf-8?q?d=C3=A9jeuner?= From: =?utf-8?q?Pep=C3=A9?= Le Pew <[email protected]> To: "Penelope Pussycat" <"[email protected]"> MIME-Version: 1.0 Content-Type: text/plain; charset="utf-8" sample text """), policy=policy.default) self.assertEqual(m['subject'], "Ayons asperges pour le déjeuner") self.assertEqual(m['from'], "Pepé Le Pew <[email protected]>") self.assertIsInstance(m, EmailMessage) @parameterize class TestEmailMessageBase: policy = policy.default # The first argument is a triple (related, html, plain) of indices into the # list returned by 'walk' called on a Message constructed from the third. # The indices indicate which part should match the corresponding part-type # when passed to get_body (ie: the "first" part of that type in the # message). The second argument is a list of indices into the 'walk' list # of the attachments that should be returned by a call to # 'iter_attachments'. The third argument is a list of indices into 'walk' # that should be returned by a call to 'iter_parts'. Note that the first # item returned by 'walk' is the Message itself. message_params = { 'empty_message': ( (None, None, 0), (), (), ""), 'non_mime_plain': ( (None, None, 0), (), (), textwrap.dedent("""\ To: [email protected] simple text body """)), 'mime_non_text': ( (None, None, None), (), (), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: image/jpg bogus body. """)), 'plain_html_alternative': ( (None, 2, 1), (), (1, 2), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/alternative; boundary="===" preamble --=== Content-Type: text/plain simple body --=== Content-Type: text/html <p>simple body</p> --===-- """)), 'plain_html_mixed': ( (None, 2, 1), (), (1, 2), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="===" preamble --=== Content-Type: text/plain simple body --=== Content-Type: text/html <p>simple body</p> --===-- """)), 'plain_html_attachment_mixed': ( (None, None, 1), (2,), (1, 2), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="===" --=== Content-Type: text/plain simple body --=== Content-Type: text/html Content-Disposition: attachment <p>simple body</p> --===-- """)), 'html_text_attachment_mixed': ( (None, 2, None), (1,), (1, 2), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="===" --=== Content-Type: text/plain Content-Disposition: AtTaChment simple body --=== Content-Type: text/html <p>simple body</p> --===-- """)), 'html_text_attachment_inline_mixed': ( (None, 2, 1), (), (1, 2), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="===" --=== Content-Type: text/plain Content-Disposition: InLine simple body --=== Content-Type: text/html Content-Disposition: inline <p>simple body</p> --===-- """)), # RFC 2387 'related': ( (0, 1, None), (2,), (1, 2), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/related; boundary="==="; type=text/html --=== Content-Type: text/html <p>simple body</p> --=== Content-Type: image/jpg Content-ID: <image1> bogus data --===-- """)), # This message structure will probably never be seen in the wild, but # it proves we distinguish between text parts based on 'start'. The # content would not, of course, actually work :) 'related_with_start': ( (0, 2, None), (1,), (1, 2), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/related; boundary="==="; type=text/html; start="<body>" --=== Content-Type: text/html Content-ID: <include> useless text --=== Content-Type: text/html Content-ID: <body> <p>simple body</p> <!--#include file="<include>"--> --===-- """)), 'mixed_alternative_plain_related': ( (3, 4, 2), (6, 7), (1, 6, 7), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="===" --=== Content-Type: multipart/alternative; boundary="+++" --+++ Content-Type: text/plain simple body --+++ Content-Type: multipart/related; boundary="___" --___ Content-Type: text/html <p>simple body</p> --___ Content-Type: image/jpg Content-ID: <image1@cid> bogus jpg body --___-- --+++-- --=== Content-Type: image/jpg Content-Disposition: attachment bogus jpg body --=== Content-Type: image/jpg Content-Disposition: AttacHmenT another bogus jpg body --===-- """)), # This structure suggested by Stephen J. Turnbull...may not exist/be # supported in the wild, but we want to support it. 'mixed_related_alternative_plain_html': ( (1, 4, 3), (6, 7), (1, 6, 7), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="===" --=== Content-Type: multipart/related; boundary="+++" --+++ Content-Type: multipart/alternative; boundary="___" --___ Content-Type: text/plain simple body --___ Content-Type: text/html <p>simple body</p> --___-- --+++ Content-Type: image/jpg Content-ID: <image1@cid> bogus jpg body --+++-- --=== Content-Type: image/jpg Content-Disposition: attachment bogus jpg body --=== Content-Type: image/jpg Content-Disposition: attachment another bogus jpg body --===-- """)), # Same thing, but proving we only look at the root part, which is the # first one if there isn't any start parameter. That is, this is a # broken related. 'mixed_related_alternative_plain_html_wrong_order': ( (1, None, None), (6, 7), (1, 6, 7), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="===" --=== Content-Type: multipart/related; boundary="+++" --+++ Content-Type: image/jpg Content-ID: <image1@cid> bogus jpg body --+++ Content-Type: multipart/alternative; boundary="___" --___ Content-Type: text/plain simple body --___ Content-Type: text/html <p>simple body</p> --___-- --+++-- --=== Content-Type: image/jpg Content-Disposition: attachment bogus jpg body --=== Content-Type: image/jpg Content-Disposition: attachment another bogus jpg body --===-- """)), 'message_rfc822': ( (None, None, None), (), (), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: message/rfc822 To: [email protected] From: [email protected] this is a message body. """)), 'mixed_text_message_rfc822': ( (None, None, 1), (2,), (1, 2), textwrap.dedent("""\ To: [email protected] MIME-Version: 1.0 Content-Type: multipart/mixed; boundary="===" --=== Content-Type: text/plain Your message has bounced, ser. --=== Content-Type: message/rfc822 To: [email protected] From: [email protected] this is a message body. --===-- """)), } def message_as_get_body(self, body_parts, attachments, parts, msg): m = self._str_msg(msg) allparts = list(m.walk()) expected = [None if n is None else allparts[n] for n in body_parts] related = 0; html = 1; plain = 2 self.assertEqual(m.get_body(), first(expected)) self.assertEqual(m.get_body(preferencelist=( 'related', 'html', 'plain')), first(expected)) self.assertEqual(m.get_body(preferencelist=('related', 'html')), first(expected[related:html+1])) self.assertEqual(m.get_body(preferencelist=('related', 'plain')), first([expected[related], expected[plain]])) self.assertEqual(m.get_body(preferencelist=('html', 'plain')), first(expected[html:plain+1])) self.assertEqual(m.get_body(preferencelist=['related']), expected[related]) self.assertEqual(m.get_body(preferencelist=['html']), expected[html]) self.assertEqual(m.get_body(preferencelist=['plain']), expected[plain]) self.assertEqual(m.get_body(preferencelist=('plain', 'html')), first(expected[plain:html-1:-1])) self.assertEqual(m.get_body(preferencelist=('plain', 'related')), first([expected[plain], expected[related]])) self.assertEqual(m.get_body(preferencelist=('html', 'related')), first(expected[html::-1])) self.assertEqual(m.get_body(preferencelist=('plain', 'html', 'related')), first(expected[::-1])) self.assertEqual(m.get_body(preferencelist=('html', 'plain', 'related')), first([expected[html], expected[plain], expected[related]])) def message_as_iter_attachment(self, body_parts, attachments, parts, msg): m = self._str_msg(msg) allparts = list(m.walk()) attachments = [allparts[n] for n in attachments] self.assertEqual(list(m.iter_attachments()), attachments) def message_as_iter_parts(self, body_parts, attachments, parts, msg): m = self._str_msg(msg) allparts = list(m.walk()) parts = [allparts[n] for n in parts] self.assertEqual(list(m.iter_parts()), parts) class _TestContentManager: def get_content(self, msg, *args, **kw): return msg, args, kw def set_content(self, msg, *args, **kw): self.msg = msg self.args = args self.kw = kw def test_get_content_with_cm(self): m = self._str_msg('') cm = self._TestContentManager() self.assertEqual(m.get_content(content_manager=cm), (m, (), {})) msg, args, kw = m.get_content('foo', content_manager=cm, bar=1, k=2) self.assertEqual(msg, m) self.assertEqual(args, ('foo',)) self.assertEqual(kw, dict(bar=1, k=2)) def test_get_content_default_cm_comes_from_policy(self): p = policy.default.clone(content_manager=self._TestContentManager()) m = self._str_msg('', policy=p) self.assertEqual(m.get_content(), (m, (), {})) msg, args, kw = m.get_content('foo', bar=1, k=2) self.assertEqual(msg, m) self.assertEqual(args, ('foo',)) self.assertEqual(kw, dict(bar=1, k=2)) def test_set_content_with_cm(self): m = self._str_msg('') cm = self._TestContentManager() m.set_content(content_manager=cm) self.assertEqual(cm.msg, m) self.assertEqual(cm.args, ()) self.assertEqual(cm.kw, {}) m.set_content('foo', content_manager=cm, bar=1, k=2) self.assertEqual(cm.msg, m) self.assertEqual(cm.args, ('foo',)) self.assertEqual(cm.kw, dict(bar=1, k=2)) def test_set_content_default_cm_comes_from_policy(self): cm = self._TestContentManager() p = policy.default.clone(content_manager=cm) m = self._str_msg('', policy=p) m.set_content() self.assertEqual(cm.msg, m) self.assertEqual(cm.args, ()) self.assertEqual(cm.kw, {}) m.set_content('foo', bar=1, k=2) self.assertEqual(cm.msg, m) self.assertEqual(cm.args, ('foo',)) self.assertEqual(cm.kw, dict(bar=1, k=2)) # outcome is whether xxx_method should raise ValueError error when called # on multipart/subtype. Blank outcome means it depends on xxx (add # succeeds, make raises). Note: 'none' means there are content-type # headers but payload is None...this happening in practice would be very # unusual, so treating it as if there were content seems reasonable. # method subtype outcome subtype_params = ( ('related', 'no_content', 'succeeds'), ('related', 'none', 'succeeds'), ('related', 'plain', 'succeeds'), ('related', 'related', ''), ('related', 'alternative', 'raises'), ('related', 'mixed', 'raises'), ('alternative', 'no_content', 'succeeds'), ('alternative', 'none', 'succeeds'), ('alternative', 'plain', 'succeeds'), ('alternative', 'related', 'succeeds'), ('alternative', 'alternative', ''), ('alternative', 'mixed', 'raises'), ('mixed', 'no_content', 'succeeds'), ('mixed', 'none', 'succeeds'), ('mixed', 'plain', 'succeeds'), ('mixed', 'related', 'succeeds'), ('mixed', 'alternative', 'succeeds'), ('mixed', 'mixed', ''), ) def _make_subtype_test_message(self, subtype): m = self.message() payload = None msg_headers = [ ('To', '[email protected]'), ('From', '[email protected]'), ] if subtype != 'no_content': ('content-shadow', 'Logrus'), msg_headers.append(('X-Random-Header', 'Corwin')) if subtype == 'text': payload = '' msg_headers.append(('Content-Type', 'text/plain')) m.set_payload('') elif subtype != 'no_content': payload = [] msg_headers.append(('Content-Type', 'multipart/' + subtype)) msg_headers.append(('X-Trump', 'Random')) m.set_payload(payload) for name, value in msg_headers: m[name] = value return m, msg_headers, payload def _check_disallowed_subtype_raises(self, m, method_name, subtype, method): with self.assertRaises(ValueError) as ar: getattr(m, method)() exc_text = str(ar.exception) self.assertIn(subtype, exc_text) self.assertIn(method_name, exc_text) def _check_make_multipart(self, m, msg_headers, payload): count = 0 for name, value in msg_headers: if not name.lower().startswith('content-'): self.assertEqual(m[name], value) count += 1 self.assertEqual(len(m), count+1) # +1 for new Content-Type part = next(m.iter_parts()) count = 0 for name, value in msg_headers: if name.lower().startswith('content-'): self.assertEqual(part[name], value) count += 1 self.assertEqual(len(part), count) self.assertEqual(part.get_payload(), payload) def subtype_as_make(self, method, subtype, outcome): m, msg_headers, payload = self._make_subtype_test_message(subtype) make_method = 'make_' + method if outcome in ('', 'raises'): self._check_disallowed_subtype_raises(m, method, subtype, make_method) return getattr(m, make_method)() self.assertEqual(m.get_content_maintype(), 'multipart') self.assertEqual(m.get_content_subtype(), method) if subtype == 'no_content': self.assertEqual(len(m.get_payload()), 0) self.assertEqual(m.items(), msg_headers + [('Content-Type', 'multipart/'+method)]) else: self.assertEqual(len(m.get_payload()), 1) self._check_make_multipart(m, msg_headers, payload) def subtype_as_make_with_boundary(self, method, subtype, outcome): # Doing all variation is a bit of overkill... m = self.message() if outcome in ('', 'raises'): m['Content-Type'] = 'multipart/' + subtype with self.assertRaises(ValueError) as cm: getattr(m, 'make_' + method)() return if subtype == 'plain': m['Content-Type'] = 'text/plain' elif subtype != 'no_content': m['Content-Type'] = 'multipart/' + subtype getattr(m, 'make_' + method)(boundary="abc") self.assertTrue(m.is_multipart()) self.assertEqual(m.get_boundary(), 'abc') def test_policy_on_part_made_by_make_comes_from_message(self): for method in ('make_related', 'make_alternative', 'make_mixed'): m = self.message(policy=self.policy.clone(content_manager='foo')) m['Content-Type'] = 'text/plain' getattr(m, method)() self.assertEqual(m.get_payload(0).policy.content_manager, 'foo') class _TestSetContentManager: def set_content(self, msg, content, *args, **kw): msg['Content-Type'] = 'text/plain' msg.set_payload(content) def subtype_as_add(self, method, subtype, outcome): m, msg_headers, payload = self._make_subtype_test_message(subtype) cm = self._TestSetContentManager() add_method = 'add_attachment' if method=='mixed' else 'add_' + method if outcome == 'raises': self._check_disallowed_subtype_raises(m, method, subtype, add_method) return getattr(m, add_method)('test', content_manager=cm) self.assertEqual(m.get_content_maintype(), 'multipart') self.assertEqual(m.get_content_subtype(), method) if method == subtype or subtype == 'no_content': self.assertEqual(len(m.get_payload()), 1) for name, value in msg_headers: self.assertEqual(m[name], value) part = m.get_payload()[0] else: self.assertEqual(len(m.get_payload()), 2) self._check_make_multipart(m, msg_headers, payload) part = m.get_payload()[1] self.assertEqual(part.get_content_type(), 'text/plain') self.assertEqual(part.get_payload(), 'test') if method=='mixed': self.assertEqual(part['Content-Disposition'], 'attachment') elif method=='related': self.assertEqual(part['Content-Disposition'], 'inline') else: # Otherwise we don't guess. self.assertIsNone(part['Content-Disposition']) class _TestSetRaisingContentManager: def set_content(self, msg, content, *args, **kw): raise Exception('test') def test_default_content_manager_for_add_comes_from_policy(self): cm = self._TestSetRaisingContentManager() m = self.message(policy=self.policy.clone(content_manager=cm)) for method in ('add_related', 'add_alternative', 'add_attachment'): with self.assertRaises(Exception) as ar: getattr(m, method)('') self.assertEqual(str(ar.exception), 'test') def message_as_clear(self, body_parts, attachments, parts, msg): m = self._str_msg(msg) m.clear() self.assertEqual(len(m), 0) self.assertEqual(list(m.items()), []) self.assertIsNone(m.get_payload()) self.assertEqual(list(m.iter_parts()), []) def message_as_clear_content(self, body_parts, attachments, parts, msg): m = self._str_msg(msg) expected_headers = [h for h in m.keys() if not h.lower().startswith('content-')] m.clear_content() self.assertEqual(list(m.keys()), expected_headers) self.assertIsNone(m.get_payload()) self.assertEqual(list(m.iter_parts()), []) def test_is_attachment(self): m = self._make_message() self.assertFalse(m.is_attachment()) with self.assertWarns(DeprecationWarning): self.assertFalse(m.is_attachment) m['Content-Disposition'] = 'inline' self.assertFalse(m.is_attachment()) with self.assertWarns(DeprecationWarning): self.assertFalse(m.is_attachment) m.replace_header('Content-Disposition', 'attachment') self.assertTrue(m.is_attachment()) with self.assertWarns(DeprecationWarning): self.assertTrue(m.is_attachment) m.replace_header('Content-Disposition', 'AtTachMent') self.assertTrue(m.is_attachment()) with self.assertWarns(DeprecationWarning): self.assertTrue(m.is_attachment) m.set_param('filename', 'abc.png', 'Content-Disposition') self.assertTrue(m.is_attachment()) with self.assertWarns(DeprecationWarning): self.assertTrue(m.is_attachment) class TestEmailMessage(TestEmailMessageBase, TestEmailBase): message = EmailMessage def test_set_content_adds_MIME_Version(self): m = self._str_msg('') cm = self._TestContentManager() self.assertNotIn('MIME-Version', m) m.set_content(content_manager=cm) self.assertEqual(m['MIME-Version'], '1.0') class _MIME_Version_adding_CM: def set_content(self, msg, *args, **kw): msg['MIME-Version'] = '1.0' def test_set_content_does_not_duplicate_MIME_Version(self): m = self._str_msg('') cm = self._MIME_Version_adding_CM() self.assertNotIn('MIME-Version', m) m.set_content(content_manager=cm) self.assertEqual(m['MIME-Version'], '1.0') class TestMIMEPart(TestEmailMessageBase, TestEmailBase): # Doing the full test run here may seem a bit redundant, since the two # classes are almost identical. But what if they drift apart? So we do # the full tests so that any future drift doesn't introduce bugs. message = MIMEPart def test_set_content_does_not_add_MIME_Version(self): m = self._str_msg('') cm = self._TestContentManager() self.assertNotIn('MIME-Version', m) m.set_content(content_manager=cm) self.assertNotIn('MIME-Version', m) if __name__ == '__main__': unittest.main()
lgpl-3.0
GodBlessPP/2015cd_midterm2
static/Brython3.1.1-20150328-091302/Lib/site-packages/pygame/pkgdata.py
603
2146
"""pkgdata is a simple, extensible way for a package to acquire data file resources. The getResource function is equivalent to the standard idioms, such as the following minimal implementation:: import sys, os def getResource(identifier, pkgname=__name__): pkgpath = os.path.dirname(sys.modules[pkgname].__file__) path = os.path.join(pkgpath, identifier) return file(os.path.normpath(path), mode='rb') When a __loader__ is present on the module given by __name__, it will defer getResource to its get_data implementation and return it as a file-like object (such as StringIO). """ __all__ = ['getResource'] import sys import os #from cStringIO import StringIO from io import StringIO try: # Try to use setuptools if available. from pkg_resources import resource_stream _have_resource_stream = True except ImportError: _have_resource_stream = False def getResource(identifier, pkgname=__name__): """Acquire a readable object for a given package name and identifier. An IOError will be raised if the resource can not be found. For example:: mydata = getResource('mypkgdata.jpg').read() Note that the package name must be fully qualified, if given, such that it would be found in sys.modules. In some cases, getResource will return a real file object. In that case, it may be useful to use its name attribute to get the path rather than use it as a file-like object. For example, you may be handing data off to a C API. """ # Prefer setuptools if _have_resource_stream: return resource_stream(pkgname, identifier) mod = sys.modules[pkgname] fn = getattr(mod, '__file__', None) if fn is None: raise IOError("%r has no __file__!") path = os.path.join(os.path.dirname(fn), identifier) loader = getattr(mod, '__loader__', None) if loader is not None: try: data = loader.get_data(path) except IOError: pass else: return StringIO(data) #return file(os.path.normpath(path), 'rb') return open(os.path.normpath(path), 'rb')
agpl-3.0
rochaporto/collectd-ceph
plugins/base.py
3
4986
#!/usr/bin/env python # # vim: tabstop=4 shiftwidth=4 # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; only version 2 of the License is applicable. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # # Authors: # Ricardo Rocha <[email protected]> # # About this plugin: # Helper object for all plugins. # # collectd: # http://collectd.org # collectd-python: # http://collectd.org/documentation/manpages/collectd-python.5.shtml # import collectd import datetime import traceback class Base(object): def __init__(self): self.verbose = False self.debug = False self.prefix = '' self.cluster = 'ceph' self.testpool = 'test' self.interval = 60.0 def config_callback(self, conf): """Takes a collectd conf object and fills in the local config.""" for node in conf.children: if node.key == "Verbose": if node.values[0] in ['True', 'true']: self.verbose = True elif node.key == "Debug": if node.values[0] in ['True', 'true']: self.debug = True elif node.key == "Prefix": self.prefix = node.values[0] elif node.key == 'Cluster': self.cluster = node.values[0] elif node.key == 'TestPool': self.testpool = node.values[0] elif node.key == 'Interval': self.interval = float(node.values[0]) else: collectd.warning("%s: unknown config key: %s" % (self.prefix, node.key)) def dispatch(self, stats): """ Dispatches the given stats. stats should be something like: {'plugin': {'plugin_instance': {'type': {'type_instance': <value>, ...}}}} """ if not stats: collectd.error("%s: failed to retrieve stats" % self.prefix) return self.logdebug("dispatching %d new stats :: %s" % (len(stats), stats)) try: for plugin in stats.keys(): for plugin_instance in stats[plugin].keys(): for type in stats[plugin][plugin_instance].keys(): type_value = stats[plugin][plugin_instance][type] if not isinstance(type_value, dict): self.dispatch_value(plugin, plugin_instance, type, None, type_value) else: for type_instance in stats[plugin][plugin_instance][type].keys(): self.dispatch_value(plugin, plugin_instance, type, type_instance, stats[plugin][plugin_instance][type][type_instance]) except Exception as exc: collectd.error("%s: failed to dispatch values :: %s :: %s" % (self.prefix, exc, traceback.format_exc())) def dispatch_value(self, plugin, plugin_instance, type, type_instance, value): """Looks for the given stat in stats, and dispatches it""" self.logdebug("dispatching value %s.%s.%s.%s=%s" % (plugin, plugin_instance, type, type_instance, value)) val = collectd.Values(type='gauge') val.plugin=plugin val.plugin_instance=plugin_instance if type_instance is not None: val.type_instance="%s-%s" % (type, type_instance) else: val.type_instance=type val.values=[value] val.interval = self.interval val.dispatch() self.logdebug("sent metric %s.%s.%s.%s.%s" % (plugin, plugin_instance, type, type_instance, value)) def read_callback(self): try: start = datetime.datetime.now() stats = self.get_stats() self.logverbose("collectd new data from service :: took %d seconds" % (datetime.datetime.now() - start).seconds) except Exception as exc: collectd.error("%s: failed to get stats :: %s :: %s" % (self.prefix, exc, traceback.format_exc())) self.dispatch(stats) def get_stats(self): collectd.error('Not implemented, should be subclassed') def logverbose(self, msg): if self.verbose: collectd.info("%s: %s" % (self.prefix, msg)) def logdebug(self, msg): if self.debug: collectd.info("%s: %s" % (self.prefix, msg))
gpl-2.0
blaggacao/OpenUpgrade
addons/resource/migrations/8.0.1.1/pre-migration.py
14
1091
# -*- coding: utf-8 -*- ############################################################################## # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade @openupgrade.migrate() def migrate(cr, version): # Drop view that inhibits changing field types. It will be recreated BTW cr.execute('drop view if exists hr_holidays_remaining_leaves_user cascade')
agpl-3.0
demonchild2112/travis-test
grr/test_lib/test_lib_test.py
2
7490
#!/usr/bin/env python from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals import threading import time from absl.testing import absltest from grr_response_core.lib import rdfvalue from grr.test_lib import test_lib class FakeTimelineTest(absltest.TestCase): def testRunSingleSleep(self): log = [] def foo(): while True: log.append("foo") time.sleep(10) thread = threading.Thread(name="foo-thread", target=foo) with test_lib.FakeTimeline(thread) as foo_timeline: log.append("start") foo_timeline.Run(duration=rdfvalue.Duration.From(5, rdfvalue.SECONDS)) log.append("5 seconds have passed") foo_timeline.Run(duration=rdfvalue.Duration.From(3, rdfvalue.SECONDS)) log.append("3 seconds have passed") foo_timeline.Run(duration=rdfvalue.Duration.From(4, rdfvalue.SECONDS)) log.append("4 seconds have passed") foo_timeline.Run(duration=rdfvalue.Duration.From(22, rdfvalue.SECONDS)) log.append("22 seconds have passed") self.assertEqual(log, [ "start", "foo", "5 seconds have passed", "3 seconds have passed", "foo", "4 seconds have passed", "foo", "foo", "22 seconds have passed", ]) def testRunMultiSleep(self): log = [] def barz(): while True: time.sleep(10) log.append("bar") time.sleep(5) log.append("baz") thread = threading.Thread(name="barz=thread", target=barz) with test_lib.FakeTimeline(thread) as barz_timeline: log.append("start") barz_timeline.Run(duration=rdfvalue.Duration.From(5, rdfvalue.SECONDS)) log.append("5 seconds have passed") barz_timeline.Run(duration=rdfvalue.Duration.From(7, rdfvalue.SECONDS)) log.append("7 seconds have passed") barz_timeline.Run(duration=rdfvalue.Duration.From(1, rdfvalue.SECONDS)) log.append("1 second has passed") barz_timeline.Run(duration=rdfvalue.Duration.From(3, rdfvalue.SECONDS)) log.append("3 seconds have passed") barz_timeline.Run(duration=rdfvalue.Duration.From(20, rdfvalue.SECONDS)) log.append("20 seconds have passed") self.assertEqual(log, [ "start", "5 seconds have passed", "bar", "7 seconds have passed", "1 second has passed", "baz", "3 seconds have passed", "bar", "baz", "20 seconds have passed", ]) def testRunSleepZero(self): log = [] def norf(): time.sleep(0) log.append("norf") time.sleep(0) log.append("norf") time.sleep(0) log.append("norf") thread = threading.Thread(name="norf-thread", target=norf) with test_lib.FakeTimeline(thread) as norf_timeline: log.append("start") norf_timeline.Run(duration=rdfvalue.Duration.From(0, rdfvalue.SECONDS)) log.append("rest") norf_timeline.Run(duration=rdfvalue.Duration.From(0, rdfvalue.SECONDS)) log.append("stop") self.assertEqual(log, [ "start", "norf", "norf", "norf", "rest", "stop", ]) def testRunException(self): log = [] def quux(): time.sleep(10) log.append("foo") time.sleep(10) raise Exception("bar") thread = threading.Thread(name="quux-thread", target=quux) with test_lib.FakeTimeline(thread) as quux_timeline: log.append("start") quux_timeline.Run(duration=rdfvalue.Duration.From(6, rdfvalue.SECONDS)) log.append("6 seconds have passed") quux_timeline.Run(duration=rdfvalue.Duration.From(5, rdfvalue.SECONDS)) log.append("5 seconds have passed") quux_timeline.Run(duration=rdfvalue.Duration.From(7, rdfvalue.SECONDS)) log.append("7 seconds have passed") self.assertEqual(log, [ "start", "6 seconds have passed", "foo", "5 seconds have passed", "7 seconds have passed", ]) with self.assertRaisesRegex(Exception, "bar"): quux_timeline.Run(duration=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) def testNoRuns(self): log = [] def thud(): log.append("thud") with test_lib.FakeTimeline( threading.Thread(name="thud-thread", target=thud)): pass self.assertEqual(log, []) def testRunAfterFinish(self): log = [] def moof(): log.append("moof") with test_lib.FakeTimeline( threading.Thread(name="moof-thread", target=moof)) as moof_timeline: moof_timeline.Run(duration=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) moof_timeline.Run(duration=rdfvalue.Duration.From(20, rdfvalue.SECONDS)) moof_timeline.Run(duration=rdfvalue.Duration.From(30, rdfvalue.SECONDS)) self.assertEqual(log, ["moof"]) def testRunWithoutContext(self): weez_timeline = test_lib.FakeTimeline( threading.Thread(name="weez-thread", target=lambda: None)) with self.assertRaisesRegex(AssertionError, "called without context"): weez_timeline.Run(duration=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) def testReuse(self): log = [] def blargh(): log.append("blargh") blargh_timeline = test_lib.FakeTimeline( threading.Thread(name="blargh-thread", target=blargh)) with blargh_timeline: blargh_timeline.Run(duration=rdfvalue.Duration.From(5, rdfvalue.SECONDS)) self.assertEqual(log, ["blargh"]) with self.assertRaisesRegex(AssertionError, "cannot be reused"): with blargh_timeline: blargh_timeline.Run( duration=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) def testTimePassage(self): log = [] def fhesh(): log.append(rdfvalue.RDFDatetime.Now().Format("%Y-%m-%d")) time.sleep( rdfvalue.Duration.From(2, rdfvalue.DAYS).ToFractional(rdfvalue.SECONDS)) log.append(rdfvalue.RDFDatetime.Now().Format("%Y-%m-%d")) time.sleep( rdfvalue.Duration.From(15, rdfvalue.SECONDS).ToFractional( rdfvalue.SECONDS)) log.append(rdfvalue.RDFDatetime.Now().Format("%Y-%m-%d %H:%M:%S")) time.sleep( rdfvalue.Duration.From(20, rdfvalue.MINUTES).ToFractional( rdfvalue.SECONDS)) log.append(rdfvalue.RDFDatetime.Now().Format("%Y-%m-%d %H:%M:%S")) fhesh_timeline = test_lib.FakeTimeline( thread=threading.Thread(name="fhesh-thread", target=fhesh), now=rdfvalue.RDFDatetime.FromHumanReadable("2077-01-01")) with fhesh_timeline: log.append("beep (0)") fhesh_timeline.Run(duration=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) log.append("beep (1)") fhesh_timeline.Run(duration=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) log.append("beep (2)") fhesh_timeline.Run(duration=rdfvalue.Duration.From(2, rdfvalue.DAYS)) log.append("beep (3)") fhesh_timeline.Run(duration=rdfvalue.Duration.From(10, rdfvalue.SECONDS)) log.append("beep (4)") fhesh_timeline.Run(duration=rdfvalue.Duration.From(30, rdfvalue.MINUTES)) log.append("beep (5)") self.assertEqual(log, [ "beep (0)", "2077-01-01", "beep (1)", "beep (2)", "2077-01-03", "2077-01-03 00:00:15", "beep (3)", "beep (4)", "2077-01-03 00:20:15", "beep (5)", ]) if __name__ == "__main__": absltest.main()
apache-2.0
jkandasa/integration_tests
cfme/tests/containers/test_relationships.py
1
4376
from random import randrange import pytest from cfme.containers.pod import Pod from cfme.containers.provider import ContainersProvider, ContainersTestItem from cfme.containers.service import Service from cfme.containers.replicator import Replicator from cfme.containers.image import Image from cfme.containers.project import Project from cfme.containers.template import Template from cfme.containers.container import Container from cfme.containers.image_registry import ImageRegistry from cfme.containers.volume import Volume from cfme.utils.appliance.implementations.ui import navigate_to pytestmark = [ pytest.mark.tier(1), pytest.mark.provider([ContainersProvider], scope='module') ] # The polarion markers below are used to mark the test item # with polarion test case ID. # TODO: future enhancement - https://github.com/pytest-dev/pytest/pull/1921 TEST_ITEMS = [ pytest.mark.polarion('CMP-9851')(ContainersTestItem(ContainersProvider, 'CMP-9851')), pytest.mark.polarion('CMP-9947')(ContainersTestItem(Container, 'CMP-9947')), pytest.mark.polarion('CMP-9929')(ContainersTestItem(Pod, 'CMP-9929')), pytest.mark.polarion('CMP-10564')(ContainersTestItem(Service, 'CMP-10564')), # TODO Add Node back into the list when other classes are updated to use WT views and widgets. # pytest.mark.polarion('CMP-9962')(ContainersTestItem(Node, 'CMP-9962')), pytest.mark.polarion('CMP-10565')(ContainersTestItem(Replicator, 'CMP-10565')), pytest.mark.polarion('CMP-9980')(ContainersTestItem(Image, 'CMP-9980')), pytest.mark.polarion('CMP-9994')(ContainersTestItem(ImageRegistry, 'CMP-9994')), pytest.mark.polarion('CMP-9868')(ContainersTestItem(Project, 'CMP-9868')), pytest.mark.polarion('CMP-10319')(ContainersTestItem(Template, 'CMP-10319')), pytest.mark.polarion('CMP-10410')(ContainersTestItem(Volume, 'CMP-10410')) ] @pytest.mark.parametrize('test_item', TEST_ITEMS, scope='module', ids=[ti.args[1].pretty_id() for ti in TEST_ITEMS]) @pytest.mark.usefixtures('has_persistent_volume') @pytest.mark.usefixtures('setup_provider_modscope') def test_relationships_tables(provider, has_persistent_volume, appliance, test_item): """This test verifies the integrity of the Relationships table. clicking on each field in the Relationships table takes the user to either Summary page where we verify that the field that appears in the Relationships table also appears in the Properties table, or to the page where the number of rows is equal to the number that is displayed in the Relationships table. """ instance = (provider if test_item.obj is ContainersProvider else test_item.obj.get_random_instances(provider, 1, appliance).pop()) # Check the relationships linking & data integrity view = navigate_to(instance, 'Details') relations = [key for key, val in view.entities.relationships.read().items() if val != '0'] relation = relations[randrange(len(relations))] field = view.entities.relationships.get_field(relation)[1] text = field.text field.click() if text.isdigit(): view = appliance.browser.create_view(test_item.obj.all_view) value = int(text) items_amount = int(view.paginator.items_amount) assert items_amount == value, ( 'Difference between the value({}) in the relationships table in {}' 'to number of records ({}) in the target page' .format(value, instance.name, items_amount) ) else: view = appliance.browser.create_view(test_item.obj.details_view) assert view.title.text == '{} (Summary)'.format(text) @pytest.mark.polarion('CMP-9934') @pytest.mark.usefixtures('setup_provider') def test_container_status_relationships_data_integrity(provider, appliance, soft_assert): """ This test verifies that the sum of running, waiting and terminated containers in the status summary table is the same number that appears in the Relationships table containers field """ for obj in Pod.get_random_instances(provider, count=3, appliance=appliance): view = navigate_to(obj, 'Details') soft_assert( (int(view.entities.relationships.read()['Containers']) == sum([int(v) for v in view.entities.container_statuses_summary.read().values()])) )
gpl-2.0
joelddiaz/openshift-tools
openshift/installer/vendored/openshift-ansible-3.8.36-1/roles/lib_openshift/src/test/unit/test_oc_serviceaccount.py
82
7174
''' Unit tests for oc serviceaccount ''' import os import six import sys import unittest import mock # Removing invalid variable names for tests so that I can # keep them brief # pylint: disable=invalid-name,no-name-in-module # Disable import-error b/c our libraries aren't loaded in jenkins # pylint: disable=import-error,wrong-import-position # place class in our python path module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]), 'library') # noqa: E501 sys.path.insert(0, module_path) from oc_serviceaccount import OCServiceAccount, locate_oc_binary # noqa: E402 class OCServiceAccountTest(unittest.TestCase): ''' Test class for OCServiceAccount ''' @mock.patch('oc_serviceaccount.locate_oc_binary') @mock.patch('oc_serviceaccount.Utils.create_tmpfile_copy') @mock.patch('oc_serviceaccount.OCServiceAccount._run') def test_adding_a_serviceaccount(self, mock_cmd, mock_tmpfile_copy, mock_oc_binary): ''' Testing adding a serviceaccount ''' # Arrange # run_ansible input parameters params = { 'kubeconfig': '/etc/origin/master/admin.kubeconfig', 'state': 'present', 'debug': False, 'name': 'testserviceaccountname', 'namespace': 'default', 'secrets': None, 'image_pull_secrets': None, } valid_result_json = '''{ "kind": "ServiceAccount", "apiVersion": "v1", "metadata": { "name": "testserviceaccountname", "namespace": "default", "selfLink": "/api/v1/namespaces/default/serviceaccounts/testserviceaccountname", "uid": "4d8320c9-e66f-11e6-8edc-0eece8f2ce22", "resourceVersion": "328450", "creationTimestamp": "2017-01-29T22:07:19Z" }, "secrets": [ { "name": "testserviceaccountname-dockercfg-4lqd0" }, { "name": "testserviceaccountname-token-9h0ej" } ], "imagePullSecrets": [ { "name": "testserviceaccountname-dockercfg-4lqd0" } ] }''' # Return values of our mocked function call. These get returned once per call. mock_cmd.side_effect = [ # First call to mock (1, '', 'Error from server: serviceaccounts "testserviceaccountname" not found'), # Second call to mock (0, 'serviceaccount "testserviceaccountname" created', ''), # Third call to mock (0, valid_result_json, ''), ] mock_oc_binary.side_effect = [ 'oc' ] mock_tmpfile_copy.side_effect = [ '/tmp/mocked_kubeconfig', ] # Act results = OCServiceAccount.run_ansible(params, False) # Assert self.assertTrue(results['changed']) self.assertEqual(results['results']['returncode'], 0) self.assertEqual(results['state'], 'present') # Making sure our mock was called as we expected mock_cmd.assert_has_calls([ mock.call(['oc', 'get', 'sa', 'testserviceaccountname', '-o', 'json', '-n', 'default'], None), mock.call(['oc', 'create', '-f', mock.ANY, '-n', 'default'], None), mock.call(['oc', 'get', 'sa', 'testserviceaccountname', '-o', 'json', '-n', 'default'], None), ]) @unittest.skipIf(six.PY3, 'py2 test only') @mock.patch('os.path.exists') @mock.patch('os.environ.get') def test_binary_lookup_fallback(self, mock_env_get, mock_path_exists): ''' Testing binary lookup fallback ''' mock_env_get.side_effect = lambda _v, _d: '' mock_path_exists.side_effect = lambda _: False self.assertEqual(locate_oc_binary(), 'oc') @unittest.skipIf(six.PY3, 'py2 test only') @mock.patch('os.path.exists') @mock.patch('os.environ.get') def test_binary_lookup_in_path(self, mock_env_get, mock_path_exists): ''' Testing binary lookup in path ''' oc_bin = '/usr/bin/oc' mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin' mock_path_exists.side_effect = lambda f: f == oc_bin self.assertEqual(locate_oc_binary(), oc_bin) @unittest.skipIf(six.PY3, 'py2 test only') @mock.patch('os.path.exists') @mock.patch('os.environ.get') def test_binary_lookup_in_usr_local(self, mock_env_get, mock_path_exists): ''' Testing binary lookup in /usr/local/bin ''' oc_bin = '/usr/local/bin/oc' mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin' mock_path_exists.side_effect = lambda f: f == oc_bin self.assertEqual(locate_oc_binary(), oc_bin) @unittest.skipIf(six.PY3, 'py2 test only') @mock.patch('os.path.exists') @mock.patch('os.environ.get') def test_binary_lookup_in_home(self, mock_env_get, mock_path_exists): ''' Testing binary lookup in ~/bin ''' oc_bin = os.path.expanduser('~/bin/oc') mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin' mock_path_exists.side_effect = lambda f: f == oc_bin self.assertEqual(locate_oc_binary(), oc_bin) @unittest.skipIf(six.PY2, 'py3 test only') @mock.patch('shutil.which') @mock.patch('os.environ.get') def test_binary_lookup_fallback_py3(self, mock_env_get, mock_shutil_which): ''' Testing binary lookup fallback ''' mock_env_get.side_effect = lambda _v, _d: '' mock_shutil_which.side_effect = lambda _f, path=None: None self.assertEqual(locate_oc_binary(), 'oc') @unittest.skipIf(six.PY2, 'py3 test only') @mock.patch('shutil.which') @mock.patch('os.environ.get') def test_binary_lookup_in_path_py3(self, mock_env_get, mock_shutil_which): ''' Testing binary lookup in path ''' oc_bin = '/usr/bin/oc' mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin' mock_shutil_which.side_effect = lambda _f, path=None: oc_bin self.assertEqual(locate_oc_binary(), oc_bin) @unittest.skipIf(six.PY2, 'py3 test only') @mock.patch('shutil.which') @mock.patch('os.environ.get') def test_binary_lookup_in_usr_local_py3(self, mock_env_get, mock_shutil_which): ''' Testing binary lookup in /usr/local/bin ''' oc_bin = '/usr/local/bin/oc' mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin' mock_shutil_which.side_effect = lambda _f, path=None: oc_bin self.assertEqual(locate_oc_binary(), oc_bin) @unittest.skipIf(six.PY2, 'py3 test only') @mock.patch('shutil.which') @mock.patch('os.environ.get') def test_binary_lookup_in_home_py3(self, mock_env_get, mock_shutil_which): ''' Testing binary lookup in ~/bin ''' oc_bin = os.path.expanduser('~/bin/oc') mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin' mock_shutil_which.side_effect = lambda _f, path=None: oc_bin self.assertEqual(locate_oc_binary(), oc_bin)
apache-2.0
aron-bordin/kivy
examples/demo/multistroke/settings.py
43
3078
__all__ = ('MultistrokeSettingsContainer', 'MultistrokeSettingItem', 'MultistrokeSettingBoolean', 'MultistrokeSettingSlider', 'MultistrokeSettingString', 'MultistrokeSettingTitle') from kivy.factory import Factory from kivy.lang import Builder from kivy.uix.gridlayout import GridLayout from kivy.uix.label import Label from kivy.properties import (StringProperty, NumericProperty, OptionProperty, BooleanProperty) from kivy.uix.popup import Popup Builder.load_file('settings.kv') class MultistrokeSettingsContainer(GridLayout): pass class MultistrokeSettingItem(GridLayout): title = StringProperty('<No title set>') desc = StringProperty('') class MultistrokeSettingTitle(Label): title = StringProperty('<No title set>') desc = StringProperty('') class MultistrokeSettingBoolean(MultistrokeSettingItem): button_text = StringProperty('') value = BooleanProperty(False) class MultistrokeSettingString(MultistrokeSettingItem): value = StringProperty('') class EditSettingPopup(Popup): def __init__(self, **kwargs): super(EditSettingPopup, self).__init__(**kwargs) self.register_event_type('on_validate') def on_validate(self, *l): pass class MultistrokeSettingSlider(MultistrokeSettingItem): min = NumericProperty(0) max = NumericProperty(100) type = OptionProperty('int', options=['float', 'int']) value = NumericProperty(0) def __init__(self, **kwargs): super(MultistrokeSettingSlider, self).__init__(**kwargs) self._popup = EditSettingPopup() self._popup.bind(on_validate=self._validate) self._popup.bind(on_dismiss=self._dismiss) def _to_numtype(self, v): try: if self.type == 'float': return round(float(v), 1) else: return int(v) except ValueError: return self.min def _dismiss(self, *l): self._popup.ids.input.focus = False def _validate(self, instance, value): self._popup.dismiss() val = self._to_numtype(self._popup.ids.input.text) if val < self.min: val = self.min elif val > self.max: val = self.max self.value = val def on_touch_down(self, touch): if not self.ids.sliderlabel.collide_point(*touch.pos): return super(MultistrokeSettingSlider, self).on_touch_down(touch) ids = self._popup.ids ids.value = str(self.value) ids.input.text = str(self._to_numtype(self.value)) self._popup.open() ids.input.focus = True ids.input.select_all() Factory.register('MultistrokeSettingsContainer', cls=MultistrokeSettingsContainer) Factory.register('MultistrokeSettingTitle', cls=MultistrokeSettingTitle) Factory.register('MultistrokeSettingBoolean', cls=MultistrokeSettingBoolean) Factory.register('MultistrokeSettingSlider', cls=MultistrokeSettingSlider) Factory.register('MultistrokeSettingString', cls=MultistrokeSettingString)
mit
runt18/mojo
third_party/cython/src/Cython/Debugger/DebugWriter.py
103
2192
from __future__ import with_statement import os import sys import errno try: from lxml import etree have_lxml = True except ImportError: have_lxml = False try: # Python 2.5 from xml.etree import cElementTree as etree except ImportError: try: # Python 2.5 from xml.etree import ElementTree as etree except ImportError: try: # normal cElementTree install import cElementTree as etree except ImportError: try: # normal ElementTree install import elementtree.ElementTree as etree except ImportError: etree = None from Cython.Compiler import Errors class CythonDebugWriter(object): """ Class to output debugging information for cygdb It writes debug information to cython_debug/cython_debug_info_<modulename> in the build directory. """ def __init__(self, output_dir): if etree is None: raise Errors.NoElementTreeInstalledException() self.output_dir = os.path.join(output_dir, 'cython_debug') self.tb = etree.TreeBuilder() # set by Cython.Compiler.ParseTreeTransforms.DebugTransform self.module_name = None self.start('cython_debug', attrs=dict(version='1.0')) def start(self, name, attrs=None): self.tb.start(name, attrs or {}) def end(self, name): self.tb.end(name) def serialize(self): self.tb.end('Module') self.tb.end('cython_debug') xml_root_element = self.tb.close() try: os.makedirs(self.output_dir) except OSError, e: if e.errno != errno.EEXIST: raise et = etree.ElementTree(xml_root_element) kw = {} if have_lxml: kw['pretty_print'] = True fn = "cython_debug_info_" + self.module_name et.write(os.path.join(self.output_dir, fn), encoding="UTF-8", **kw) interpreter_path = os.path.join(self.output_dir, 'interpreter') with open(interpreter_path, 'w') as f: f.write(sys.executable)
bsd-3-clause
csdms/bmi-live
bmi_live/diffusion.py
1
1682
"""A model of temperature diffusion over a rectangular plate.""" import numpy as np import yaml class Diffusion(object): """Model of temperature diffusion on a plate.""" def __init__(self, config_file=None): """Initialize the model.""" if config_file is not None: with open(config_file, 'r') as fp: parameters = yaml.safe_load(fp) for key, value in parameters.items(): setattr(self, key, value) else: self.nx = 8 self.ny = 6 self.dx = 1.0 self.dy = 1.0 self.alpha = 0.9 self.time = 0.0 self.dt = min(self.dx, self.dy) ** 2.0 / (4.0 * self.alpha) self.dt /= 2.0 self.temperature = np.zeros((self.ny, self.nx)) self.new_temperature = self.temperature.copy() def advance(self): """Advance the model by one time step.""" self.solve() self.time += self.dt def solve(self): """Solve the diffusion equation.""" dx2, dy2 = self.dx**2, self.dy**2 coef = self.alpha * self.dt / (2.0*(dx2 + dy2)) for i in range(1, self.ny-1): for j in range(1, self.nx-1): self.new_temperature[i,j] = \ self.temperature[i,j] + coef * ( dx2*(self.temperature[i,j-1] + self.temperature[i,j+1]) + dy2*(self.temperature[i-1,j] + self.temperature[i+1,j]) - 2.0*(dx2 + dy2)*self.temperature[i,j]) self.new_temperature[(0, -1), :] = 0.0 self.new_temperature[:, (0, -1)] = 0.0 self.temperature[:] = self.new_temperature
mit
anryko/ansible
test/units/parsing/test_dataloader.py
57
9955
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os from units.compat import unittest from units.compat.mock import patch, mock_open from ansible.errors import AnsibleParserError, yaml_strings, AnsibleFileNotFound from ansible.parsing.vault import AnsibleVaultError from ansible.module_utils._text import to_text from ansible.module_utils.six import PY3 from units.mock.vault_helper import TextVaultSecret from ansible.parsing.dataloader import DataLoader from units.mock.path import mock_unfrackpath_noop class TestDataLoader(unittest.TestCase): def setUp(self): self._loader = DataLoader() @patch('os.path.exists') def test__is_role(self, p_exists): p_exists.side_effect = lambda p: p == b'test_path/tasks/main.yml' self.assertTrue(self._loader._is_role('test_path/tasks')) self.assertTrue(self._loader._is_role('test_path/')) @patch.object(DataLoader, '_get_file_contents') def test_parse_json_from_file(self, mock_def): mock_def.return_value = (b"""{"a": 1, "b": 2, "c": 3}""", True) output = self._loader.load_from_file('dummy_json.txt') self.assertEqual(output, dict(a=1, b=2, c=3)) @patch.object(DataLoader, '_get_file_contents') def test_parse_yaml_from_file(self, mock_def): mock_def.return_value = (b""" a: 1 b: 2 c: 3 """, True) output = self._loader.load_from_file('dummy_yaml.txt') self.assertEqual(output, dict(a=1, b=2, c=3)) @patch.object(DataLoader, '_get_file_contents') def test_parse_fail_from_file(self, mock_def): mock_def.return_value = (b""" TEXT: *** NOT VALID """, True) self.assertRaises(AnsibleParserError, self._loader.load_from_file, 'dummy_yaml_bad.txt') @patch('ansible.errors.AnsibleError._get_error_lines_from_file') @patch.object(DataLoader, '_get_file_contents') def test_tab_error(self, mock_def, mock_get_error_lines): mock_def.return_value = (u"""---\nhosts: localhost\nvars:\n foo: bar\n\tblip: baz""", True) mock_get_error_lines.return_value = ('''\tblip: baz''', '''..foo: bar''') with self.assertRaises(AnsibleParserError) as cm: self._loader.load_from_file('dummy_yaml_text.txt') self.assertIn(yaml_strings.YAML_COMMON_LEADING_TAB_ERROR, str(cm.exception)) self.assertIn('foo: bar', str(cm.exception)) @patch('ansible.parsing.dataloader.unfrackpath', mock_unfrackpath_noop) @patch.object(DataLoader, '_is_role') def test_path_dwim_relative(self, mock_is_role): """ simulate a nested dynamic include: playbook.yml: - hosts: localhost roles: - { role: 'testrole' } testrole/tasks/main.yml: - include: "include1.yml" static: no testrole/tasks/include1.yml: - include: include2.yml static: no testrole/tasks/include2.yml: - debug: msg="blah" """ mock_is_role.return_value = False with patch('os.path.exists') as mock_os_path_exists: mock_os_path_exists.return_value = False self._loader.path_dwim_relative('/tmp/roles/testrole/tasks', 'tasks', 'included2.yml') # Fetch first args for every call # mock_os_path_exists.assert_any_call isn't used because os.path.normpath must be used in order to compare paths called_args = [os.path.normpath(to_text(call[0][0])) for call in mock_os_path_exists.call_args_list] # 'path_dwim_relative' docstrings say 'with or without explicitly named dirname subdirs': self.assertIn('/tmp/roles/testrole/tasks/included2.yml', called_args) self.assertIn('/tmp/roles/testrole/tasks/tasks/included2.yml', called_args) # relative directories below are taken in account too: self.assertIn('tasks/included2.yml', called_args) self.assertIn('included2.yml', called_args) def test_path_dwim_root(self): self.assertEqual(self._loader.path_dwim('/'), '/') def test_path_dwim_home(self): self.assertEqual(self._loader.path_dwim('~'), os.path.expanduser('~')) def test_path_dwim_tilde_slash(self): self.assertEqual(self._loader.path_dwim('~/'), os.path.expanduser('~')) def test_get_real_file(self): self.assertEqual(self._loader.get_real_file(__file__), __file__) def test_is_file(self): self.assertTrue(self._loader.is_file(__file__)) def test_is_directory_positive(self): self.assertTrue(self._loader.is_directory(os.path.dirname(__file__))) def test_get_file_contents_none_path(self): self.assertRaisesRegexp(AnsibleParserError, 'Invalid filename', self._loader._get_file_contents, None) def test_get_file_contents_non_existent_path(self): self.assertRaises(AnsibleFileNotFound, self._loader._get_file_contents, '/non_existent_file') class TestPathDwimRelativeDataLoader(unittest.TestCase): def setUp(self): self._loader = DataLoader() def test_all_slash(self): self.assertEqual(self._loader.path_dwim_relative('/', '/', '/'), '/') def test_path_endswith_role(self): self.assertEqual(self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='/'), '/') def test_path_endswith_role_main_yml(self): self.assertIn('main.yml', self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='main.yml')) def test_path_endswith_role_source_tilde(self): self.assertEqual(self._loader.path_dwim_relative(path='foo/bar/tasks/', dirname='/', source='~/'), os.path.expanduser('~')) class TestPathDwimRelativeStackDataLoader(unittest.TestCase): def setUp(self): self._loader = DataLoader() def test_none(self): self.assertRaisesRegexp(AnsibleFileNotFound, 'on the Ansible Controller', self._loader.path_dwim_relative_stack, None, None, None) def test_empty_strings(self): self.assertEqual(self._loader.path_dwim_relative_stack('', '', ''), './') def test_empty_lists(self): self.assertEqual(self._loader.path_dwim_relative_stack([], '', '~/'), os.path.expanduser('~')) def test_all_slash(self): self.assertEqual(self._loader.path_dwim_relative_stack('/', '/', '/'), '/') def test_path_endswith_role(self): self.assertEqual(self._loader.path_dwim_relative_stack(paths=['foo/bar/tasks/'], dirname='/', source='/'), '/') def test_path_endswith_role_source_tilde(self): self.assertEqual(self._loader.path_dwim_relative_stack(paths=['foo/bar/tasks/'], dirname='/', source='~/'), os.path.expanduser('~')) def test_path_endswith_role_source_main_yml(self): self.assertRaises(AnsibleFileNotFound, self._loader.path_dwim_relative_stack, ['foo/bar/tasks/'], '/', 'main.yml') def test_path_endswith_role_source_main_yml_source_in_dirname(self): self.assertRaises(AnsibleFileNotFound, self._loader.path_dwim_relative_stack, 'foo/bar/tasks/', 'tasks', 'tasks/main.yml') class TestDataLoaderWithVault(unittest.TestCase): def setUp(self): self._loader = DataLoader() vault_secrets = [('default', TextVaultSecret('ansible'))] self._loader.set_vault_secrets(vault_secrets) self.test_vault_data_path = os.path.join(os.path.dirname(__file__), 'fixtures', 'vault.yml') def tearDown(self): pass def test_get_real_file_vault(self): real_file_path = self._loader.get_real_file(self.test_vault_data_path) self.assertTrue(os.path.exists(real_file_path)) def test_get_real_file_vault_no_vault(self): self._loader.set_vault_secrets(None) self.assertRaises(AnsibleParserError, self._loader.get_real_file, self.test_vault_data_path) def test_get_real_file_vault_wrong_password(self): wrong_vault = [('default', TextVaultSecret('wrong_password'))] self._loader.set_vault_secrets(wrong_vault) self.assertRaises(AnsibleVaultError, self._loader.get_real_file, self.test_vault_data_path) def test_get_real_file_not_a_path(self): self.assertRaisesRegexp(AnsibleParserError, 'Invalid filename', self._loader.get_real_file, None) @patch.multiple(DataLoader, path_exists=lambda s, x: True, is_file=lambda s, x: True) def test_parse_from_vault_1_1_file(self): vaulted_data = """$ANSIBLE_VAULT;1.1;AES256 33343734386261666161626433386662623039356366656637303939306563376130623138626165 6436333766346533353463636566313332623130383662340a393835656134633665333861393331 37666233346464636263636530626332623035633135363732623332313534306438393366323966 3135306561356164310a343937653834643433343734653137383339323330626437313562306630 3035 """ if PY3: builtins_name = 'builtins' else: builtins_name = '__builtin__' with patch(builtins_name + '.open', mock_open(read_data=vaulted_data.encode('utf-8'))): output = self._loader.load_from_file('dummy_vault.txt') self.assertEqual(output, dict(foo='bar'))
gpl-3.0
zvolsky/wikilinuxdevel
languages/pt.py
162
7150
# coding: utf8 { '!langcode!': 'pt', '!langname!': 'Português', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "field1=\'newvalue\'". Não pode actualizar ou eliminar os resultados de um JOIN', '%s %%{row} deleted': '%s linhas eliminadas', '%s %%{row} updated': '%s linhas actualizadas', '%s selected': '%s seleccionado(s)', '%Y-%m-%d': '%Y-%m-%d', '%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S', 'About': 'About', 'Access Control': 'Access Control', 'Administrative Interface': 'Administrative Interface', 'Administrative interface': 'Painel administrativo', 'Ajax Recipes': 'Ajax Recipes', 'appadmin is disabled because insecure channel': 'appadmin está desactivada pois o canal é inseguro', 'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?', 'Author Reference Auth User': 'Author Reference Auth User', 'Author Reference Auth User.username': 'Author Reference Auth User.username', 'Available Databases and Tables': 'bases de dados e tabelas disponíveis', 'Buy this book': 'Buy this book', 'cache': 'cache', 'Cache': 'Cache', 'Cache Keys': 'Cache Keys', 'Cannot be empty': 'não pode ser vazio', 'Category Create': 'Category Create', 'Category Select': 'Category Select', 'change password': 'alterar palavra-chave', 'Check to delete': 'seleccione para eliminar', 'Clear CACHE?': 'Clear CACHE?', 'Clear DISK': 'Clear DISK', 'Clear RAM': 'Clear RAM', 'Comment Create': 'Comment Create', 'Comment Select': 'Comment Select', 'Community': 'Community', 'Components and Plugins': 'Components and Plugins', 'Content': 'Content', 'Controller': 'Controlador', 'Copyright': 'Direitos de cópia', 'create new category': 'create new category', 'create new comment': 'create new comment', 'create new post': 'create new post', 'Created By': 'Created By', 'Created On': 'Created On', 'Current request': 'pedido currente', 'Current response': 'resposta currente', 'Current session': 'sessão currente', 'customize me!': 'Personaliza-me!', 'data uploaded': 'informação enviada', 'Database': 'base de dados', 'Database %s select': 'selecção de base de dados %s', 'db': 'bd', 'DB Model': 'Modelo de BD', 'Delete:': 'Eliminar:', 'Demo': 'Demo', 'Deployment Recipes': 'Deployment Recipes', 'design': 'design', 'DISK': 'DISK', 'Disk Cache Keys': 'Disk Cache Keys', 'Disk Cleared': 'Disk Cleared', 'Documentation': 'Documentation', "Don't know what to do?": "Don't know what to do?", 'done!': 'concluído!', 'Download': 'Download', 'Edit': 'Editar', 'edit category': 'edit category', 'edit comment': 'edit comment', 'Edit current record': 'Edição de registo currente', 'edit post': 'edit post', 'edit profile': 'Editar perfil', 'Edit This App': 'Edite esta aplicação', 'Email': 'Email', 'Email and SMS': 'Email and SMS', 'Errors': 'Errors', 'export as csv file': 'exportar como ficheiro csv', 'FAQ': 'FAQ', 'First Name': 'First Name', 'For %s #%s': 'For %s #%s', 'Forms and Validators': 'Forms and Validators', 'Free Applications': 'Free Applications', 'Groups': 'Groups', 'Hello World': 'Olá Mundo', 'Home': 'Home', 'How did you get here?': 'How did you get here?', 'import': 'import', 'Import/Export': 'Importar/Exportar', 'Index': 'Índice', 'insert new': 'inserir novo', 'insert new %s': 'inserir novo %s', 'Internal State': 'Estado interno', 'Introduction': 'Introduction', 'Invalid Query': 'Consulta Inválida', 'invalid request': 'Pedido Inválido', 'Key': 'Key', 'Last Name': 'Last Name', 'Layout': 'Esboço', 'Layout Plugins': 'Layout Plugins', 'Layouts': 'Layouts', 'Live Chat': 'Live Chat', 'login': 'login', 'logout': 'logout', 'Lost Password': 'Lost Password', 'Main Menu': 'Menu Principal', 'Manage Cache': 'Manage Cache', 'Menu Model': 'Menu do Modelo', 'Modified By': 'Modified By', 'Modified On': 'Modified On', 'My Sites': 'My Sites', 'Name': 'Name', 'New Record': 'Novo Registo', 'new record inserted': 'novo registo inserido', 'next 100 rows': 'próximas 100 linhas', 'No Data': 'No Data', 'No databases in this application': 'Não há bases de dados nesta aplicação', 'Online examples': 'Exemplos online', 'or import from csv file': 'ou importe a partir de ficheiro csv', 'Other Plugins': 'Other Plugins', 'Other Recipes': 'Other Recipes', 'Overview': 'Overview', 'Password': 'Password', 'Plugins': 'Plugins', 'Post Create': 'Post Create', 'Post Select': 'Post Select', 'Powered by': 'Suportado por', 'Preface': 'Preface', 'previous 100 rows': '100 linhas anteriores', 'Python': 'Python', 'Query:': 'Interrogação:', 'Quick Examples': 'Quick Examples', 'RAM': 'RAM', 'RAM Cache Keys': 'RAM Cache Keys', 'Ram Cleared': 'Ram Cleared', 'Recipes': 'Recipes', 'Record': 'registo', 'record does not exist': 'registo inexistente', 'Record id': 'id de registo', 'Register': 'Register', 'register': 'register', 'Replyto Reference Post': 'Replyto Reference Post', 'Rows in Table': 'Linhas numa tabela', 'Rows selected': 'Linhas seleccionadas', 'search category': 'search category', 'search comment': 'search comment', 'search post': 'search post', 'select category': 'select category', 'select comment': 'select comment', 'select post': 'select post', 'Semantic': 'Semantic', 'Services': 'Services', 'show category': 'show category', 'show comment': 'show comment', 'show post': 'show post', 'Size of cache:': 'Size of cache:', 'state': 'estado', 'Statistics': 'Statistics', 'Stylesheet': 'Folha de estilo', 'submit': 'submit', 'Support': 'Support', 'Sure you want to delete this object?': 'Tem a certeza que deseja eliminar este objecto?', 'Table': 'tabela', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'A "query" é uma condição do tipo "db.table1.field1==\'value\'". Algo como "db.table1.field1==db.table2.field2" resultaria num SQL JOIN.', 'The Core': 'The Core', 'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s', 'The Views': 'The Views', 'This App': 'This App', 'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)', 'Title': 'Title', 'Twitter': 'Twitter', 'unable to parse csv file': 'não foi possível carregar ficheiro csv', 'Update:': 'Actualização:', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Utilize (...)&(...) para AND, (...)|(...) para OR, e ~(...) para NOT para construir interrogações mais complexas.', 'Username': 'Username', 'Videos': 'Videos', 'View': 'Vista', 'Welcome %s': 'Bem-vindo(a) %s', 'Welcome to Gluonization': 'Bem vindo ao Web2py', 'Welcome to web2py': 'Bem-vindo(a) ao web2py', 'Welcome to web2py!': 'Welcome to web2py!', 'When': 'When', 'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s', 'You are successfully running web2py': 'You are successfully running web2py', 'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs', 'You visited the url %s': 'You visited the url %s', }
lgpl-3.0
pilou-/ansible
test/units/modules/cloud/google/test_gce_tag.py
187
1909
import unittest from ansible.modules.cloud.google.gce_tag import _get_changed_items, _intersect_items, _union_items class TestGCETag(unittest.TestCase): """Unit tests for gce_tag module.""" def test_union_items(self): """ Combine items in both lists removing duplicates. """ listA = [1, 2, 3, 4, 5, 8, 9] listB = [1, 2, 3, 4, 5, 6, 7] want = [1, 2, 3, 4, 5, 6, 7, 8, 9] got = _union_items(listA, listB) self.assertEqual(want, got) def test_intersect_items(self): """ All unique items from either list. """ listA = [1, 2, 3, 4, 5, 8, 9] listB = [1, 2, 3, 4, 5, 6, 7] want = [1, 2, 3, 4, 5] got = _intersect_items(listA, listB) self.assertEqual(want, got) # tags removed new_tags = ['one', 'two'] existing_tags = ['two'] want = ['two'] # only remove the tag that was present got = _intersect_items(existing_tags, new_tags) self.assertEqual(want, got) def test_get_changed_items(self): """ All the items from left list that don't match any item from the right list. """ listA = [1, 2, 3, 4, 5, 8, 9] listB = [1, 2, 3, 4, 5, 6, 7] want = [8, 9] got = _get_changed_items(listA, listB) self.assertEqual(want, got) # simulate new tags added tags_to_add = ['one', 'two'] existing_tags = ['two'] want = ['one'] got = _get_changed_items(tags_to_add, existing_tags) self.assertEqual(want, got) # simulate removing tags # specifying one tag on right that doesn't exist tags_to_remove = ['one', 'two'] existing_tags = ['two', 'three'] want = ['three'] got = _get_changed_items(existing_tags, tags_to_remove) self.assertEqual(want, got)
gpl-3.0
atsolakid/edx-platform
lms/djangoapps/bulk_email/migrations/0007_load_course_email_template.py
182
5602
# -*- coding: utf-8 -*- from south.v2 import DataMigration class Migration(DataMigration): def forwards(self, orm): "Load data from fixture." from django.core.management import call_command call_command("loaddata", "course_email_template.json") def backwards(self, orm): "Perform a no-op to go backwards." pass models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'bulk_email.courseemail': { 'Meta': {'object_name': 'CourseEmail'}, 'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'html_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'sender': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}), 'subject': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'text_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'to_option': ('django.db.models.fields.CharField', [], {'default': "'myself'", 'max_length': '64'}) }, 'bulk_email.courseemailtemplate': { 'Meta': {'object_name': 'CourseEmailTemplate'}, 'html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'plain_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) }, 'bulk_email.optout': { 'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'Optout'}, 'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['bulk_email'] symmetrical = True
agpl-3.0
kyoungrok0517/linguist
samples/Python/gen-py-linguist-thrift.py
61
2259
# # Autogenerated by Thrift Compiler (1.0.0-dev) # # DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING # # options string: py # from thrift.Thrift import TType, TMessageType, TException, TApplicationException from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol, TProtocol try: from thrift.protocol import fastbinary except: fastbinary = None class PullRequest: """ Attributes: - title """ thrift_spec = ( None, # 0 (1, TType.STRING, 'title', None, None, ), # 1 ) def __init__(self, title=None,): self.title = title def read(self, iprot): if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1: if ftype == TType.STRING: self.title = iprot.readString() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) return oprot.writeStructBegin('PullRequest') if self.title is not None: oprot.writeFieldBegin('title', TType.STRING, 1) oprot.writeString(self.title) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def validate(self): return def __hash__(self): value = 17 value = (value * 31) ^ hash(self.title) return value def __repr__(self): L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) def __eq__(self, other): return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ def __ne__(self, other): return not (self == other)
mit
vadimtk/chrome4sdp
third_party/cython/src/Cython/Debugger/libpython.py
101
83726
#!/usr/bin/python # NOTE: this file is taken from the Python source distribution # It can be found under Tools/gdb/libpython.py. It is shipped with Cython # because it's not installed as a python module, and because changes are only # merged into new python versions (v3.2+). ''' From gdb 7 onwards, gdb's build can be configured --with-python, allowing gdb to be extended with Python code e.g. for library-specific data visualizations, such as for the C++ STL types. Documentation on this API can be seen at: http://sourceware.org/gdb/current/onlinedocs/gdb/Python-API.html This python module deals with the case when the process being debugged (the "inferior process" in gdb parlance) is itself python, or more specifically, linked against libpython. In this situation, almost every item of data is a (PyObject*), and having the debugger merely print their addresses is not very enlightening. This module embeds knowledge about the implementation details of libpython so that we can emit useful visualizations e.g. a string, a list, a dict, a frame giving file/line information and the state of local variables In particular, given a gdb.Value corresponding to a PyObject* in the inferior process, we can generate a "proxy value" within the gdb process. For example, given a PyObject* in the inferior process that is in fact a PyListObject* holding three PyObject* that turn out to be PyStringObject* instances, we can generate a proxy value within the gdb process that is a list of strings: ["foo", "bar", "baz"] Doing so can be expensive for complicated graphs of objects, and could take some time, so we also have a "write_repr" method that writes a representation of the data to a file-like object. This allows us to stop the traversal by having the file-like object raise an exception if it gets too much data. With both "proxyval" and "write_repr" we keep track of the set of all addresses visited so far in the traversal, to avoid infinite recursion due to cycles in the graph of object references. We try to defer gdb.lookup_type() invocations for python types until as late as possible: for a dynamically linked python binary, when the process starts in the debugger, the libpython.so hasn't been dynamically loaded yet, so none of the type names are known to the debugger The module also extends gdb with some python-specific commands. ''' from __future__ import with_statement import os import re import sys import struct import locale import atexit import warnings import tempfile import textwrap import itertools import gdb if sys.version_info[0] < 3: # I think this is the only way to fix this bug :'( # http://sourceware.org/bugzilla/show_bug.cgi?id=12285 out, err = sys.stdout, sys.stderr reload(sys).setdefaultencoding('UTF-8') sys.stdout = out sys.stderr = err # Look up the gdb.Type for some standard types: _type_char_ptr = gdb.lookup_type('char').pointer() # char* _type_unsigned_char_ptr = gdb.lookup_type('unsigned char').pointer() _type_void_ptr = gdb.lookup_type('void').pointer() # void* SIZEOF_VOID_P = _type_void_ptr.sizeof Py_TPFLAGS_HEAPTYPE = (1L << 9) Py_TPFLAGS_INT_SUBCLASS = (1L << 23) Py_TPFLAGS_LONG_SUBCLASS = (1L << 24) Py_TPFLAGS_LIST_SUBCLASS = (1L << 25) Py_TPFLAGS_TUPLE_SUBCLASS = (1L << 26) Py_TPFLAGS_STRING_SUBCLASS = (1L << 27) Py_TPFLAGS_BYTES_SUBCLASS = (1L << 27) Py_TPFLAGS_UNICODE_SUBCLASS = (1L << 28) Py_TPFLAGS_DICT_SUBCLASS = (1L << 29) Py_TPFLAGS_BASE_EXC_SUBCLASS = (1L << 30) Py_TPFLAGS_TYPE_SUBCLASS = (1L << 31) MAX_OUTPUT_LEN = 1024 hexdigits = "0123456789abcdef" ENCODING = locale.getpreferredencoding() class NullPyObjectPtr(RuntimeError): pass def safety_limit(val): # Given a integer value from the process being debugged, limit it to some # safety threshold so that arbitrary breakage within said process doesn't # break the gdb process too much (e.g. sizes of iterations, sizes of lists) return min(val, 1000) def safe_range(val): # As per range, but don't trust the value too much: cap it to a safety # threshold in case the data was corrupted return xrange(safety_limit(val)) def write_unicode(file, text): # Write a byte or unicode string to file. Unicode strings are encoded to # ENCODING encoding with 'backslashreplace' error handler to avoid # UnicodeEncodeError. if isinstance(text, unicode): text = text.encode(ENCODING, 'backslashreplace') file.write(text) def os_fsencode(filename): if not isinstance(filename, unicode): return filename encoding = sys.getfilesystemencoding() if encoding == 'mbcs': # mbcs doesn't support surrogateescape return filename.encode(encoding) encoded = [] for char in filename: # surrogateescape error handler if 0xDC80 <= ord(char) <= 0xDCFF: byte = chr(ord(char) - 0xDC00) else: byte = char.encode(encoding) encoded.append(byte) return ''.join(encoded) class StringTruncated(RuntimeError): pass class TruncatedStringIO(object): '''Similar to cStringIO, but can truncate the output by raising a StringTruncated exception''' def __init__(self, maxlen=None): self._val = '' self.maxlen = maxlen def write(self, data): if self.maxlen: if len(data) + len(self._val) > self.maxlen: # Truncation: self._val += data[0:self.maxlen - len(self._val)] raise StringTruncated() self._val += data def getvalue(self): return self._val # pretty printer lookup all_pretty_typenames = set() class PrettyPrinterTrackerMeta(type): def __init__(self, name, bases, dict): super(PrettyPrinterTrackerMeta, self).__init__(name, bases, dict) all_pretty_typenames.add(self._typename) class PyObjectPtr(object): """ Class wrapping a gdb.Value that's a either a (PyObject*) within the inferior process, or some subclass pointer e.g. (PyStringObject*) There will be a subclass for every refined PyObject type that we care about. Note that at every stage the underlying pointer could be NULL, point to corrupt data, etc; this is the debugger, after all. """ __metaclass__ = PrettyPrinterTrackerMeta _typename = 'PyObject' def __init__(self, gdbval, cast_to=None): if cast_to: self._gdbval = gdbval.cast(cast_to) else: self._gdbval = gdbval def field(self, name): ''' Get the gdb.Value for the given field within the PyObject, coping with some python 2 versus python 3 differences. Various libpython types are defined using the "PyObject_HEAD" and "PyObject_VAR_HEAD" macros. In Python 2, this these are defined so that "ob_type" and (for a var object) "ob_size" are fields of the type in question. In Python 3, this is defined as an embedded PyVarObject type thus: PyVarObject ob_base; so that the "ob_size" field is located insize the "ob_base" field, and the "ob_type" is most easily accessed by casting back to a (PyObject*). ''' if self.is_null(): raise NullPyObjectPtr(self) if name == 'ob_type': pyo_ptr = self._gdbval.cast(PyObjectPtr.get_gdb_type()) return pyo_ptr.dereference()[name] if name == 'ob_size': pyo_ptr = self._gdbval.cast(PyVarObjectPtr.get_gdb_type()) return pyo_ptr.dereference()[name] # General case: look it up inside the object: return self._gdbval.dereference()[name] def pyop_field(self, name): ''' Get a PyObjectPtr for the given PyObject* field within this PyObject, coping with some python 2 versus python 3 differences. ''' return PyObjectPtr.from_pyobject_ptr(self.field(name)) def write_field_repr(self, name, out, visited): ''' Extract the PyObject* field named "name", and write its representation to file-like object "out" ''' field_obj = self.pyop_field(name) field_obj.write_repr(out, visited) def get_truncated_repr(self, maxlen): ''' Get a repr-like string for the data, but truncate it at "maxlen" bytes (ending the object graph traversal as soon as you do) ''' out = TruncatedStringIO(maxlen) try: self.write_repr(out, set()) except StringTruncated: # Truncation occurred: return out.getvalue() + '...(truncated)' # No truncation occurred: return out.getvalue() def type(self): return PyTypeObjectPtr(self.field('ob_type')) def is_null(self): return 0 == long(self._gdbval) def is_optimized_out(self): ''' Is the value of the underlying PyObject* visible to the debugger? This can vary with the precise version of the compiler used to build Python, and the precise version of gdb. See e.g. https://bugzilla.redhat.com/show_bug.cgi?id=556975 with PyEval_EvalFrameEx's "f" ''' return self._gdbval.is_optimized_out def safe_tp_name(self): try: return self.type().field('tp_name').string() except NullPyObjectPtr: # NULL tp_name? return 'unknown' except RuntimeError: # Can't even read the object at all? return 'unknown' def proxyval(self, visited): ''' Scrape a value from the inferior process, and try to represent it within the gdb process, whilst (hopefully) avoiding crashes when the remote data is corrupt. Derived classes will override this. For example, a PyIntObject* with ob_ival 42 in the inferior process should result in an int(42) in this process. visited: a set of all gdb.Value pyobject pointers already visited whilst generating this value (to guard against infinite recursion when visiting object graphs with loops). Analogous to Py_ReprEnter and Py_ReprLeave ''' class FakeRepr(object): """ Class representing a non-descript PyObject* value in the inferior process for when we don't have a custom scraper, intended to have a sane repr(). """ def __init__(self, tp_name, address): self.tp_name = tp_name self.address = address def __repr__(self): # For the NULL pointer, we have no way of knowing a type, so # special-case it as per # http://bugs.python.org/issue8032#msg100882 if self.address == 0: return '0x0' return '<%s at remote 0x%x>' % (self.tp_name, self.address) return FakeRepr(self.safe_tp_name(), long(self._gdbval)) def write_repr(self, out, visited): ''' Write a string representation of the value scraped from the inferior process to "out", a file-like object. ''' # Default implementation: generate a proxy value and write its repr # However, this could involve a lot of work for complicated objects, # so for derived classes we specialize this return out.write(repr(self.proxyval(visited))) @classmethod def subclass_from_type(cls, t): ''' Given a PyTypeObjectPtr instance wrapping a gdb.Value that's a (PyTypeObject*), determine the corresponding subclass of PyObjectPtr to use Ideally, we would look up the symbols for the global types, but that isn't working yet: (gdb) python print gdb.lookup_symbol('PyList_Type')[0].value Traceback (most recent call last): File "<string>", line 1, in <module> NotImplementedError: Symbol type not yet supported in Python scripts. Error while executing Python code. For now, we use tp_flags, after doing some string comparisons on the tp_name for some special-cases that don't seem to be visible through flags ''' try: tp_name = t.field('tp_name').string() tp_flags = int(t.field('tp_flags')) except RuntimeError: # Handle any kind of error e.g. NULL ptrs by simply using the base # class return cls #print 'tp_flags = 0x%08x' % tp_flags #print 'tp_name = %r' % tp_name name_map = {'bool': PyBoolObjectPtr, 'classobj': PyClassObjectPtr, 'instance': PyInstanceObjectPtr, 'NoneType': PyNoneStructPtr, 'frame': PyFrameObjectPtr, 'set' : PySetObjectPtr, 'frozenset' : PySetObjectPtr, 'builtin_function_or_method' : PyCFunctionObjectPtr, } if tp_name in name_map: return name_map[tp_name] if tp_flags & (Py_TPFLAGS_HEAPTYPE|Py_TPFLAGS_TYPE_SUBCLASS): return PyTypeObjectPtr if tp_flags & Py_TPFLAGS_INT_SUBCLASS: return PyIntObjectPtr if tp_flags & Py_TPFLAGS_LONG_SUBCLASS: return PyLongObjectPtr if tp_flags & Py_TPFLAGS_LIST_SUBCLASS: return PyListObjectPtr if tp_flags & Py_TPFLAGS_TUPLE_SUBCLASS: return PyTupleObjectPtr if tp_flags & Py_TPFLAGS_STRING_SUBCLASS: try: gdb.lookup_type('PyBytesObject') return PyBytesObjectPtr except RuntimeError: return PyStringObjectPtr if tp_flags & Py_TPFLAGS_UNICODE_SUBCLASS: return PyUnicodeObjectPtr if tp_flags & Py_TPFLAGS_DICT_SUBCLASS: return PyDictObjectPtr if tp_flags & Py_TPFLAGS_BASE_EXC_SUBCLASS: return PyBaseExceptionObjectPtr # Use the base class: return cls @classmethod def from_pyobject_ptr(cls, gdbval): ''' Try to locate the appropriate derived class dynamically, and cast the pointer accordingly. ''' try: p = PyObjectPtr(gdbval) cls = cls.subclass_from_type(p.type()) return cls(gdbval, cast_to=cls.get_gdb_type()) except RuntimeError, exc: # Handle any kind of error e.g. NULL ptrs by simply using the base # class pass return cls(gdbval) @classmethod def get_gdb_type(cls): return gdb.lookup_type(cls._typename).pointer() def as_address(self): return long(self._gdbval) class PyVarObjectPtr(PyObjectPtr): _typename = 'PyVarObject' class ProxyAlreadyVisited(object): ''' Placeholder proxy to use when protecting against infinite recursion due to loops in the object graph. Analogous to the values emitted by the users of Py_ReprEnter and Py_ReprLeave ''' def __init__(self, rep): self._rep = rep def __repr__(self): return self._rep def _write_instance_repr(out, visited, name, pyop_attrdict, address): '''Shared code for use by old-style and new-style classes: write a representation to file-like object "out"''' out.write('<') out.write(name) # Write dictionary of instance attributes: if isinstance(pyop_attrdict, PyDictObjectPtr): out.write('(') first = True for pyop_arg, pyop_val in pyop_attrdict.iteritems(): if not first: out.write(', ') first = False out.write(pyop_arg.proxyval(visited)) out.write('=') pyop_val.write_repr(out, visited) out.write(')') out.write(' at remote 0x%x>' % address) class InstanceProxy(object): def __init__(self, cl_name, attrdict, address): self.cl_name = cl_name self.attrdict = attrdict self.address = address def __repr__(self): if isinstance(self.attrdict, dict): kwargs = ', '.join(["%s=%r" % (arg, val) for arg, val in self.attrdict.iteritems()]) return '<%s(%s) at remote 0x%x>' % (self.cl_name, kwargs, self.address) else: return '<%s at remote 0x%x>' % (self.cl_name, self.address) def _PyObject_VAR_SIZE(typeobj, nitems): return ( ( typeobj.field('tp_basicsize') + nitems * typeobj.field('tp_itemsize') + (SIZEOF_VOID_P - 1) ) & ~(SIZEOF_VOID_P - 1) ).cast(gdb.lookup_type('size_t')) class PyTypeObjectPtr(PyObjectPtr): _typename = 'PyTypeObject' def get_attr_dict(self): ''' Get the PyDictObject ptr representing the attribute dictionary (or None if there's a problem) ''' try: typeobj = self.type() dictoffset = int_from_int(typeobj.field('tp_dictoffset')) if dictoffset != 0: if dictoffset < 0: type_PyVarObject_ptr = gdb.lookup_type('PyVarObject').pointer() tsize = int_from_int(self._gdbval.cast(type_PyVarObject_ptr)['ob_size']) if tsize < 0: tsize = -tsize size = _PyObject_VAR_SIZE(typeobj, tsize) dictoffset += size assert dictoffset > 0 assert dictoffset % SIZEOF_VOID_P == 0 dictptr = self._gdbval.cast(_type_char_ptr) + dictoffset PyObjectPtrPtr = PyObjectPtr.get_gdb_type().pointer() dictptr = dictptr.cast(PyObjectPtrPtr) return PyObjectPtr.from_pyobject_ptr(dictptr.dereference()) except RuntimeError: # Corrupt data somewhere; fail safe pass # Not found, or some kind of error: return None def proxyval(self, visited): ''' Support for new-style classes. Currently we just locate the dictionary using a transliteration to python of _PyObject_GetDictPtr, ignoring descriptors ''' # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('<...>') visited.add(self.as_address()) pyop_attr_dict = self.get_attr_dict() if pyop_attr_dict: attr_dict = pyop_attr_dict.proxyval(visited) else: attr_dict = {} tp_name = self.safe_tp_name() # New-style class: return InstanceProxy(tp_name, attr_dict, long(self._gdbval)) def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('<...>') return visited.add(self.as_address()) try: tp_name = self.field('tp_name').string() except RuntimeError: tp_name = 'unknown' out.write('<type %s at remote 0x%x>' % (tp_name, self.as_address())) # pyop_attrdict = self.get_attr_dict() # _write_instance_repr(out, visited, # self.safe_tp_name(), pyop_attrdict, self.as_address()) class ProxyException(Exception): def __init__(self, tp_name, args): self.tp_name = tp_name self.args = args def __repr__(self): return '%s%r' % (self.tp_name, self.args) class PyBaseExceptionObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyBaseExceptionObject* i.e. an exception within the process being debugged. """ _typename = 'PyBaseExceptionObject' def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('(...)') visited.add(self.as_address()) arg_proxy = self.pyop_field('args').proxyval(visited) return ProxyException(self.safe_tp_name(), arg_proxy) def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('(...)') return visited.add(self.as_address()) out.write(self.safe_tp_name()) self.write_field_repr('args', out, visited) class PyClassObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyClassObject* i.e. a <classobj> instance within the process being debugged. """ _typename = 'PyClassObject' class BuiltInFunctionProxy(object): def __init__(self, ml_name): self.ml_name = ml_name def __repr__(self): return "<built-in function %s>" % self.ml_name class BuiltInMethodProxy(object): def __init__(self, ml_name, pyop_m_self): self.ml_name = ml_name self.pyop_m_self = pyop_m_self def __repr__(self): return ('<built-in method %s of %s object at remote 0x%x>' % (self.ml_name, self.pyop_m_self.safe_tp_name(), self.pyop_m_self.as_address()) ) class PyCFunctionObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyCFunctionObject* (see Include/methodobject.h and Objects/methodobject.c) """ _typename = 'PyCFunctionObject' def proxyval(self, visited): m_ml = self.field('m_ml') # m_ml is a (PyMethodDef*) ml_name = m_ml['ml_name'].string() pyop_m_self = self.pyop_field('m_self') if pyop_m_self.is_null(): return BuiltInFunctionProxy(ml_name) else: return BuiltInMethodProxy(ml_name, pyop_m_self) class PyCodeObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyCodeObject* i.e. a <code> instance within the process being debugged. """ _typename = 'PyCodeObject' def addr2line(self, addrq): ''' Get the line number for a given bytecode offset Analogous to PyCode_Addr2Line; translated from pseudocode in Objects/lnotab_notes.txt ''' co_lnotab = self.pyop_field('co_lnotab').proxyval(set()) # Initialize lineno to co_firstlineno as per PyCode_Addr2Line # not 0, as lnotab_notes.txt has it: lineno = int_from_int(self.field('co_firstlineno')) addr = 0 for addr_incr, line_incr in zip(co_lnotab[::2], co_lnotab[1::2]): addr += ord(addr_incr) if addr > addrq: return lineno lineno += ord(line_incr) return lineno class PyDictObjectPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyDictObject* i.e. a dict instance within the process being debugged. """ _typename = 'PyDictObject' def iteritems(self): ''' Yields a sequence of (PyObjectPtr key, PyObjectPtr value) pairs, analagous to dict.iteritems() ''' for i in safe_range(self.field('ma_mask') + 1): ep = self.field('ma_table') + i pyop_value = PyObjectPtr.from_pyobject_ptr(ep['me_value']) if not pyop_value.is_null(): pyop_key = PyObjectPtr.from_pyobject_ptr(ep['me_key']) yield (pyop_key, pyop_value) def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('{...}') visited.add(self.as_address()) result = {} for pyop_key, pyop_value in self.iteritems(): proxy_key = pyop_key.proxyval(visited) proxy_value = pyop_value.proxyval(visited) result[proxy_key] = proxy_value return result def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('{...}') return visited.add(self.as_address()) out.write('{') first = True for pyop_key, pyop_value in self.iteritems(): if not first: out.write(', ') first = False pyop_key.write_repr(out, visited) out.write(': ') pyop_value.write_repr(out, visited) out.write('}') class PyInstanceObjectPtr(PyObjectPtr): _typename = 'PyInstanceObject' def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('<...>') visited.add(self.as_address()) # Get name of class: in_class = self.pyop_field('in_class') cl_name = in_class.pyop_field('cl_name').proxyval(visited) # Get dictionary of instance attributes: in_dict = self.pyop_field('in_dict').proxyval(visited) # Old-style class: return InstanceProxy(cl_name, in_dict, long(self._gdbval)) def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('<...>') return visited.add(self.as_address()) # Old-style class: # Get name of class: in_class = self.pyop_field('in_class') cl_name = in_class.pyop_field('cl_name').proxyval(visited) # Get dictionary of instance attributes: pyop_in_dict = self.pyop_field('in_dict') _write_instance_repr(out, visited, cl_name, pyop_in_dict, self.as_address()) class PyIntObjectPtr(PyObjectPtr): _typename = 'PyIntObject' def proxyval(self, visited): result = int_from_int(self.field('ob_ival')) return result class PyListObjectPtr(PyObjectPtr): _typename = 'PyListObject' def __getitem__(self, i): # Get the gdb.Value for the (PyObject*) with the given index: field_ob_item = self.field('ob_item') return field_ob_item[i] def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('[...]') visited.add(self.as_address()) result = [PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited) for i in safe_range(int_from_int(self.field('ob_size')))] return result def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('[...]') return visited.add(self.as_address()) out.write('[') for i in safe_range(int_from_int(self.field('ob_size'))): if i > 0: out.write(', ') element = PyObjectPtr.from_pyobject_ptr(self[i]) element.write_repr(out, visited) out.write(']') class PyLongObjectPtr(PyObjectPtr): _typename = 'PyLongObject' def proxyval(self, visited): ''' Python's Include/longobjrep.h has this declaration: struct _longobject { PyObject_VAR_HEAD digit ob_digit[1]; }; with this description: The absolute value of a number is equal to SUM(for i=0 through abs(ob_size)-1) ob_digit[i] * 2**(SHIFT*i) Negative numbers are represented with ob_size < 0; zero is represented by ob_size == 0. where SHIFT can be either: #define PyLong_SHIFT 30 #define PyLong_SHIFT 15 ''' ob_size = long(self.field('ob_size')) if ob_size == 0: return 0L ob_digit = self.field('ob_digit') if gdb.lookup_type('digit').sizeof == 2: SHIFT = 15L else: SHIFT = 30L digits = [long(ob_digit[i]) * 2**(SHIFT*i) for i in safe_range(abs(ob_size))] result = sum(digits) if ob_size < 0: result = -result return result def write_repr(self, out, visited): # Write this out as a Python 3 int literal, i.e. without the "L" suffix proxy = self.proxyval(visited) out.write("%s" % proxy) class PyBoolObjectPtr(PyLongObjectPtr): """ Class wrapping a gdb.Value that's a PyBoolObject* i.e. one of the two <bool> instances (Py_True/Py_False) within the process being debugged. """ _typename = 'PyBoolObject' def proxyval(self, visited): castto = gdb.lookup_type('PyLongObject').pointer() self._gdbval = self._gdbval.cast(castto) return bool(PyLongObjectPtr(self._gdbval).proxyval(visited)) class PyNoneStructPtr(PyObjectPtr): """ Class wrapping a gdb.Value that's a PyObject* pointing to the singleton (we hope) _Py_NoneStruct with ob_type PyNone_Type """ _typename = 'PyObject' def proxyval(self, visited): return None class PyFrameObjectPtr(PyObjectPtr): _typename = 'PyFrameObject' def __init__(self, gdbval, cast_to=None): PyObjectPtr.__init__(self, gdbval, cast_to) if not self.is_optimized_out(): self.co = PyCodeObjectPtr.from_pyobject_ptr(self.field('f_code')) self.co_name = self.co.pyop_field('co_name') self.co_filename = self.co.pyop_field('co_filename') self.f_lineno = int_from_int(self.field('f_lineno')) self.f_lasti = int_from_int(self.field('f_lasti')) self.co_nlocals = int_from_int(self.co.field('co_nlocals')) self.co_varnames = PyTupleObjectPtr.from_pyobject_ptr(self.co.field('co_varnames')) def iter_locals(self): ''' Yield a sequence of (name,value) pairs of PyObjectPtr instances, for the local variables of this frame ''' if self.is_optimized_out(): return f_localsplus = self.field('f_localsplus') for i in safe_range(self.co_nlocals): pyop_value = PyObjectPtr.from_pyobject_ptr(f_localsplus[i]) if not pyop_value.is_null(): pyop_name = PyObjectPtr.from_pyobject_ptr(self.co_varnames[i]) yield (pyop_name, pyop_value) def iter_globals(self): ''' Yield a sequence of (name,value) pairs of PyObjectPtr instances, for the global variables of this frame ''' if self.is_optimized_out(): return pyop_globals = self.pyop_field('f_globals') return pyop_globals.iteritems() def iter_builtins(self): ''' Yield a sequence of (name,value) pairs of PyObjectPtr instances, for the builtin variables ''' if self.is_optimized_out(): return pyop_builtins = self.pyop_field('f_builtins') return pyop_builtins.iteritems() def get_var_by_name(self, name): ''' Look for the named local variable, returning a (PyObjectPtr, scope) pair where scope is a string 'local', 'global', 'builtin' If not found, return (None, None) ''' for pyop_name, pyop_value in self.iter_locals(): if name == pyop_name.proxyval(set()): return pyop_value, 'local' for pyop_name, pyop_value in self.iter_globals(): if name == pyop_name.proxyval(set()): return pyop_value, 'global' for pyop_name, pyop_value in self.iter_builtins(): if name == pyop_name.proxyval(set()): return pyop_value, 'builtin' return None, None def filename(self): '''Get the path of the current Python source file, as a string''' if self.is_optimized_out(): return '(frame information optimized out)' return self.co_filename.proxyval(set()) def current_line_num(self): '''Get current line number as an integer (1-based) Translated from PyFrame_GetLineNumber and PyCode_Addr2Line See Objects/lnotab_notes.txt ''' if self.is_optimized_out(): return None f_trace = self.field('f_trace') if long(f_trace) != 0: # we have a non-NULL f_trace: return self.f_lineno else: #try: return self.co.addr2line(self.f_lasti) #except ValueError: # return self.f_lineno def current_line(self): '''Get the text of the current source line as a string, with a trailing newline character''' if self.is_optimized_out(): return '(frame information optimized out)' filename = self.filename() with open(os_fsencode(filename), 'r') as f: all_lines = f.readlines() # Convert from 1-based current_line_num to 0-based list offset: return all_lines[self.current_line_num()-1] def write_repr(self, out, visited): if self.is_optimized_out(): out.write('(frame information optimized out)') return out.write('Frame 0x%x, for file %s, line %i, in %s (' % (self.as_address(), self.co_filename.proxyval(visited), self.current_line_num(), self.co_name.proxyval(visited))) first = True for pyop_name, pyop_value in self.iter_locals(): if not first: out.write(', ') first = False out.write(pyop_name.proxyval(visited)) out.write('=') pyop_value.write_repr(out, visited) out.write(')') class PySetObjectPtr(PyObjectPtr): _typename = 'PySetObject' def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('%s(...)' % self.safe_tp_name()) visited.add(self.as_address()) members = [] table = self.field('table') for i in safe_range(self.field('mask')+1): setentry = table[i] key = setentry['key'] if key != 0: key_proxy = PyObjectPtr.from_pyobject_ptr(key).proxyval(visited) if key_proxy != '<dummy key>': members.append(key_proxy) if self.safe_tp_name() == 'frozenset': return frozenset(members) else: return set(members) def write_repr(self, out, visited): # Emulate Python 3's set_repr tp_name = self.safe_tp_name() # Guard against infinite loops: if self.as_address() in visited: out.write('(...)') return visited.add(self.as_address()) # Python 3's set_repr special-cases the empty set: if not self.field('used'): out.write(tp_name) out.write('()') return # Python 3 uses {} for set literals: if tp_name != 'set': out.write(tp_name) out.write('(') out.write('{') first = True table = self.field('table') for i in safe_range(self.field('mask')+1): setentry = table[i] key = setentry['key'] if key != 0: pyop_key = PyObjectPtr.from_pyobject_ptr(key) key_proxy = pyop_key.proxyval(visited) # FIXME! if key_proxy != '<dummy key>': if not first: out.write(', ') first = False pyop_key.write_repr(out, visited) out.write('}') if tp_name != 'set': out.write(')') class PyBytesObjectPtr(PyObjectPtr): _typename = 'PyBytesObject' def __str__(self): field_ob_size = self.field('ob_size') field_ob_sval = self.field('ob_sval') return ''.join(struct.pack('b', field_ob_sval[i]) for i in safe_range(field_ob_size)) def proxyval(self, visited): return str(self) def write_repr(self, out, visited, py3=True): # Write this out as a Python 3 bytes literal, i.e. with a "b" prefix # Get a PyStringObject* within the Python 2 gdb process: proxy = self.proxyval(visited) # Transliteration of Python 3's Objects/bytesobject.c:PyBytes_Repr # to Python 2 code: quote = "'" if "'" in proxy and not '"' in proxy: quote = '"' if py3: out.write('b') out.write(quote) for byte in proxy: if byte == quote or byte == '\\': out.write('\\') out.write(byte) elif byte == '\t': out.write('\\t') elif byte == '\n': out.write('\\n') elif byte == '\r': out.write('\\r') elif byte < ' ' or ord(byte) >= 0x7f: out.write('\\x') out.write(hexdigits[(ord(byte) & 0xf0) >> 4]) out.write(hexdigits[ord(byte) & 0xf]) else: out.write(byte) out.write(quote) class PyStringObjectPtr(PyBytesObjectPtr): _typename = 'PyStringObject' def write_repr(self, out, visited): return super(PyStringObjectPtr, self).write_repr(out, visited, py3=False) class PyTupleObjectPtr(PyObjectPtr): _typename = 'PyTupleObject' def __getitem__(self, i): # Get the gdb.Value for the (PyObject*) with the given index: field_ob_item = self.field('ob_item') return field_ob_item[i] def proxyval(self, visited): # Guard against infinite loops: if self.as_address() in visited: return ProxyAlreadyVisited('(...)') visited.add(self.as_address()) result = tuple([PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited) for i in safe_range(int_from_int(self.field('ob_size')))]) return result def write_repr(self, out, visited): # Guard against infinite loops: if self.as_address() in visited: out.write('(...)') return visited.add(self.as_address()) out.write('(') for i in safe_range(int_from_int(self.field('ob_size'))): if i > 0: out.write(', ') element = PyObjectPtr.from_pyobject_ptr(self[i]) element.write_repr(out, visited) if self.field('ob_size') == 1: out.write(',)') else: out.write(')') def _unichr_is_printable(char): # Logic adapted from Python 3's Tools/unicode/makeunicodedata.py if char == u" ": return True import unicodedata return unicodedata.category(char) not in ("C", "Z") if sys.maxunicode >= 0x10000: _unichr = unichr else: # Needed for proper surrogate support if sizeof(Py_UNICODE) is 2 in gdb def _unichr(x): if x < 0x10000: return unichr(x) x -= 0x10000 ch1 = 0xD800 | (x >> 10) ch2 = 0xDC00 | (x & 0x3FF) return unichr(ch1) + unichr(ch2) class PyUnicodeObjectPtr(PyObjectPtr): _typename = 'PyUnicodeObject' def char_width(self): _type_Py_UNICODE = gdb.lookup_type('Py_UNICODE') return _type_Py_UNICODE.sizeof def proxyval(self, visited): # From unicodeobject.h: # Py_ssize_t length; /* Length of raw Unicode data in buffer */ # Py_UNICODE *str; /* Raw Unicode buffer */ field_length = long(self.field('length')) field_str = self.field('str') # Gather a list of ints from the Py_UNICODE array; these are either # UCS-2 or UCS-4 code points: if self.char_width() > 2: Py_UNICODEs = [int(field_str[i]) for i in safe_range(field_length)] else: # A more elaborate routine if sizeof(Py_UNICODE) is 2 in the # inferior process: we must join surrogate pairs. Py_UNICODEs = [] i = 0 limit = safety_limit(field_length) while i < limit: ucs = int(field_str[i]) i += 1 if ucs < 0xD800 or ucs >= 0xDC00 or i == field_length: Py_UNICODEs.append(ucs) continue # This could be a surrogate pair. ucs2 = int(field_str[i]) if ucs2 < 0xDC00 or ucs2 > 0xDFFF: continue code = (ucs & 0x03FF) << 10 code |= ucs2 & 0x03FF code += 0x00010000 Py_UNICODEs.append(code) i += 1 # Convert the int code points to unicode characters, and generate a # local unicode instance. # This splits surrogate pairs if sizeof(Py_UNICODE) is 2 here (in gdb). result = u''.join([_unichr(ucs) for ucs in Py_UNICODEs]) return result def write_repr(self, out, visited): # Get a PyUnicodeObject* within the Python 2 gdb process: proxy = self.proxyval(visited) # Transliteration of Python 3's Object/unicodeobject.c:unicode_repr # to Python 2: try: gdb.parse_and_eval('PyString_Type') except RuntimeError: # Python 3, don't write 'u' as prefix pass else: # Python 2, write the 'u' out.write('u') if "'" in proxy and '"' not in proxy: quote = '"' else: quote = "'" out.write(quote) i = 0 while i < len(proxy): ch = proxy[i] i += 1 # Escape quotes and backslashes if ch == quote or ch == '\\': out.write('\\') out.write(ch) # Map special whitespace to '\t', \n', '\r' elif ch == '\t': out.write('\\t') elif ch == '\n': out.write('\\n') elif ch == '\r': out.write('\\r') # Map non-printable US ASCII to '\xhh' */ elif ch < ' ' or ch == 0x7F: out.write('\\x') out.write(hexdigits[(ord(ch) >> 4) & 0x000F]) out.write(hexdigits[ord(ch) & 0x000F]) # Copy ASCII characters as-is elif ord(ch) < 0x7F: out.write(ch) # Non-ASCII characters else: ucs = ch ch2 = None if sys.maxunicode < 0x10000: # If sizeof(Py_UNICODE) is 2 here (in gdb), join # surrogate pairs before calling _unichr_is_printable. if (i < len(proxy) and 0xD800 <= ord(ch) < 0xDC00 \ and 0xDC00 <= ord(proxy[i]) <= 0xDFFF): ch2 = proxy[i] ucs = ch + ch2 i += 1 # Unfortuately, Python 2's unicode type doesn't seem # to expose the "isprintable" method printable = _unichr_is_printable(ucs) if printable: try: ucs.encode(ENCODING) except UnicodeEncodeError: printable = False # Map Unicode whitespace and control characters # (categories Z* and C* except ASCII space) if not printable: if ch2 is not None: # Match Python 3's representation of non-printable # wide characters. code = (ord(ch) & 0x03FF) << 10 code |= ord(ch2) & 0x03FF code += 0x00010000 else: code = ord(ucs) # Map 8-bit characters to '\\xhh' if code <= 0xff: out.write('\\x') out.write(hexdigits[(code >> 4) & 0x000F]) out.write(hexdigits[code & 0x000F]) # Map 21-bit characters to '\U00xxxxxx' elif code >= 0x10000: out.write('\\U') out.write(hexdigits[(code >> 28) & 0x0000000F]) out.write(hexdigits[(code >> 24) & 0x0000000F]) out.write(hexdigits[(code >> 20) & 0x0000000F]) out.write(hexdigits[(code >> 16) & 0x0000000F]) out.write(hexdigits[(code >> 12) & 0x0000000F]) out.write(hexdigits[(code >> 8) & 0x0000000F]) out.write(hexdigits[(code >> 4) & 0x0000000F]) out.write(hexdigits[code & 0x0000000F]) # Map 16-bit characters to '\uxxxx' else: out.write('\\u') out.write(hexdigits[(code >> 12) & 0x000F]) out.write(hexdigits[(code >> 8) & 0x000F]) out.write(hexdigits[(code >> 4) & 0x000F]) out.write(hexdigits[code & 0x000F]) else: # Copy characters as-is out.write(ch) if ch2 is not None: out.write(ch2) out.write(quote) def __unicode__(self): return self.proxyval(set()) def __str__(self): # In Python 3, everything is unicode (including attributes of e.g. # code objects, such as function names). The Python 2 debugger code # uses PyUnicodePtr objects to format strings etc, whereas with a # Python 2 debuggee we'd get PyStringObjectPtr instances with __str__. # Be compatible with that. return unicode(self).encode('UTF-8') def int_from_int(gdbval): return int(str(gdbval)) def stringify(val): # TODO: repr() puts everything on one line; pformat can be nicer, but # can lead to v.long results; this function isolates the choice if True: return repr(val) else: from pprint import pformat return pformat(val) class PyObjectPtrPrinter: "Prints a (PyObject*)" def __init__ (self, gdbval): self.gdbval = gdbval def to_string (self): pyop = PyObjectPtr.from_pyobject_ptr(self.gdbval) if True: return pyop.get_truncated_repr(MAX_OUTPUT_LEN) else: # Generate full proxy value then stringify it. # Doing so could be expensive proxyval = pyop.proxyval(set()) return stringify(proxyval) def pretty_printer_lookup(gdbval): type = gdbval.type.unqualified() if type.code == gdb.TYPE_CODE_PTR: type = type.target().unqualified() if str(type) in all_pretty_typenames: return PyObjectPtrPrinter(gdbval) """ During development, I've been manually invoking the code in this way: (gdb) python import sys sys.path.append('/home/david/coding/python-gdb') import libpython end then reloading it after each edit like this: (gdb) python reload(libpython) The following code should ensure that the prettyprinter is registered if the code is autoloaded by gdb when visiting libpython.so, provided that this python file is installed to the same path as the library (or its .debug file) plus a "-gdb.py" suffix, e.g: /usr/lib/libpython2.6.so.1.0-gdb.py /usr/lib/debug/usr/lib/libpython2.6.so.1.0.debug-gdb.py """ def register (obj): if obj == None: obj = gdb # Wire up the pretty-printer obj.pretty_printers.append(pretty_printer_lookup) register (gdb.current_objfile ()) # Unfortunately, the exact API exposed by the gdb module varies somewhat # from build to build # See http://bugs.python.org/issue8279?#msg102276 class Frame(object): ''' Wrapper for gdb.Frame, adding various methods ''' def __init__(self, gdbframe): self._gdbframe = gdbframe def older(self): older = self._gdbframe.older() if older: return Frame(older) else: return None def newer(self): newer = self._gdbframe.newer() if newer: return Frame(newer) else: return None def select(self): '''If supported, select this frame and return True; return False if unsupported Not all builds have a gdb.Frame.select method; seems to be present on Fedora 12 onwards, but absent on Ubuntu buildbot''' if not hasattr(self._gdbframe, 'select'): print ('Unable to select frame: ' 'this build of gdb does not expose a gdb.Frame.select method') return False self._gdbframe.select() return True def get_index(self): '''Calculate index of frame, starting at 0 for the newest frame within this thread''' index = 0 # Go down until you reach the newest frame: iter_frame = self while iter_frame.newer(): index += 1 iter_frame = iter_frame.newer() return index def is_evalframeex(self): '''Is this a PyEval_EvalFrameEx frame?''' if self._gdbframe.name() == 'PyEval_EvalFrameEx': ''' I believe we also need to filter on the inline struct frame_id.inline_depth, only regarding frames with an inline depth of 0 as actually being this function So we reject those with type gdb.INLINE_FRAME ''' if self._gdbframe.type() == gdb.NORMAL_FRAME: # We have a PyEval_EvalFrameEx frame: return True return False def read_var(self, varname): """ read_var with respect to code blocks (gdbframe.read_var works with respect to the most recent block) Apparently this function doesn't work, though, as it seems to read variables in other frames also sometimes. """ block = self._gdbframe.block() var = None while block and var is None: try: var = self._gdbframe.read_var(varname, block) except ValueError: pass block = block.superblock return var def get_pyop(self): try: # self.read_var does not always work properly, so select our frame # and restore the previously selected frame selected_frame = gdb.selected_frame() self._gdbframe.select() f = gdb.parse_and_eval('f') selected_frame.select() except RuntimeError: return None else: return PyFrameObjectPtr.from_pyobject_ptr(f) @classmethod def get_selected_frame(cls): _gdbframe = gdb.selected_frame() if _gdbframe: return Frame(_gdbframe) return None @classmethod def get_selected_python_frame(cls): '''Try to obtain the Frame for the python code in the selected frame, or None''' frame = cls.get_selected_frame() while frame: if frame.is_evalframeex(): return frame frame = frame.older() # Not found: return None def print_summary(self): if self.is_evalframeex(): pyop = self.get_pyop() if pyop: line = pyop.get_truncated_repr(MAX_OUTPUT_LEN) write_unicode(sys.stdout, '#%i %s\n' % (self.get_index(), line)) sys.stdout.write(pyop.current_line()) else: sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) else: sys.stdout.write('#%i\n' % self.get_index()) class PyList(gdb.Command): '''List the current Python source code, if any Use py-list START to list at a different line number within the python source. Use py-list START, END to list a specific range of lines within the python source. ''' def __init__(self): gdb.Command.__init__ (self, "py-list", gdb.COMMAND_FILES, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): import re start = None end = None m = re.match(r'\s*(\d+)\s*', args) if m: start = int(m.group(0)) end = start + 10 m = re.match(r'\s*(\d+)\s*,\s*(\d+)\s*', args) if m: start, end = map(int, m.groups()) frame = Frame.get_selected_python_frame() if not frame: print 'Unable to locate python frame' return pyop = frame.get_pyop() if not pyop: print 'Unable to read information on python frame' return filename = pyop.filename() lineno = pyop.current_line_num() if start is None: start = lineno - 5 end = lineno + 5 if start<1: start = 1 with open(os_fsencode(filename), 'r') as f: all_lines = f.readlines() # start and end are 1-based, all_lines is 0-based; # so [start-1:end] as a python slice gives us [start, end] as a # closed interval for i, line in enumerate(all_lines[start-1:end]): linestr = str(i+start) # Highlight current line: if i + start == lineno: linestr = '>' + linestr sys.stdout.write('%4s %s' % (linestr, line)) # ...and register the command: PyList() def move_in_stack(move_up): '''Move up or down the stack (for the py-up/py-down command)''' frame = Frame.get_selected_python_frame() while frame: if move_up: iter_frame = frame.older() else: iter_frame = frame.newer() if not iter_frame: break if iter_frame.is_evalframeex(): # Result: if iter_frame.select(): iter_frame.print_summary() return frame = iter_frame if move_up: print 'Unable to find an older python frame' else: print 'Unable to find a newer python frame' class PyUp(gdb.Command): 'Select and print the python stack frame that called this one (if any)' def __init__(self): gdb.Command.__init__ (self, "py-up", gdb.COMMAND_STACK, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): move_in_stack(move_up=True) class PyDown(gdb.Command): 'Select and print the python stack frame called by this one (if any)' def __init__(self): gdb.Command.__init__ (self, "py-down", gdb.COMMAND_STACK, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): move_in_stack(move_up=False) # Not all builds of gdb have gdb.Frame.select if hasattr(gdb.Frame, 'select'): PyUp() PyDown() class PyBacktrace(gdb.Command): 'Display the current python frame and all the frames within its call stack (if any)' def __init__(self): gdb.Command.__init__ (self, "py-bt", gdb.COMMAND_STACK, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): frame = Frame.get_selected_python_frame() while frame: if frame.is_evalframeex(): frame.print_summary() frame = frame.older() PyBacktrace() class PyPrint(gdb.Command): 'Look up the given python variable name, and print it' def __init__(self): gdb.Command.__init__ (self, "py-print", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) def invoke(self, args, from_tty): name = str(args) frame = Frame.get_selected_python_frame() if not frame: print 'Unable to locate python frame' return pyop_frame = frame.get_pyop() if not pyop_frame: print 'Unable to read information on python frame' return pyop_var, scope = pyop_frame.get_var_by_name(name) if pyop_var: print ('%s %r = %s' % (scope, name, pyop_var.get_truncated_repr(MAX_OUTPUT_LEN))) else: print '%r not found' % name PyPrint() class PyLocals(gdb.Command): 'Look up the given python variable name, and print it' def invoke(self, args, from_tty): name = str(args) frame = Frame.get_selected_python_frame() if not frame: print 'Unable to locate python frame' return pyop_frame = frame.get_pyop() if not pyop_frame: print 'Unable to read information on python frame' return namespace = self.get_namespace(pyop_frame) namespace = [(name.proxyval(set()), val) for name, val in namespace] if namespace: name, val = max(namespace, key=lambda (name, val): len(name)) max_name_length = len(name) for name, pyop_value in namespace: value = pyop_value.get_truncated_repr(MAX_OUTPUT_LEN) print ('%-*s = %s' % (max_name_length, name, value)) def get_namespace(self, pyop_frame): return pyop_frame.iter_locals() class PyGlobals(PyLocals): 'List all the globals in the currently select Python frame' def get_namespace(self, pyop_frame): return pyop_frame.iter_globals() PyLocals("py-locals", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) PyGlobals("py-globals", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) class PyNameEquals(gdb.Function): def _get_pycurframe_attr(self, attr): frame = Frame(gdb.selected_frame()) if frame.is_evalframeex(): pyframe = frame.get_pyop() if pyframe is None: warnings.warn("Use a Python debug build, Python breakpoints " "won't work otherwise.") return None return getattr(pyframe, attr).proxyval(set()) return None def invoke(self, funcname): attr = self._get_pycurframe_attr('co_name') return attr is not None and attr == funcname.string() PyNameEquals("pyname_equals") class PyModEquals(PyNameEquals): def invoke(self, modname): attr = self._get_pycurframe_attr('co_filename') if attr is not None: filename, ext = os.path.splitext(os.path.basename(attr)) return filename == modname.string() return False PyModEquals("pymod_equals") class PyBreak(gdb.Command): """ Set a Python breakpoint. Examples: Break on any function or method named 'func' in module 'modname' py-break modname.func Break on any function or method named 'func' py-break func """ def invoke(self, funcname, from_tty): if '.' in funcname: modname, dot, funcname = funcname.rpartition('.') cond = '$pyname_equals("%s") && $pymod_equals("%s")' % (funcname, modname) else: cond = '$pyname_equals("%s")' % funcname gdb.execute('break PyEval_EvalFrameEx if ' + cond) PyBreak("py-break", gdb.COMMAND_RUNNING, gdb.COMPLETE_NONE) class _LoggingState(object): """ State that helps to provide a reentrant gdb.execute() function. """ def __init__(self): self.fd, self.filename = tempfile.mkstemp() self.file = os.fdopen(self.fd, 'r+') _execute("set logging file %s" % self.filename) self.file_position_stack = [] atexit.register(os.close, self.fd) atexit.register(os.remove, self.filename) def __enter__(self): if not self.file_position_stack: _execute("set logging redirect on") _execute("set logging on") _execute("set pagination off") self.file_position_stack.append(os.fstat(self.fd).st_size) return self def getoutput(self): gdb.flush() self.file.seek(self.file_position_stack[-1]) result = self.file.read() return result def __exit__(self, exc_type, exc_val, tb): startpos = self.file_position_stack.pop() self.file.seek(startpos) self.file.truncate() if not self.file_position_stack: _execute("set logging off") _execute("set logging redirect off") _execute("set pagination on") def execute(command, from_tty=False, to_string=False): """ Replace gdb.execute() with this function and have it accept a 'to_string' argument (new in 7.2). Have it properly capture stderr also. Ensure reentrancy. """ if to_string: with _logging_state as state: _execute(command, from_tty) return state.getoutput() else: _execute(command, from_tty) _execute = gdb.execute gdb.execute = execute _logging_state = _LoggingState() def get_selected_inferior(): """ Return the selected inferior in gdb. """ # Woooh, another bug in gdb! Is there an end in sight? # http://sourceware.org/bugzilla/show_bug.cgi?id=12212 return gdb.inferiors()[0] selected_thread = gdb.selected_thread() for inferior in gdb.inferiors(): for thread in inferior.threads(): if thread == selected_thread: return inferior def source_gdb_script(script_contents, to_string=False): """ Source a gdb script with script_contents passed as a string. This is useful to provide defines for py-step and py-next to make them repeatable (this is not possible with gdb.execute()). See http://sourceware.org/bugzilla/show_bug.cgi?id=12216 """ fd, filename = tempfile.mkstemp() f = os.fdopen(fd, 'w') f.write(script_contents) f.close() gdb.execute("source %s" % filename, to_string=to_string) os.remove(filename) def register_defines(): source_gdb_script(textwrap.dedent("""\ define py-step -py-step end define py-next -py-next end document py-step %s end document py-next %s end """) % (PyStep.__doc__, PyNext.__doc__)) def stackdepth(frame): "Tells the stackdepth of a gdb frame." depth = 0 while frame: frame = frame.older() depth += 1 return depth class ExecutionControlCommandBase(gdb.Command): """ Superclass for language specific execution control. Language specific features should be implemented by lang_info using the LanguageInfo interface. 'name' is the name of the command. """ def __init__(self, name, lang_info): super(ExecutionControlCommandBase, self).__init__( name, gdb.COMMAND_RUNNING, gdb.COMPLETE_NONE) self.lang_info = lang_info def install_breakpoints(self): all_locations = itertools.chain( self.lang_info.static_break_functions(), self.lang_info.runtime_break_functions()) for location in all_locations: result = gdb.execute('break %s' % location, to_string=True) yield re.search(r'Breakpoint (\d+)', result).group(1) def delete_breakpoints(self, breakpoint_list): for bp in breakpoint_list: gdb.execute("delete %s" % bp) def filter_output(self, result): reflags = re.MULTILINE output_on_halt = [ (r'^Program received signal .*', reflags|re.DOTALL), (r'.*[Ww]arning.*', 0), (r'^Program exited .*', reflags), ] output_always = [ # output when halting on a watchpoint (r'^(Old|New) value = .*', reflags), # output from the 'display' command (r'^\d+: \w+ = .*', reflags), ] def filter_output(regexes): output = [] for regex, flags in regexes: for match in re.finditer(regex, result, flags): output.append(match.group(0)) return '\n'.join(output) # Filter the return value output of the 'finish' command match_finish = re.search(r'^Value returned is \$\d+ = (.*)', result, re.MULTILINE) if match_finish: finish_output = 'Value returned: %s\n' % match_finish.group(1) else: finish_output = '' return (filter_output(output_on_halt), finish_output + filter_output(output_always)) def stopped(self): return get_selected_inferior().pid == 0 def finish_executing(self, result): """ After doing some kind of code running in the inferior, print the line of source code or the result of the last executed gdb command (passed in as the `result` argument). """ output_on_halt, output_always = self.filter_output(result) if self.stopped(): print output_always print output_on_halt else: frame = gdb.selected_frame() source_line = self.lang_info.get_source_line(frame) if self.lang_info.is_relevant_function(frame): raised_exception = self.lang_info.exc_info(frame) if raised_exception: print raised_exception if source_line: if output_always.rstrip(): print output_always.rstrip() print source_line else: print result def _finish(self): """ Execute until the function returns (or until something else makes it stop) """ if gdb.selected_frame().older() is not None: return gdb.execute('finish', to_string=True) else: # outermost frame, continue return gdb.execute('cont', to_string=True) def _finish_frame(self): """ Execute until the function returns to a relevant caller. """ while True: result = self._finish() try: frame = gdb.selected_frame() except RuntimeError: break hitbp = re.search(r'Breakpoint (\d+)', result) is_relevant = self.lang_info.is_relevant_function(frame) if hitbp or is_relevant or self.stopped(): break return result def finish(self, *args): "Implements the finish command." result = self._finish_frame() self.finish_executing(result) def step(self, stepinto, stepover_command='next'): """ Do a single step or step-over. Returns the result of the last gdb command that made execution stop. This implementation, for stepping, sets (conditional) breakpoints for all functions that are deemed relevant. It then does a step over until either something halts execution, or until the next line is reached. If, however, stepover_command is given, it should be a string gdb command that continues execution in some way. The idea is that the caller has set a (conditional) breakpoint or watchpoint that can work more efficiently than the step-over loop. For Python this means setting a watchpoint for f->f_lasti, which means we can then subsequently "finish" frames. We want f->f_lasti instead of f->f_lineno, because the latter only works properly with local trace functions, see PyFrameObjectPtr.current_line_num and PyFrameObjectPtr.addr2line. """ if stepinto: breakpoint_list = list(self.install_breakpoints()) beginframe = gdb.selected_frame() if self.lang_info.is_relevant_function(beginframe): # If we start in a relevant frame, initialize stuff properly. If # we don't start in a relevant frame, the loop will halt # immediately. So don't call self.lang_info.lineno() as it may # raise for irrelevant frames. beginline = self.lang_info.lineno(beginframe) if not stepinto: depth = stackdepth(beginframe) newframe = beginframe while True: if self.lang_info.is_relevant_function(newframe): result = gdb.execute(stepover_command, to_string=True) else: result = self._finish_frame() if self.stopped(): break newframe = gdb.selected_frame() is_relevant_function = self.lang_info.is_relevant_function(newframe) try: framename = newframe.name() except RuntimeError: framename = None m = re.search(r'Breakpoint (\d+)', result) if m: if is_relevant_function and m.group(1) in breakpoint_list: # although we hit a breakpoint, we still need to check # that the function, in case hit by a runtime breakpoint, # is in the right context break if newframe != beginframe: # new function if not stepinto: # see if we returned to the caller newdepth = stackdepth(newframe) is_relevant_function = (newdepth < depth and is_relevant_function) if is_relevant_function: break else: # newframe equals beginframe, check for a difference in the # line number lineno = self.lang_info.lineno(newframe) if lineno and lineno != beginline: break if stepinto: self.delete_breakpoints(breakpoint_list) self.finish_executing(result) def run(self, args, from_tty): self.finish_executing(gdb.execute('run ' + args, to_string=True)) def cont(self, *args): self.finish_executing(gdb.execute('cont', to_string=True)) class LanguageInfo(object): """ This class defines the interface that ExecutionControlCommandBase needs to provide language-specific execution control. Classes that implement this interface should implement: lineno(frame) Tells the current line number (only called for a relevant frame). If lineno is a false value it is not checked for a difference. is_relevant_function(frame) tells whether we care about frame 'frame' get_source_line(frame) get the line of source code for the current line (only called for a relevant frame). If the source code cannot be retrieved this function should return None exc_info(frame) -- optional tells whether an exception was raised, if so, it should return a string representation of the exception value, None otherwise. static_break_functions() returns an iterable of function names that are considered relevant and should halt step-into execution. This is needed to provide a performing step-into runtime_break_functions() -- optional list of functions that we should break into depending on the context """ def exc_info(self, frame): "See this class' docstring." def runtime_break_functions(self): """ Implement this if the list of step-into functions depends on the context. """ return () class PythonInfo(LanguageInfo): def pyframe(self, frame): pyframe = Frame(frame).get_pyop() if pyframe: return pyframe else: raise gdb.RuntimeError( "Unable to find the Python frame, run your code with a debug " "build (configure with --with-pydebug or compile with -g).") def lineno(self, frame): return self.pyframe(frame).current_line_num() def is_relevant_function(self, frame): return Frame(frame).is_evalframeex() def get_source_line(self, frame): try: pyframe = self.pyframe(frame) return '%4d %s' % (pyframe.current_line_num(), pyframe.current_line().rstrip()) except IOError, e: return None def exc_info(self, frame): try: tstate = frame.read_var('tstate').dereference() if gdb.parse_and_eval('tstate->frame == f'): # tstate local variable initialized, check for an exception inf_type = tstate['curexc_type'] inf_value = tstate['curexc_value'] if inf_type: return 'An exception was raised: %s' % (inf_value,) except (ValueError, RuntimeError), e: # Could not read the variable tstate or it's memory, it's ok pass def static_break_functions(self): yield 'PyEval_EvalFrameEx' class PythonStepperMixin(object): """ Make this a mixin so CyStep can also inherit from this and use a CythonCodeStepper at the same time. """ def python_step(self, stepinto): """ Set a watchpoint on the Python bytecode instruction pointer and try to finish the frame """ output = gdb.execute('watch f->f_lasti', to_string=True) watchpoint = int(re.search(r'[Ww]atchpoint (\d+):', output).group(1)) self.step(stepinto=stepinto, stepover_command='finish') gdb.execute('delete %s' % watchpoint) class PyStep(ExecutionControlCommandBase, PythonStepperMixin): "Step through Python code." stepinto = True def invoke(self, args, from_tty): self.python_step(stepinto=self.stepinto) class PyNext(PyStep): "Step-over Python code." stepinto = False class PyFinish(ExecutionControlCommandBase): "Execute until function returns to a caller." invoke = ExecutionControlCommandBase.finish class PyRun(ExecutionControlCommandBase): "Run the program." invoke = ExecutionControlCommandBase.run class PyCont(ExecutionControlCommandBase): invoke = ExecutionControlCommandBase.cont def _pointervalue(gdbval): """ Return the value of the pionter as a Python int. gdbval.type must be a pointer type """ # don't convert with int() as it will raise a RuntimeError if gdbval.address is not None: return long(gdbval.address) else: # the address attribute is None sometimes, in which case we can # still convert the pointer to an int return long(gdbval) def pointervalue(gdbval): pointer = _pointervalue(gdbval) try: if pointer < 0: raise gdb.GdbError("Negative pointer value, presumably a bug " "in gdb, aborting.") except RuntimeError: # work around yet another bug in gdb where you get random behaviour # and tracebacks pass return pointer def get_inferior_unicode_postfix(): try: gdb.parse_and_eval('PyUnicode_FromEncodedObject') except RuntimeError: try: gdb.parse_and_eval('PyUnicodeUCS2_FromEncodedObject') except RuntimeError: return 'UCS4' else: return 'UCS2' else: return '' class PythonCodeExecutor(object): Py_single_input = 256 Py_file_input = 257 Py_eval_input = 258 def malloc(self, size): chunk = (gdb.parse_and_eval("(void *) malloc((size_t) %d)" % size)) pointer = pointervalue(chunk) if pointer == 0: raise gdb.GdbError("No memory could be allocated in the inferior.") return pointer def alloc_string(self, string): pointer = self.malloc(len(string)) get_selected_inferior().write_memory(pointer, string) return pointer def alloc_pystring(self, string): stringp = self.alloc_string(string) PyString_FromStringAndSize = 'PyString_FromStringAndSize' try: gdb.parse_and_eval(PyString_FromStringAndSize) except RuntimeError: # Python 3 PyString_FromStringAndSize = ('PyUnicode%s_FromStringAndSize' % (get_inferior_unicode_postfix(),)) try: result = gdb.parse_and_eval( '(PyObject *) %s((char *) %d, (size_t) %d)' % ( PyString_FromStringAndSize, stringp, len(string))) finally: self.free(stringp) pointer = pointervalue(result) if pointer == 0: raise gdb.GdbError("Unable to allocate Python string in " "the inferior.") return pointer def free(self, pointer): gdb.parse_and_eval("free((void *) %d)" % pointer) def incref(self, pointer): "Increment the reference count of a Python object in the inferior." gdb.parse_and_eval('Py_IncRef((PyObject *) %d)' % pointer) def xdecref(self, pointer): "Decrement the reference count of a Python object in the inferior." # Py_DecRef is like Py_XDECREF, but a function. So we don't have # to check for NULL. This should also decref all our allocated # Python strings. gdb.parse_and_eval('Py_DecRef((PyObject *) %d)' % pointer) def evalcode(self, code, input_type, global_dict=None, local_dict=None): """ Evaluate python code `code` given as a string in the inferior and return the result as a gdb.Value. Returns a new reference in the inferior. Of course, executing any code in the inferior may be dangerous and may leave the debuggee in an unsafe state or terminate it alltogether. """ if '\0' in code: raise gdb.GdbError("String contains NUL byte.") code += '\0' pointer = self.alloc_string(code) globalsp = pointervalue(global_dict) localsp = pointervalue(local_dict) if globalsp == 0 or localsp == 0: raise gdb.GdbError("Unable to obtain or create locals or globals.") code = """ PyRun_String( (char *) %(code)d, (int) %(start)d, (PyObject *) %(globals)s, (PyObject *) %(locals)d) """ % dict(code=pointer, start=input_type, globals=globalsp, locals=localsp) with FetchAndRestoreError(): try: pyobject_return_value = gdb.parse_and_eval(code) finally: self.free(pointer) return pyobject_return_value class FetchAndRestoreError(PythonCodeExecutor): """ Context manager that fetches the error indicator in the inferior and restores it on exit. """ def __init__(self): self.sizeof_PyObjectPtr = gdb.lookup_type('PyObject').pointer().sizeof self.pointer = self.malloc(self.sizeof_PyObjectPtr * 3) type = self.pointer value = self.pointer + self.sizeof_PyObjectPtr traceback = self.pointer + self.sizeof_PyObjectPtr * 2 self.errstate = type, value, traceback def __enter__(self): gdb.parse_and_eval("PyErr_Fetch(%d, %d, %d)" % self.errstate) def __exit__(self, *args): if gdb.parse_and_eval("(int) PyErr_Occurred()"): gdb.parse_and_eval("PyErr_Print()") pyerr_restore = ("PyErr_Restore(" "(PyObject *) *%d," "(PyObject *) *%d," "(PyObject *) *%d)") try: gdb.parse_and_eval(pyerr_restore % self.errstate) finally: self.free(self.pointer) class FixGdbCommand(gdb.Command): def __init__(self, command, actual_command): super(FixGdbCommand, self).__init__(command, gdb.COMMAND_DATA, gdb.COMPLETE_NONE) self.actual_command = actual_command def fix_gdb(self): """ It seems that invoking either 'cy exec' and 'py-exec' work perfectly fine, but after this gdb's python API is entirely broken. Maybe some uncleared exception value is still set? sys.exc_clear() didn't help. A demonstration: (gdb) cy exec 'hello' 'hello' (gdb) python gdb.execute('cont') RuntimeError: Cannot convert value to int. Error while executing Python code. (gdb) python gdb.execute('cont') [15148 refs] Program exited normally. """ warnings.filterwarnings('ignore', r'.*', RuntimeWarning, re.escape(__name__)) try: long(gdb.parse_and_eval("(void *) 0")) == 0 except RuntimeError: pass # warnings.resetwarnings() def invoke(self, args, from_tty): self.fix_gdb() try: gdb.execute('%s %s' % (self.actual_command, args)) except RuntimeError, e: raise gdb.GdbError(str(e)) self.fix_gdb() def _evalcode_python(executor, code, input_type): """ Execute Python code in the most recent stack frame. """ global_dict = gdb.parse_and_eval('PyEval_GetGlobals()') local_dict = gdb.parse_and_eval('PyEval_GetLocals()') if (pointervalue(global_dict) == 0 or pointervalue(local_dict) == 0): raise gdb.GdbError("Unable to find the locals or globals of the " "most recent Python function (relative to the " "selected frame).") return executor.evalcode(code, input_type, global_dict, local_dict) class PyExec(gdb.Command): def readcode(self, expr): if expr: return expr, PythonCodeExecutor.Py_single_input else: lines = [] while True: try: line = raw_input('>') except EOFError: break else: if line.rstrip() == 'end': break lines.append(line) return '\n'.join(lines), PythonCodeExecutor.Py_file_input def invoke(self, expr, from_tty): expr, input_type = self.readcode(expr) executor = PythonCodeExecutor() executor.xdecref(_evalcode_python(executor, input_type, global_dict, local_dict)) gdb.execute('set breakpoint pending on') if hasattr(gdb, 'GdbError'): # Wrap py-step and py-next in gdb defines to make them repeatable. py_step = PyStep('-py-step', PythonInfo()) py_next = PyNext('-py-next', PythonInfo()) register_defines() py_finish = PyFinish('py-finish', PythonInfo()) py_run = PyRun('py-run', PythonInfo()) py_cont = PyCont('py-cont', PythonInfo()) py_exec = FixGdbCommand('py-exec', '-py-exec') _py_exec = PyExec("-py-exec", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) else: warnings.warn("Use gdb 7.2 or higher to use the py-exec command.")
bsd-3-clause
SOM-st/RTruffleSOM
tests/basic_interpreter_test.py
2
5852
import unittest import sys from parameterized import parameterized from som.compiler.parse_error import ParseError from som.vm.universe import create_universe from som.vmobjects.clazz import Class from som.vmobjects.double import Double from som.vmobjects.integer import Integer from som.vmobjects.symbol import Symbol class BasicInterpreterTest(unittest.TestCase): @parameterized.expand([ # ("Self", "testAssignSuper", 42, ParseError), # ("Self", "testAssignSelf", 42, ParseError), ("MethodCall", "test", 42, Integer), ("MethodCall", "test2", 42, Integer), ("NonLocalReturn", "test1", 42, Integer), ("NonLocalReturn", "test2", 43, Integer), ("NonLocalReturn", "test3", 3, Integer), ("NonLocalReturn", "test4", 42, Integer), ("NonLocalReturn", "test5", 22, Integer), ("Blocks", "testArg1", 42, Integer), ("Blocks", "testArg2", 77, Integer), ("Blocks", "testArgAndLocal", 8, Integer), ("Blocks", "testArgAndContext", 8, Integer), ("Blocks", "testEmptyZeroArg", 1, Integer), ("Blocks", "testEmptyOneArg", 1, Integer), ("Blocks", "testEmptyTwoArg", 1, Integer), ("Return", "testReturnSelf", "Return", Class), ("Return", "testReturnSelfImplicitly", "Return", Class), ("Return", "testNoReturnReturnsSelf", "Return", Class), ("Return", "testBlockReturnsImplicitlyLastValue", 4, Integer), ("IfTrueIfFalse", "test", 42, Integer), ("IfTrueIfFalse", "test2", 33, Integer), ("IfTrueIfFalse", "test3", 4, Integer), ("CompilerSimplification", "testReturnConstantSymbol", "constant", Symbol), ("CompilerSimplification", "testReturnConstantInt", 42, Integer), ("CompilerSimplification", "testReturnSelf", "CompilerSimplification", Class), ("CompilerSimplification", "testReturnSelfImplicitly", "CompilerSimplification", Class), ("CompilerSimplification", "testReturnArgumentN", 55, Integer), ("CompilerSimplification", "testReturnArgumentA", 44, Integer), ("CompilerSimplification", "testSetField", "foo", Symbol), ("CompilerSimplification", "testGetField", 40, Integer), ("Hash", "testHash", 444, Integer), ("Arrays", "testEmptyToInts", 3, Integer), ("Arrays", "testPutAllInt", 5, Integer), ("Arrays", "testPutAllNil", "Nil", Class), ("Arrays", "testPutAllBlock", 3, Integer), ("Arrays", "testNewWithAll", 1, Integer), ("BlockInlining", "testNoInlining", 1, Integer), ("BlockInlining", "testOneLevelInlining", 1, Integer), ("BlockInlining", "testOneLevelInliningWithLocalShadowTrue", 2, Integer), ("BlockInlining", "testOneLevelInliningWithLocalShadowFalse", 1, Integer), ("BlockInlining", "testBlockNestedInIfTrue", 2, Integer), ("BlockInlining", "testBlockNestedInIfFalse", 42, Integer), ("BlockInlining", "testDeepNestedInlinedIfTrue", 3, Integer), ("BlockInlining", "testDeepNestedInlinedIfFalse", 42, Integer), ("BlockInlining", "testDeepNestedBlocksInInlinedIfTrue", 5, Integer), ("BlockInlining", "testDeepNestedBlocksInInlinedIfFalse", 43, Integer), ("BlockInlining", "testDeepDeepNestedTrue", 9, Integer), ("BlockInlining", "testDeepDeepNestedFalse", 43, Integer), ("BlockInlining", "testToDoNestDoNestIfTrue", 2, Integer), ("NonLocalVars", "testWriteDifferentTypes", 3.75, Double), ("ObjectCreation", "test", 1000000, Integer), ("Regressions", "testSymbolEquality", 1, Integer), ("Regressions", "testSymbolReferenceEquality", 1, Integer), ("Regressions", "testUninitializedLocal", 1, Integer), ("Regressions", "testUninitializedLocalInBlock", 1, Integer), ("BinaryOperation", "test", 3 + 8, Integer), ("NumberOfTests", "numberOfTests", 57, Integer), ]) def test_basic_interpreter_behavior(self, test_class, test_selector, expected_result, result_type): u = create_universe() u.setup_classpath("Smalltalk:TestSuite/BasicInterpreterTests") try: actual_result = u.execute_method(test_class, test_selector) self._assert_equals_SOM_value(expected_result, actual_result, result_type) except ParseError as e: # if we expect a ParseError, then all is fine, otherwise re-raise it if result_type is not ParseError: raise e def _assert_equals_SOM_value(self, expected_result, actual_result, result_type): if result_type is Integer: self.assertEquals(expected_result, actual_result.get_embedded_integer()) return if result_type is Double: self.assertEquals(expected_result, actual_result.get_embedded_double()) return if result_type is Class: self.assertEquals(expected_result, actual_result.get_name().get_embedded_string()) return if result_type is Symbol: self.assertEquals(expected_result, actual_result.get_embedded_string()) return self.fail("SOM Value handler missing: " + str(result_type)) if 'pytest' in sys.modules: # hack to make pytest not to collect the unexpanded test method delattr(BasicInterpreterTest, "test_basic_interpreter_behavior")
mit
biswajitsahu/kuma
vendor/packages/pkg_resources/_vendor/packaging/specifiers.py
101
27684
# Copyright 2014 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function import abc import functools import itertools import re from ._compat import string_types, with_metaclass from .version import Version, LegacyVersion, parse class InvalidSpecifier(ValueError): """ An invalid specifier was found, users should refer to PEP 440. """ class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def __str__(self): """ Returns the str representation of this Specifier like object. This should be representative of the Specifier itself. """ @abc.abstractmethod def __hash__(self): """ Returns a hash value for this Specifier like object. """ @abc.abstractmethod def __eq__(self, other): """ Returns a boolean representing whether or not the two Specifier like objects are equal. """ @abc.abstractmethod def __ne__(self, other): """ Returns a boolean representing whether or not the two Specifier like objects are not equal. """ @abc.abstractproperty def prereleases(self): """ Returns whether or not pre-releases as a whole are allowed by this specifier. """ @prereleases.setter def prereleases(self, value): """ Sets whether or not pre-releases as a whole are allowed by this specifier. """ @abc.abstractmethod def contains(self, item, prereleases=None): """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod def filter(self, iterable, prereleases=None): """ Takes an iterable of items and filters them so that only items which are contained within this specifier are allowed in it. """ class _IndividualSpecifier(BaseSpecifier): _operators = {} def __init__(self, spec="", prereleases=None): match = self._regex.search(spec) if not match: raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) self._spec = ( match.group("operator").strip(), match.group("version").strip(), ) # Store whether or not this Specifier should accept prereleases self._prereleases = prereleases def __repr__(self): pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None else "" ) return "<{0}({1!r}{2})>".format( self.__class__.__name__, str(self), pre, ) def __str__(self): return "{0}{1}".format(*self._spec) def __hash__(self): return hash(self._spec) def __eq__(self, other): if isinstance(other, string_types): try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented return self._spec == other._spec def __ne__(self, other): if isinstance(other, string_types): try: other = self.__class__(other) except InvalidSpecifier: return NotImplemented elif not isinstance(other, self.__class__): return NotImplemented return self._spec != other._spec def _get_operator(self, op): return getattr(self, "_compare_{0}".format(self._operators[op])) def _coerce_version(self, version): if not isinstance(version, (LegacyVersion, Version)): version = parse(version) return version @property def prereleases(self): return self._prereleases @prereleases.setter def prereleases(self, value): self._prereleases = value def contains(self, item, prereleases=None): # Determine if prereleases are to be allowed or not. if prereleases is None: prereleases = self.prereleases # Normalize item to a Version or LegacyVersion, this allows us to have # a shortcut for ``"2.0" in Specifier(">=2") item = self._coerce_version(item) # Determine if we should be supporting prereleases in this specifier # or not, if we do not support prereleases than we can short circuit # logic if this version is a prereleases. if item.is_prerelease and not prereleases: return False # Actually do the comparison to determine if this item is contained # within this Specifier or not. return self._get_operator(self._spec[0])(item, self._spec[1]) def filter(self, iterable, prereleases=None): yielded = False found_prereleases = [] kw = {"prereleases": prereleases if prereleases is not None else True} # Attempt to iterate over all the values in the iterable and if any of # them match, yield them. for version in iterable: parsed_version = self._coerce_version(version) if self.contains(parsed_version, **kw): # If our version is a prerelease, and we were not set to allow # prereleases, then we'll store it for later incase nothing # else matches this specifier. if (parsed_version.is_prerelease and not (prereleases or self.prereleases)): found_prereleases.append(version) # Either this is not a prerelease, or we should have been # accepting prereleases from the begining. else: yielded = True yield version # Now that we've iterated over everything, determine if we've yielded # any values, and if we have not and we have any prereleases stored up # then we will go ahead and yield the prereleases. if not yielded and found_prereleases: for version in found_prereleases: yield version class LegacySpecifier(_IndividualSpecifier): _regex = re.compile( r""" ^ \s* (?P<operator>(==|!=|<=|>=|<|>)) \s* (?P<version> [^\s]* # We just match everything, except for whitespace since this # is a "legacy" specifier and the version string can be just # about anything. ) \s* $ """, re.VERBOSE | re.IGNORECASE, ) _operators = { "==": "equal", "!=": "not_equal", "<=": "less_than_equal", ">=": "greater_than_equal", "<": "less_than", ">": "greater_than", } def _coerce_version(self, version): if not isinstance(version, LegacyVersion): version = LegacyVersion(str(version)) return version def _compare_equal(self, prospective, spec): return prospective == self._coerce_version(spec) def _compare_not_equal(self, prospective, spec): return prospective != self._coerce_version(spec) def _compare_less_than_equal(self, prospective, spec): return prospective <= self._coerce_version(spec) def _compare_greater_than_equal(self, prospective, spec): return prospective >= self._coerce_version(spec) def _compare_less_than(self, prospective, spec): return prospective < self._coerce_version(spec) def _compare_greater_than(self, prospective, spec): return prospective > self._coerce_version(spec) def _require_version_compare(fn): @functools.wraps(fn) def wrapped(self, prospective, spec): if not isinstance(prospective, Version): return False return fn(self, prospective, spec) return wrapped class Specifier(_IndividualSpecifier): _regex = re.compile( r""" ^ \s* (?P<operator>(~=|==|!=|<=|>=|<|>|===)) (?P<version> (?: # The identity operators allow for an escape hatch that will # do an exact string match of the version you wish to install. # This will not be parsed by PEP 440 and we cannot determine # any semantic meaning from it. This operator is discouraged # but included entirely as an escape hatch. (?<====) # Only match for the identity operator \s* [^\s]* # We just match everything, except for whitespace # since we are only testing for strict identity. ) | (?: # The (non)equality operators allow for wild card and local # versions to be specified so we have to define these two # operators separately to enable that. (?<===|!=) # Only match for equals and not equals \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)* # release (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? # You cannot use a wild card and a dev or local version # together so group them with a | and make them optional. (?: (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local | \.\* # Wild card syntax of .* )? ) | (?: # The compatible operator requires at least two digits in the # release segment. (?<=~=) # Only match for the compatible operator \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release ) | (?: # All other operators only allow a sub set of what the # (non)equality operators do. Specifically they do not allow # local versions to be specified nor do they allow the prefix # matching wild cards. (?<!==|!=|~=) # We have special cases for these # operators so we want to make sure they # don't match here. \s* v? (?:[0-9]+!)? # epoch [0-9]+(?:\.[0-9]+)* # release (?: # pre release [-_\.]? (a|b|c|rc|alpha|beta|pre|preview) [-_\.]? [0-9]* )? (?: # post release (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) )? (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release ) ) \s* $ """, re.VERBOSE | re.IGNORECASE, ) _operators = { "~=": "compatible", "==": "equal", "!=": "not_equal", "<=": "less_than_equal", ">=": "greater_than_equal", "<": "less_than", ">": "greater_than", "===": "arbitrary", } @_require_version_compare def _compare_compatible(self, prospective, spec): # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to # implement this in terms of the other specifiers instead of # implementing it ourselves. The only thing we need to do is construct # the other specifiers. # We want everything but the last item in the version, but we want to # ignore post and dev releases and we want to treat the pre-release as # it's own separate segment. prefix = ".".join( list( itertools.takewhile( lambda x: (not x.startswith("post") and not x.startswith("dev")), _version_split(spec), ) )[:-1] ) # Add the prefix notation to the end of our string prefix += ".*" return (self._get_operator(">=")(prospective, spec) and self._get_operator("==")(prospective, prefix)) @_require_version_compare def _compare_equal(self, prospective, spec): # We need special logic to handle prefix matching if spec.endswith(".*"): # Split the spec out by dots, and pretend that there is an implicit # dot in between a release segment and a pre-release segment. spec = _version_split(spec[:-2]) # Remove the trailing .* # Split the prospective version out by dots, and pretend that there # is an implicit dot in between a release segment and a pre-release # segment. prospective = _version_split(str(prospective)) # Shorten the prospective version to be the same length as the spec # so that we can determine if the specifier is a prefix of the # prospective version or not. prospective = prospective[:len(spec)] # Pad out our two sides with zeros so that they both equal the same # length. spec, prospective = _pad_version(spec, prospective) else: # Convert our spec string into a Version spec = Version(spec) # If the specifier does not have a local segment, then we want to # act as if the prospective version also does not have a local # segment. if not spec.local: prospective = Version(prospective.public) return prospective == spec @_require_version_compare def _compare_not_equal(self, prospective, spec): return not self._compare_equal(prospective, spec) @_require_version_compare def _compare_less_than_equal(self, prospective, spec): return prospective <= Version(spec) @_require_version_compare def _compare_greater_than_equal(self, prospective, spec): return prospective >= Version(spec) @_require_version_compare def _compare_less_than(self, prospective, spec): # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec) # Check to see if the prospective version is less than the spec # version. If it's not we can short circuit and just return False now # instead of doing extra unneeded work. if not prospective < spec: return False # This special case is here so that, unless the specifier itself # includes is a pre-release version, that we do not accept pre-release # versions for the version mentioned in the specifier (e.g. <3.1 should # not match 3.1.dev0, but should match 3.0.dev0). if not spec.is_prerelease and prospective.is_prerelease: if Version(prospective.base_version) == Version(spec.base_version): return False # If we've gotten to here, it means that prospective version is both # less than the spec version *and* it's not a pre-release of the same # version in the spec. return True @_require_version_compare def _compare_greater_than(self, prospective, spec): # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec) # Check to see if the prospective version is greater than the spec # version. If it's not we can short circuit and just return False now # instead of doing extra unneeded work. if not prospective > spec: return False # This special case is here so that, unless the specifier itself # includes is a post-release version, that we do not accept # post-release versions for the version mentioned in the specifier # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). if not spec.is_postrelease and prospective.is_postrelease: if Version(prospective.base_version) == Version(spec.base_version): return False # Ensure that we do not allow a local version of the version mentioned # in the specifier, which is techincally greater than, to match. if prospective.local is not None: if Version(prospective.base_version) == Version(spec.base_version): return False # If we've gotten to here, it means that prospective version is both # greater than the spec version *and* it's not a pre-release of the # same version in the spec. return True def _compare_arbitrary(self, prospective, spec): return str(prospective).lower() == str(spec).lower() @property def prereleases(self): # If there is an explicit prereleases set for this, then we'll just # blindly use that. if self._prereleases is not None: return self._prereleases # Look at all of our specifiers and determine if they are inclusive # operators, and if they are if they are including an explicit # prerelease. operator, version = self._spec if operator in ["==", ">=", "<=", "~="]: # The == specifier can include a trailing .*, if it does we # want to remove before parsing. if operator == "==" and version.endswith(".*"): version = version[:-2] # Parse the version, and if it is a pre-release than this # specifier allows pre-releases. if parse(version).is_prerelease: return True return False @prereleases.setter def prereleases(self, value): self._prereleases = value _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") def _version_split(version): result = [] for item in version.split("."): match = _prefix_regex.search(item) if match: result.extend(match.groups()) else: result.append(item) return result def _pad_version(left, right): left_split, right_split = [], [] # Get the release segment of our versions left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) # Get the rest of our versions left_split.append(left[len(left_split):]) right_split.append(left[len(right_split):]) # Insert our padding left_split.insert( 1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])), ) right_split.insert( 1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])), ) return ( list(itertools.chain(*left_split)), list(itertools.chain(*right_split)), ) class SpecifierSet(BaseSpecifier): def __init__(self, specifiers="", prereleases=None): # Split on , to break each indidivual specifier into it's own item, and # strip each item to remove leading/trailing whitespace. specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] # Parsed each individual specifier, attempting first to make it a # Specifier and falling back to a LegacySpecifier. parsed = set() for specifier in specifiers: try: parsed.add(Specifier(specifier)) except InvalidSpecifier: parsed.add(LegacySpecifier(specifier)) # Turn our parsed specifiers into a frozen set and save them for later. self._specs = frozenset(parsed) # Store our prereleases value so we can use it later to determine if # we accept prereleases or not. self._prereleases = prereleases def __repr__(self): pre = ( ", prereleases={0!r}".format(self.prereleases) if self._prereleases is not None else "" ) return "<SpecifierSet({0!r}{1})>".format(str(self), pre) def __str__(self): return ",".join(sorted(str(s) for s in self._specs)) def __hash__(self): return hash(self._specs) def __and__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif not isinstance(other, SpecifierSet): return NotImplemented specifier = SpecifierSet() specifier._specs = frozenset(self._specs | other._specs) if self._prereleases is None and other._prereleases is not None: specifier._prereleases = other._prereleases elif self._prereleases is not None and other._prereleases is None: specifier._prereleases = self._prereleases elif self._prereleases == other._prereleases: specifier._prereleases = self._prereleases else: raise ValueError( "Cannot combine SpecifierSets with True and False prerelease " "overrides." ) return specifier def __eq__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif isinstance(other, _IndividualSpecifier): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented return self._specs == other._specs def __ne__(self, other): if isinstance(other, string_types): other = SpecifierSet(other) elif isinstance(other, _IndividualSpecifier): other = SpecifierSet(str(other)) elif not isinstance(other, SpecifierSet): return NotImplemented return self._specs != other._specs @property def prereleases(self): # If we have been given an explicit prerelease modifier, then we'll # pass that through here. if self._prereleases is not None: return self._prereleases # If we don't have any specifiers, and we don't have a forced value, # then we'll just return None since we don't know if this should have # pre-releases or not. if not self._specs: return None # Otherwise we'll see if any of the given specifiers accept # prereleases, if any of them do we'll return True, otherwise False. return any(s.prereleases for s in self._specs) @prereleases.setter def prereleases(self, value): self._prereleases = value def contains(self, item, prereleases=None): # Ensure that our item is a Version or LegacyVersion instance. if not isinstance(item, (LegacyVersion, Version)): item = parse(item) # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. if prereleases is None: prereleases = self.prereleases # We can determine if we're going to allow pre-releases by looking to # see if any of the underlying items supports them. If none of them do # and this item is a pre-release then we do not allow it and we can # short circuit that here. # Note: This means that 1.0.dev1 would not be contained in something # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 if not prereleases and item.is_prerelease: return False # We simply dispatch to the underlying specs here to make sure that the # given version is contained within all of them. # Note: This use of all() here means that an empty set of specifiers # will always return True, this is an explicit design decision. return all( s.contains(item, prereleases=prereleases) for s in self._specs ) def filter(self, iterable, prereleases=None): # Determine if we're forcing a prerelease or not, if we're not forcing # one for this particular filter call, then we'll use whatever the # SpecifierSet thinks for whether or not we should support prereleases. if prereleases is None: prereleases = self.prereleases # If we have any specifiers, then we want to wrap our iterable in the # filter method for each one, this will act as a logical AND amongst # each specifier. if self._specs: for spec in self._specs: iterable = spec.filter(iterable, prereleases=bool(prereleases)) return iterable # If we do not have any specifiers, then we need to have a rough filter # which will filter out any pre-releases, unless there are no final # releases, and which will filter out LegacyVersion in general. else: filtered = [] found_prereleases = [] for item in iterable: # Ensure that we some kind of Version class for this item. if not isinstance(item, (LegacyVersion, Version)): parsed_version = parse(item) else: parsed_version = item # Filter out any item which is parsed as a LegacyVersion if isinstance(parsed_version, LegacyVersion): continue # Store any item which is a pre-release for later unless we've # already found a final version or we are accepting prereleases if parsed_version.is_prerelease and not prereleases: if not filtered: found_prereleases.append(item) else: filtered.append(item) # If we've found no items except for pre-releases, then we'll go # ahead and use the pre-releases if not filtered and found_prereleases and prereleases is None: return found_prereleases return filtered
mpl-2.0
xodus7/tensorflow
tensorflow/contrib/gan/python/features/python/conditioning_utils_impl.py
28
3817
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Miscellaneous utilities for TFGAN code and examples. Includes: 1) Conditioning the value of a Tensor, based on techniques from https://arxiv.org/abs/1609.03499. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.layers.python.layers import layers from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import embedding_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import variable_scope __all__ = [ 'condition_tensor', 'condition_tensor_from_onehot', ] def _get_shape(tensor): tensor_shape = array_ops.shape(tensor) static_tensor_shape = tensor_util.constant_value(tensor_shape) return (static_tensor_shape if static_tensor_shape is not None else tensor_shape) def condition_tensor(tensor, conditioning): """Condition the value of a tensor. Conditioning scheme based on https://arxiv.org/abs/1609.03499. Args: tensor: A minibatch tensor to be conditioned. conditioning: A minibatch Tensor of to condition on. Must be 2D, with first dimension the same as `tensor`. Returns: `tensor` conditioned on `conditioning`. Raises: ValueError: If the non-batch dimensions of `tensor` aren't fully defined. ValueError: If `conditioning` isn't at least 2D. ValueError: If the batch dimension for the input Tensors don't match. """ tensor.shape[1:].assert_is_fully_defined() num_features = tensor.shape[1:].num_elements() mapped_conditioning = layers.linear( layers.flatten(conditioning), num_features) if not mapped_conditioning.shape.is_compatible_with(tensor.shape): mapped_conditioning = array_ops.reshape( mapped_conditioning, _get_shape(tensor)) return tensor + mapped_conditioning def _one_hot_to_embedding(one_hot, embedding_size): """Get a dense embedding vector from a one-hot encoding.""" num_tokens = one_hot.shape[1] label_id = math_ops.argmax(one_hot, axis=1) embedding = variable_scope.get_variable( 'embedding', [num_tokens, embedding_size]) return embedding_ops.embedding_lookup( embedding, label_id, name='token_to_embedding') def _validate_onehot(one_hot_labels): one_hot_labels.shape.assert_has_rank(2) one_hot_labels.shape[1:].assert_is_fully_defined() def condition_tensor_from_onehot(tensor, one_hot_labels, embedding_size=256): """Condition a tensor based on a one-hot tensor. Conditioning scheme based on https://arxiv.org/abs/1609.03499. Args: tensor: Tensor to be conditioned. one_hot_labels: A Tensor of one-hot labels. Shape is [batch_size, num_classes]. embedding_size: The size of the class embedding. Returns: `tensor` conditioned on `one_hot_labels`. Raises: ValueError: `one_hot_labels` isn't 2D, if non-batch dimensions aren't fully defined, or if batch sizes don't match. """ _validate_onehot(one_hot_labels) conditioning = _one_hot_to_embedding(one_hot_labels, embedding_size) return condition_tensor(tensor, conditioning)
apache-2.0
dustinrb/mezzanine
mezzanine/utils/urls.py
7
4081
from __future__ import unicode_literals from future.builtins import str import re import unicodedata from django.core.exceptions import ObjectDoesNotExist from django.core.urlresolvers import (resolve, reverse, NoReverseMatch, get_script_prefix) from django.shortcuts import redirect from django.utils.encoding import smart_text from django.utils.http import is_safe_url from django.utils import translation from mezzanine.conf import settings from mezzanine.utils.importing import import_dotted_path def admin_url(model, url, object_id=None): """ Returns the URL for the given model and admin url name. """ opts = model._meta url = "admin:%s_%s_%s" % (opts.app_label, opts.object_name.lower(), url) args = () if object_id is not None: args = (object_id,) return reverse(url, args=args) def home_slug(): """ Returns the slug arg defined for the ``home`` urlpattern, which is the definitive source of the ``url`` field defined for an editable homepage object. """ prefix = get_script_prefix() slug = reverse("home") if slug.startswith(prefix): slug = '/' + slug[len(prefix):] try: return resolve(slug).kwargs["slug"] except KeyError: return slug def slugify(s): """ Loads the callable defined by the ``SLUGIFY`` setting, which defaults to the ``slugify_unicode`` function. """ return import_dotted_path(settings.SLUGIFY)(s) def slugify_unicode(s): """ Replacement for Django's slugify which allows unicode chars in slugs, for URLs in Chinese, Russian, etc. Adopted from https://github.com/mozilla/unicode-slugify/ """ chars = [] for char in str(smart_text(s)): cat = unicodedata.category(char)[0] if cat in "LN" or char in "-_~": chars.append(char) elif cat == "Z": chars.append(" ") return re.sub("[-\s]+", "-", "".join(chars).strip()).lower() def unique_slug(queryset, slug_field, slug): """ Ensures a slug is unique for the given queryset, appending an integer to its end until the slug is unique. """ i = 0 while True: if i > 0: if i > 1: slug = slug.rsplit("-", 1)[0] slug = "%s-%s" % (slug, i) try: queryset.get(**{slug_field: slug}) except ObjectDoesNotExist: break i += 1 return slug def next_url(request): """ Returns URL to redirect to from the ``next`` param in the request. """ next = request.GET.get("next", request.POST.get("next", "")) host = request.get_host() return next if next and is_safe_url(next, host=host) else None def login_redirect(request): """ Returns the redirect response for login/signup. Favors: - next param - LOGIN_REDIRECT_URL setting - homepage """ ignorable_nexts = ("",) if "mezzanine.accounts" in settings.INSTALLED_APPS: from mezzanine.accounts import urls ignorable_nexts += (urls.SIGNUP_URL, urls.LOGIN_URL, urls.LOGOUT_URL) next = next_url(request) or "" if next in ignorable_nexts: next = settings.LOGIN_REDIRECT_URL if next == "/accounts/profile/": # Use the homepage if LOGIN_REDIRECT_URL is Django's defaut. next = get_script_prefix() else: try: next = reverse(next) except NoReverseMatch: pass return redirect(next) def path_to_slug(path): """ Removes everything from the given URL path, including language code and ``PAGES_SLUG`` if any is set, returning a slug that would match a ``Page`` instance's slug. """ from mezzanine.urls import PAGES_SLUG lang_code = translation.get_language_from_path(path) for prefix in (lang_code, settings.SITE_PREFIX, PAGES_SLUG): if prefix: path = path.replace(prefix, "", 1) path = path.strip("/") if settings.APPEND_SLASH else path.lstrip("/") return path or "/"
bsd-2-clause
pattisdr/lookit-api
accounts/migrations/0009_auto_20170710_1438.py
1
1326
# -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2017-07-10 14:38 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('accounts', '0008_merge_20170707_1801'), ] operations = [ migrations.AlterModelOptions( name='child', options={'ordering': ['-birthday']}, ), migrations.AlterModelOptions( name='demographicdata', options={'ordering': ['-created_at']}, ), migrations.AlterModelOptions( name='organization', options={'ordering': ['name'], 'permissions': (('can_view_organization', 'Can View Organization'), ('can_edit_organization', 'Can Edit Organization'), ('can_create_organization', 'Can Create Organization'), ('can_remove_organization', 'Can Remove Organization'))}, ), migrations.AlterModelOptions( name='user', options={'ordering': ['username'], 'permissions': (('can_create_users', 'Can Create User'), ('can_view_users', 'Can View User'), ('can_edit_users', 'Can Edit User'), ('can_remove_users', 'Can Remove User'), ('can_view_user_permissions', 'Can View User Permissions'), ('can_edit_user_permissions', 'Can Edit User Permissions'))}, ), ]
mit
Godiyos/python-for-android
python3-alpha/python3-src/Tools/demo/mcast.py
112
2223
#!/usr/bin/env python3 """ Send/receive UDP multicast packets. Requires that your OS kernel supports IP multicast. Usage: mcast -s (sender, IPv4) mcast -s -6 (sender, IPv6) mcast (receivers, IPv4) mcast -6 (receivers, IPv6) """ MYPORT = 8123 MYGROUP_4 = '225.0.0.250' MYGROUP_6 = 'ff15:7079:7468:6f6e:6465:6d6f:6d63:6173' MYTTL = 1 # Increase to reach other networks import time import struct import socket import sys def main(): group = MYGROUP_6 if "-6" in sys.argv[1:] else MYGROUP_4 if "-s" in sys.argv[1:]: sender(group) else: receiver(group) def sender(group): addrinfo = socket.getaddrinfo(group, None)[0] s = socket.socket(addrinfo[0], socket.SOCK_DGRAM) # Set Time-to-live (optional) ttl_bin = struct.pack('@i', MYTTL) if addrinfo[0] == socket.AF_INET: # IPv4 s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl_bin) else: s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, ttl_bin) while True: data = repr(time.time()).encode('utf-8') + b'\0' s.sendto(data, (addrinfo[4][0], MYPORT)) time.sleep(1) def receiver(group): # Look up multicast group address in name server and find out IP version addrinfo = socket.getaddrinfo(group, None)[0] # Create a socket s = socket.socket(addrinfo[0], socket.SOCK_DGRAM) # Allow multiple copies of this program on one machine # (not strictly needed) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Bind it to the port s.bind(('', MYPORT)) group_bin = socket.inet_pton(addrinfo[0], addrinfo[4][0]) # Join group if addrinfo[0] == socket.AF_INET: # IPv4 mreq = group_bin + struct.pack('=I', socket.INADDR_ANY) s.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) else: mreq = group_bin + struct.pack('@I', 0) s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq) # Loop, printing any data we receive while True: data, sender = s.recvfrom(1500) while data[-1:] == '\0': data = data[:-1] # Strip trailing \0's print(str(sender) + ' ' + repr(data)) if __name__ == '__main__': main()
apache-2.0
abhishek-ch/kaggle_facebook_recruiting_human_or_bot
plot_data.py
2
2041
#!/usr/bin/python # -*- coding: utf-8 -*- """ Created on Tue Apr 28 23:15:29 2015 @author: ddboline """ import os import matplotlib matplotlib.use('Agg') import pylab as pl from pandas.tools.plotting import scatter_matrix def create_html_page_of_plots(list_of_plots, prefix='html'): """ create html page with png files """ if not os.path.exists(prefix): os.makedirs(prefix) os.system('mv *.png %s' % prefix) #print(list_of_plots) idx = 0 htmlfile = open('%s/index_0.html' % prefix, 'w') htmlfile.write('<!DOCTYPE html><html><body><div>\n') for plot in list_of_plots: if idx > 0 and idx % 200 == 0: htmlfile.write('</div></html></html>\n') htmlfile.close() htmlfile = open('%s/index_%d.html' % (prefix, (idx//200)), 'w') htmlfile.write('<!DOCTYPE html><html><body><div>\n') htmlfile.write('<p><img src="%s"></p>\n' % plot) idx += 1 htmlfile.write('</div></html></html>\n') htmlfile.close() def plot_data(indf, prefix='html'): """ create scatter matrix plot, histograms """ list_of_plots = [] column_groups = [] for idx in range(0, len(indf.columns), 3): print len(indf.columns), idx, (idx+3) column_groups.append(indf.columns[idx:(idx+3)]) for idx in range(len(column_groups)): for idy in range(0, idx): if idx == idy: continue print column_groups[idx]+column_groups[idy] pl.clf() scatter_matrix(indf[column_groups[idx]+column_groups[idy]]) pl.savefig('scatter_matrix_%d_%d.png' % (idx, idy)) list_of_plots.append('scatter_matrix_%d_%d.png' % (idx, idy)) pl.close() for col in indf: pl.clf() print col indf[col].hist(histtype='step', normed=True) pl.title(col) pl.savefig('%s_hist.png' % col) list_of_plots.append('%s_hist.png' % col) create_html_page_of_plots(list_of_plots, prefix) return
mit
tracierenea/gnuradio
gr-fec/python/fec/qa_ber_bf.py
33
4888
#!/usr/bin/env python # # Copyright 2014 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, gr_unittest, blocks import fec_swig as fec import numpy import copy class test_ber_bf(gr_unittest.TestCase): def setUp(self): self.tb = gr.top_block() def tearDown(self): self.tb = None def test_000(self): # Cause a single bit error out of 8*N bits # using streaming mode mode = False N = 10000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0x01 src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = self.log_ber(1., N) # [numpy.log10(1.0 / (8.0 * N)), ] self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def test_001(self): # Cause a single bit error out of 8*N bits # using test mode mode = True N = 1000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0x01 src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode, 1) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = self.log_ber(1., N) self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def test_002(self): # Cause 8 bit errors out of 8*N bits # using test mode mode = True N = 1000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0xFF src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode, 1, -2.0) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = self.log_ber(8., N) self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def test_003(self): # Cause a 8 bit errors out of 8*N bits # using test mode # Exit if BER < -2.0 mode = True N = 1000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0xFF src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode, 10, -2.0) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = [-2.0, ] print data print expected_result self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def test_004(self): # Cause 16 consecutive bit errors out of 8*N bits # make sure bytes are only read once. # using streaming mode mode = False N = 10000 data0 = numpy.random.randint(0, 256, N).tolist() data1 = copy.deepcopy(data0) data1[0] ^= 0xFF data1[1] ^= 0xFF src0 = blocks.vector_source_b(data0) src1 = blocks.vector_source_b(data1) op = fec.ber_bf(mode) dst = blocks.vector_sink_f() self.tb.connect(src0, (op, 0)) self.tb.connect(src1, (op, 1)) self.tb.connect(op, dst) self.tb.run() data = dst.data() expected_result = self.log_ber(16, N) self.assertFloatTuplesAlmostEqual(expected_result, data, 5) def log_ber(self, n_errors, N): return numpy.log10(1. * n_errors / (8.0 * N)), if __name__ == '__main__': gr_unittest.run(test_ber_bf, "test_ber_bf.xml")
gpl-3.0
drpaneas/linuxed.gr
lib/python2.7/site-packages/pip/wheel.py
184
20618
""" Support for installing and building the "wheel" binary package format. """ from __future__ import with_statement import compileall import csv import functools import hashlib import os import re import shutil import sys from base64 import urlsafe_b64encode from email.parser import Parser from pip.backwardcompat import ConfigParser, StringIO from pip.exceptions import InvalidWheelFilename, UnsupportedWheel from pip.locations import distutils_scheme from pip.log import logger from pip import pep425tags from pip.util import call_subprocess, normalize_path, make_path_relative from pip._vendor import pkg_resources from pip._vendor.distlib.scripts import ScriptMaker from pip._vendor import pkg_resources wheel_ext = '.whl' VERSION_COMPATIBLE = (1, 0) def rehash(path, algo='sha256', blocksize=1<<20): """Return (hash, length) for path using hashlib.new(algo)""" h = hashlib.new(algo) length = 0 with open(path, 'rb') as f: block = f.read(blocksize) while block: length += len(block) h.update(block) block = f.read(blocksize) digest = 'sha256='+urlsafe_b64encode(h.digest()).decode('latin1').rstrip('=') return (digest, length) try: unicode def binary(s): if isinstance(s, unicode): return s.encode('ascii') return s except NameError: def binary(s): if isinstance(s, str): return s.encode('ascii') def open_for_csv(name, mode): if sys.version_info[0] < 3: nl = {} bin = 'b' else: nl = { 'newline': '' } bin = '' return open(name, mode + bin, **nl) def fix_script(path): """Replace #!python with #!/path/to/python Return True if file was changed.""" # XXX RECORD hashes will need to be updated if os.path.isfile(path): script = open(path, 'rb') try: firstline = script.readline() if not firstline.startswith(binary('#!python')): return False exename = sys.executable.encode(sys.getfilesystemencoding()) firstline = binary('#!') + exename + binary(os.linesep) rest = script.read() finally: script.close() script = open(path, 'wb') try: script.write(firstline) script.write(rest) finally: script.close() return True dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?) \.dist-info$""", re.VERBOSE) def root_is_purelib(name, wheeldir): """ Return True if the extracted wheel in wheeldir should go into purelib. """ name_folded = name.replace("-", "_") for item in os.listdir(wheeldir): match = dist_info_re.match(item) if match and match.group('name') == name_folded: with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: for line in wheel: line = line.lower().rstrip() if line == "root-is-purelib: true": return True return False def get_entrypoints(filename): if not os.path.exists(filename): return {}, {} # This is done because you can pass a string to entry_points wrappers which # means that they may or may not be valid INI files. The attempt here is to # strip leading and trailing whitespace in order to make them valid INI # files. with open(filename) as fp: data = StringIO() for line in fp: data.write(line.strip()) data.write("\n") data.seek(0) cp = ConfigParser.RawConfigParser() cp.readfp(data) console = {} gui = {} if cp.has_section('console_scripts'): console = dict(cp.items('console_scripts')) if cp.has_section('gui_scripts'): gui = dict(cp.items('gui_scripts')) return console, gui def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, pycompile=True, scheme=None): """Install a wheel""" if not scheme: scheme = distutils_scheme(name, user=user, home=home, root=root) if root_is_purelib(name, wheeldir): lib_dir = scheme['purelib'] else: lib_dir = scheme['platlib'] info_dir = [] data_dirs = [] source = wheeldir.rstrip(os.path.sep) + os.path.sep # Record details of the files moved # installed = files copied from the wheel to the destination # changed = files changed while installing (scripts #! line typically) # generated = files newly generated during the install (script wrappers) installed = {} changed = set() generated = [] # Compile all of the pyc files that we're going to be installing if pycompile: compileall.compile_dir(source, force=True, quiet=True) def normpath(src, p): return make_path_relative(src, p).replace(os.path.sep, '/') def record_installed(srcfile, destfile, modified=False): """Map archive RECORD paths to installation RECORD paths.""" oldpath = normpath(srcfile, wheeldir) newpath = normpath(destfile, lib_dir) installed[oldpath] = newpath if modified: changed.add(destfile) def clobber(source, dest, is_base, fixer=None, filter=None): if not os.path.exists(dest): # common for the 'include' path os.makedirs(dest) for dir, subdirs, files in os.walk(source): basedir = dir[len(source):].lstrip(os.path.sep) destdir = os.path.join(dest, basedir) if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): continue for s in subdirs: destsubdir = os.path.join(dest, basedir, s) if is_base and basedir == '' and destsubdir.endswith('.data'): data_dirs.append(s) continue elif (is_base and s.endswith('.dist-info') # is self.req.project_name case preserving? and s.lower().startswith(req.project_name.replace('-', '_').lower())): assert not info_dir, 'Multiple .dist-info directories' info_dir.append(destsubdir) for f in files: # Skip unwanted files if filter and filter(f): continue srcfile = os.path.join(dir, f) destfile = os.path.join(dest, basedir, f) # directory creation is lazy and after the file filtering above # to ensure we don't install empty dirs; empty dirs can't be # uninstalled. if not os.path.exists(destdir): os.makedirs(destdir) # use copy2 (not move) to be extra sure we're not moving # directories over; copy2 fails for directories. this would # fail tests (not during released/user execution) shutil.copy2(srcfile, destfile) changed = False if fixer: changed = fixer(destfile) record_installed(srcfile, destfile, changed) clobber(source, lib_dir, True) assert info_dir, "%s .dist-info directory not found" % req # Get the defined entry points ep_file = os.path.join(info_dir[0], 'entry_points.txt') console, gui = get_entrypoints(ep_file) def is_entrypoint_wrapper(name): # EP, EP.exe and EP-script.py are scripts generated for # entry point EP by setuptools if name.lower().endswith('.exe'): matchname = name[:-4] elif name.lower().endswith('-script.py'): matchname = name[:-10] elif name.lower().endswith(".pya"): matchname = name[:-4] else: matchname = name # Ignore setuptools-generated scripts return (matchname in console or matchname in gui) for datadir in data_dirs: fixer = None filter = None for subdir in os.listdir(os.path.join(wheeldir, datadir)): fixer = None if subdir == 'scripts': fixer = fix_script filter = is_entrypoint_wrapper source = os.path.join(wheeldir, datadir, subdir) dest = scheme[subdir] clobber(source, dest, False, fixer=fixer, filter=filter) maker = ScriptMaker(None, scheme['scripts']) # Ensure we don't generate any variants for scripts because this is almost # never what somebody wants. # See https://bitbucket.org/pypa/distlib/issue/35/ maker.variants = set(('', )) # This is required because otherwise distlib creates scripts that are not # executable. # See https://bitbucket.org/pypa/distlib/issue/32/ maker.set_mode = True # Simplify the script and fix the fact that the default script swallows # every single stack trace. # See https://bitbucket.org/pypa/distlib/issue/34/ # See https://bitbucket.org/pypa/distlib/issue/33/ def _get_script_text(entry): return maker.script_template % { "module": entry.prefix, "import_name": entry.suffix.split(".")[0], "func": entry.suffix, } maker._get_script_text = _get_script_text maker.script_template = """# -*- coding: utf-8 -*- import re import sys from %(module)s import %(import_name)s if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) sys.exit(%(func)s()) """ # Special case pip and setuptools to generate versioned wrappers # # The issue is that some projects (specifically, pip and setuptools) use # code in setup.py to create "versioned" entry points - pip2.7 on Python # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into # the wheel metadata at build time, and so if the wheel is installed with # a *different* version of Python the entry points will be wrong. The # correct fix for this is to enhance the metadata to be able to describe # such versioned entry points, but that won't happen till Metadata 2.0 is # available. # In the meantime, projects using versioned entry points will either have # incorrect versioned entry points, or they will not be able to distribute # "universal" wheels (i.e., they will need a wheel per Python version). # # Because setuptools and pip are bundled with _ensurepip and virtualenv, # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we # override the versioned entry points in the wheel and generate the # correct ones. This code is purely a short-term measure until Metadat 2.0 # is available. # # To add the level of hack in this section of code, in order to support # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment # variable which will control which version scripts get installed. # # ENSUREPIP_OPTIONS=altinstall # - Only pipX.Y and easy_install-X.Y will be generated and installed # ENSUREPIP_OPTIONS=install # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note # that this option is technically if ENSUREPIP_OPTIONS is set and is # not altinstall # DEFAULT # - The default behavior is to install pip, pipX, pipX.Y, easy_install # and easy_install-X.Y. pip_script = console.pop('pip', None) if pip_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = 'pip = ' + pip_script generated.extend(maker.make(spec)) if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": spec = 'pip%s = %s' % (sys.version[:1], pip_script) generated.extend(maker.make(spec)) spec = 'pip%s = %s' % (sys.version[:3], pip_script) generated.extend(maker.make(spec)) # Delete any other versioned pip entry points pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] for k in pip_ep: del console[k] easy_install_script = console.pop('easy_install', None) if easy_install_script: if "ENSUREPIP_OPTIONS" not in os.environ: spec = 'easy_install = ' + easy_install_script generated.extend(maker.make(spec)) spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) generated.extend(maker.make(spec)) # Delete any other versioned easy_install entry points easy_install_ep = [k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)] for k in easy_install_ep: del console[k] # Generate the console and GUI entry points specified in the wheel if len(console) > 0: generated.extend(maker.make_multiple(['%s = %s' % kv for kv in console.items()])) if len(gui) > 0: generated.extend(maker.make_multiple(['%s = %s' % kv for kv in gui.items()], {'gui': True})) record = os.path.join(info_dir[0], 'RECORD') temp_record = os.path.join(info_dir[0], 'RECORD.pip') with open_for_csv(record, 'r') as record_in: with open_for_csv(temp_record, 'w+') as record_out: reader = csv.reader(record_in) writer = csv.writer(record_out) for row in reader: row[0] = installed.pop(row[0], row[0]) if row[0] in changed: row[1], row[2] = rehash(row[0]) writer.writerow(row) for f in generated: h, l = rehash(f) writer.writerow((f, h, l)) for f in installed: writer.writerow((installed[f], '', '')) shutil.move(temp_record, record) def _unique(fn): @functools.wraps(fn) def unique(*args, **kw): seen = set() for item in fn(*args, **kw): if item not in seen: seen.add(item) yield item return unique # TODO: this goes somewhere besides the wheel module @_unique def uninstallation_paths(dist): """ Yield all the uninstallation paths for dist based on RECORD-without-.pyc Yield paths to all the files in RECORD. For each .py file in RECORD, add the .pyc in the same directory. UninstallPathSet.add() takes care of the __pycache__ .pyc. """ from pip.req import FakeFile # circular import r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) for row in r: path = os.path.join(dist.location, row[0]) yield path if path.endswith('.py'): dn, fn = os.path.split(path) base = fn[:-3] path = os.path.join(dn, base+'.pyc') yield path def wheel_version(source_dir): """ Return the Wheel-Version of an extracted wheel, if possible. Otherwise, return False if we couldn't parse / extract it. """ try: dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] wheel_data = dist.get_metadata('WHEEL') wheel_data = Parser().parsestr(wheel_data) version = wheel_data['Wheel-Version'].strip() version = tuple(map(int, version.split('.'))) return version except: return False def check_compatibility(version, name): """ Raises errors or warns if called with an incompatible Wheel-Version. Pip should refuse to install a Wheel-Version that's a major series ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when installing a version only minor version ahead (e.g 1.2 > 1.1). version: a 2-tuple representing a Wheel-Version (Major, Minor) name: name of wheel or package to raise exception about :raises UnsupportedWheel: when an incompatible Wheel-Version is given """ if not version: raise UnsupportedWheel( "%s is in an unsupported or invalid wheel" % name ) if version[0] > VERSION_COMPATIBLE[0]: raise UnsupportedWheel( "%s's Wheel-Version (%s) is not compatible with this version " "of pip" % (name, '.'.join(map(str, version))) ) elif version > VERSION_COMPATIBLE: logger.warn('Installing from a newer Wheel-Version (%s)' % '.'.join(map(str, version))) class Wheel(object): """A wheel file""" # TODO: maybe move the install code into this class wheel_file_re = re.compile( r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?)) ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) \.whl|\.dist-info)$""", re.VERBOSE) def __init__(self, filename): """ :raises InvalidWheelFilename: when the filename is invalid for a wheel """ wheel_info = self.wheel_file_re.match(filename) if not wheel_info: raise InvalidWheelFilename("%s is not a valid wheel filename." % filename) self.filename = filename self.name = wheel_info.group('name').replace('_', '-') # we'll assume "_" means "-" due to wheel naming scheme # (https://github.com/pypa/pip/issues/1150) self.version = wheel_info.group('ver').replace('_', '-') self.pyversions = wheel_info.group('pyver').split('.') self.abis = wheel_info.group('abi').split('.') self.plats = wheel_info.group('plat').split('.') # All the tag combinations from this file self.file_tags = set((x, y, z) for x in self.pyversions for y in self.abis for z in self.plats) def support_index_min(self, tags=None): """ Return the lowest index that one of the wheel's file_tag combinations achieves in the supported_tags list e.g. if there are 8 supported tags, and one of the file tags is first in the list, then return 0. Returns None is the wheel is not supported. """ if tags is None: # for mock tags = pep425tags.supported_tags indexes = [tags.index(c) for c in self.file_tags if c in tags] return min(indexes) if indexes else None def supported(self, tags=None): """Is this wheel supported on this system?""" if tags is None: # for mock tags = pep425tags.supported_tags return bool(set(tags).intersection(self.file_tags)) class WheelBuilder(object): """Build wheels from a RequirementSet.""" def __init__(self, requirement_set, finder, wheel_dir, build_options=[], global_options=[]): self.requirement_set = requirement_set self.finder = finder self.wheel_dir = normalize_path(wheel_dir) self.build_options = build_options self.global_options = global_options def _build_one(self, req): """Build one wheel.""" base_args = [ sys.executable, '-c', "import setuptools;__file__=%r;"\ "exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % req.setup_py] + \ list(self.global_options) logger.notify('Running setup.py bdist_wheel for %s' % req.name) logger.notify('Destination directory: %s' % self.wheel_dir) wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] + self.build_options try: call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False) return True except: logger.error('Failed building wheel for %s' % req.name) return False def build(self): """Build wheels.""" #unpack and constructs req set self.requirement_set.prepare_files(self.finder) reqset = self.requirement_set.requirements.values() buildset = [req for req in reqset if not req.is_wheel] if not buildset: return #build the wheels logger.notify( 'Building wheels for collected packages: %s' % ','.join([req.name for req in buildset]) ) logger.indent += 2 build_success, build_failure = [], [] for req in buildset: if self._build_one(req): build_success.append(req) else: build_failure.append(req) logger.indent -= 2 #notify sucess/failure if build_success: logger.notify('Successfully built %s' % ' '.join([req.name for req in build_success])) if build_failure: logger.notify('Failed to build %s' % ' '.join([req.name for req in build_failure]))
mit
sserrot/champion_relationships
venv/Lib/site-packages/prompt_toolkit/key_binding/bindings/page_navigation.py
1
2309
""" Key bindings for extra page navigation: bindings for up/down scrolling through long pages, like in Emacs or Vi. """ from prompt_toolkit.filters import buffer_has_focus, emacs_mode, vi_mode from prompt_toolkit.key_binding.key_bindings import ( ConditionalKeyBindings, KeyBindings, KeyBindingsBase, merge_key_bindings, ) from .scroll import ( scroll_backward, scroll_forward, scroll_half_page_down, scroll_half_page_up, scroll_one_line_down, scroll_one_line_up, scroll_page_down, scroll_page_up, ) __all__ = [ "load_page_navigation_bindings", "load_emacs_page_navigation_bindings", "load_vi_page_navigation_bindings", ] def load_page_navigation_bindings() -> KeyBindingsBase: """ Load both the Vi and Emacs bindings for page navigation. """ # Only enable when a `Buffer` is focused, otherwise, we would catch keys # when another widget is focused (like for instance `c-d` in a # ptterm.Terminal). return ConditionalKeyBindings( merge_key_bindings( [load_emacs_page_navigation_bindings(), load_vi_page_navigation_bindings(),] ), buffer_has_focus, ) def load_emacs_page_navigation_bindings() -> KeyBindingsBase: """ Key bindings, for scrolling up and down through pages. This are separate bindings, because GNU readline doesn't have them. """ key_bindings = KeyBindings() handle = key_bindings.add handle("c-v")(scroll_page_down) handle("pagedown")(scroll_page_down) handle("escape", "v")(scroll_page_up) handle("pageup")(scroll_page_up) return ConditionalKeyBindings(key_bindings, emacs_mode) def load_vi_page_navigation_bindings() -> KeyBindingsBase: """ Key bindings, for scrolling up and down through pages. This are separate bindings, because GNU readline doesn't have them. """ key_bindings = KeyBindings() handle = key_bindings.add handle("c-f")(scroll_forward) handle("c-b")(scroll_backward) handle("c-d")(scroll_half_page_down) handle("c-u")(scroll_half_page_up) handle("c-e")(scroll_one_line_down) handle("c-y")(scroll_one_line_up) handle("pagedown")(scroll_page_down) handle("pageup")(scroll_page_up) return ConditionalKeyBindings(key_bindings, vi_mode)
mit
dkillick/iris
lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py
1
5886
# (C) British Crown Copyright 2018, Met Office # # This file is part of Iris. # # Iris is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Iris is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Iris. If not, see <http://www.gnu.org/licenses/>. """ Unit tests for the function :func:`iris.analysis.cartography.rotate_grid_vectors`. """ from __future__ import (absolute_import, division, print_function) from six.moves import (filter, input, map, range, zip) # noqa # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests from mock import Mock, call as mock_call import numpy as np from iris.cube import Cube from iris.tests.stock import sample_2d_latlons from iris.analysis.cartography import rotate_grid_vectors class TestRotateGridVectors(tests.IrisTest): def _check_angles_calculation(self, angles_in_degrees=True, nan_angles_mask=None): # Check basic maths on a 2d latlon grid. u_cube = sample_2d_latlons(regional=True, transformed=True) u_cube.units = 'ms-1' u_cube.rename('dx') u_cube.data[...] = 0 v_cube = u_cube.copy() v_cube.name('dy') # Define 6 different vectors, repeated in each data row. in_vu = np.array([(0, 1), (2, -1), (-1, -1), (-3, 1), (2, 0), (0, 0)]) in_angs = np.rad2deg(np.arctan2(in_vu[..., 0], in_vu[..., 1])) in_mags = np.sqrt(np.sum(in_vu * in_vu, axis=1)) v_cube.data[...] = in_vu[..., 0] u_cube.data[...] = in_vu[..., 1] # Define 5 different test rotation angles, one for each data row. rotation_angles = np.array([0., -45., 135, -140., 90.]) ang_cube_data = np.broadcast_to(rotation_angles[:, None], u_cube.shape) ang_cube = u_cube.copy() if angles_in_degrees: ang_cube.units = 'degrees' else: ang_cube.units = 'radians' ang_cube_data = np.deg2rad(ang_cube_data) ang_cube.data[:] = ang_cube_data if nan_angles_mask is not None: ang_cube.data[nan_angles_mask] = np.nan # Rotate all vectors by all the given angles. result = rotate_grid_vectors(u_cube, v_cube, ang_cube) out_u, out_v = [cube.data for cube in result] # Check that vector magnitudes were unchanged. out_mags = np.sqrt(out_u * out_u + out_v * out_v) expect_mags = in_mags[None, :] self.assertArrayAllClose(out_mags, expect_mags) # Check that vector angles are all as expected. out_angs = np.rad2deg(np.arctan2(out_v, out_u)) expect_angs = in_angs[None, :] + rotation_angles[:, None] ang_diffs = out_angs - expect_angs # Fix for null vectors, and +/-360 differences. ang_diffs[np.abs(out_mags) < 0.001] = 0.0 ang_diffs = ang_diffs % 360.0 # Check that any differences are very small. self.assertArrayAllClose(ang_diffs, 0.0) # Check that results are always masked arrays, masked at NaN angles. self.assertTrue(np.ma.isMaskedArray(out_u)) self.assertTrue(np.ma.isMaskedArray(out_v)) if nan_angles_mask is not None: self.assertArrayEqual(out_u.mask, nan_angles_mask) self.assertArrayEqual(out_v.mask, nan_angles_mask) def test_angles_calculation(self): self._check_angles_calculation() def test_angles_in_radians(self): self._check_angles_calculation(angles_in_degrees=False) def test_angles_from_grid(self): # Check it will gets angles from 'u_cube', and pass any kwargs on to # the angles routine. u_cube = sample_2d_latlons(regional=True, transformed=True) u_cube = u_cube[:2, :3] u_cube.units = 'ms-1' u_cube.rename('dx') u_cube.data[...] = 1.0 v_cube = u_cube.copy() v_cube.name('dy') v_cube.data[...] = 0.0 # Setup a fake angles result from the inner call to 'gridcell_angles'. angles_result_data = np.array([[0.0, 90.0, 180.0], [-180.0, -90.0, 270.0]]) angles_result_cube = Cube(angles_result_data, units='degrees') angles_kwargs = {'this': 2} angles_call_patch = self.patch( 'iris.analysis._grid_angles.gridcell_angles', Mock(return_value=angles_result_cube)) # Call the routine. result = rotate_grid_vectors(u_cube, v_cube, grid_angles_kwargs=angles_kwargs) self.assertEqual(angles_call_patch.call_args_list, [mock_call(u_cube, this=2)]) out_u, out_v = [cube.data for cube in result] # Records what results should be for the various n*90deg rotations. expect_u = np.array([[1.0, 0.0, -1.0], [-1.0, 0.0, 0.0]]) expect_v = np.array([[0.0, 1.0, 0.0], [0.0, -1.0, -1.0]]) # Check results are as expected. self.assertArrayAllClose(out_u, expect_u) self.assertArrayAllClose(out_v, expect_v) def test_nan_vectors(self): bad_angle_points = np.zeros((5, 6), dtype=bool) bad_angle_points[2, 3] = True self._check_angles_calculation(nan_angles_mask=bad_angle_points) if __name__ == "__main__": tests.main()
lgpl-3.0
AdamHull/namebench
nb_third_party/dns/rdtypes/ANY/LOC.py
248
12571
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import cStringIO import struct import dns.exception import dns.rdata _pows = (1L, 10L, 100L, 1000L, 10000L, 100000L, 1000000L, 10000000L, 100000000L, 1000000000L, 10000000000L) def _exponent_of(what, desc): exp = None for i in xrange(len(_pows)): if what // _pows[i] == 0L: exp = i - 1 break if exp is None or exp < 0: raise dns.exception.SyntaxError("%s value out of bounds" % desc) return exp def _float_to_tuple(what): if what < 0: sign = -1 what *= -1 else: sign = 1 what = long(round(what * 3600000)) degrees = int(what // 3600000) what -= degrees * 3600000 minutes = int(what // 60000) what -= minutes * 60000 seconds = int(what // 1000) what -= int(seconds * 1000) what = int(what) return (degrees * sign, minutes, seconds, what) def _tuple_to_float(what): if what[0] < 0: sign = -1 value = float(what[0]) * -1 else: sign = 1 value = float(what[0]) value += float(what[1]) / 60.0 value += float(what[2]) / 3600.0 value += float(what[3]) / 3600000.0 return sign * value def _encode_size(what, desc): what = long(what); exponent = _exponent_of(what, desc) & 0xF base = what // pow(10, exponent) & 0xF return base * 16 + exponent def _decode_size(what, desc): exponent = what & 0x0F if exponent > 9: raise dns.exception.SyntaxError("bad %s exponent" % desc) base = (what & 0xF0) >> 4 if base > 9: raise dns.exception.SyntaxError("bad %s base" % desc) return long(base) * pow(10, exponent) class LOC(dns.rdata.Rdata): """LOC record @ivar latitude: latitude @type latitude: (int, int, int, int) tuple specifying the degrees, minutes, seconds, and milliseconds of the coordinate. @ivar longitude: longitude @type longitude: (int, int, int, int) tuple specifying the degrees, minutes, seconds, and milliseconds of the coordinate. @ivar altitude: altitude @type altitude: float @ivar size: size of the sphere @type size: float @ivar horizontal_precision: horizontal precision @type horizontal_precision: float @ivar vertical_precision: vertical precision @type vertical_precision: float @see: RFC 1876""" __slots__ = ['latitude', 'longitude', 'altitude', 'size', 'horizontal_precision', 'vertical_precision'] def __init__(self, rdclass, rdtype, latitude, longitude, altitude, size=1.0, hprec=10000.0, vprec=10.0): """Initialize a LOC record instance. The parameters I{latitude} and I{longitude} may be either a 4-tuple of integers specifying (degrees, minutes, seconds, milliseconds), or they may be floating point values specifying the number of degrees. The other parameters are floats.""" super(LOC, self).__init__(rdclass, rdtype) if isinstance(latitude, int) or isinstance(latitude, long): latitude = float(latitude) if isinstance(latitude, float): latitude = _float_to_tuple(latitude) self.latitude = latitude if isinstance(longitude, int) or isinstance(longitude, long): longitude = float(longitude) if isinstance(longitude, float): longitude = _float_to_tuple(longitude) self.longitude = longitude self.altitude = float(altitude) self.size = float(size) self.horizontal_precision = float(hprec) self.vertical_precision = float(vprec) def to_text(self, origin=None, relativize=True, **kw): if self.latitude[0] > 0: lat_hemisphere = 'N' lat_degrees = self.latitude[0] else: lat_hemisphere = 'S' lat_degrees = -1 * self.latitude[0] if self.longitude[0] > 0: long_hemisphere = 'E' long_degrees = self.longitude[0] else: long_hemisphere = 'W' long_degrees = -1 * self.longitude[0] text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % ( lat_degrees, self.latitude[1], self.latitude[2], self.latitude[3], lat_hemisphere, long_degrees, self.longitude[1], self.longitude[2], self.longitude[3], long_hemisphere, self.altitude / 100.0 ) if self.size != 1.0 or self.horizontal_precision != 10000.0 or \ self.vertical_precision != 10.0: text += " %0.2fm %0.2fm %0.2fm" % ( self.size / 100.0, self.horizontal_precision / 100.0, self.vertical_precision / 100.0 ) return text def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True): latitude = [0, 0, 0, 0] longitude = [0, 0, 0, 0] size = 1.0 hprec = 10000.0 vprec = 10.0 latitude[0] = tok.get_int() t = tok.get_string() if t.isdigit(): latitude[1] = int(t) t = tok.get_string() if '.' in t: (seconds, milliseconds) = t.split('.') if not seconds.isdigit(): raise dns.exception.SyntaxError('bad latitude seconds value') latitude[2] = int(seconds) if latitude[2] >= 60: raise dns.exception.SyntaxError('latitude seconds >= 60') l = len(milliseconds) if l == 0 or l > 3 or not milliseconds.isdigit(): raise dns.exception.SyntaxError('bad latitude milliseconds value') if l == 1: m = 100 elif l == 2: m = 10 else: m = 1 latitude[3] = m * int(milliseconds) t = tok.get_string() elif t.isdigit(): latitude[2] = int(t) t = tok.get_string() if t == 'S': latitude[0] *= -1 elif t != 'N': raise dns.exception.SyntaxError('bad latitude hemisphere value') longitude[0] = tok.get_int() t = tok.get_string() if t.isdigit(): longitude[1] = int(t) t = tok.get_string() if '.' in t: (seconds, milliseconds) = t.split('.') if not seconds.isdigit(): raise dns.exception.SyntaxError('bad longitude seconds value') longitude[2] = int(seconds) if longitude[2] >= 60: raise dns.exception.SyntaxError('longitude seconds >= 60') l = len(milliseconds) if l == 0 or l > 3 or not milliseconds.isdigit(): raise dns.exception.SyntaxError('bad longitude milliseconds value') if l == 1: m = 100 elif l == 2: m = 10 else: m = 1 longitude[3] = m * int(milliseconds) t = tok.get_string() elif t.isdigit(): longitude[2] = int(t) t = tok.get_string() if t == 'W': longitude[0] *= -1 elif t != 'E': raise dns.exception.SyntaxError('bad longitude hemisphere value') t = tok.get_string() if t[-1] == 'm': t = t[0 : -1] altitude = float(t) * 100.0 # m -> cm token = tok.get().unescape() if not token.is_eol_or_eof(): value = token.value if value[-1] == 'm': value = value[0 : -1] size = float(value) * 100.0 # m -> cm token = tok.get().unescape() if not token.is_eol_or_eof(): value = token.value if value[-1] == 'm': value = value[0 : -1] hprec = float(value) * 100.0 # m -> cm token = tok.get().unescape() if not token.is_eol_or_eof(): value = token.value if value[-1] == 'm': value = value[0 : -1] vprec = float(value) * 100.0 # m -> cm tok.get_eol() return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) from_text = classmethod(from_text) def to_wire(self, file, compress = None, origin = None): if self.latitude[0] < 0: sign = -1 degrees = long(-1 * self.latitude[0]) else: sign = 1 degrees = long(self.latitude[0]) milliseconds = (degrees * 3600000 + self.latitude[1] * 60000 + self.latitude[2] * 1000 + self.latitude[3]) * sign latitude = 0x80000000L + milliseconds if self.longitude[0] < 0: sign = -1 degrees = long(-1 * self.longitude[0]) else: sign = 1 degrees = long(self.longitude[0]) milliseconds = (degrees * 3600000 + self.longitude[1] * 60000 + self.longitude[2] * 1000 + self.longitude[3]) * sign longitude = 0x80000000L + milliseconds altitude = long(self.altitude) + 10000000L size = _encode_size(self.size, "size") hprec = _encode_size(self.horizontal_precision, "horizontal precision") vprec = _encode_size(self.vertical_precision, "vertical precision") wire = struct.pack("!BBBBIII", 0, size, hprec, vprec, latitude, longitude, altitude) file.write(wire) def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None): (version, size, hprec, vprec, latitude, longitude, altitude) = \ struct.unpack("!BBBBIII", wire[current : current + rdlen]) if latitude > 0x80000000L: latitude = float(latitude - 0x80000000L) / 3600000 else: latitude = -1 * float(0x80000000L - latitude) / 3600000 if latitude < -90.0 or latitude > 90.0: raise dns.exception.FormError("bad latitude") if longitude > 0x80000000L: longitude = float(longitude - 0x80000000L) / 3600000 else: longitude = -1 * float(0x80000000L - longitude) / 3600000 if longitude < -180.0 or longitude > 180.0: raise dns.exception.FormError("bad longitude") altitude = float(altitude) - 10000000.0 size = _decode_size(size, "size") hprec = _decode_size(hprec, "horizontal precision") vprec = _decode_size(vprec, "vertical precision") return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) from_wire = classmethod(from_wire) def _cmp(self, other): f = cStringIO.StringIO() self.to_wire(f) wire1 = f.getvalue() f.seek(0) f.truncate() other.to_wire(f) wire2 = f.getvalue() f.close() return cmp(wire1, wire2) def _get_float_latitude(self): return _tuple_to_float(self.latitude) def _set_float_latitude(self, value): self.latitude = _float_to_tuple(value) float_latitude = property(_get_float_latitude, _set_float_latitude, doc="latitude as a floating point value") def _get_float_longitude(self): return _tuple_to_float(self.longitude) def _set_float_longitude(self, value): self.longitude = _float_to_tuple(value) float_longitude = property(_get_float_longitude, _set_float_longitude, doc="longitude as a floating point value")
apache-2.0
lthurlow/openflow-tcpsnoop
setup.py
1
1114
#!/usr/bin/python #sudo mn --custom setup.py --topo mytopo --test pingall #host --- switch --- host from mininet.topo import Topo from mininet.net import Mininet from mininet.node import CPULimitedHost from mininet.link import TCLink from mininet.util import dumpNodeConnections from mininet.log import setLogLevel ## host1 ----- switch -----/ 10% loss /----- host2 class SimpleTopology( Topo ): "Simple topology example." def build(self,inloss=10): # Add hosts and switches lHost = self.addHost( 'h1' ) rHost = self.addHost( 'h2' ) switch = self.addSwitch( 's1' ) # Add links self.addLink( lHost, switch ) self.addLink( rHost, switch, loss=inloss) def simpleTest(): "Create and test a simple network" topo = SimpleTopology(inloss=10) net = Mininet(topo=topo, host=CPULimitedHost, link=TCLink) net.start() print "Dumping host connections" dumpNodeConnections(net.hosts) print "Testing network connectivity" net.pingAll() net.stop() if __name__ == '__main__': # Tell mininet to print useful information setLogLevel('info') simpleTest()
mit
Southpaw-TACTIC/Team
src/python/Lib/site-packages/PySide/examples/dialogs/classwizard/classwizard_rc.py
1
253010
# -*- coding: utf-8 -*- # Resource object code # # Created: Fri Jul 30 17:18:57 2010 # by: The Resource Compiler for PySide (Qt v4.6.2) # # WARNING! All changes made in this file will be lost! from PySide import QtCore qt_resource_data = "\ \x00\x00\x06\x53\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x40\x00\x00\x00\x40\x08\x03\x00\x00\x00\x9d\xb7\x81\xec\ \x00\x00\x02\xeb\x50\x4c\x54\x45\x00\x00\x00\xff\x00\x00\xff\xff\ \xff\xff\xff\xff\xbf\x00\x00\xff\xff\xff\x99\x00\x00\xff\xff\xff\ \x9f\x00\x00\xaa\x00\x00\xb2\x00\x00\xff\xff\xff\xb9\x00\x00\xff\ \xff\xff\xaa\x00\x00\xff\xff\xff\xb0\x00\x00\xb6\x12\x12\xff\xff\ \xff\xaa\x00\x00\xae\x00\x00\xff\xff\xff\xff\xff\xff\xaa\x00\x00\ \xff\xff\xff\xad\x00\x00\xb3\x00\x00\xff\xff\xff\xad\x00\x00\xff\ \xff\xff\xaf\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\ \xff\xac\x00\x00\xb0\x00\x00\xc4\x47\x47\xff\xff\xff\xff\xff\xff\ \xad\x00\x00\xaf\x00\x00\xb1\x00\x00\xff\xff\xff\xff\xff\xff\xae\ \x00\x00\xff\xff\xff\xae\x00\x00\xff\xff\xff\xae\x00\x00\xf2\xd5\ \xd5\xff\xff\xff\xff\xff\xff\xbf\x38\x38\xad\x00\x00\xff\xff\xff\ \xff\xff\xff\xff\xff\xff\xaf\x00\x00\xff\xff\xff\xff\xff\xff\xaf\ \x00\x00\xb0\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xae\x00\ \x00\xaf\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\ \xae\x00\x00\xaf\x00\x00\xff\xff\xff\xae\x00\x00\xd1\x70\x70\xae\ \x00\x00\xae\x02\x02\xaf\x00\x00\xff\xff\xff\xb0\x00\x00\xff\xff\ \xff\xda\x8c\x8c\xae\x00\x00\xff\xff\xff\xaf\x00\x00\xff\xff\xff\ \xaf\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xae\x00\x00\xff\ \xff\xff\xd3\x75\x75\xaf\x00\x00\xc9\x51\x51\xae\x00\x00\xf4\xdc\ \xdc\xff\xff\xff\xaf\x00\x00\xae\x00\x00\xff\xff\xff\xae\x00\x00\ \xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe6\xb2\xb2\xff\ \xff\xff\xae\x00\x00\xff\xff\xff\xaf\x00\x00\xaf\x00\x00\xae\x00\ \x00\xaf\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd2\x71\x71\ \xaf\x00\x00\xff\xff\xff\xba\x27\x27\xae\x00\x00\xaf\x00\x00\xfa\ \xf4\xf4\xd9\x87\x87\xff\xff\xff\xff\xff\xff\xba\x24\x24\xff\xff\ \xff\xb8\x1f\x1f\xff\xff\xff\xf3\xd9\xd9\xff\xff\xff\xb7\x1a\x1a\ \xae\x00\x00\xae\x00\x00\xaf\x00\x00\xff\xff\xff\xff\xff\xff\xae\ \x00\x00\xaf\x00\x00\xcc\x5c\x5c\xff\xff\xff\xb7\x1b\x1b\xb2\x0a\ \x0a\xaf\x03\x03\xae\x00\x00\xff\xff\xff\xff\xff\xff\xaf\x02\x02\ \xff\xff\xff\xb0\x02\x02\xff\xff\xff\xff\xff\xff\xcd\x63\x63\xaf\ \x00\x00\xaf\x01\x01\xff\xff\xff\xaf\x00\x00\xb1\x08\x08\xae\x00\ \x00\xff\xff\xff\xd1\x6d\x6d\xaf\x00\x00\xb4\x10\x10\xe6\xae\xae\ \xae\x00\x00\xaf\x00\x00\xff\xff\xff\xff\xff\xff\xea\xbd\xbd\xfb\ \xf4\xf4\xae\x00\x00\xaf\x00\x00\xba\x22\x22\xeb\xc1\xc1\xff\xff\ \xff\xcb\x5a\x5a\xda\x8b\x8b\xff\xff\xff\xaf\x00\x00\xff\xff\xff\ \xba\x22\x22\xaf\x01\x01\xbf\x32\x32\xc6\x48\x48\xe8\xb7\xb7\xf8\ \xea\xea\xfa\xf0\xf0\xfb\xf2\xf2\xff\xfe\xfe\xb0\x02\x02\xc7\x4c\ \x4c\xb7\x1a\x1a\xb0\x04\x04\xbb\x26\x26\xbb\x27\x27\xb1\x05\x05\ \xbf\x33\x33\xc0\x35\x35\xc2\x3b\x3b\xc2\x3e\x3e\xc4\x44\x44\xb1\ \x06\x06\xb7\x19\x19\xc8\x4f\x4f\xc9\x52\x52\xca\x57\x57\xcb\x58\ \x58\xcb\x59\x59\xcd\x61\x61\xce\x62\x62\xcf\x66\x66\xd0\x6a\x6a\ \xd3\x74\x74\xd4\x75\x75\xd6\x7b\x7b\xd7\x7e\x7e\xd7\x81\x81\xdc\ \x8f\x8f\xe1\x9e\x9e\xe1\x9f\x9f\xe2\xa2\xa2\xe4\xaa\xaa\xe5\xab\ \xab\xe6\xb0\xb0\xe7\xb1\xb1\xe7\xb4\xb4\xb2\x09\x09\xeb\xbe\xbe\ \xec\xc4\xc4\xf0\xd0\xd0\xf2\xd4\xd4\xf2\xd5\xd5\xf4\xdb\xdb\xf5\ \xde\xde\xf5\xe0\xe0\xf7\xe4\xe4\xb2\x0b\x0b\xf9\xec\xec\xb3\x0e\ \x0e\xb6\x15\x15\xfc\xf7\xf7\xfe\xfb\xfb\xfe\xfc\xfc\xb6\x16\x16\ \xb6\x17\x17\xdc\x97\x3c\x09\x00\x00\x00\xb6\x74\x52\x4e\x53\x00\ \x01\x01\x03\x04\x04\x05\x08\x08\x09\x0a\x0a\x0b\x0b\x0c\x0d\x0d\ \x0e\x0f\x0f\x13\x13\x14\x15\x15\x16\x1b\x1b\x1c\x1c\x1d\x1e\x1f\ \x21\x24\x25\x27\x27\x2a\x2b\x2c\x2d\x2e\x2f\x32\x36\x36\x39\x3b\ \x3c\x3d\x40\x41\x44\x45\x48\x4b\x4c\x4d\x4e\x4f\x50\x54\x54\x55\ \x5a\x5c\x5d\x5d\x60\x61\x63\x65\x67\x67\x68\x6b\x6c\x6c\x6d\x70\ \x71\x73\x78\x7c\x7e\x80\x81\x83\x84\x8a\x8b\x8c\x8c\x8d\x91\x93\ \x95\x95\x95\x96\x98\x99\x9c\x9d\x9e\xa4\xa6\xa7\xa7\xa8\xa8\xa9\ \xaa\xac\xad\xad\xb0\xb3\xb3\xb4\xb7\xbb\xbc\xbd\xbd\xc0\xc1\xc4\ \xc6\xca\xcb\xcc\xcd\xcd\xd0\xd2\xd4\xd7\xd8\xd9\xdb\xdc\xdc\xdd\ \xde\xe0\xe1\xe4\xe5\xe6\xe7\xe8\xe9\xe9\xea\xef\xf0\xf0\xf1\xf3\ \xf3\xf5\xf6\xf6\xf7\xf7\xf7\xf8\xfa\xfa\xfb\xfb\xfb\xfb\xfc\xfc\ \xfd\xfd\xfe\xfe\xfe\xa0\xb1\xff\x8a\x00\x00\x02\x61\x49\x44\x41\ \x54\x78\x5e\xdd\xd7\x55\x70\x13\x51\x14\xc7\xe1\xd3\x52\x28\xda\ \x42\xf1\xe2\x5e\xdc\x5b\x28\x10\xdc\xdd\xdd\xdd\x0a\x45\x8a\xb4\ \xb8\x7b\x70\x29\x5e\x24\x50\xa0\xe8\xd9\xa4\x2a\xb8\xbb\xbb\xbb\ \xeb\x23\x93\x3d\x77\xee\xcb\xe6\x66\x98\x93\x17\xa6\xbf\xd7\xff\ \xe6\x9b\x7d\xc8\x9c\x99\x85\x14\x52\xfa\x52\x39\x5d\xfa\xf9\x80\ \x28\xc4\x95\x41\x26\x36\x30\x10\xa9\x19\xd9\x78\x80\xc7\x4e\x14\ \xed\xaa\xca\x02\x72\xa3\xec\x60\x25\x96\xb0\x1e\x65\x1b\x33\x70\ \x80\xfa\x36\x09\xd8\x46\x00\xa7\x5e\x17\xbe\xa0\xe8\x68\x19\x96\ \x50\x7d\xca\xee\x68\x02\xae\xb6\x03\x5e\x9e\x7d\x08\xb0\x8e\x02\ \x66\x45\x09\x38\x61\xe6\x02\x79\x05\x10\xf9\x3f\x03\x6e\x2e\x01\ \x25\x47\x2f\x39\xb0\x2a\x34\x90\x0d\x34\x8f\xa2\x7d\x32\x13\xf0\ \xb3\xa0\x68\x2a\x0f\xe8\x84\x22\xbc\x5c\x97\x05\x8c\x95\x80\x75\ \x3c\x0b\xe8\x2d\x81\x73\x66\x16\x60\x92\xc0\xdd\xe9\x0a\xc0\xd7\ \x29\xe0\x36\x0b\x29\x6b\x7c\x37\x05\x90\x8e\x80\xa4\xfd\x8e\xe7\ \x2c\xcb\x2e\xda\xe7\x2b\x1f\xcd\x3e\xa0\x68\x33\x09\x87\x14\x37\ \xc9\xbb\xdf\xbe\x47\xb1\x9f\xb4\x71\x85\x40\xd5\x42\x02\x62\x5a\ \xa8\xfe\xb1\x39\x2a\x37\x0a\x28\x08\xea\xc2\x50\xb4\xa2\x95\x17\ \x70\xaa\x85\xb2\x6d\xc5\x58\xc2\x3c\x94\xed\xc8\xc7\x01\xca\xa2\ \x2c\xb9\x27\x07\xe8\x81\xb2\x9b\x21\x0c\xc0\x6f\x8f\x04\x6c\xaf\ \x87\x30\x80\x60\x14\xe1\x9f\x27\xc7\xaa\x30\x80\xf9\x04\x1c\xbf\ \xf7\x2e\x71\x5d\x03\x60\xb4\x89\x80\x17\xab\xbb\x96\x70\x07\x46\ \x59\x91\x8a\xab\xe1\xe2\x55\xd6\x72\x39\x9c\xfd\xbb\x88\x9a\x32\ \x8f\x6a\x28\x8a\x26\x34\x63\x01\x5e\x16\xa4\x4e\xfd\x6c\xcc\x02\ \x02\x51\xf4\x74\x51\x6a\x16\xd0\x17\xa9\xe8\xc4\x3a\xc0\x02\x96\ \x22\x15\x3b\xd7\x9d\x05\x14\x41\xea\xbc\x16\x00\x2c\xa0\x35\x52\ \x6f\xa6\x01\x0f\x98\x48\x63\xb2\x56\x81\x07\xa4\xdd\x4e\x17\xfb\ \x6d\x08\xf0\x00\x7f\xda\xae\x1f\x2e\x0d\xea\xca\x13\xf0\x2a\x52\ \x79\x6a\x4e\x7f\x18\x0e\x4e\xea\x40\xc0\xd9\x08\x30\xb6\x40\x9f\ \x6e\xed\x2d\xac\x04\x7c\xeb\x05\x6f\x25\xe0\xf6\x4c\xe3\x9a\x9f\ \xde\xed\xf3\x20\x50\x94\x39\x08\x65\x8f\xfb\x1b\xf7\x26\xfa\x72\ \x27\x22\x8f\x0a\x18\x8c\xb2\xef\x71\x0d\x8d\xfb\x18\xfb\xf2\xed\ \x6b\x77\x50\x94\xc6\x82\xb2\x67\xe1\xc6\x73\xe0\xa1\xdf\xaa\x07\ \x5b\xb2\xff\xc3\xf7\xc2\x35\xad\xb6\x71\xaf\xa8\xbf\x5a\x42\x47\ \x50\xb6\x16\x45\x37\x12\x46\x82\xb1\xb6\xf6\xe9\x61\xb8\xb7\x1a\ \x30\x25\xe9\xc0\xef\xe7\xda\x50\x47\x4f\xb5\x44\xc4\x93\x3f\xda\ \x80\x93\xda\x1f\x39\x13\x73\xff\x65\xfc\x86\x9a\x0e\xd7\x8c\xcb\ \xf1\xd2\xfb\xc5\x9e\xe0\xac\x72\xc3\x66\x4f\xea\x5c\xcd\x47\xb1\ \x66\x9a\xf3\x6b\x4d\x71\x70\xa9\x02\xa9\x20\x25\xf7\x17\x09\xba\ \x39\x39\xea\xb1\x61\x75\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\ \x60\x82\ \x00\x00\x06\x53\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x40\x00\x00\x00\x40\x08\x03\x00\x00\x00\x9d\xb7\x81\xec\ \x00\x00\x02\xeb\x50\x4c\x54\x45\x00\x00\x00\xff\x00\x00\xff\xff\ \xff\xff\xff\xff\xbf\x00\x00\xff\xff\xff\xcc\x00\x00\xff\xff\xff\ \xdf\x00\x00\xe2\x00\x00\xe5\x00\x00\xff\xff\xff\xe7\x00\x00\xff\ \xff\xff\xd4\x00\x00\xff\xff\xff\xd7\x00\x00\xda\x12\x12\xff\xff\ \xff\xdd\x00\x00\xe4\x00\x00\xff\xff\xff\xff\xff\xff\xda\x00\x00\ \xff\xff\xff\xdc\x00\x00\xe2\x00\x00\xff\xff\xff\xda\x00\x00\xff\ \xff\xff\xdb\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\ \xff\xdc\x00\x00\xde\x00\x00\xe4\x47\x47\xff\xff\xff\xff\xff\xff\ \xdc\x00\x00\xdd\x00\x00\xdd\x00\x00\xff\xff\xff\xff\xff\xff\xdd\ \x00\x00\xff\xff\xff\xdf\x00\x00\xff\xff\xff\xdd\x00\x00\xfa\xd5\ \xd5\xff\xff\xff\xff\xff\xff\xe4\x38\x38\xdd\x00\x00\xff\xff\xff\ \xff\xff\xff\xff\xff\xff\xdd\x00\x00\xff\xff\xff\xff\xff\xff\xdf\ \x00\x00\xdd\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xdd\x00\ \x00\xde\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\ \xde\x00\x00\xde\x00\x00\xff\xff\xff\xdf\x00\x00\xeb\x70\x70\xdd\ \x00\x00\xe0\x02\x02\xde\x00\x00\xff\xff\xff\xdf\x00\x00\xff\xff\ \xff\xf0\x8c\x8c\xde\x00\x00\xff\xff\xff\xdf\x00\x00\xff\xff\xff\ \xdf\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xde\x00\x00\xff\ \xff\xff\xec\x75\x75\xdf\x00\x00\xe8\x51\x51\xde\x00\x00\xf9\xdc\ \xdc\xff\xff\xff\xde\x00\x00\xdf\x00\x00\xff\xff\xff\xde\x00\x00\ \xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf5\xb2\xb2\xff\ \xff\xff\xdf\x00\x00\xff\xff\xff\xdf\x00\x00\xdf\x00\x00\xde\x00\ \x00\xde\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xed\x71\x71\ \xde\x00\x00\xff\xff\xff\xe3\x27\x27\xde\x00\x00\xde\x00\x00\xfd\ \xf4\xf4\xf0\x87\x87\xff\xff\xff\xff\xff\xff\xe3\x24\x24\xff\xff\ \xff\xe3\x1f\x1f\xff\xff\xff\xfa\xd9\xd9\xff\xff\xff\xe2\x1a\x1a\ \xdf\x00\x00\xde\x00\x00\xde\x00\x00\xff\xff\xff\xff\xff\xff\xdf\ \x00\x00\xde\x00\x00\xea\x5c\x5c\xff\xff\xff\xe2\x1b\x1b\xe0\x0a\ \x0a\xdf\x03\x03\xde\x00\x00\xff\xff\xff\xff\xff\xff\xde\x02\x02\ \xff\xff\xff\xdf\x02\x02\xff\xff\xff\xff\xff\xff\xeb\x63\x63\xdf\ \x00\x00\xdf\x01\x01\xff\xff\xff\xdf\x00\x00\xe0\x08\x08\xde\x00\ \x00\xff\xff\xff\xec\x6d\x6d\xde\x00\x00\xe1\x10\x10\xf4\xae\xae\ \xdf\x00\x00\xdf\x00\x00\xff\xff\xff\xff\xff\xff\xf6\xbd\xbd\xfd\ \xf4\xf4\xdf\x00\x00\xde\x00\x00\xe3\x22\x22\xf6\xc1\xc1\xff\xff\ \xff\xe9\x5a\x5a\xf0\x8b\x8b\xff\xff\xff\xdf\x00\x00\xff\xff\xff\ \xe3\x22\x22\xdf\x01\x01\xe5\x32\x32\xe8\x48\x48\xf6\xb7\xb7\xfc\ \xea\xea\xfd\xf0\xf0\xfd\xf2\xf2\xff\xfe\xfe\xdf\x02\x02\xe9\x4c\ \x4c\xe2\x1a\x1a\xe0\x04\x04\xe4\x26\x26\xe4\x27\x27\xe0\x05\x05\ \xe5\x33\x33\xe6\x35\x35\xe6\x3b\x3b\xe7\x3e\x3e\xe8\x44\x44\xe0\ \x06\x06\xe2\x19\x19\xe9\x4f\x4f\xe9\x52\x52\xea\x57\x57\xea\x58\ \x58\xea\x59\x59\xeb\x61\x61\xeb\x62\x62\xec\x66\x66\xec\x6a\x6a\ \xee\x74\x74\xee\x75\x75\xee\x7b\x7b\xef\x7e\x7e\xef\x81\x81\xf1\ \x8f\x8f\xf3\x9e\x9e\xf3\x9f\x9f\xf3\xa2\xa2\xf4\xaa\xaa\xf4\xab\ \xab\xf5\xb0\xb0\xf5\xb1\xb1\xf6\xb4\xb4\xe0\x09\x09\xf7\xbe\xbe\ \xf8\xc4\xc4\xf9\xd0\xd0\xfa\xd4\xd4\xfa\xd5\xd5\xfa\xdb\xdb\xfb\ \xde\xde\xfb\xe0\xe0\xfc\xe4\xe4\xe0\x0b\x0b\xfd\xec\xec\xe1\x0e\ \x0e\xe2\x15\x15\xfe\xf7\xf7\xfe\xfb\xfb\xff\xfc\xfc\xe2\x16\x16\ \xe2\x17\x17\x66\xee\x72\x60\x00\x00\x00\xb6\x74\x52\x4e\x53\x00\ \x01\x01\x03\x04\x04\x05\x08\x08\x09\x0a\x0a\x0b\x0b\x0c\x0d\x0d\ \x0e\x0f\x0f\x13\x13\x14\x15\x15\x16\x1b\x1b\x1c\x1c\x1d\x1e\x1f\ \x21\x24\x25\x27\x27\x2a\x2b\x2c\x2d\x2e\x2f\x32\x36\x36\x39\x3b\ \x3c\x3d\x40\x41\x44\x45\x48\x4b\x4c\x4d\x4e\x4f\x50\x54\x54\x55\ \x5a\x5c\x5d\x5d\x60\x61\x63\x65\x67\x67\x68\x6b\x6c\x6c\x6d\x70\ \x71\x73\x78\x7c\x7e\x80\x81\x83\x84\x8a\x8b\x8c\x8c\x8d\x91\x93\ \x95\x95\x95\x96\x98\x99\x9c\x9d\x9e\xa4\xa6\xa7\xa7\xa8\xa8\xa9\ \xaa\xac\xad\xad\xb0\xb3\xb3\xb4\xb7\xbb\xbc\xbd\xbd\xc0\xc1\xc4\ \xc6\xca\xcb\xcc\xcd\xcd\xd0\xd2\xd4\xd7\xd8\xd9\xdb\xdc\xdc\xdd\ \xde\xe0\xe1\xe4\xe5\xe6\xe7\xe8\xe9\xe9\xea\xef\xf0\xf0\xf1\xf3\ \xf3\xf5\xf6\xf6\xf7\xf7\xf7\xf8\xfa\xfa\xfb\xfb\xfb\xfb\xfc\xfc\ \xfd\xfd\xfe\xfe\xfe\xa0\xb1\xff\x8a\x00\x00\x02\x61\x49\x44\x41\ \x54\x78\x5e\xdd\xd7\x55\x70\x13\x51\x14\xc7\xe1\xd3\x52\x28\xda\ \x42\xf1\xe2\x5e\xdc\x5b\x28\x10\xdc\xdd\xdd\xdd\x0a\x45\x8a\xb4\ \xb8\x7b\x70\x29\x5e\x24\x50\xa0\xe8\xd9\xa4\x2a\xb8\xbb\xbb\xbb\ \xeb\x23\x93\x3d\x77\xee\xcb\xe6\x66\x98\x93\x17\xa6\xbf\xd7\xff\ \xe6\x9b\x7d\xc8\x9c\x99\x85\x14\x52\xfa\x52\x39\x5d\xfa\xf9\x80\ \x28\xc4\x95\x41\x26\x36\x30\x10\xa9\x19\xd9\x78\x80\xc7\x4e\x14\ \xed\xaa\xca\x02\x72\xa3\xec\x60\x25\x96\xb0\x1e\x65\x1b\x33\x70\ \x80\xfa\x36\x09\xd8\x46\x00\xa7\x5e\x17\xbe\xa0\xe8\x68\x19\x96\ \x50\x7d\xca\xee\x68\x02\xae\xb6\x03\x5e\x9e\x7d\x08\xb0\x8e\x02\ \x66\x45\x09\x38\x61\xe6\x02\x79\x05\x10\xf9\x3f\x03\x6e\x2e\x01\ \x25\x47\x2f\x39\xb0\x2a\x34\x90\x0d\x34\x8f\xa2\x7d\x32\x13\xf0\ \xb3\xa0\x68\x2a\x0f\xe8\x84\x22\xbc\x5c\x97\x05\x8c\x95\x80\x75\ \x3c\x0b\xe8\x2d\x81\x73\x66\x16\x60\x92\xc0\xdd\xe9\x0a\xc0\xd7\ \x29\xe0\x36\x0b\x29\x6b\x7c\x37\x05\x90\x8e\x80\xa4\xfd\x8e\xe7\ \x2c\xcb\x2e\xda\xe7\x2b\x1f\xcd\x3e\xa0\x68\x33\x09\x87\x14\x37\ \xc9\xbb\xdf\xbe\x47\xb1\x9f\xb4\x71\x85\x40\xd5\x42\x02\x62\x5a\ \xa8\xfe\xb1\x39\x2a\x37\x0a\x28\x08\xea\xc2\x50\xb4\xa2\x95\x17\ \x70\xaa\x85\xb2\x6d\xc5\x58\xc2\x3c\x94\xed\xc8\xc7\x01\xca\xa2\ \x2c\xb9\x27\x07\xe8\x81\xb2\x9b\x21\x0c\xc0\x6f\x8f\x04\x6c\xaf\ \x87\x30\x80\x60\x14\xe1\x9f\x27\xc7\xaa\x30\x80\xf9\x04\x1c\xbf\ \xf7\x2e\x71\x5d\x03\x60\xb4\x89\x80\x17\xab\xbb\x96\x70\x07\x46\ \x59\x91\x8a\xab\xe1\xe2\x55\xd6\x72\x39\x9c\xfd\xbb\x88\x9a\x32\ \x8f\x6a\x28\x8a\x26\x34\x63\x01\x5e\x16\xa4\x4e\xfd\x6c\xcc\x02\ \x02\x51\xf4\x74\x51\x6a\x16\xd0\x17\xa9\xe8\xc4\x3a\xc0\x02\x96\ \x22\x15\x3b\xd7\x9d\x05\x14\x41\xea\xbc\x16\x00\x2c\xa0\x35\x52\ \x6f\xa6\x01\x0f\x98\x48\x63\xb2\x56\x81\x07\xa4\xdd\x4e\x17\xfb\ \x6d\x08\xf0\x00\x7f\xda\xae\x1f\x2e\x0d\xea\xca\x13\xf0\x2a\x52\ \x79\x6a\x4e\x7f\x18\x0e\x4e\xea\x40\xc0\xd9\x08\x30\xb6\x40\x9f\ \x6e\xed\x2d\xac\x04\x7c\xeb\x05\x6f\x25\xe0\xf6\x4c\xe3\x9a\x9f\ \xde\xed\xf3\x20\x50\x94\x39\x08\x65\x8f\xfb\x1b\xf7\x26\xfa\x72\ \x27\x22\x8f\x0a\x18\x8c\xb2\xef\x71\x0d\x8d\xfb\x18\xfb\xf2\xed\ \x6b\x77\x50\x94\xc6\x82\xb2\x67\xe1\xc6\x73\xe0\xa1\xdf\xaa\x07\ \x5b\xb2\xff\xc3\xf7\xc2\x35\xad\xb6\x71\xaf\xa8\xbf\x5a\x42\x47\ \x50\xb6\x16\x45\x37\x12\x46\x82\xb1\xb6\xf6\xe9\x61\xb8\xb7\x1a\ \x30\x25\xe9\xc0\xef\xe7\xda\x50\x47\x4f\xb5\x44\xc4\x93\x3f\xda\ \x80\x93\xda\x1f\x39\x13\x73\xff\x65\xfc\x86\x9a\x0e\xd7\x8c\xcb\ \xf1\xd2\xfb\xc5\x9e\xe0\xac\x72\xc3\x66\x4f\xea\x5c\xcd\x47\xb1\ \x66\x9a\xf3\x6b\x4d\x71\x70\xa9\x02\xa9\x20\x25\xf7\x17\x09\xba\ \x39\x39\xea\xb1\x61\x75\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\ \x60\x82\ \x00\x00\x3a\x40\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\xa8\x00\x00\x01\x77\x08\x03\x00\x00\x00\x06\x8a\xf0\xc8\ \x00\x00\x02\xd9\x50\x4c\x54\x45\xad\xac\xff\xc4\x90\xc4\xe2\x5a\ \x63\xe6\xc1\xd5\xe9\x9c\xa7\xb8\xb6\xfe\xc8\xc6\xfe\xcb\xcb\xfe\ \xbb\xbb\xff\xc0\xbe\xfe\xc3\xc3\xfe\xd2\xd2\xff\xd8\xd7\xff\xdc\ \xdb\xfe\xb3\xb3\xfe\xe2\xe2\xfe\xb0\xae\xfe\xd0\xce\xfe\xeb\xeb\ \xfe\xf3\xf3\xfe\xfc\xfb\xfe\xdd\x06\x08\xda\x22\x2c\xdc\x0d\x12\ \xb7\x8f\xd1\xe0\xde\xfe\xea\x5a\x5a\xdd\x09\x0b\xdd\x19\x1e\xfc\ \xec\xec\xde\x00\x00\xf0\xef\xfd\xe1\x13\x13\xbf\xa1\xde\xbb\xb2\ \xf4\xe3\x23\x23\xe5\x32\x32\xe6\x3a\x3a\xe6\x41\x42\xcc\xc3\xf4\ \xe9\x53\x53\xdc\x14\x1a\xbd\x81\xbb\xec\x6c\x6c\xed\x72\x72\xee\ \x7b\x7b\xf2\x9b\x9b\xf2\xa2\xa2\xd2\x81\xa2\xf4\xac\xac\xf6\xb9\ \xb9\xfa\xdb\xdb\xfa\xf5\xfa\xfb\xe2\xe2\xdd\x10\x15\xbd\xab\xeb\ \xfd\xf2\xf3\xe4\x2c\x2c\xeb\x63\x63\xf0\x8b\x8b\xf5\xcd\xd1\xd6\ \x2b\x3c\xd8\x2d\x3b\xd8\x46\x58\xca\x56\x7b\xd9\x25\x31\xd9\x34\ \x43\xd9\x42\x52\xd9\x6d\x83\xda\x1b\x25\xca\x75\x9e\xda\x31\x3d\ \xda\x52\x64\xdb\x1a\x22\xdb\x85\x9d\xb2\xa9\xf5\xdc\xd3\xf5\xcc\ \x69\x8d\xcc\x84\xad\xcc\x8b\xb4\xbb\xa3\xe3\xcd\x4a\x69\xdd\x50\ \x5e\xb7\xb0\xf7\xdf\x30\x37\xcd\x5a\x7b\xcd\xa1\xcd\xe2\x1a\x1a\ \xce\x71\x94\xce\xc0\xef\xe4\xaa\xbd\xcf\x39\x54\xcf\x40\x5b\xcf\ \xba\xe7\xd0\x5f\x7e\xe8\x45\x45\xe8\x4c\x4c\xd0\xad\xd8\xe9\x6f\ \x74\xbe\x8a\xc3\xd1\x56\x71\xea\xe6\xfb\xd2\x7a\x9a\xc4\xab\xe3\ \xd2\x89\xab\xec\x76\x78\xd2\xcb\xf7\xc5\x83\xb4\xee\x8f\x91\xef\ \x81\x81\xf0\x84\x84\xf8\xcb\xcb\xd3\x44\x5c\xf1\x93\x93\xd3\x53\ \x6d\xd4\x35\x49\xd4\x4b\x62\xd4\x69\x84\xf4\xe2\xe9\xf5\xb3\xb3\ \xe0\x0b\x0b\xd4\xc2\xeb\xf6\xd8\xdc\xf7\xc3\xc3\xd5\x62\x7b\xf9\ \xd4\xd4\xc8\xbd\xf3\xd5\xa5\xc8\xd6\x27\x37\xbe\x9a\xd5\xd7\x49\ \x5d\xd4\x5a\x73\xca\x86\xb2\xcb\x74\x9b\xf8\xc5\xc5\xd6\x39\x4c\ \xce\x80\xa6\xcb\x61\x85\xcd\x9a\xc4\xdf\xa8\xc0\xd3\x9a\xbe\xcb\ \x7a\xa2\xe4\x79\x84\xd4\x4e\x67\xd5\x40\x54\xe6\x73\x7c\xc4\xba\ \xf4\xe6\xc6\xda\xe7\x6a\x70\xe7\xb8\xc9\xc5\x66\x92\xe8\x5e\x60\ \xe8\x84\x8c\xc5\x73\xa1\xcd\x92\xbb\xb6\x9b\xe0\xcd\xb0\xde\xeb\ \xad\xb7\xeb\xe2\xf4\xc5\x8a\xbb\xd7\xb5\xd8\xcd\xac\xda\xc5\xa0\ \xd5\xed\xa3\xaa\xcd\xba\xea\xce\x55\x76\xc6\x5c\x86\xc8\x92\xc1\ \xd9\x59\x6d\xf0\xac\xb1\xc8\xb3\xe8\xd9\x87\xa1\xf1\x9e\xa1\xc0\ \xb6\xf4\xb9\x87\xc6\xf2\xbf\xc5\xda\x29\x35\xc2\x9b\xd3\xca\x5c\ \x81\xf4\xe9\xf1\xd0\x6d\x8d\xdb\x4b\x5a\xd0\xa6\xd0\xca\x64\x8c\ \xdc\x3a\x46\xc2\xa2\xda\xde\x98\xaf\xd1\x8d\xb1\xd1\xc5\xf1\xb9\ \xa6\xe9\xd2\xbb\xe5\xd2\x9d\xc2\xcc\xa5\xd2\xde\xb1\xcc\xd3\x74\ \x92\xe1\x3c\x42\xe3\x48\x4e\xcb\x9d\xcb\xe4\x64\x6d\xe2\x4e\x56\ \xc4\x7b\xac\xcb\xb2\xe2\xd4\x31\x44\xbf\x94\xce\xbc\x9c\xdb\xca\ \xa8\xd8\xd5\x93\xb4\xcd\x45\x63\xc5\x6c\x98\xb9\x8e\xce\xd0\x6f\ \x90\xc4\x7f\xb0\xda\x3d\x4c\xb7\xab\xf2\xda\x73\x89\xf4\xc6\xcb\ \xca\x6c\x93\xd8\x78\x92\xd4\x70\x8d\xc3\xb2\xec\xec\x92\x97\xec\ \xd3\xe3\xf6\xde\xe3\xc5\x88\xba\xd1\x86\xa9\xe1\x82\x92\xf8\xd3\ \xd5\xe1\x9b\xaf\xe1\x9c\xb0\xd3\x2e\x42\xd0\xaa\xd3\xdb\x93\xac\ \xdb\xc7\xe9\xe2\xdb\xf7\xc7\x79\xa7\xdb\x61\x74\xe1\xbd\xd5\xd1\ \x46\x61\xe4\x54\x5a\xc9\x7e\xa9\xd2\x3a\x52\xe8\xcf\xe2\xc2\x95\ \xcc\xbd\x3d\xa6\xd0\x00\x00\x37\x22\x49\x44\x41\x54\x78\x5e\x94\ \x5d\xe3\xa3\x6d\xbb\xae\x5f\x5f\x06\xa7\xb5\x8c\x6d\xdb\x38\xb6\ \x6d\xdb\xb6\x8d\x4b\xdb\xb6\x6d\x1b\xcf\xb6\xed\xf7\x17\xbc\x36\ \x0d\x9a\xb6\xf3\xec\xfb\x32\x30\xc7\x39\x9f\x7e\x3b\x6a\x92\xa6\ \x59\x13\x8d\x46\xb3\x21\xd4\x33\x4f\x51\x98\xc7\x5e\xe6\x06\xc2\ \x9f\x1c\x1e\xf3\xae\xeb\xc2\x5c\xf6\xd7\x7e\xd5\x79\x5e\x0b\x55\ \x95\x79\x95\xf6\x2a\xcd\xaf\xa2\xb6\xbd\xe1\x81\xbb\xdd\xc6\x17\ \xd1\x04\xde\xe6\x32\xb7\x7d\x14\x35\x9a\x4d\x0b\xb5\xc9\x48\x0d\ \x4c\xc1\x09\x1f\x44\xb9\x45\x57\xd4\x16\x2a\x80\x74\x6f\x86\x59\ \xd9\x57\x89\x77\x09\x28\xe1\x3b\xa0\xb6\x80\x84\x87\xde\x02\xb6\ \x6b\x91\x9a\x4b\x93\x05\x69\x51\x36\x89\xa3\x8c\x14\xa0\xf6\x0a\ \x26\x00\x68\x2e\x8b\x37\x37\x2f\xe0\x26\x22\x65\xac\x15\x20\x23\ \xa4\xfc\x12\x94\x02\x55\xa1\x14\xa8\x70\x75\x3d\x84\xd3\xee\x87\ \x50\x32\x4e\x07\xd5\x62\xec\x99\x87\x05\x2f\x84\x38\x9d\x0a\xe4\ \x9a\xa1\x55\x55\x02\x5f\xe1\x42\xcc\x4c\x22\x7a\x7a\x29\x98\x5d\ \x80\x08\xfc\xc4\x6f\xf3\xd5\xf5\x44\x6f\x2f\x0f\x29\x0a\xbc\x41\ \x52\xef\x11\x53\x73\x50\x51\x44\x59\xd7\x80\x95\x18\x5a\xe5\x16\ \x68\xe5\xae\x4a\x31\x34\xa2\x76\x3b\x33\x44\xf2\xcf\x80\xab\x88\ \xb4\x0b\x00\x01\x62\x2c\xfa\xd0\x9a\x0a\x78\x03\x4a\x78\x31\x59\ \x84\xcc\x49\x03\xba\xce\x45\x4b\xf3\xaa\x26\x9c\xc0\x55\xd4\x4e\ \xf3\x25\x5c\xcd\x32\x10\x37\x7c\x65\x1e\x3f\xb3\x0c\x65\x0e\x60\ \x01\x24\x50\x37\x30\x26\x43\xc8\x50\x11\x7e\xd1\x13\xa3\x6f\x30\ \x47\xf9\x05\x1c\x35\x6f\xa0\x9c\xde\x15\xca\xbf\x34\x1c\xa5\x4b\ \x11\x8b\x3d\x03\x8c\x09\x1d\xed\x76\x01\x30\x1a\x13\x21\x26\x1d\ \x4d\xb9\x27\x23\x6f\xf8\x70\xc2\x17\x9e\x8a\x73\xaa\xdd\x6f\x0e\ \x97\x11\x3d\x19\x93\x98\x3d\x72\x52\xc3\xcd\x58\x53\xdb\xb1\x92\ \x12\x47\xe1\x01\x07\xd5\x15\x83\x6a\x92\xe4\x9b\xca\x3d\x59\x96\ \xba\x47\x48\xbc\x28\xfe\xb2\xdc\xf3\xdc\x09\x1d\x59\x4a\x30\xe1\ \xb7\xaa\xc4\x90\x18\xa1\xe1\x2a\x50\x3b\xd3\x48\xbb\xe4\x9d\x62\ \x1d\x05\x27\xca\x68\xc5\xe8\x41\x3f\x01\xab\x46\x8a\x9c\x25\xbe\ \x16\x06\x65\x85\x1c\x05\xfd\xb4\xf0\x1c\xd8\x5a\x98\x29\x18\xf9\ \xdb\x82\x6c\xdb\x9b\xd9\xda\x15\xd3\x07\x90\xf4\x9a\x4e\x19\x53\ \x4f\x7c\xbd\x05\x0b\x62\x17\x1d\x65\x63\xd2\xc2\xd7\xeb\x52\x69\ \x11\x03\x4c\xb8\x2b\x73\x31\x3a\x7e\x80\xa3\x68\xfd\x9a\xa5\xa2\ \x9f\x11\x47\x09\x6a\xec\x49\xc1\x90\x64\x01\x45\xf7\xe4\x50\xb2\ \xe8\x11\x68\x2e\xa2\x07\x69\x03\xcc\x94\x8a\xb2\x5b\x02\x9e\x66\ \x4a\x43\xb5\x2d\x85\x40\xc5\xdd\xdb\x0b\x17\xa4\x1e\x72\xd4\x17\ \x7d\x8d\x70\x59\xec\xc0\x50\x54\xd3\xca\x88\x9f\x78\xa9\x04\xcf\ \xfe\x29\x63\x96\x66\x4c\x6d\x65\xf9\x5d\xb8\x49\x41\xbb\x91\x1f\ \x25\xac\x42\x80\x17\x48\x61\xcd\x79\xbd\x17\x99\xc3\x0f\x9a\x53\ \x55\x3b\xab\x27\xb0\xf0\x1d\x53\x1b\x31\x02\x47\x05\x25\x3a\x7c\ \x44\x1a\xc5\x25\xca\x92\x88\xa1\x12\x96\x68\xca\x91\x9f\xf6\x8d\ \xbe\x34\x17\xc1\xa3\xf4\x79\x0d\x0d\x9c\x13\xf9\xd0\x8c\x89\x25\ \x2f\xb6\x84\x3a\xda\x0e\x84\x3f\x8d\xc6\xa4\xa3\x12\x40\x09\x70\ \xc9\xea\x1b\x8e\x9b\x68\xf4\xa2\xa6\x20\x75\x74\xf6\x39\x30\x14\ \xf8\xa9\x38\xaa\xbc\x53\x66\x6e\x7b\x39\xfd\x7c\xfb\x51\x67\x31\ \x47\xc5\x3d\xbd\x94\x31\x01\x89\x9e\x5a\xa2\x30\x4f\xcc\x3e\x87\ \xa7\x66\xd1\x13\x54\xa2\xca\x3e\x3e\x59\x8c\x62\xf2\x80\x90\x19\ \xba\xe2\x97\x4f\x2f\xff\xbd\xc5\xc5\x1f\x35\x30\xdc\x63\x96\x2a\ \x66\xe2\xcf\x34\xeb\xa8\x8a\x9f\x7a\x40\x18\xe6\x11\x4c\xc4\xea\ \x0b\x1f\x03\x67\x85\x15\x97\x7c\x21\x34\x26\xc2\xea\xd8\x79\xeb\ \x65\x7b\x46\x8b\x8e\x0e\x2b\xfd\x10\x0f\x71\xb2\x23\x8d\xe3\x51\ \xa1\x1e\x42\xb5\x30\x5d\xb4\xe7\xa1\xe4\x98\x94\x3c\xbe\xc0\x44\ \xef\x54\x05\x30\x2b\x01\x59\x66\xee\x3e\x7f\xfd\xa2\xd0\xcd\x4d\ \xe0\xa7\x04\x25\x14\xe2\x27\xdc\x53\xe0\xf2\x99\xa3\x8c\x50\xa0\ \xe2\x2d\x4b\x28\xad\xf6\xc4\x50\x67\xfb\x08\x12\xde\x9a\xb2\x72\ \xf9\x53\x8b\x3e\xad\x19\xa8\xa8\x59\x8c\x3e\x62\x29\x58\x12\x43\ \xed\x21\x4f\x0b\x5c\xf3\x7b\x70\x19\xac\x80\x10\x43\xd1\x42\x2c\ \x49\x67\x23\xd6\x97\x32\x47\x61\x51\x52\x3a\x6a\x9f\xa7\xa7\x14\ \xce\xd1\xfe\x61\x2a\xc2\x4f\xaf\xf5\x9a\xa1\x05\xf2\x14\xf5\x13\ \x6d\x89\x29\x47\x5b\xa2\x74\x84\xd9\x59\x29\xc9\x2b\x8e\x3e\x7d\ \x96\x48\xff\x70\x87\x6f\x76\xcb\x8e\xc3\xb6\x1e\x30\x5f\x57\x19\ \xa0\x19\x61\x9c\x50\x09\x1e\x39\x7c\xb4\xa7\x26\xc5\xf7\x82\x15\ \x90\xe2\x12\x8a\x30\x45\x49\xc5\xe6\x49\xf2\xb5\xc7\x4f\x73\xb9\ \x20\x9f\xc3\xa6\xaa\x7c\xfb\xe2\xe8\x1b\x35\x1a\xfd\xcf\x00\xe6\ \xe6\x85\x41\xab\x32\xea\x7a\xe6\x9a\x4d\xc3\x3e\x02\x25\xb0\x88\ \x36\x4e\xef\xd8\xe6\x65\x0d\x35\x18\x1d\x47\x25\x28\x01\x80\x51\ \xb2\x0c\xaf\x5c\x59\x3d\x46\xf8\x10\x3b\xdf\xf8\xa5\xbb\x3e\x37\ \x35\x7f\xf4\xd4\x68\xb4\xaa\x8f\x2c\x3d\xdf\x02\xdd\x3c\x6c\x64\ \x48\x8d\x06\xc1\x9c\xc0\x47\x74\x34\xe0\xaa\x4b\x96\x15\xd4\x1e\ \x7b\xfc\x84\x31\xb9\x24\xc4\x3c\xe8\x9e\x04\xa5\x76\x4c\xe5\x19\ \x2f\x82\x0b\x72\x7e\xe8\x85\xdc\x39\xa7\x0f\x5b\xa0\xab\xff\xee\ \xd6\xe7\xbf\x7a\xd4\xd9\x97\xbd\x78\xef\xd9\x5f\xfd\x76\xdb\x13\ \xbd\xb9\xd3\xf1\x68\xda\xe1\xcb\x32\x8a\x3c\xed\x31\x37\x73\x90\ \xbc\x35\xaa\x1c\xd7\x26\xed\xa0\x6a\x5a\xa0\xaa\x4f\xbf\xa8\x8c\ \xe6\x8e\x19\xc7\xd1\x3f\x5e\x0c\x68\xee\xb8\x73\x55\x56\xdf\xd6\ \xf1\x68\x9b\x34\x94\x96\xd0\xa6\x06\x0a\x2f\x61\xaa\xcf\x51\x5c\ \xe1\x41\x07\x48\x49\x73\xc9\xec\xd0\x9a\x96\x9f\x0c\x30\x98\xa5\ \x9b\xfa\xce\xea\x57\xec\x09\x91\x8e\x56\xe7\xed\x8c\xc5\xae\x63\ \xbc\x50\x47\xa3\x15\x14\x74\x14\x73\x7a\x6d\x4e\x94\xdb\x81\xc3\ \x87\x5b\x17\x4b\x48\xf8\x47\x3b\x89\xaf\x9c\xdd\xbc\xf9\x2a\xeb\ \xdf\x37\xf6\xd1\xea\x6f\x9d\x5b\x0c\x69\x7b\xed\xfb\xa6\xb1\xf1\ \x28\x46\x24\x02\x14\xdd\x53\xc0\xce\xf7\x3f\x7f\xd7\x15\x3d\xa8\ \x92\x60\x18\x9a\xe7\x90\xd9\x6b\xb9\x13\xce\x8b\x00\xe7\xba\x1d\ \xc3\x7e\xb3\x91\x3f\xbc\x7a\xf1\xaa\x7d\x04\xb4\xbc\xf1\xa4\xfd\ \x96\xc7\x7b\x56\xae\x5a\xb3\x6e\x16\x9c\xea\x85\x7d\x01\xda\x65\ \x55\x65\xd2\x05\x08\xe5\x9b\xd8\xea\x71\x05\x7d\xff\xb6\x23\xd7\ \x1b\x0e\xad\x1a\x48\xa5\x04\x90\xe6\x58\xd5\xb9\xe0\xb9\x8b\x9e\ \xbd\xe8\x97\x4f\x38\x8e\xe6\x16\xed\x89\x16\xc0\xed\xc3\x99\x1c\ \xfc\x53\x79\x4b\xbf\xe3\xd6\x79\x78\x17\xa7\xfd\xcf\x17\x07\xfd\ \x4e\x67\xa6\xd9\x7c\x78\xde\x0a\x7f\x58\x8b\x86\x8a\x82\xb6\xbb\ \xda\xa0\x08\x24\xfe\xaa\x95\x09\x60\x6d\x9b\x74\xba\x34\x9a\x5b\ \x28\x0a\xd4\x50\x20\xe7\x98\x7e\x7c\x0f\xaa\xdd\xdc\x29\xe8\x42\ \x0d\x81\x5b\xdf\xda\x5a\x71\xe3\xe7\x3f\x7a\xeb\x09\xa7\x3f\xff\ \xde\xe5\xfe\xe2\xe4\xd1\xb7\x2d\x4b\x97\x01\x50\xb5\x2c\x49\x68\ \x72\x48\xf7\xc4\x1c\xfd\x26\x29\xfd\x68\xf4\xee\x16\x31\x94\xa8\ \x3e\xc3\x5b\xb9\x47\xb3\x05\x09\xff\x5e\x31\xa3\x91\xfd\x19\x9d\ \x73\x23\x85\x79\x4f\x1f\x7e\xcc\x72\x0a\x9b\xff\xf5\x78\xcb\x82\ \x2b\x87\xa5\x30\x94\x0b\x3a\x49\xd1\x7b\x2e\x1f\x82\x7b\x0a\x4a\ \xec\xf3\xa7\x82\x64\x63\x3f\xcf\x61\xbd\x27\x98\xef\x38\x5a\x9b\ \xc5\x8e\x1c\x79\x7a\x46\x64\x31\x73\xff\x04\x1c\x05\x1e\x5e\x78\ \xca\x59\x59\x76\xee\xaf\x8e\x5a\xef\x24\xb1\x73\xe0\x67\x77\x6c\ \x47\x71\x98\x17\x97\x9e\x70\x09\x05\xac\x17\x5b\x30\x93\x6f\xdd\ \xbf\xe8\x94\x54\x60\xe6\xf5\x0d\x23\xc6\xb1\x6c\x19\x78\xf2\x3e\ \xad\xf6\x27\x46\x48\x2f\x7c\x0f\xf0\x74\xb9\xc3\x3d\x3f\xc5\xce\ \xeb\xe0\xb0\x6a\x67\x5e\x72\xa7\xd7\xcf\xb6\x02\xca\x72\x77\x84\ \x25\x1d\x27\xfc\xde\xe5\x7f\xbf\x63\xf7\xb0\xff\x1a\x0b\x64\xa1\ \x47\x09\x09\xa8\xe8\xe1\x0e\xc2\xfe\x3f\x5f\x18\xf4\xaf\xff\xa9\ \xfd\x7c\x3c\xc7\x20\xbf\x3c\x4e\xfe\x11\x0f\x39\x8f\xb4\xb9\x61\ \x65\xff\x07\x81\x17\x9d\xda\x3e\x6c\x20\x4c\x0e\xf0\x83\xca\xb3\ \x88\x1e\xee\x66\xe0\x49\x29\xb9\xeb\x39\x47\xfa\x0a\x8b\xe3\xc1\ \x16\xc0\x24\xba\xc2\x0a\x6e\xf6\x85\x61\xbf\x61\x60\x7f\xc0\x02\ \xd8\xd2\xaa\x2a\x84\x9a\xbf\xf1\x83\x5b\xb6\xef\xb8\xf3\x91\x07\ \x06\xfd\xf7\xfc\x0d\xb0\x7b\x50\x5a\x3a\x59\xe1\xbc\x6a\xf7\x00\ \xfc\x3d\x33\xb4\x8b\x0b\x52\x4c\x41\xfd\xa1\xa0\xb5\xbe\xd0\x25\ \xb2\xb5\x73\xb0\xbe\x00\x37\xc9\xe6\xeb\xc7\x7e\x7a\x70\x61\xd0\ \xc9\xf3\x1b\xcf\x3c\xfd\xb5\x20\xec\xd9\x7e\x55\x8a\xd7\xaf\xf2\ \xda\x65\x4d\x7b\x17\x21\xa0\x83\xea\xc3\xf9\x02\x73\xf2\xb6\x07\ \x06\xad\xb2\x2d\x40\x4d\x0e\xda\x25\x63\x4a\xc5\xa3\x51\x94\x07\ \x69\x88\xc3\xca\x75\xdc\x63\xac\x90\x07\x06\xa5\x8f\xb5\xd1\x32\ \xcc\x3c\xf3\x17\xcc\xa1\xf9\x21\x55\xa0\xce\x3a\xff\xd5\x1c\x91\ \x7e\x1d\x6c\xee\xc0\x10\xbc\xd3\x59\x7b\x16\xe7\x6f\x3b\xb8\x79\ \xd3\x6d\x3b\x76\xef\xea\x14\x92\x31\x8b\x41\x71\xf0\xf4\x92\x0e\ \xbf\xc0\xb5\x1e\x55\x14\xa2\x67\xb8\xef\xb7\x40\xf6\xf5\x1c\x44\ \x11\xff\x13\x5f\x52\xb2\xdc\x59\xd4\x50\x26\xfb\x80\xf1\xe4\xd7\ \x3c\x01\x01\xe9\xad\xc7\x38\xe3\xde\xdd\xcf\xc0\x91\xee\xbd\x64\ \x61\x00\xf1\x17\x96\x71\xb9\x92\xdb\x05\xd9\x8b\x7e\xa6\x45\xaf\ \xa2\x12\x4c\xeb\x54\x75\xf4\xbb\xa8\xa4\xc0\x4f\x96\xfe\xf7\x0c\ \x20\x9f\xee\x6e\xb9\xfa\xe8\x09\xa0\x81\x27\xde\xf3\xba\xa7\x70\ \x39\x18\xdd\x36\xcc\x33\x8b\x14\x88\xb7\x46\xb2\x92\x2d\x5e\x72\ \x3b\x4a\xee\xe2\x4a\x89\x4e\x46\x5c\x1e\xfa\x9b\xbf\xff\xac\x4e\ \xee\x46\xa4\xa4\x9e\x96\x5e\x30\x85\x99\xcf\xec\xe6\xad\x77\x5e\ \x08\x4a\xea\x2a\x7a\x2f\x63\xe8\x8c\xb3\x99\x61\xed\x81\xb1\x96\ \xc8\x51\x4f\x49\x55\xe0\x6c\x50\x2b\xa0\xd1\x96\xd8\xc5\x4f\x5e\ \xf0\xbe\x8b\x0d\xae\xed\x06\x36\xc2\x64\x25\x05\x98\x44\x6b\x27\ \xc1\xfd\x6c\xde\x3d\xec\xb4\x1a\xc5\x5d\xe6\x7b\xfd\xd0\x19\xd2\ \x72\x8d\x73\xe5\x9d\xc3\x1e\xae\x4c\xee\x41\x69\x03\x5f\xe1\x9b\ \xcd\x9e\xd7\x25\xf3\xf9\x52\x79\xfd\x2d\x9f\x3c\xc6\x09\x0b\x60\ \xf5\x84\xa3\xa0\xa4\xa3\x85\xc2\x07\x7a\xba\x05\xb1\xea\x91\x61\ \xc3\xe6\x4a\x4f\x80\x2e\x1e\x7c\xec\xba\xbd\x46\xf6\xd5\xbc\x44\ \xa3\xa3\x75\x0f\xee\xea\xe7\x94\xdd\x59\xc2\xdd\x06\x45\xac\xa5\ \x13\xe4\x48\x85\xb4\xe8\x9b\x86\x97\x0f\x1f\xb7\x5e\x99\x46\x83\ \xac\x5e\x3c\x29\x21\xad\xcd\x6d\x19\x3a\xbf\x7b\x60\xff\xa3\xfa\ \x8d\x49\xc0\x65\xa3\xac\xd3\x0c\x57\x4f\x37\xe6\x73\xd5\xea\xfd\ \xfb\xd7\xdd\xbc\x75\x61\x57\xab\xa6\xea\x13\x70\x95\x44\x0f\x37\ \x9b\x12\xe0\xa4\x5d\x11\x7a\xd2\x79\x7d\xf3\x55\x41\x54\xbb\x75\ \xc6\x4f\x95\xd1\x93\x8a\x7b\xba\xd1\x65\x6a\x36\x7a\xae\xce\x9c\ \x12\x7d\x7c\xb4\x61\x84\x7f\xc3\xcd\x3b\x87\xcd\x56\xa7\xd3\xea\ \xe5\x54\x25\x13\xc1\x13\x47\xc1\x94\x32\xcd\x52\x85\x34\xad\xa3\ \x88\x53\xe8\xc0\x12\x30\x94\x3d\x29\x2c\xf7\x39\xd3\xbb\x20\x3a\ \xbe\xbe\xce\xdf\x70\x03\xb0\x13\x69\xfd\x42\xcb\xba\xcf\xbc\x97\ \x73\x2d\x4f\x28\xcb\xd8\xf0\x41\x3f\xe1\x42\x94\x8c\xd3\x7c\xcb\ \x5a\x1f\xeb\xe8\x93\x0e\xe7\xfc\xcd\x5b\x1f\x7d\x61\xeb\x95\x50\ \x6f\x19\xfa\x1c\x7d\x0c\x94\xb4\x27\x46\xff\x06\xa7\xc9\x5f\x3b\ \x62\xca\x33\xef\xf5\x9b\x76\x0e\x73\x48\x9b\x21\xab\x57\x48\x7d\ \xbb\xa7\x4d\xc6\x4c\x6b\x28\xd7\x47\xa3\xb5\x9e\xdd\x68\x6f\x19\ \x18\xd0\xd6\xe1\xd2\x4c\xd3\xb8\xa8\x2f\x1b\x54\xab\x86\x45\x41\ \xf5\xdc\x1c\x95\xb4\xe9\xb1\xf4\x3e\x6d\xda\x5b\xd6\xaf\xbb\xfb\ \x71\x93\x7d\xd4\x5c\xc6\xab\xec\x55\x97\x42\x82\x54\xf6\x98\x32\ \x65\x4e\x2c\xfa\x90\x38\x65\xfa\x4d\xf0\x23\x0b\x03\xfb\x5d\x18\ \xa4\xe7\x2d\xee\x79\x74\x48\x69\x93\x5e\xee\x89\x7e\xbe\x07\x02\ \x6a\xc7\xca\x8d\xfb\x96\x8a\x7e\xab\xa8\xad\xcd\xfb\x65\x9d\x40\ \xf2\x58\xc6\xe7\xed\x30\xb5\x19\x2a\xdb\x37\xdd\x08\x28\xc7\xf7\ \x47\x1a\x18\x53\x3b\x87\x16\x26\xac\x4e\xb7\xbc\xef\x81\x5d\xba\ \x82\xef\x94\x34\x5f\xfe\xb2\x67\x26\xe7\x4f\x82\xbd\xd0\x4f\xfc\ \x68\xd1\xe1\x5c\xb6\xf1\xb0\x61\xab\x76\x05\x9d\xaa\x74\xaf\x52\ \xa0\x0a\x33\x19\x26\x60\xa3\x5f\x5e\x95\xba\xe8\xf0\xbb\x00\x18\ \x49\xd7\xf0\xbf\x35\x67\x38\x33\x3b\xb4\x3a\x80\x64\x3f\xd4\xde\ \x32\x28\xe9\xec\xbc\x83\x76\x5b\x03\xd6\xa6\x27\xdf\xb6\x6e\xcd\ \x4f\xae\xf9\xf8\xae\xa5\x26\x55\xca\x4a\xfb\x01\x18\xe9\xf2\x4c\ \x8a\xb7\x41\x4b\xf7\xab\xc5\x2e\x85\xbc\x74\x21\x17\x6e\x58\xcb\ \x77\x0c\xdc\xe6\x32\x52\x61\x6f\x66\xe9\xbb\x9e\x65\x75\x04\x1d\ \x40\x67\x5a\xb4\x3a\xcd\x46\x4f\x2a\x7a\x75\x49\xc2\x47\xb6\x2a\ \x8b\xf7\x77\x42\x33\x78\x48\xee\xba\x09\x42\xa0\xea\x42\x2e\xaa\ \xe8\x9d\x1d\x17\x3f\x29\xa4\x00\xf2\x86\x67\xe6\x75\x66\xb1\xb3\ \x9f\xe3\x6e\x98\x6a\xd6\xb0\x46\x54\x0b\x4a\x8d\xb4\x24\xa0\x60\ \xf3\x8c\x30\xf3\xc2\x51\x00\xa9\xb2\xa6\x30\xc2\x7f\x12\x38\xda\ \x81\x22\x3e\xd1\xe5\x27\x3d\xf6\x70\xcf\xc2\xfc\xda\x48\xa1\x9c\ \x9f\xdd\x3a\xec\xf7\x20\xb1\xcf\x0b\x5d\x23\xb3\xbf\x65\x45\x84\ \x20\x2b\x56\x52\x14\xbd\x28\x40\x68\xf4\xdd\x20\x6b\x12\xa3\xa2\ \xc2\xd3\x77\x60\xa5\xee\xcb\xb6\x48\xaf\xb8\xda\x18\xf5\x89\x1d\ \xe3\xf0\xef\x1b\xc9\xc2\x3d\x3f\xbb\x65\xe7\xb0\x3f\xd3\xe0\x30\ \x9f\x8b\x4f\xf6\x26\xa0\x22\x77\x36\x25\x78\x89\x31\xc1\x0d\xec\ \xf4\x63\x12\xe5\x9d\xda\x69\xf7\x64\x45\xbb\x72\x09\x34\xd4\x81\ \xbd\x7a\xbd\xc5\xb6\xaf\x59\x14\xaf\x23\x79\x9f\x83\x20\x11\x25\ \x3c\xb5\x47\x95\x88\xbe\x64\xb0\x0a\x2a\xb9\x27\xe1\xa8\xc7\x54\ \xde\xb9\x83\x8f\x71\x65\xc7\x3f\x84\x32\x4c\x0b\xf9\x59\x7c\xf7\ \xcb\x73\xb0\x00\x0c\x0d\xd0\xcb\x8d\x7e\x1e\xff\xd6\x2d\x3b\x07\ \x4b\x9d\x06\x78\x51\x42\x4a\xdc\x14\xaa\x70\xd7\x8e\xc0\xaa\x45\ \x94\x8c\x09\xe0\x91\xd5\xc7\x1b\xf6\x91\x86\x4e\x73\x84\x0f\xfe\ \xe9\x21\x28\x5b\x7c\xff\x5b\xbd\xc6\x77\x5e\x75\x1c\x26\x18\x0f\ \xbd\x30\xb4\xe6\xf4\xa1\x37\x9a\x64\xb2\xd3\xc0\xfd\x86\x00\xa9\ \x7b\x4a\x2c\xe7\x61\x77\x0e\xa4\x75\xf0\x28\x0d\x45\x33\x02\xb8\ \x98\x85\xa4\x71\xb6\x13\x56\x8f\x57\xe3\x31\x0a\x2a\x24\x36\x19\ \x6d\x1d\x36\xf5\x5e\x83\x20\x05\x53\x8a\x38\x5a\xd6\xdc\xf8\xc0\ \x18\x2b\x96\x3c\x00\x25\xd9\x3b\x8e\x66\x71\x3c\xaa\xeb\x8e\xba\ \xb1\x00\x5e\xe7\x2c\x06\xb4\xe6\xf1\x61\x8b\x20\x4a\xcd\x59\xb1\ \xd4\xdb\x65\xc2\xce\x07\x44\x0b\x38\xb5\xc3\xa7\x95\x49\xb4\xd3\ \x92\x4a\xeb\x39\xc2\x8f\x3b\x75\xfc\x12\xd9\xc7\x14\xcc\xd5\xdb\ \x4d\x69\x41\x73\x53\x65\x4c\x96\x22\x7e\x52\x37\x51\x4d\x1a\x0a\ \x38\xc5\x96\xd0\xdf\x67\xa2\xa6\xf1\x16\x23\x2f\x4e\xe3\x77\xee\ \x3e\xb9\x9f\x50\xae\x3a\x68\xec\xbb\x09\x05\xbd\x90\xa7\x92\xd7\ \xe3\x25\x30\x81\x2a\xe4\x28\x1b\x53\x15\x59\x53\x96\xd1\x2a\x0f\ \xfc\x65\xd1\x23\xdc\xe4\x6e\x83\xce\x41\x9b\x6f\xbe\x7b\xe3\xc6\ \x4d\x07\x77\x98\x6a\x52\x93\x3a\xca\x98\x04\xa4\x92\xbd\x90\x34\ \xbe\x40\xed\x21\x82\x09\x37\xb0\x51\xae\x52\x5b\x93\x94\x74\x92\ \xc6\xa4\xa8\x35\x33\x03\xc8\x99\x9b\x0d\xda\x68\xd2\xb2\x67\x94\ \x9a\x70\xbb\xbe\x2a\x45\x49\x85\x9f\x2c\x7b\xbd\xcc\x97\x58\x78\ \xf2\x9a\x4a\xe2\x1a\x7e\x5c\x29\x91\xa0\x44\xc2\x3c\xa1\x9a\x2f\ \x59\x3d\x0b\xf6\xf7\x68\x4d\xd2\x9d\x17\xb6\x40\xf0\x4b\xc1\x14\ \xa2\x08\x3f\x1d\x8f\x2a\xac\x02\x53\x6f\x84\xe6\x82\x8f\x36\xeb\ \xe1\xd2\xed\x8e\x28\x7a\xba\x3c\xca\xbc\x9e\xa2\x44\xbf\x23\x32\ \x51\xfd\xb4\x3d\xff\x04\x38\x83\x5d\x50\x07\x53\xda\x47\xf5\x36\ \x38\x35\x42\x8c\x93\x7a\x55\xd9\x07\x41\xd6\x9a\xa1\xa0\xa4\x6d\ \xe6\x68\x92\xc4\x87\xc6\x05\x88\x40\x4b\x7b\x08\xb7\x30\x17\x48\ \x5f\x70\x52\xd7\x28\x77\x96\x08\xe2\xaa\x62\xab\xb7\xdf\x2c\x76\ \xc2\x9c\xb1\x7b\x2a\x69\xed\x04\xbc\x21\x53\xc7\xaf\x4c\x7a\x9f\ \x09\x75\x94\xf7\xc2\x74\x3b\x11\xf4\x8f\x32\x3e\xe5\x9e\x2a\xb8\ \x4a\x04\x6a\x11\xea\x4e\xd7\x36\xbe\xb3\x52\xf0\x65\x8a\x93\xa2\ \xa4\x09\x6a\x46\x9b\xa1\xb2\x7b\x47\x85\x47\xd1\x51\x7b\xe7\xac\ \xa9\x45\x91\x0a\x49\x80\x91\xc0\x61\x0f\x25\x1b\x7b\x09\xcc\xa4\ \x4e\x52\x88\x4c\x05\x2a\x5f\xd1\xe6\xcd\x34\xea\xa8\xaa\x3a\x02\ \x35\x02\x25\xad\xf1\x45\x1c\xe5\x1e\xe7\x42\x47\xf8\xd2\x39\x9c\ \x68\xca\x04\x80\xe0\x4e\xa9\x40\x96\xb0\x79\xb5\x2d\x12\x17\x72\ \xd1\x75\xba\x08\x9f\x37\x1b\xe2\x1e\xe7\x3a\xd6\xce\xdc\x13\x3d\ \xf7\x91\x51\x6c\x02\x97\x47\x80\xd2\x01\x0e\x31\xa2\xf0\x59\x49\ \x35\xe9\x8d\x06\x78\x73\xda\xa4\x76\x6c\x69\x1b\x5c\x50\xc2\x43\ \x70\x75\x76\xc7\x62\xaf\x6b\x95\x2f\x65\x28\xf1\x64\xa3\xeb\x04\ \xde\x02\x52\x73\x54\x59\x53\x51\x30\x4f\x0d\x46\xf3\x08\x4b\x45\ \x51\x2d\x4c\x81\xaa\x71\x02\x36\x88\x4c\x04\x64\x9d\x6a\x77\x2c\ \xd3\x2e\x5f\xdc\x28\xfc\x26\x5a\xde\x84\x40\xea\xc8\x4f\x6c\xd4\ \x02\xdd\x74\x0f\x5e\x8e\xab\xa0\xa3\xd1\xb9\x06\x30\xfd\x9a\x20\ \x46\x59\x28\x94\xc4\xa9\x92\x57\x0a\x3b\x25\x9d\x4f\xd5\x75\x02\ \x5b\xe2\xe6\x51\x92\x7d\x44\x0c\xb2\xce\xfd\x0e\x88\x92\x8c\xc9\ \x57\x51\x78\x2b\xb3\x07\x4b\x02\x37\x1a\xf7\x63\x33\x64\x42\x19\ \xaf\xf5\xc0\x55\x61\xa7\x18\xbc\x94\x1f\x74\xf7\xa0\xbc\x74\x34\ \x4a\x1c\xc5\x64\x04\x81\xc6\x8b\xbd\x4a\xef\xe2\x45\x29\xf6\xa4\ \xa1\xe8\x0b\x7c\xa0\x4e\x86\x15\x7c\xc1\x49\xbf\x04\x98\x48\x15\ \x20\xd8\x39\x55\xe2\xee\x85\x50\x37\xd5\xaa\xc4\xd2\x57\xeb\x53\ \x2a\x5d\x16\x12\x93\x4a\x72\xd4\xf7\xa7\xfc\xa1\xa1\x02\x91\x6e\ \xd6\x31\x4a\xf9\x48\x49\x1d\x10\xa6\x33\x7b\xcc\xed\xd8\xdd\x5b\ \x88\xa8\xa5\x88\x56\x88\xd7\x7a\xf7\x26\xb2\xa0\xd1\xe8\x49\xde\ \x74\x0a\x03\x38\x2b\xe8\x32\x4f\xec\xca\x37\x95\x7e\xd7\x53\x8a\ \xa1\xd3\xe1\x7e\x3d\x8b\x1f\x2e\xe0\xac\x8e\x45\xc1\x99\xe2\x8a\ \x24\xf6\xa4\xe2\x12\xdf\x8c\xea\xb4\xe0\xe1\x8a\xce\x08\x09\x50\ \xe4\x67\xc8\x51\x45\xc4\x50\xca\x97\x84\x72\x56\x50\x8a\xf3\xc2\ \xe4\x8e\xac\x5e\xae\x04\x58\xc1\xe9\x70\xeb\xc5\x9e\x1d\xe9\xa1\ \xdb\xdb\x85\x9f\xda\x96\x30\x22\x09\xda\xdb\x45\x07\x38\xb8\xa7\ \x57\x2a\xc2\xcf\xda\x5e\xe8\x9c\x0e\x45\xe9\x15\x10\xad\xf4\x68\ \xea\xa8\x9e\x80\x13\xff\x87\xa1\x86\x1f\x8f\xb2\x97\xd2\xba\x5a\ \xe2\x8b\x78\x89\x77\x50\x1a\x0f\xdc\x53\x49\x76\x1f\x83\x8d\x9a\ \xb1\x05\xa7\xb8\x27\x87\x91\x20\x36\x24\x70\x16\xa4\xc2\xd2\x42\ \xbb\x52\x11\x3d\xac\x4e\xc2\x4c\xbd\x84\xea\xf5\x53\x3c\x7d\xd2\ \x95\x4e\x8b\xe8\x63\x3d\xa5\x48\x4f\xf5\x8e\x4a\x2c\x2a\x50\x35\ \x4e\x0f\x69\x85\xa2\xaf\xb5\x72\xf2\xaf\xc2\x7b\xc8\x46\x9d\xc8\ \x94\x50\xd6\xcc\x51\x9f\x1c\x52\x12\x7b\x1a\x66\x70\x48\xa8\x4a\ \xe4\xa0\x9e\xe0\x43\xb9\x13\x4f\x63\xa8\x0d\xed\x48\x7b\x08\x17\ \xfd\xa8\x40\xad\xa3\x65\x94\x6f\x8d\x95\x71\x92\x13\xad\x62\xaf\ \x9f\x54\x4e\xd5\x8e\x1b\x53\xea\xf8\x45\x41\xbe\x54\x13\xb5\x65\ \x32\x52\xfa\x12\x9c\x64\x4d\x5e\x72\x57\x7b\x6b\x91\x58\x13\xfb\ \xfc\x90\xa5\x11\xa5\x39\xea\x4c\x5e\x4e\x0b\xe1\xe5\x91\xc4\xa3\ \x31\x2f\x45\xf4\x08\x14\x9e\xf8\x80\x98\x68\x67\xe2\x00\xeb\x38\ \x0a\x63\x51\xdf\xe3\x17\x72\xde\x92\x97\x4e\xfc\xc0\x28\x2f\xc2\ \x2b\xa2\x27\xf1\x87\x82\x47\x2f\x9a\xce\xed\xd3\x52\x9f\xe6\x54\ \x44\xa3\x05\x98\x12\x98\x68\xaa\x95\xf0\xe3\x45\x49\x5e\x88\xb6\ \xf2\xc5\xdf\x26\xac\xc4\x4e\x7e\x4f\xa4\x81\x8e\x5f\x99\xe4\xec\ \x4d\x4f\x67\xf5\x12\x34\x49\x5e\x2f\x4b\x68\xec\x45\xeb\x14\x4b\ \xdb\x1c\xe5\x27\xad\xe9\x10\x1c\x85\x5b\x05\x25\xe8\x9f\xe0\x1d\ \x87\xa3\x79\x8d\xa0\x31\x38\xd1\x3a\xca\x58\x2b\x78\x02\x94\x0e\ \x9f\xb2\x7d\x71\xf8\x87\xe4\x68\x4c\x64\x4c\x31\x47\xc9\x90\x88\ \xa7\xf0\x0e\xd9\x2a\xc2\xaf\xf4\x5a\x2f\x2a\x10\x1b\x53\xda\xec\ \xd3\x6b\x3d\xf3\x13\xcc\x1e\xef\x98\x28\xa9\x8b\x32\xd1\xb2\x14\ \xa6\xf2\x86\x7d\x1c\xe6\x89\xe0\x15\xcc\xd8\xe8\xd3\x7e\x54\x83\ \x25\xa4\xee\x75\x8b\x57\xcc\x8b\x30\xea\x42\x49\x29\x77\x2a\x1a\ \x95\xf3\xf5\x78\x4b\xdc\xcc\xfe\x3e\xbd\x17\x1a\x55\x72\xf5\x15\ \x14\xc6\x59\x3f\x51\x0d\x04\xaf\x6c\x83\xfb\x71\x53\x25\x58\x35\ \x57\x05\xa6\x66\xea\xf8\xf3\x4c\xc2\x4e\xc1\x2a\x20\x75\xc6\x9c\ \xf3\x52\x9f\x0a\x4a\x2a\x08\x98\x08\x27\x20\x56\x06\xc5\x62\xd7\ \x09\x09\x92\x62\x29\xfc\x26\x0f\xb5\x08\x44\x85\x14\x50\x22\xd4\ \x5c\x4a\x7a\x3e\xcc\x20\xbb\x2b\xc9\xa2\x38\xb9\xab\x12\xf1\xbd\ \xc0\xa4\x97\xf2\xa5\x63\x8b\x64\x4c\x92\x2b\x93\xcb\x27\xe1\xcb\ \x81\x2b\xf0\x50\xb9\x64\xf6\x01\xd1\x22\x4f\x1f\x1a\x24\x3f\x2a\ \x74\x8e\x65\x1f\xbb\x54\xda\xb8\xd3\x3a\xca\x87\x03\xd1\x90\x98\ \x10\x1f\x97\x1e\x85\xa7\x60\x48\xc2\xd1\x32\x26\x02\xa8\xa3\x91\ \x38\x6a\x86\x57\xca\xea\x79\x09\x2d\x84\xab\x3d\x8c\x48\xb4\x31\ \x49\x2e\x4f\xfe\x29\xd4\x51\x0c\x9f\x88\xa7\xc2\xe0\x76\x60\x4c\ \xa1\x6f\x4a\x6f\x8a\xa4\x83\x12\x01\x8b\x28\x1d\x43\xb7\xcd\x9f\ \x7c\x52\xa3\xc8\xd5\x09\x31\x92\x3a\x7c\x85\x35\xe7\xf4\x29\x5b\ \x11\x39\x6b\xa8\xe4\xcc\x41\x40\x0a\x37\xe7\x23\xd3\x52\x1f\x4d\ \xa4\xcb\x54\x24\x5b\xfb\x39\xbb\xcb\xfc\xc1\x46\x78\xda\xb2\xe0\ \x1d\xf0\x42\x2f\xa1\xbc\xc5\xcc\xbb\xa0\x55\x00\x58\x6c\x3e\xb4\ \x7a\x25\xf9\x76\xcc\xd1\xc8\x9c\x0a\x00\x0a\xce\xe9\x58\xb7\x1d\ \xde\xc7\x42\x2e\x9f\xb9\x23\xb4\xbe\x1b\x2d\xf9\x06\xab\xc7\xae\ \x0d\x0f\x29\xaf\xf3\x82\x36\xb2\xfa\x34\xce\x69\x0e\x4a\x84\x7a\ \x00\x16\x39\xfa\x8f\x8b\x96\xce\xdb\xdd\xc7\x14\x14\x80\xfa\xbb\ \x22\x4c\xc2\x4e\xb1\xf6\x38\x28\x01\xa0\x5e\x8c\x1f\xc5\x79\x41\ \xcf\x78\x3b\x88\xf0\xe3\x68\x14\x9b\x89\xfe\xcd\xf2\x73\xd3\xbe\ \xa5\x1e\xc0\x44\xd1\x73\x6a\x1f\xc3\xac\x18\x2c\xbc\x75\x3b\x11\ \x82\x44\x94\xfc\xa1\x84\x3f\xd6\x3f\x05\x82\xc7\x62\x0e\x6f\x86\ \x9d\x64\xb7\xc4\x77\x75\xa4\x88\x5f\xa3\x49\xd5\x29\x37\x2a\xce\ \x5e\xf8\x2a\x6c\x55\xc6\xa4\xf3\xfa\xd8\x96\x22\x8e\xb6\x7c\x96\ \x16\x08\x56\x36\x6d\x6d\xaf\xc1\xca\x81\x3e\xba\x0c\x9a\x29\x0b\ \x53\xa1\x22\x7c\x78\xe8\x5b\x3a\xf3\x62\xeb\x97\xd0\x24\x61\x4e\ \xf0\x95\xce\xeb\xb5\xe8\x01\x64\xcf\xfa\x26\xab\xa2\xbb\x1b\xfe\ \x12\xea\xa5\x4d\xc0\xda\x20\x17\xf1\xfd\x3d\x4a\x1f\x49\xe2\xa6\ \xf1\x79\x3d\x8b\x5f\x9d\x0c\xe4\x63\x42\x1a\x26\x00\xc5\x54\xe4\ \x6a\x77\x84\x4a\xf6\xeb\x83\xe8\x9e\xa9\x0c\xda\x89\xf0\x57\x82\ \x12\xc1\xaa\x33\x91\xb4\x86\x26\x2a\xce\xad\x56\xe0\x9e\xf4\x78\ \xa2\x29\xdb\x42\xdc\x47\x98\xf0\xca\xe1\x85\x2a\x9a\xee\x29\xa0\ \x10\x5f\x93\xb6\x79\xb8\xd3\x05\x9d\xf4\xbc\xa7\x98\xa4\x36\xda\ \x70\x5d\x64\x2b\x87\x80\x53\xf5\xe8\x58\xae\x26\x49\x71\xb3\x4e\ \x54\xc5\x03\x96\x96\x41\x22\xe2\x7e\xc2\x28\x8f\x3a\x20\xe2\x7c\ \x99\xe0\xfe\x40\x94\x14\x91\x3a\x2a\x38\xcd\x47\xc8\xa5\x2e\x92\ \x0a\x51\x41\x5f\x62\x92\x30\x38\x89\x1d\xbe\x72\xa3\xe9\x02\x44\ \x11\x8c\xd0\x02\x8f\xbf\xb5\xa3\xfa\x9e\xdc\xb9\xab\x74\xe5\x01\ \xb0\x62\x47\xd9\xd8\x15\x1f\x39\xaa\x1c\x14\xd9\x3c\xde\xb1\x1f\ \xb5\x2a\x1a\xa4\x4b\xea\x94\xed\x1e\xe8\x6a\x2d\x42\x96\xd2\x18\ \x00\x41\x5b\xaa\x28\x9f\xca\x0f\xda\x94\x10\xa0\xd8\x51\xa9\x94\ \x54\xb0\xb6\x63\x8e\x2a\xa3\xd7\x11\x3e\x7c\xd8\xa0\x64\xdd\x40\ \xb8\x89\x48\xbd\xa9\x54\x85\x40\xad\xb0\x43\x4b\xf2\x26\x79\xda\ \x71\x8c\x5f\xa6\xa3\x3c\x0d\x76\x5a\x38\x9a\x36\x26\xc7\xd0\x27\ \xad\xd9\xcf\x0e\x91\x9f\x62\x4e\x32\xb5\x40\x2f\xa1\x25\x3c\x20\ \x74\x6e\x70\x1f\x9f\x30\x87\x86\xaf\x58\x1a\x59\x7d\x0b\x1f\x01\ \x2a\x11\xa9\xeb\x2c\xdc\x3e\x28\xc4\x37\x01\xa1\x8e\x86\x8a\x5a\ \x09\x44\x29\x3a\x56\xc9\x6c\xb9\x8c\x4d\x49\xe7\xa1\x01\xd6\xd4\ \xd6\xb2\xa8\xe8\xd3\x23\x00\x3a\x7b\x3d\x00\x65\x6e\xe2\x71\x60\ \xf2\xfa\x9a\xd2\x85\x27\x5f\xee\x29\x98\xb2\xce\x27\x19\x3a\x0d\ \x79\x7d\x94\x87\x16\x04\xf7\x95\x73\x38\xa5\xc3\xf3\x4f\xcc\xd4\ \x90\x9d\x62\xe8\xfc\x23\xad\x3a\xa4\x9b\x48\x64\x46\xd1\x72\xcf\ \x2c\x8d\x37\xc4\x62\x12\xb0\xc5\x7f\x3b\xa0\x53\xfb\xd8\x94\x04\ \x64\x2e\xf3\xd3\xf4\xa6\xad\x18\x7b\xcc\xd4\xb6\x96\x7c\x7a\x70\ \x5e\xbc\x86\x2a\x87\xdf\x2c\x18\xa2\x38\xd2\xf7\x7d\xfc\x00\x9e\ \x11\x21\x6b\x42\xf1\x53\xef\xa8\x22\xe9\xc3\xaf\xb5\x0f\xad\x54\ \xaa\x9c\x2c\x3e\x01\x32\x42\xaa\xc5\xae\xf3\x7a\xfe\x2d\xd4\x5e\ \x68\xef\x5b\x6e\xb9\xa7\x45\x94\xdc\x13\x3c\x2a\x19\xad\xd0\x9a\ \x58\xe2\x22\xf6\xa4\x67\x2a\x23\x77\x8f\x97\xc3\x9b\x6e\x7e\x11\ \x90\x8c\x92\xfb\x74\x8e\xc5\xe5\x3e\xea\x75\x35\x97\xe6\x25\xde\ \xe2\xf1\xe3\x3a\x2e\x82\x64\xc4\x21\xe9\x13\x18\xf1\xf8\x8f\x74\ \x79\xd4\x71\x15\x97\x7b\x03\x30\x6e\xc5\xd6\x06\x25\x39\x88\x54\ \x20\xd3\x2e\x54\xf3\x53\x48\x2d\x4a\x11\x47\x23\x88\x64\x4c\xae\ \xf2\xf4\x15\x77\xac\x8d\x26\xaa\x68\xac\x51\x44\xaa\xf6\xc3\xd2\ \x2d\x5a\x0a\xb3\x80\x65\xa1\x27\x9a\x72\xa7\x13\x7e\xb4\x47\x65\ \x1d\xae\x8f\xed\x41\x25\x05\xe2\x11\x10\x45\xbc\x11\x5e\x55\x15\ \x3e\xd8\x5f\x10\x28\x28\x1f\x18\x80\x83\x57\xf8\x28\x76\xa6\xe3\ \x51\x89\xf0\x9b\xd2\x98\xc9\xfb\x4c\xe6\x17\x93\x51\x50\xd2\x5d\ \x34\x77\xd4\xfe\x00\xce\x31\xed\xed\x95\x01\x08\x0f\xad\xf5\x95\ \x9a\xa0\x25\x7e\x89\xe6\xe6\x69\x76\x2a\x3d\x55\xee\x89\x2d\xde\ \x81\xf4\x5a\x35\x80\x50\x49\x47\x3b\x1b\x7e\x83\x1e\x1b\x3b\x43\ \x2d\xcb\xa8\x15\x1f\xde\x75\xb8\x0f\xae\x7c\x69\x16\x6a\x28\xbd\ \xe2\xf9\xa3\xa1\x31\xe9\xe0\x09\xa4\xff\x3d\x38\xae\xf8\xca\xf7\ \x17\x82\x93\x43\x66\x04\x5b\xe9\x6c\x44\x69\xa9\x6a\x2a\xd1\x79\ \x53\x74\x8e\x55\xc7\xa3\x71\xff\x68\x6a\x9f\xc9\xde\x68\x4e\x38\ \xeb\x61\xcf\x77\x84\x9d\xbc\xd5\x14\xee\x34\x45\xbb\x4c\x71\xda\ \x34\x2e\xca\x53\x21\x1e\x7e\xa4\xfd\x68\x41\x0c\x15\xb8\x60\x4f\ \x37\x2d\xe2\x01\xc0\x0e\x67\x20\xb2\xdb\x90\x27\x12\xbc\xd8\x8f\ \xb2\xa5\x8b\x92\x86\x6b\xfd\x84\x02\x2b\xa4\x37\x1b\xd2\xdb\xf6\ \x54\x1a\x7f\xcc\x01\xbd\x79\x00\xc2\x0f\xba\xf3\x94\x6e\x0a\x5b\ \x4b\xd2\xd4\x80\xb0\x37\x8f\x8e\xb3\xfd\xba\xf1\x68\x33\xa1\xa0\ \xe2\xa2\x78\x36\x11\x9c\x79\x58\xbd\xbb\x0f\x56\x0f\x28\x99\x99\ \x9a\xa5\xf1\x26\xa3\x40\x95\xea\x43\x86\xef\x2c\x53\xec\x1c\x1b\ \x8f\x4e\xf3\x86\x98\xc0\x0c\x92\x3b\x6e\x7b\xfb\xe7\x3f\x3a\x6c\ \xd0\x07\x86\xb2\xf8\x35\x4c\x01\x18\xe6\xca\x71\xa9\x24\xcd\x50\ \xd9\x0f\x39\xd4\x74\x22\x81\x9b\xde\x66\xa2\xf2\x78\x54\x23\x8b\ \xcd\x69\xfc\x1c\x67\xf7\xc2\xf3\xcb\x31\x8d\x8f\x47\xd5\x06\x0e\ \x71\xd2\x37\xfc\x9e\x6e\x7b\xd2\x00\x8b\x78\xdb\x96\x64\xae\x0d\ \x4a\x62\x66\x39\xff\x1f\x99\xbd\xea\x78\x8c\xfb\x47\x63\xe2\x31\ \x4a\x8a\xa7\xb2\x73\x27\x3b\x63\x22\x7e\x4e\xe5\x25\x8e\x4a\xda\ \xbd\x70\x54\x77\x3b\xea\xca\x78\xba\x2d\x53\x8f\x4c\xf4\xb6\x6b\ \xb1\x7d\x98\xa8\x06\x7c\xaa\x0f\x1f\x5f\x7a\xe8\x34\xa2\xc4\x77\ \xe5\xc5\xf6\x19\x3c\x08\x94\x6d\x49\x80\x8e\x8f\x47\xc3\xce\xe1\ \x82\x9b\x72\xa9\x4b\x4b\xb1\x35\x57\xeb\x52\x11\x04\x25\x78\xe4\ \x8a\xa2\xbd\xa8\x40\x8a\x53\x20\xc0\x41\x65\xee\x56\x91\xf3\xf8\ \x99\xe8\x41\xed\xa9\x90\x5e\x22\xb7\xd5\xe4\x71\xb4\x96\xd8\xa9\ \x66\x73\xf2\x4f\x32\x96\x12\x92\xaa\xc1\xe8\x55\x32\x0b\x35\x10\ \x11\xa7\x0f\xd5\x73\x4b\xc4\xd3\xe9\x74\x3c\x5a\xa8\xa1\x3f\x21\ \xd5\x61\x54\x02\x4f\x72\x7a\x7b\xca\x43\xb5\xd9\xe4\x65\xaa\xeb\ \x98\xcc\x3e\xb5\x0d\x8e\x97\x82\x2a\x91\xb3\x3e\xc9\x86\x56\x2f\ \x77\x6a\xf9\x14\xb4\x31\xb5\x79\xe6\x30\xcd\xf2\x55\xe7\xd6\xbb\ \x80\xb2\x0b\x34\xbe\xe5\x2d\x91\xdb\xab\x09\xa4\xb9\xbf\x73\x07\ \x4f\xc8\xd4\x60\x82\xb3\x70\x36\x6c\xd2\xe2\x59\xbe\xf6\x46\x94\ \x48\x30\xa1\xc4\xe0\x0c\xe3\xbc\xa6\xee\xd2\x61\xff\xc9\x20\x1b\ \x7a\x84\x37\x0a\x9f\x26\x64\x27\x59\xea\xa0\x92\x8e\x2a\xc1\x67\ \x3c\x55\x03\x60\x66\xda\x41\x75\xbd\xb1\x2f\xe9\x95\xa9\x99\x5e\ \x9b\x74\x57\x01\x59\xbd\x3f\x70\x5a\x3c\x54\xdc\x49\x88\x20\xa3\ \x71\x15\xe9\x69\xbe\xfe\x18\xe7\xe9\xb7\x5c\x1a\xe5\x23\xa0\x9e\ \xda\xe8\x0b\xde\xaf\x8f\x67\x63\x87\x4b\x68\x28\x77\x40\xc9\x9e\ \xb4\xc6\x4b\x9d\xad\x4f\xe3\xec\x32\xd2\x53\x2f\x9d\x32\x15\xaf\ \xcf\xd6\xe9\x31\x4a\xf1\xca\x14\x2e\x4a\x78\xfb\xfb\xf5\x48\x85\ \x7f\x5e\xa0\x44\x9f\x1f\x13\xc3\x25\x8c\xf0\xf6\x8d\x09\x8f\x04\ \x9f\x3c\x32\x40\xef\x68\x06\x76\x9f\xd8\xb7\xf3\xc7\x7a\x2a\xe1\ \x13\x52\x9c\x91\x8b\xac\x8d\xd2\x7a\xf9\xd2\x31\x09\x8f\xc5\x2e\ \x15\x47\x23\xe1\xff\x16\x9c\xf6\xee\x27\xe2\xd1\x68\xb3\x01\xd7\ \x7a\x8a\x48\x7b\x08\x53\x82\x27\xae\xe4\x45\x2e\x2a\xda\xaf\xaf\ \x3d\x98\x99\xaf\xa5\x6d\xb8\x4a\xb1\x24\x1a\xf9\xb2\xc1\xc6\xe8\ \xe7\x0d\xdb\xc9\x41\x00\x7a\x9c\x2f\x13\x32\x94\x81\xaa\x6d\xf0\ \x9c\x57\x50\xbd\x15\x26\x47\x82\x6b\x3e\x65\xcd\xc1\x93\xa7\x9f\ \x40\xb2\x2e\xd1\x14\xef\xee\x3b\x8f\xde\xb8\x7d\x61\xd0\x31\x9f\ \xf1\x68\x05\x0d\x93\x36\x6d\x81\x95\x7a\x40\x32\x2f\x4c\xbc\x82\ \x0a\x54\x9d\x92\x00\xbe\xb0\x40\xc6\x0e\x0a\xe8\xd4\x6b\x8f\xba\ \x36\x8c\xf2\x00\x6b\x33\xcf\x92\x87\xac\x35\x52\x2a\x3c\xf4\x0a\ \x16\xbb\x6e\x27\x92\x43\x18\xda\x37\x55\x5e\xfe\x41\x0e\x2a\xdc\ \x0c\x65\xa0\xe7\x6e\x38\xff\x1f\x4c\x95\xf8\x27\x33\x08\x52\xd6\ \xa6\x36\x7b\x7b\x5d\x71\xd6\xee\x9e\x66\x3b\xf2\xf4\x0f\xe5\xee\ \xd1\x96\x08\xac\xee\x22\xd3\xbd\x1a\x08\x52\x9f\xbc\x62\xfd\xdc\ \x70\xef\x68\x84\xc3\xaf\x3c\xab\x77\x77\xd7\xca\x7c\xc3\x17\x0e\ \x3f\xbb\x54\xa2\x6f\x69\x63\x2a\x88\xa1\x05\x5e\xc2\xd2\x9c\xa7\ \xf9\xca\x39\x31\x40\x1c\x9e\x67\x2a\x69\xe6\x4f\x5c\x25\x41\xa8\ \xbf\x2d\x33\x11\x16\xca\x8c\x5b\x20\x1c\x37\x4f\xdd\x70\xd4\xe1\ \x23\xa8\x74\x0a\x43\xdf\xc9\x8d\x05\xe1\xf0\x34\x22\x6d\x4a\x20\ \x79\x75\x78\x59\x53\x45\x27\x42\x69\x8a\x7b\x5c\xcb\x03\xab\xbf\ \x6b\x91\xe9\xc1\x86\x96\x7d\xf7\x0b\x3c\xc4\xeb\x2f\xcb\xb8\x23\ \x57\x5b\x93\x93\xbc\xb0\x15\x89\x4c\x09\xde\x14\x94\x84\x58\x01\ \xe8\xb8\x92\x0e\x99\xfd\x5f\xb9\xe9\xb8\x73\xe6\xbd\xb1\xe3\xa3\ \xec\x76\x7f\xe6\x0d\xf0\xea\x75\xe3\xf1\xc8\x2a\x6e\x26\x05\x05\ \xa4\xb1\xe8\x45\xe6\x45\x98\x89\x94\xfa\xc8\x00\x2c\xa5\x02\x52\ \xbc\xe8\xad\xaf\xb9\x7d\xc7\xbe\xc1\xb1\xa0\xa4\x8a\xa3\x13\xa2\ \x15\x8b\x07\x3a\x8c\x72\x7c\x0d\x3f\x15\x96\x00\x50\xbc\xf3\xc4\ \xe1\x30\xe9\x7b\xc3\x29\x25\xf0\x1b\x85\x24\xf0\x54\x45\x9e\x65\ \x5f\x85\x11\x9c\xda\x9a\x4e\xb5\x03\x18\x57\x6f\x5e\x6d\xde\x93\ \xa2\xa4\xf1\x78\xe4\xa2\x60\xa4\x78\x2b\xd1\x4b\x75\xbc\x8e\x04\ \x0f\xf8\xc0\xa2\x90\xca\x4f\x6f\xd8\x80\x7d\x3a\x82\xd3\xde\x14\ \xe6\xfd\x0e\x28\x69\xcf\x83\xd9\x35\x48\x2f\x38\xec\x8b\x4b\xad\ \x8b\xac\x6e\xec\xce\x14\x47\xd3\xe5\xfb\x18\x66\xa1\x36\x9a\xd8\ \x9e\x94\x83\x2a\x2b\xda\xbc\xa9\x36\x5c\x61\x6c\x17\x66\x77\x57\ \x3a\xb5\x93\xa5\xfe\x5c\x54\xd2\x4c\x8d\xff\x98\xb0\xdf\xd3\xa0\ \xa4\x85\xe2\xa8\x30\x34\x4c\x47\x10\x2a\xa3\xcd\x55\x63\x41\x2e\ \x1c\xd5\x33\x92\x2d\x3d\x7d\xb4\x9d\x01\x36\x1a\xed\xab\x83\x61\ \x15\x7e\x40\x92\xdd\x0b\x33\xb9\xa2\xf1\x69\x5d\x43\x53\xa1\x92\ \x6a\x8c\x85\xdc\x32\x85\xb2\xe7\xf3\x53\x80\x06\xc9\x32\x1f\x60\ \x05\xa4\x97\xd1\xd8\xdc\x86\x8f\x13\x63\x27\x46\xfa\x72\xa7\xa4\ \x99\x72\xf8\xf0\x74\x2f\x85\x3e\x26\xd5\x01\x91\x4c\x46\xa8\xee\ \xa8\x55\x14\x58\x2a\x3a\x0a\x48\x99\x9b\x70\x13\x47\xef\xc3\xa9\ \xc3\xaf\xef\x28\x97\x1f\xcc\x47\xbe\x16\xb6\xae\xc9\x93\xca\xe4\ \x17\x03\xf4\x2d\xb0\x67\x94\x29\x8e\xa6\xcb\x64\xc4\x50\xae\x3c\ \x22\x52\xad\xa3\xb9\xea\xc4\x16\x43\x5a\x31\xb9\x38\x37\x85\x03\ \x67\x2b\x07\x72\xc5\x0a\x67\x4c\xa8\xa4\x10\x37\x5b\x25\xbd\x66\ \x46\x80\xf2\x90\x37\x51\x52\xed\xf0\x95\x6a\x8a\xd5\xa7\xdc\x13\ \xf0\x94\x74\x14\xb5\x34\x97\xe3\x96\x78\x55\x2b\x4e\x5b\x7a\x3d\ \xcc\x8b\xca\x0d\xc8\xff\xfd\x8f\xc9\xf5\x46\x63\x8f\x3d\x61\x45\ \x99\x50\xd2\x25\xc8\x44\xbb\x5d\x1e\x50\x04\x61\x49\xa8\xa4\x02\ \x53\x6b\x6a\xaa\x46\xa6\x9a\x1f\x64\x6b\xb9\xd0\xe7\xd6\x79\x44\ \xee\x7b\x47\x4e\x49\xff\x6b\x52\x26\x10\xff\x89\x75\xfd\x02\xf5\ \xe5\x6e\xb9\x67\x86\xe2\x3d\x61\x80\x1a\x25\x1d\x4d\x0e\x26\x3e\ \xf3\x91\x7b\xa7\xe6\x8e\x59\x11\x0e\x4a\x05\x78\x52\x7b\x12\xc1\ \x4b\x4f\x09\x22\xd5\x7d\x3a\xc2\x51\x91\x7e\xbd\x02\x66\xab\x76\ \x2c\x18\xa1\xd7\x03\x50\x42\x7a\xed\xc8\x2a\x69\x4f\x1b\x93\xd1\ \x53\x52\xd2\x2b\xb1\x5f\x60\x73\x09\x1c\xc5\x08\x8a\x48\x4a\x0f\ \x52\xc5\x07\x62\xb8\x1c\x3c\xeb\xe2\x78\xc5\xa3\x3f\x2a\x1e\x39\ \xbe\xfa\x4d\xc1\x6c\xd7\x73\x6a\x51\x51\xe7\x49\x37\x75\xb4\xcb\ \x87\x22\xc9\x67\x20\x73\x92\xa1\xdc\x13\x89\xde\x51\xd6\x51\xc5\ \x52\xd5\x54\x02\x6e\x1e\xf5\x54\xac\x1e\xed\x9e\x07\x7d\xc9\x28\ \xe4\xb9\x75\x77\x6c\xd9\x7a\x60\x19\xb8\x82\x4f\xe5\xa5\xa4\x76\ \xbf\x90\xe5\xbe\x4b\x50\x27\xa6\x3f\x62\x62\x6a\x9f\x56\x3e\x72\ \xbd\xf3\xa3\x71\x43\x11\x7c\x44\x89\x08\x4d\x2a\x81\x07\xf3\x10\ \x86\xfa\x06\x74\xf8\x7b\x9f\xbb\xee\x3a\x54\x52\xc2\x69\x86\xd3\ \xb6\x8a\xba\x5c\x7b\x0e\xc4\xca\x7d\x96\xbc\x53\x52\x59\xee\x41\ \x41\x7f\xf5\x94\x96\xc1\xaa\x8d\x97\x2c\xb5\xda\x10\x38\xc7\x5d\ \x84\xe1\xc0\x69\xed\x9f\xc0\xd7\x03\xd2\xa8\x55\x63\xc3\x87\x8f\ \x36\x4c\x9b\xdb\x55\xc3\x5a\x8f\x63\xec\xb6\x0c\xd1\xed\x67\xf7\ \xc1\xea\x9e\xa3\xe8\x4b\xf2\xa4\x3d\xe4\x27\x5c\x24\x73\xc8\x45\ \x6d\x9a\xd7\xa2\xbe\xa7\xa6\x67\x4f\x85\x84\x4f\xa4\xa3\x31\x4f\ \x81\xab\x2a\xb3\x43\xb8\x9f\x7e\x0a\xa1\xdd\xd9\x03\x96\x1e\x01\ \x38\x37\x01\x4e\x58\x99\xf6\x5a\x91\xae\xe9\x78\x39\x3d\x2e\xf7\ \xdd\x0d\x2f\xbf\xec\xd2\x0d\xb0\x80\x9e\x4d\x9c\xdc\xf4\xa0\x9d\ \xbb\x4d\x8b\xd3\x98\x52\x1e\xfd\xc6\x1c\x35\xa0\x98\xa3\x05\x49\ \xbe\xda\xb0\xd7\x02\x7d\xd9\x08\x81\x1e\x9c\xa9\x4a\x56\xd2\xdd\ \x1d\x09\x9f\xee\xb1\x9d\x34\x43\x1e\x96\xda\xce\xac\x92\x4e\xbe\ \xed\x70\xd0\xc9\xa9\x7f\xb7\x48\x4f\x9d\x34\xea\xb1\xd1\x80\x6c\ \x15\xed\x78\x1b\x3c\x79\x6c\x3d\x5d\x2a\xf1\x7b\xb4\x40\xfa\xcf\ \x5d\x67\x1c\xe5\x29\x4d\x03\xf4\xcc\x45\x04\x7a\x55\x1f\xac\xe9\ \x1d\xc0\x99\x61\x21\x61\xde\xe9\xa0\x93\x35\xb3\xb4\xab\x8c\x7b\ \x4b\xb3\x6b\x91\x5e\xdc\xef\xb7\x7a\x89\xd2\xb8\x40\x8c\x5a\xde\ \x14\x54\x04\xa9\xdb\x35\xaa\x57\x2f\x03\x68\xeb\xfa\xd6\xe4\xef\ \xdf\x73\xe1\x95\xab\xad\x63\x77\x4a\x0a\x9e\x74\xf5\x50\x46\xa7\ \xb9\xb4\x6e\xa1\xe7\x18\x6a\x22\x41\x65\xdd\x73\x0b\x2d\xa9\x97\ \xa4\x67\x8d\xfb\x0e\x5f\xd6\x26\x65\xf5\xb9\x1e\x46\xc7\xc6\x84\ \x3c\x5c\x36\x04\xff\x54\xf4\x3b\xdf\x70\x4a\x0a\xd5\x87\xc3\x61\ \x98\x61\x25\x63\x3d\x2d\x03\x47\xc3\xc2\xc0\x5c\x7e\xf6\xdc\x68\ \xa4\xac\xfb\x8e\x85\x41\x09\x6e\x34\x1e\x57\x91\x5e\x42\x8b\x02\ \x1f\xed\x47\xa5\xed\xe9\xe7\xaf\x38\x73\xdb\x45\xaf\x00\x98\xf9\ \x5a\xe2\xc9\xe3\x05\x3a\xd2\x3f\x1b\x81\x92\x8a\x27\xbd\x33\x2f\ \x89\x56\xcc\xc1\xc0\x79\x23\x7a\xf0\xa0\x40\x68\x38\xfb\x96\x5a\ \x39\xcd\x75\x4d\x8e\xff\x48\xf7\xe3\x8a\xdd\x93\xec\x73\xb4\xf9\ \xb5\xdf\x3c\x06\x87\x7b\x9e\x64\xb0\x1a\x03\x32\xdc\x74\xf3\xd0\ \x6b\x4c\x96\x26\x9d\x92\x96\xe4\x49\xd7\x75\x38\x6c\x86\x3c\xf9\ \xe3\x4b\xb6\x36\xbe\x9e\xfe\xfa\x84\x05\x39\x98\x29\x64\x84\xbb\ \x54\xf0\xd3\xed\x44\x5a\xf4\x28\xf8\x86\x4a\x43\x0d\xc1\x04\x6c\ \xa4\xd1\x6b\xac\xfc\xd7\xfe\xc5\x60\x25\x28\x29\x46\x4f\xb5\xb5\ \xec\xb9\x61\xc5\x4a\xba\xb8\xa9\xc6\x40\xef\xb5\x4e\x49\x1a\xd6\ \x83\xbe\xdd\x26\x00\xe7\x5d\x43\x20\x61\xec\xa8\xe0\xec\xf2\x1f\ \xb7\x4c\x9f\xb9\x13\x1b\x22\x1d\x15\xb9\x7f\xe8\xa6\x7b\x26\xbf\ \x72\xf5\x7a\xc4\x88\x19\xed\xda\x3a\x37\xe0\x5e\x8b\x4a\xea\x22\ \xd2\x33\x58\x49\x61\xb9\x37\xb4\xe6\xaf\xf7\x96\x2b\x3e\x7f\xbe\ \x9b\xf7\x7c\xc7\xd0\x85\x7a\x1b\x3e\xf8\xd9\x7d\xfd\x4e\x51\xea\ \x02\xa9\xcb\x9c\x20\x30\xd5\x50\xc5\x94\xb4\xf8\x03\x1d\xbd\xfc\ \xfe\x49\x8b\x6e\x13\x95\x31\xf6\x20\xd2\xd9\x86\xed\xd6\xd8\x26\ \x4a\x6a\xa0\xee\x05\x25\x6d\x55\x86\x78\xb9\x1f\xb1\x79\x6f\x1c\ \xb6\x32\x47\x86\xc9\xfe\xc0\xcc\x2e\x2a\xa8\xa3\x64\x07\x44\x9c\ \xd4\x73\x44\x8a\xb4\x76\xd2\x0f\x11\x6e\x7f\x7c\xd7\xe0\x3d\x6f\ \xfe\x21\x00\x78\xc4\xfa\xfc\x4f\x24\x95\xd4\x50\xf9\xdc\x62\x40\ \x07\x86\x33\x3c\xbb\x9f\x07\x93\x65\xac\x9e\xbc\x17\x9a\xee\x24\ \x6b\x0a\x4e\x65\xf9\x05\xe9\xe8\x05\x9e\x23\xb9\x7d\xd8\x6f\xe5\ \x96\x8e\x03\xe1\xcf\x58\xa4\xc7\x83\x92\x82\xd9\xe7\x55\x39\x09\ \x9e\xd4\xc0\xfc\xfc\x5d\x23\x0d\x73\xfe\x8e\x61\x87\x0b\xb8\xe1\ \x0c\x6f\x44\x8b\x08\xe3\xde\xbc\x56\x28\x76\xb9\x00\xa8\xbd\xd6\ \x1e\xcd\x38\xef\x1e\xce\xe4\x48\x5f\x83\x2a\x81\x05\x4a\x4a\x5a\ \xd9\xeb\x0a\x60\xf5\xe6\x1b\xbe\x4e\x7f\x8d\xe9\xa1\xd5\x38\x7f\ \xd3\xce\x00\xa6\x94\xa9\x1c\x37\xd6\xd3\x5e\xdd\x78\x78\x7f\x58\ \x19\x2f\xf4\x81\x01\xa2\xcb\xcf\x79\xd3\xed\xae\xa0\x39\xec\xc8\ \x5f\x42\x70\x07\x49\x8c\x96\xbe\xda\x7e\x1d\xd6\x03\x9c\x1f\x25\ \x2e\x5e\xf8\x7d\xd4\xcf\x87\xf6\x9d\xf6\xee\xdb\xb6\x3f\xfa\xc0\ \xb0\x53\x64\x44\x96\x95\xc0\x50\x01\x0a\x0f\x25\x24\xd1\x96\x18\ \x80\x6c\xc5\x7e\x34\xca\x45\x9a\x2d\xd0\xd3\x4f\x0d\x65\x00\xe9\ \x13\xa0\x09\x7d\x52\xd2\x75\x1d\xbb\x6f\x73\xc2\xbc\x94\xe3\xce\ \x81\xa2\xdd\xcd\x3b\x76\x15\x65\x96\xd7\x54\xd0\x29\x4b\xa7\xa3\ \x6a\xfa\x2c\xf1\x14\x29\xde\x06\x4f\x77\x91\xe1\x1c\x15\x9d\x86\ \x3e\x03\x15\x85\x7e\xce\x54\x43\x50\xdc\xb7\xb2\x7f\x1d\x20\xbd\ \xe9\x8c\xfb\x09\x26\x74\xc9\xfd\xcb\x0f\xb6\xec\x30\x2e\xa8\x51\ \xa3\xbf\xf7\x4a\x25\x24\xf5\x0c\x7f\xd9\x93\x22\x4b\x63\x1d\x6d\ \x79\xc9\xb2\xb6\xfc\x30\x28\x39\x05\x80\x76\xa4\x25\xf7\x9b\xa0\ \xb3\x00\xf4\xc7\x2a\x73\xb8\xdb\xc2\xbc\xca\x0c\x7f\xaf\xf2\x3c\ \x18\x94\x4a\x24\x47\x5a\x44\x4d\x27\x88\xa5\x13\x29\xf7\xa4\xfb\ \x89\xc6\xc7\xa3\x66\xaa\x3c\xb8\xc1\x19\x89\x9e\x8e\x00\x63\xea\ \xd4\x96\x5e\x14\x9c\x53\x8f\x3f\x70\xc4\x66\x53\xfe\x9c\xd1\x6d\ \x99\x99\x7f\xc9\x9f\xeb\x10\x1d\x45\xac\x70\x75\x0f\x1d\x94\x14\ \x2a\xb1\x57\x7f\x7c\xd7\x8a\x75\xee\x11\x2e\x3c\x1e\x69\x51\xfd\ \xe7\xae\x9e\x6b\x7d\xf9\x18\xc5\x42\xf3\x3b\x86\x79\xdd\x6c\x24\ \x6b\xe3\xd1\x9f\xb0\x2f\xc7\xcd\x1f\x4d\x6f\xdf\x8c\x3d\xb6\xae\ \x07\xe2\x43\xc6\xb3\xfe\x92\x1c\xe8\x1d\x27\x02\xac\xcf\x0e\x6a\ \xa4\x9b\xd6\x3c\x64\xb2\xa5\x55\x5b\x87\x4b\x05\xef\x87\x6b\xd2\ \xa7\x42\x01\x62\x6a\xfe\x68\xfa\xfc\x4d\x4b\xa7\x76\x02\x50\x7e\ \x18\x28\xfe\xfd\x95\x1f\x3e\xb6\xed\x4b\xcf\xe2\x52\x35\x3b\x68\ \xd5\x4c\xa7\x99\xb9\xd4\x4b\x9d\xc2\x95\xf1\xd3\x27\xac\x43\x46\ \x66\x63\x1b\xb4\xa2\xf6\xf6\x06\x99\x53\x2c\xff\x70\xd2\xd7\xbb\ \x9c\x31\x8f\xfc\x05\xb1\x13\x96\x9c\x2b\xda\x08\xaf\xea\x74\xa3\ \xab\xcf\xd9\x2c\x3a\x6f\xc9\x57\xba\xa7\x24\x92\x7c\xd8\x54\xd2\ \x80\x18\xef\x78\xc4\x27\x01\x46\x27\x2a\x3d\x01\x58\xfa\xab\xe0\ \xf6\x43\x13\xfa\xcf\x2c\x71\xe4\xee\x50\xf3\x47\xc5\x3d\x05\x79\ \x48\xbc\x65\x0f\x4a\x4a\x0c\x1d\xcd\x1e\x66\x56\x29\xcd\x4e\xc0\ \x2a\xfa\x69\x91\x0a\xd4\x4c\xba\xdb\xb3\x8c\x60\xfe\x3f\xe6\x8f\ \xaa\xb2\x93\x7f\xb1\x2f\x65\xa0\xa0\xa4\x77\xbe\x7b\xe3\xca\x95\ \xab\xd7\xdd\xbe\x30\x9c\xe1\x3a\xa9\xae\x39\x3b\x91\xcb\xb0\x71\ \xbd\x85\x63\xc1\xa2\x7b\x4a\xd9\xfc\xb8\x5e\x7c\xa9\x3e\x68\x0d\ \x45\xa4\x7a\x32\x36\x28\xe9\xdd\x9d\xbc\xd1\xe9\x77\x66\x1a\xc1\ \xbc\xe9\x2a\x9a\x3e\x6b\x6f\xe9\x7d\x51\x23\xbc\xc5\xee\x7f\xfd\ \xf9\xa3\x96\x5a\x0a\xa5\x62\xa9\x20\xad\x9d\x27\x9d\xf5\x17\x51\ \x9f\x2a\x2c\xe5\x12\xc9\x66\x7d\xdc\x44\x8a\x5c\xcd\x12\x83\xa9\ \xa4\x2d\x33\x0c\xf3\x30\x72\x4e\xfb\x51\x45\xf5\x33\x26\xd1\xd9\ \x33\x2c\xb8\xfe\x10\x9e\xaf\xaf\x62\xa4\x95\xc6\xa8\x8e\x08\x65\ \x89\x39\x35\xda\xdd\xeb\x29\x6f\xcd\xf4\x36\x53\x02\xe9\x36\x6b\ \x49\x7f\xdb\x90\xbc\x3e\x3c\x80\x81\x30\x65\x76\x7b\xd2\xec\x05\ \x6a\x3c\x22\x55\x4b\x3e\xfe\x2b\x18\x61\xb1\x24\x6d\xfa\xcb\x2d\ \xd0\x2d\x1d\x84\xa9\xaa\xce\xfc\x87\x25\x34\x4b\x45\xf2\xd8\x3a\ \xe8\x9d\x63\x55\xf3\x9e\xb4\xed\x87\xb2\x8f\x39\x4a\xf8\xd8\x8e\ \xf4\xd4\xe1\x79\xc8\x39\x5e\xf5\xbb\x57\xdc\x94\xe7\xaa\x3c\xca\ \xc2\xb7\x54\x0b\x57\xd3\xc7\x57\xe5\xe0\x95\x66\x29\x5f\xe3\xfd\ \x68\x32\x67\x6a\x44\x4a\x6a\x3d\xe9\x9c\xcd\x28\x47\x97\x80\xe4\ \x73\x0f\x24\xf2\x95\x51\x8a\xc7\x97\x4d\x26\xb6\x7a\xff\xef\xdf\ \xc4\xcd\x8e\x81\x3d\xa5\x47\x2b\x44\x03\x7f\x04\xec\x13\xdb\x64\ \x6d\x9a\xed\xa7\x8e\x33\x45\x62\xd7\x27\x9a\x32\xf6\xa5\x6c\x4e\ \x91\xdd\x8f\x9f\xed\xa8\xa2\xd1\xa8\xdd\x51\xb0\xd6\xf5\xd4\x68\ \xe4\x25\x79\xa9\x33\x77\x25\xfe\x45\x21\x0d\x56\x4f\xf3\xf5\x54\ \x34\xd3\xf3\x47\xf5\x21\x0c\x7d\x9e\x29\xb2\x25\xbd\xdd\xa0\x6a\ \x64\xcf\x73\x40\x32\x5a\xb3\x7d\x38\x43\x38\x8b\xc4\x80\xbf\x0a\ \xa7\xb8\x07\x6d\x99\xed\xd0\x47\xbd\xe4\xfc\xd1\x43\x8e\xfa\x4a\ \xeb\x67\x6e\xb2\xfb\xd1\x08\xaa\x5a\x3b\x16\x06\xd7\xf7\xf2\xe8\ \x28\x9b\x34\x3a\xa2\x6e\x02\x47\x43\x12\xa4\x40\xbf\xee\xfc\x51\ \xd8\x64\x8a\xfb\xf0\xc9\x43\x31\x4b\x73\x7b\xdd\x74\xe4\xba\x4d\ \x26\xc7\xe8\xcb\xdf\xe7\xfa\xbf\xca\xce\xdd\x55\xb2\xac\x0a\xe3\ \xbe\xea\x3c\x76\x75\xdd\xe6\xf6\xbd\xed\x5c\x54\x1a\xbb\x07\xc4\ \x99\x40\xc5\x69\x1d\x75\x50\x26\x52\x04\x15\x44\xc4\x89\xd4\x44\ \x06\xf1\x89\x18\x34\x46\x62\x20\x9a\x75\x64\x24\x18\x0a\xc2\x80\ \x99\x68\x26\x9a\x75\xa0\xe1\x04\x82\x30\xa1\x88\x7f\x83\x5d\xfb\ \x7c\xac\xc7\xf9\xed\x25\xed\xa9\x5b\x75\x1a\x3a\x59\xac\xbd\xd7\ \xf3\xf1\xad\x1d\x95\x4d\x2e\xc9\x22\x8b\x4f\x78\x5c\x49\xba\x8f\ \xe1\x14\xd0\x44\xc4\x71\x1e\x3e\x81\x9f\xb9\xbe\x7c\xfb\xf6\x69\ \x8e\x86\x49\x1c\x05\xd4\x57\x6b\x40\xf8\x8b\xb8\x79\xd6\xd7\x5e\ \xe1\x8f\x12\xd7\xf3\xc4\x4e\x57\xf1\x34\x8d\x8a\x5c\x76\x32\xe3\ \xec\x32\x14\xbe\xce\x9d\xc3\xc0\x6b\xda\x73\xa7\x63\x97\x4a\x3d\ \xd4\xf8\xa3\x41\x92\x14\x85\x96\x58\x15\x11\xc8\x79\x12\xa5\xf6\ \xc0\x84\xae\x69\xb0\x65\x3c\x77\x65\xc8\xd8\x52\x4e\x87\x1a\x7f\ \x94\xe1\xb2\x17\xc2\xe7\x84\x38\x3d\xc7\x51\x11\x14\x6e\x6d\x2e\ \x94\xa0\xae\xa2\xd6\x31\x8a\x08\xf3\xa6\x04\x44\xe6\x28\xf1\x47\ \x99\x80\xe0\x70\xbd\xde\xa9\xba\x9c\x79\xca\x93\x77\x28\xaa\xd5\ \x26\x99\x9c\xaf\x87\x10\xdf\x05\xc7\xb9\xc6\x1f\xa5\xc2\x67\xb0\ \x5c\xef\xaf\xcf\x1b\x65\x06\xb3\x96\x11\x15\xbd\x98\x61\xe5\x90\ \x35\x81\x08\x6b\xac\xf1\x5b\xc8\x3e\x68\x4d\x30\x8f\xdd\x4b\x62\ \x9d\xa5\x9c\xb1\x6e\x24\xd7\xa7\x6b\x9d\x58\xce\xd9\x82\xd2\x74\ \xf4\x27\x78\x79\xb5\xd2\x4f\x8b\x4d\xdd\x32\x71\x0e\x43\x7c\x64\ \xb4\x9c\x31\x6a\xe8\x91\xf2\x91\xd4\xe3\xe4\xfb\xc7\x19\x6a\x39\ \x1d\xce\x82\xbb\xad\x17\x75\x59\x45\x61\x38\xf0\xe0\x98\x9e\x8a\ \x98\x0f\x0b\x86\x6f\x6a\x8e\xa2\xbb\x5d\x74\xc6\xfc\xc3\x65\x98\ \x17\x30\x96\x5a\x5b\x09\x9a\x86\x45\xa5\xc8\xb4\x11\x31\x0c\xde\ \x01\x2c\xb5\x26\x54\x1c\xf5\xb3\x9f\x93\xd6\x8f\xdd\xc3\xa2\x71\ \xce\x87\xcf\xc7\x85\x29\xba\x79\xa2\x6e\x87\x88\xff\xff\x40\xcf\ \x92\xa1\xe2\xa4\x84\xdf\x1f\x0d\x87\x61\x2d\x57\xf2\x47\xf2\x1e\ \x04\x22\xba\x66\xf0\xf6\x24\x4d\x84\xa3\xa2\x7a\x82\x0d\x15\x47\ \x01\x43\x39\xe7\xe5\x5c\x40\xff\xc8\x92\x8f\x87\xe0\xed\xba\xa1\ \xcf\x06\x8f\x9c\xb0\x67\x85\x8f\xdb\x7f\x37\x32\x8f\x49\x8f\x8a\ \xd2\x61\xf7\xb0\xa2\x65\xc1\x64\x6e\xdc\xc5\xa0\x25\x70\x9c\x31\ \x72\x37\x1e\x65\x2b\xa6\x18\x6d\x67\x07\xcd\xa7\x1f\xfd\xdc\xa6\ \x1d\x9d\x4d\x74\xae\x46\x22\x60\xde\x00\x00\x11\x79\x9a\xe3\x25\ \xc6\x4c\xbe\xb8\x3e\x36\x3a\x52\x8d\xba\x7e\x1a\xe2\xe6\xad\xd6\ \x4a\x76\x7e\x75\xa9\x82\xa9\x27\x50\xaa\xfe\x65\x14\x8e\x13\x7a\ \x58\x0d\x6e\x01\x93\xc8\x34\xa1\xcf\x4d\x84\x36\xc5\xa8\xe6\x27\ \x39\x50\xfd\xe3\x6e\x73\x95\x7f\xc8\x60\xa9\xcc\x93\x10\x42\x8b\ \xeb\x3a\x66\xbd\x33\xa4\xab\x34\xa8\x9b\xd0\xd6\x49\xdd\xab\x51\ \x3f\xf6\xed\xcf\xfb\xf1\x2f\x28\x4b\x05\x84\x92\x18\x5b\x2d\x36\ \xa5\xcc\xd3\x84\x6a\xe6\xae\xa3\xbe\xcc\xb6\x87\x11\xf9\xbc\x04\ \xa2\x04\x66\x8a\x8f\xf1\x93\x9e\x67\xc3\xcd\xbb\x94\x5d\x2a\x3a\ \x33\x9b\x06\x04\x03\x30\x15\x12\x7a\xf2\x44\x8d\x9b\x75\x26\x9f\ \x20\x4a\x3a\xf6\xf8\x43\xf5\x94\x9e\x64\xe5\x2f\xf3\x26\x84\xd6\ \x8f\x5f\x93\x77\xda\x1c\x19\xb2\xa3\x4d\xa7\xcf\x11\x46\xfd\x45\ \x86\x22\x0a\xd5\x37\x07\x77\x37\xd5\xba\x0e\x89\x7c\x9c\xb9\xd3\ \xa3\x9b\xa9\x37\x2d\x67\x5b\x0c\xe5\x2d\xce\x2f\xc3\xd4\x8b\xbc\ \x3a\x0a\x2d\xb6\x04\xa7\x15\x3d\x9e\x2f\x41\x17\xbe\xff\x13\x5b\ \xf9\x88\x53\x03\x99\xcf\xbe\xa8\x43\x7d\x29\x4f\x92\x57\x49\x21\ \x41\x3a\x2a\xdd\x04\x33\x8f\x4d\x77\x2d\xaf\x91\x9a\x87\x70\x3a\ \x74\x9b\xf5\x74\xd2\xdc\x85\x1e\xca\xd2\x60\xbc\xbe\xc4\xc3\xd7\ \x0c\xa3\x87\xcc\xde\xdf\x1e\x81\xd1\x47\xbc\x74\x81\x0a\x35\x11\ \xf0\xb5\x5e\x7f\x12\x24\xca\x5e\xe5\xf4\x4d\xa8\x35\xc5\x99\x50\ \xfd\xf8\x48\x8b\x1a\xc7\xa7\x9c\x76\xf2\x08\x14\x04\x26\xb6\xba\ \xc5\x87\xb1\x47\x8c\x47\x3c\x7c\xd9\x79\xf3\x9d\x30\x28\x64\x18\ \xde\x6d\xca\x30\x25\x04\x01\x91\x83\xbf\xae\x69\x2c\xd8\xee\x69\ \xa2\x92\x63\xeb\x35\x1e\xfe\x25\x86\xc1\xa5\xa5\xb2\xa1\x6f\x3e\ \x1c\x98\x57\x73\x65\xf4\x7e\x63\xa8\x11\x49\xc7\x99\x00\x10\x22\ \x35\xe9\xd3\x9b\x72\xe3\x95\xd0\x15\x2e\x15\x33\x61\x67\x64\xb3\ \xa5\x67\x50\xf8\x21\x06\xa5\x97\x87\x64\x9e\xf4\x14\x00\x55\xe0\ \x3a\x0d\xf1\xf0\xf3\x9c\x10\xb6\x09\xb5\x16\x6f\x69\x23\xfa\xb0\ \x52\xf8\x1a\xbc\xc2\x93\x72\xf8\x2c\x89\xd9\x35\xa5\x65\x2a\xba\ \x08\xb9\x55\x64\xd2\x37\x0a\x52\xf6\x48\x17\xc7\x7b\x5a\x85\xf7\ \x04\xe3\x74\x91\x6d\x3e\xcf\x7e\x68\x3f\x6f\xd0\x9b\x67\x5a\x14\ \xa5\xbb\x84\xa1\x34\x4d\xb8\xa3\xf9\xf8\x97\x0a\xef\x49\xb2\x24\ \x62\xc7\xb5\xd0\xb1\x1e\x55\x1a\x17\x35\x11\x6f\xd5\x70\x1a\x13\ \xdc\x7c\x9b\xc1\xd1\x12\xef\x49\xc4\x65\x04\x2d\xc2\x94\xf8\x1f\ \x8d\x28\xb3\x8e\x8c\xeb\xb1\x46\xcc\x91\x32\xe7\x12\xda\xd1\x7f\ \x28\xf7\xb1\x7a\xc7\xbc\x38\xd5\x68\x8d\x58\x20\x62\x71\xf0\x36\ \x0c\x2c\xf4\x97\xf0\xb8\xac\x1b\xb5\xdc\x7b\xc3\x12\x23\x61\xfb\ \x81\x3f\x59\xe0\xe1\xcf\x73\x59\xb2\x8d\x14\x37\x61\x2c\x60\x5b\ \xa8\xb3\xb4\xae\x83\xba\x81\x22\x92\x8e\xe7\x9d\x89\x91\x4b\x76\ \xe6\x0b\x90\x59\x6a\x7c\xa5\xef\xec\xed\x0f\x32\xfb\x9c\xaf\xf7\ \x2a\x23\x59\xea\x5a\x34\x52\x59\x03\x53\x01\x26\x37\x90\x9a\xa4\ \x9e\xab\x25\x44\x62\xfc\x5b\x4d\x96\x52\xd3\x93\x47\xcc\x3c\x78\ \x46\x4d\x37\xaa\x2e\xb3\x6a\x0b\xa1\x37\xb1\x57\x5a\xa7\xd1\x36\ \x89\xa3\x7e\xfa\xc0\x20\x4c\x29\x47\x64\x47\x51\x0e\xaf\xd1\x89\ \xdc\x75\x66\x70\x27\x69\x0f\x80\x3f\x9d\xa3\x96\x21\x5d\x7c\x5e\ \x3d\x2e\x3b\x5b\x17\xc0\xcc\x77\x22\x89\x36\x8e\x06\x2d\x10\x5a\ \x44\x23\xde\x5b\xc0\xfd\x4c\x21\x56\x86\xb5\x77\xa1\xe7\xe8\x3a\ \x8a\xb6\x94\x28\x7d\xa8\x9e\x4e\xf5\x72\x89\xac\x9f\xb8\x39\x50\ \x93\x81\x53\x26\x72\x75\x8b\xd4\xe2\x9c\xfd\x21\xa5\x74\x10\x2b\ \x9b\x2c\x11\x28\x15\xdb\x2d\x3d\x58\x22\x7e\x96\xee\x67\xb4\xa2\ \x63\xd8\xbc\x18\x82\x72\x43\xb0\x2b\x7c\xe3\x25\x3d\x52\x7b\x71\ \x7f\x3d\x05\x49\x92\x14\x48\xc5\x5a\x53\x28\x7c\xc4\x4c\x5c\xcf\ \x74\x10\x53\x9d\x54\xaa\x52\x5a\x7b\x9a\x50\xc2\xbc\x65\xae\x36\ \xf7\x9e\x68\xea\x33\x82\x52\x86\xd2\x71\xb4\x4c\x2e\xe4\x83\x83\ \x3f\xdc\x82\x91\xf1\xf0\x75\xea\x65\xf3\x4b\xd3\x07\x4b\x9a\xe8\ \x94\x64\x2b\xba\xa6\xdc\x83\x9b\xf9\x25\xd2\x19\x22\x65\x7a\x7b\ \x63\x6c\xe4\x44\x21\xed\x68\xab\xf1\xf0\x93\xc6\x27\x04\x44\x94\ \x79\x56\xc3\x76\x8d\xae\xc0\xcd\x3b\xf1\xd4\x1d\x22\x37\x05\x76\ \x93\x4e\xde\x62\xa7\xd1\x52\x3e\xa7\xaf\x9b\x2a\x7a\xf7\x7e\xec\ \x20\x57\x7a\xb4\xde\x7d\xc3\xdc\xd3\xd1\xd4\x28\x21\x6a\x6a\x2c\ \x9d\x33\x71\xb1\x72\x4b\xae\x8a\xa1\xd8\x64\xfd\x36\xe7\xa8\x7d\ \x50\xb4\x45\x24\x62\xd2\x0f\x8e\xb6\x04\xef\x98\x1b\x9f\x44\x9f\ \x23\xd2\x29\xf5\xc8\x45\x9c\xe2\x2a\xf3\x8d\x75\x83\x16\x92\xa3\ \x49\xec\x47\xf9\xf1\xe0\x2e\x03\x13\x5d\x12\xbf\xae\x4b\x81\x91\ \xea\xae\x5e\x11\x35\x45\x97\x84\x96\xc9\x1c\xd2\xa8\xe8\xf3\x1d\ \xa5\x8a\x02\x7a\xfb\xe2\xc4\xea\xdb\x92\xad\xbf\xb0\x9a\x48\xb1\ \xd5\x96\x7b\x4d\x89\x87\x4f\xc1\xf7\xdc\x63\x34\xf2\xa2\x52\x31\ \x13\x8f\x5d\x2f\xe6\xf3\x12\xb5\xb1\xe7\x2d\xb5\x3d\x39\x95\xe3\ \x94\x0e\xd4\x68\x60\xed\x11\x71\x3d\xe0\xfd\x26\x64\x20\xf4\xb2\ \x95\x91\x74\x49\xa3\xa5\x5f\x90\xc5\xa7\xe8\x1b\x1e\x3e\xc5\x09\ \xc2\xa4\xaf\x3b\x25\x4a\x91\x45\xa7\xc4\x39\xba\x3a\x86\x33\x61\ \xbc\xb1\xcd\x1a\x71\x3d\xa4\x89\x24\x46\x90\x5c\xa2\xa5\xb6\x94\ \xd2\x11\x91\xd0\xa5\xbe\x71\x9d\x5b\x9a\x9c\xa3\x34\xf3\xbc\x9f\ \x84\x4c\x4c\xdd\xe3\xfa\xc6\x48\x44\x1b\xac\x0d\xdd\x4f\x25\x26\ \x63\xa8\x5b\x4f\x47\xcd\x23\x7c\x7b\x56\x4d\x14\xa8\xa4\x50\xfd\ \x61\x75\x99\x78\xf8\xd5\x96\xe0\xfe\x07\x27\x2f\x06\x77\x63\x2b\ \x9a\x6a\x22\x5c\x69\x6b\xf2\x34\xdc\xb6\x0e\xb7\x84\xce\x3d\x43\ \x12\xd9\xa5\x28\xf7\xdc\x67\xdc\x82\x28\x65\xfd\xc4\xd0\x2e\x6d\ \xb8\xd4\x2f\x12\x10\x84\xc2\x07\xa5\xb2\xed\x0d\xbb\xac\x27\xb2\ \x74\xd1\x6f\x10\x7a\x56\x6d\x4d\xe9\xb3\x7c\x03\x13\x5a\xf9\xa3\ \xfa\x83\x59\x72\x34\xba\x9c\x18\xf7\x44\x6e\x0c\xf1\xea\x26\xc2\ \x48\x2d\xb6\x09\xf5\x2f\x9e\x21\xfa\x68\x59\x5f\x6e\xc1\x32\x59\ \x85\xd1\xde\x2b\x6a\xa1\x0d\x2b\x1b\xcc\x86\x9a\x19\x85\x7a\x02\ \x33\xb9\x8b\x11\x6d\xae\x26\xf5\x99\xad\xcd\x5c\xbe\x69\xce\x9d\ \x0f\xab\x1d\xfc\x28\x35\x0e\xd3\x44\xe3\xa9\x37\xf3\x79\x1c\xbb\ \x4a\x10\xa4\xf4\x9e\x24\x4c\xc6\x49\x6a\x51\xe3\x69\x94\xaa\x8b\ \x24\x52\xce\x55\xda\xa6\x02\xce\xf7\x08\x5c\x4f\x80\xd4\xd0\x1f\ \x4d\xe4\xce\x11\xf6\xa5\x39\x81\x65\xbd\x56\xa2\x24\xc6\xc2\xcb\ \x63\xf6\x49\x7a\x14\x93\xcb\xc1\xc9\x23\x47\x13\x9d\x1e\x2d\x4f\ \x86\xa7\xb2\xae\xa6\xfa\x09\xdd\x0e\x41\x42\x74\x07\x7e\x16\x99\ \x12\x20\x92\x51\x9e\x1c\x1f\x95\x81\xbd\xfa\x5e\x1c\x83\x90\x86\ \xc9\x55\x54\x08\x9a\x19\x37\x81\xd2\x4a\x3d\x1d\x4b\x7f\xd4\x02\ \x66\x38\x4f\xab\xfe\x42\xe7\x4b\x63\xc5\x16\x23\x18\x00\x6f\xf7\ \x10\x94\x47\x7f\x2b\x49\x92\x91\x6a\x32\x6f\x21\x48\xf4\x9e\xf4\ \x71\xc8\x1f\xc7\xf6\x34\xc1\x87\x36\xc5\x8a\x9e\x65\xc8\x4c\xe6\ \x9e\xf2\x25\x35\x32\x4b\x7f\xd4\x93\x0f\x8c\xee\x44\x69\xd8\x61\ \x0e\x43\x8f\xbd\xf0\x7a\x11\x18\x9d\x52\x3f\x3a\xfb\xca\x1f\xd5\ \x91\xfb\x6f\x7a\x24\x48\x1e\x8c\x30\xa6\x4f\xf5\xda\x05\xf9\x66\ \xb0\x12\xe5\x9b\xa1\x22\x75\x60\x15\xab\xdd\xe8\xdf\xc4\xa1\x74\ \x86\x46\xef\x7e\xcd\xc2\x94\xd4\x3d\x15\x3e\xb1\xe6\x2f\x2a\xa7\ \x64\x06\xde\x53\x1a\x66\x9b\xe2\xc2\xc8\xe4\x3b\x2f\x46\xa9\x40\ \x1d\x45\xab\x9f\x3f\x23\xbc\xe0\xe6\x61\xe7\x36\x1e\x6c\x35\xd5\ \x17\xc5\x9b\x48\x2c\xfd\x51\xdf\x6b\xea\x61\x93\xaa\x77\x2b\x89\ \xcc\xb7\x94\x69\x47\x90\x4a\x30\x5f\x17\x27\x16\x6e\x25\xf4\x42\ \x4f\xcb\x3a\x74\xb6\xd5\x5c\xd9\x36\xb5\xe2\x92\xa6\x2c\x3e\x59\ \x5a\xb4\x13\x55\x0b\xf9\x98\xc9\x75\xc9\x97\xec\x4f\x33\x90\xe6\ \xf3\x76\x89\x62\x8a\x11\xd9\xbc\x68\xe9\xf5\x2e\x3d\x7c\x96\x99\ \xce\x5f\x3a\xf7\x41\x98\xd8\xeb\x28\x41\xd2\x0b\x2b\x1b\xf4\x66\ \xad\xc1\x45\x49\xe7\x4e\x0f\xbf\x1a\x63\xac\x23\x11\x3f\xfa\xcd\ \xe7\x33\x5e\xea\x92\x6e\xe5\xfa\x35\x44\xcb\x2c\x85\x22\x2f\x5e\ \x57\x1a\x7c\x6c\x9d\xe5\x50\x3a\x7a\x4e\x9e\xd7\xee\x82\xd0\x7b\ \x76\x4c\xf5\x06\x81\x4d\xaf\x68\x22\x14\x3f\x99\x1c\x4f\xa5\x86\ \xba\x68\x9b\x75\xbd\xe7\xf3\x79\xf4\x22\xb9\x70\x48\x3d\xe1\xbc\ \x9e\xbf\xc5\x66\xae\xc3\x41\x54\x82\xa9\xa5\x30\x9d\xb0\xd3\xd6\ \x19\x0a\x15\x15\xf1\xf0\x27\x75\xea\xa4\x4c\x49\x8e\x41\x1b\x05\ \xa9\x0f\x33\xd9\x34\x38\x69\xd4\x67\x34\x78\x05\xb1\x17\x43\xe3\ \xb1\x1f\xa3\x51\x8a\x51\xe8\x20\x08\x8d\x6d\x2f\x8d\x93\x0d\xa2\ \xf6\x4c\x2c\x2d\xbd\xf2\xf8\x6c\xd0\xaa\x27\x6d\xed\x86\x0e\x02\ \xe6\x14\x87\x92\xcc\x58\xc4\xb1\x1e\x67\x9b\x0c\x73\xd9\xdf\xc6\ \x31\xe0\xdf\xfb\x30\x9b\xd7\xc3\x6e\x1c\x14\x9d\xbe\x68\x65\x9b\ \xb0\x5a\xc0\x49\x15\x71\xbe\x77\x7b\xcd\x2b\x8f\xf2\xf0\x4d\x3f\ \xfd\x83\x28\x65\x0e\xaa\xb0\x4c\xd8\xd5\x21\x99\x02\x3b\xfd\x2d\ \x8b\x0f\x79\xf2\x15\xeb\xab\x46\x2e\xe9\xe3\x4b\x8a\x80\x01\xe1\ \xe3\xab\x85\x9b\x77\x0b\xf5\x7a\x54\xc4\x9c\xd6\xa0\xa4\x72\xfb\ \x70\x6c\x75\xd3\xec\x32\x5c\x3d\xcd\xb1\x99\x65\x2a\x7a\x09\x19\ \x38\x11\x52\x83\x1d\x4f\xd4\xf7\xda\x86\xc0\x1c\xbe\xf5\xe6\x89\ \xab\x1a\x6a\xa1\x40\xe9\xb3\x1b\x07\x4f\x43\xeb\x4c\x8d\x9b\x38\ \x89\x40\xd1\xaa\x1f\xe6\x49\x3c\x18\x41\x12\xd7\x24\x49\x13\xc1\ \x99\x42\xed\xe4\xca\x90\x2a\xf5\xd2\x6d\x36\x63\x9f\xea\x7a\x3d\ \x7b\x5f\x5a\x0c\xed\xf4\xda\x69\x52\x41\x40\xf4\x4f\xc6\x1a\x8f\ \x75\x50\x09\x16\x32\x64\x91\x4e\x17\x7d\xa6\xf2\xf6\xd9\xa7\xf4\ \xf8\xb9\x5b\xd1\x36\x64\xc9\xd4\xee\xa6\x1d\xac\xc2\xc5\xe7\xb9\ \xdb\x86\x43\x56\x6d\x77\x6d\xce\x8c\x42\x8b\xd1\xe5\x0c\xfb\x63\ \x42\x64\xed\x99\xad\x71\x93\x94\xfa\x07\xd3\xd9\xb3\xda\x20\xcd\ \xb4\x8c\x43\xbb\xa2\xdb\xf1\x04\x41\xaa\xc6\x05\x4c\xf6\x7d\x28\ \x10\x60\x00\x5e\x14\x03\x99\xd6\xf9\x10\x91\x2a\xb8\xdf\x70\x98\ \x20\xad\xca\x4c\xac\x89\xb5\x9c\x20\x55\x97\x3b\xd5\x93\xad\xeb\ \x00\x44\x0d\x3c\x67\xf8\x4e\xc1\x84\xb2\x55\xa3\xc6\x4e\x63\x3f\ \x11\xb7\x48\xcd\xbb\x59\xf0\x08\x02\x20\x40\x15\x0c\x07\x1a\x6f\ \x19\x34\xf1\xe1\x28\x1b\xf3\xa3\x76\x4f\xb3\x7a\x82\xd4\xcf\xc9\ \x2a\x25\x52\xa5\x45\x25\x46\xae\xeb\xdd\x92\xee\xc5\x89\x4f\x9d\ \xc3\xc7\x0d\x8d\xa4\xb2\x37\x8f\x26\xb4\x3f\xdb\x2b\xf2\x53\x13\ \xd6\x46\x24\xe0\xfd\x4a\x9e\xf2\xe8\x47\xc0\x89\xb9\x83\x30\xf5\ \x0e\xa7\x47\x11\xa8\xb1\xd4\x00\x6a\x56\x27\xd5\xc0\x69\x96\x0b\ \xec\x0b\x85\x13\x5a\xdb\xfa\x5c\x0a\x8d\xfe\xfd\xd1\xbb\x5c\x27\ \xf3\x47\x0d\xbc\x3f\x77\xbc\xad\xc6\xd3\x65\xd1\xaf\x6f\x42\xe8\ \x72\xa4\x3d\xb1\x0b\x36\x2f\xd7\xc7\x2f\x37\x8f\x0f\xc1\x3f\x7c\ \xf2\xca\x8e\x1e\xae\x93\x64\x5e\xd0\x44\x4e\xa5\x33\xd4\x4c\xa8\ \xbc\xfc\xe4\x93\xa2\x7f\xb4\xb8\xa3\x73\xc2\xf8\x42\x72\xdc\xe6\ \x42\xd3\xa9\xcf\xee\x8b\x3a\xa9\x01\x4c\x67\x7f\x4b\xc3\x28\x23\ \x9e\xaa\x12\x7a\x83\xf2\x0d\xba\x74\x8a\x42\x93\xb2\x8f\xec\xd4\ \xf9\xca\x8f\x1e\x3f\x7e\xed\xe5\xc5\xe4\x9e\xc0\x79\x0a\x96\x0c\ \x2b\x13\xe9\xd1\x82\xa3\xa2\xf1\x64\x14\x02\x45\x89\x88\x3a\xa2\ \x90\x64\xb6\x07\x1d\x48\xf5\xd1\x9f\x24\xf9\x62\xab\x29\xd1\xdf\ \xfc\xed\x87\x2f\xba\x72\x02\x53\x73\xb5\x9e\x43\xd6\x05\x4a\x09\ \x98\x69\xce\xa8\xc5\xf5\x70\xf5\x5e\x14\xe0\xca\xbb\x9a\xa1\xbe\ \xf8\xe1\x3f\xf8\xf1\xd3\xff\x7d\xf4\x6f\x07\xad\x40\xc6\xb9\x9e\ \x5f\x16\xaa\x06\xac\x28\x3c\x27\x27\x36\x2e\x0a\x86\xa1\xff\x9d\ \x10\x47\x9f\xff\x96\xd8\x19\x19\xfa\x5a\xc7\x8c\xf9\xe4\x6d\xf9\ \xf5\x00\x2d\x28\xc6\x05\x6e\x52\xe5\xee\x84\x0e\xfc\xed\x27\x68\ \x52\x21\xb7\xeb\x82\x86\x40\x24\x12\xfb\xdb\x77\xfc\xe5\xf9\x8e\ \xe8\x79\x5c\x44\xa9\xcb\xfd\x57\x3b\xa1\xef\xbf\x5a\xcd\x75\x46\ \x02\xdf\xff\x51\x78\xf8\x3c\x7a\x9c\xbc\x0f\xb3\xa5\xde\x3c\x5f\ \xd7\xd1\xbf\xeb\x74\xfc\xc2\x73\x1d\xbb\x28\xa0\x92\xe9\x79\x72\ \xc6\xb7\x79\x8a\x09\xd8\xb6\x0d\xac\x66\xf1\x79\x49\x41\x27\x51\ \x35\xd8\x89\x8f\xd5\x5c\x76\x49\x5b\x7e\x0c\x90\xec\x8c\x53\xfa\ \xbe\x2b\xc5\x4c\x6b\x90\xa5\x37\xef\xde\x7b\xf4\xdd\x87\xb7\xb5\ \x4b\x8c\x7e\x5e\x8c\x9a\xa0\x9f\xf6\x64\x9a\xa4\x0f\xbc\x92\xb6\ \x6f\xc8\xf5\x70\x39\x56\x70\x3a\xba\xde\x4f\xe6\x81\x0d\x7d\xfd\ \x9d\x1f\xbb\xba\xdc\x36\xb0\xe6\x98\x9e\x25\xc6\x91\x7a\x2a\xba\ \x35\x8c\xe4\xfe\xab\x73\x9f\x82\x9b\x87\xbd\x37\x42\xcf\xba\xdf\ \x11\xe3\x4f\xe2\x68\x76\xf2\x0e\x86\x3d\x5a\x0e\x36\x80\x4e\xe4\ \xf0\x2f\x6d\xa7\xed\xf9\xa5\xb1\xab\xfd\x74\xfd\x3c\xc7\xe9\x1b\ \x75\x13\x19\xa9\x1b\x94\xca\xe7\xcf\xb8\xa4\x77\x88\x9b\x27\x90\ \x54\x3d\x02\x9e\xad\xf3\xa3\x8c\x99\x9c\x58\x11\xa9\x55\x77\x9a\ \x0d\x3c\xb2\x73\x78\x9a\x94\xca\xcb\x11\x93\xf2\xb9\x67\x2c\xf9\ \x17\xae\xbb\xce\xdf\x91\xd9\xa9\x14\xa9\x22\x16\x4c\x35\x05\x55\ \x97\xc1\x6d\x7f\xbd\x45\x4e\xd8\x10\xac\xbf\xa6\x2b\xd0\x59\xeb\ \xc4\xca\x73\xfa\xec\x59\x0d\xbd\x35\xeb\xe0\xa3\xbd\x17\xa9\x4e\ \x27\x2b\xb6\x64\x29\xfd\x51\x51\x3a\x1b\xb1\x45\xb8\xec\xdb\x57\ \x9d\xa3\x71\xc0\xbe\x43\x40\x7e\xfb\x24\x9d\x5f\x70\x54\x6b\x82\ \x29\xf3\xec\xcb\x24\x62\x81\x4f\x36\x14\xb9\x71\xff\xb3\x70\x39\ \x5d\x52\xb9\xcd\xbf\xef\x26\xe8\xf1\x2f\x37\x56\xda\x82\x01\xad\ \x67\x7a\x72\xff\xfc\xda\xc8\x04\x8e\x52\xe9\x3d\x23\x7d\x5f\x86\ \xcb\x2d\x2a\x7b\xe7\xaa\xb3\xd4\xe5\xfe\x4d\x21\xc1\xfd\x63\x52\ \x30\x62\xa4\x3e\xf9\xd5\xe3\xe7\xee\xfd\xf9\xf2\xec\x40\x77\x3a\ \xd9\xe5\x5a\x4e\x36\x88\xa5\xd5\x76\x26\x90\x1a\x15\x29\x82\x26\ \xa9\xfc\x37\x3e\xb7\x39\x27\x9f\xba\xb3\x4b\x92\x3c\xe9\xf0\xc9\ \xd7\x67\x42\x75\xfa\x50\xf9\x7e\xf0\xd9\xe5\xbb\x55\x8e\x30\xa2\ \x5d\x83\x86\x69\x6a\x50\x4e\xdb\x35\x7d\xf0\x68\xc3\xf0\xbe\x8a\ \x8e\xde\xd9\xcf\xeb\xbb\x3c\xae\x57\x41\xcf\x16\xf9\xd1\xa1\x71\ \x62\xc7\x5b\x6e\x2c\x61\x58\x1f\xb2\xb9\x54\x4e\xda\x6d\x78\xff\ \x6c\xd6\x3f\xfe\xd1\xeb\x5b\xb9\x72\xd7\x57\x06\xde\xfd\xfe\x55\ \xc0\xc3\x87\x19\xad\xb2\x4f\xb0\x4a\x44\x6e\xcf\xb4\xb6\xd0\x9f\ \x83\x2b\xaa\xa4\x78\xeb\xdb\x10\xde\xba\x9a\xbb\xca\x0f\xd2\xf4\ \x9d\x8e\x5d\x7b\x5b\x10\xde\x05\x9a\x4a\x91\x1f\x25\x84\x56\x74\ \xf6\x98\xc3\x4d\x3b\xf6\x5d\xea\x57\x31\x75\x3b\xfe\xb7\x77\x42\ \x8f\x4e\xa6\x48\xfd\x57\x87\xf4\x3f\x9e\x69\xce\x33\x62\x7a\xa1\ \x0e\x5a\x83\x50\xce\x9e\x2c\x2b\x0a\x62\x7e\xf4\x91\xa5\x0e\x01\ \x71\xbe\x96\xaf\x6f\xe6\x5e\xc6\xde\x95\xfd\xd9\x14\xfc\xf5\xba\ \x53\x67\xe1\x1d\x2a\x23\xbe\xc6\x1c\x60\x15\xd1\xdc\xcf\x97\x12\ \x25\xe4\x1d\xf3\x2a\x63\x53\xa2\x7e\xf4\x61\x8d\xd4\x06\xa6\x2b\ \xf8\x34\x27\xf5\xef\xe7\x45\xc5\x57\xb6\xbf\xbe\x68\x28\xe1\x15\ \xe5\x1d\xe5\x16\x0c\xc2\xd4\xe4\xfd\xf5\xce\x50\x1d\x7d\x5b\xe5\ \x93\x7e\xed\x5a\x25\x46\x0f\x95\xcf\x8b\xc1\x7e\x7e\x5b\x10\xa4\ \x16\x86\x64\xe7\xbe\x94\xfa\xec\x95\xd8\xfe\x7a\xd1\x28\x6a\xeb\ \xfd\xf5\x59\xdb\x0b\x4f\xa7\x9b\xfb\x7b\x2f\x4d\x39\x41\xda\xf7\ \x59\xde\xbd\x3e\x3a\xa8\x0a\xfd\xe6\x90\x1f\xad\x11\x5d\xad\xb4\ \xb8\xed\xb5\x65\x21\xbc\xa5\x38\x94\xfd\x59\x0e\x3a\xfc\x47\xbf\ \xa4\xcb\xea\x0a\xff\xe9\xde\x89\x0f\x5c\xb7\x8d\xcc\x03\xda\x32\ \x55\xb1\xf7\x4b\x4a\x24\x42\x7f\x72\x60\x8f\x26\x42\xee\xaf\x0f\ \xcd\xd8\x01\xa1\x46\x97\x74\xd9\x59\xd0\x57\xcf\xdb\x3e\xcc\x27\ \x59\x08\xa8\x52\xcf\xdf\x88\x4a\x0c\x0d\x14\x30\x4a\xd8\x5f\x4f\ \xb8\xc4\xad\xd9\xf5\x67\x7d\x33\xe9\xbe\x17\xff\xd5\x4d\x8b\xca\ \x85\xaa\x0a\x38\x2c\x2f\x4b\x98\x88\xf4\x95\xb1\xde\x9c\xa5\xd8\ \x5f\x9f\xf7\x19\x7b\x6b\x73\xfb\x62\xdf\xd4\xb3\xbb\xa4\xcb\xfd\ \x9f\xbe\x72\x3d\x8b\xc8\x43\x17\x7d\xd8\xa4\x7a\xe6\x0e\xf1\x92\ \x1f\x3d\x19\x8a\xfd\xf5\xc0\xa0\x54\x26\xf7\xcb\x7d\x63\xd4\x24\ \x2c\x25\x4f\xe3\x9e\x8e\x9b\x3f\xea\xd0\xd8\x34\xa0\xb5\x7a\xc2\ \x25\x1d\x2e\x62\x8c\x66\x74\x32\xf3\x34\x65\x85\xbf\xa1\x67\xbd\ \xdc\xf7\xb1\x7d\xe3\x13\x93\xc8\x0c\xb3\x96\xf2\x9b\xe5\x90\x70\ \xc2\x1e\x57\xd4\x38\x0a\xa0\x0a\x27\x96\x52\x2f\x65\xaf\x37\xe2\ \x7a\x81\x7d\xb5\x17\xde\xbb\x39\x4f\xec\x26\x32\xf7\x7e\x39\x14\ \x08\x10\xfe\xad\x27\x1b\xc8\x54\x91\xea\xd9\x66\xdb\x5f\xcf\xb8\ \xde\x37\xc5\x3e\xe8\xa0\xbf\x1f\x7c\x78\xb2\x46\x52\x2f\x37\x78\ \x1c\xd2\xcf\x7e\x21\xe0\x74\x51\xaf\xd7\x4a\x99\xb2\x5e\xaf\xdf\ \x96\xf2\xa3\x64\xa8\x88\xdc\x08\x5d\x9e\xee\xdd\xbd\xfb\x9e\x87\ \x77\xf6\x33\x2d\x87\x0b\x0b\x99\x95\x2e\xa1\x83\x57\x21\xa8\xd1\ \xbb\x77\x79\x2a\xbb\xde\x9a\x2d\x8d\xcc\xf4\x1a\xee\xc7\x1b\x1f\ \x79\xe5\xa5\xab\x93\xcd\xb0\xe6\xda\x9d\x09\x13\x8c\x68\xbc\xa8\ \xe0\x28\x53\x25\xe8\x7d\x81\x9f\x97\x5b\xf3\xe6\xec\x3a\x6f\x95\ \xdb\x69\xb6\x3e\x67\x3e\x5a\x7b\x84\xe6\xf6\xac\xed\x19\xd7\x57\ \x68\x4f\xe8\x78\xd3\xd1\x63\x44\x2c\x59\xfb\xb8\xc0\x5a\xe5\x70\ \xcc\x09\x6d\x17\x94\xaa\x09\xa1\x1d\xd4\x13\xbd\xd1\xf3\x0b\xf5\ \xfa\xc9\xea\xf5\xc3\xb8\x49\x97\x74\xe9\x6f\x2b\x80\x4b\x91\xaa\ \x06\x2a\x3b\xdf\xdf\x1e\xdc\xb1\x7f\x94\x8d\xae\x27\x48\x3c\x56\ \xdf\x8c\xb3\xf8\x7a\xed\xa2\x7a\x81\x50\x12\xa4\xc4\x9b\xb1\xad\ \x51\x0b\xde\x3d\xfa\x47\x11\xd7\xbb\x22\xd5\x9b\x30\x10\xaa\xd1\ \xbb\xcc\x03\xaf\x42\xca\x29\xf6\x93\x8d\xc8\x5c\xa4\x97\x3a\x8f\ \x2b\xe4\x61\xde\x51\x1f\x0a\x4d\x34\x8a\xaf\x7c\x9a\xb3\x73\x10\ \xd3\x37\x6c\xd8\xef\xa6\x3f\xe3\x64\x02\x94\x0a\x53\x8c\xbb\x7a\ \xfd\x8d\x7a\xf3\x28\x4e\xf2\x45\x3a\xd1\x28\x34\x58\x06\xdf\xc4\ \x29\x7b\xf8\x19\xef\x29\xc6\x4c\x89\x60\xb4\xe1\x47\xc7\x84\x92\ \xcf\x04\xae\x11\xeb\x1b\x04\x81\x9b\x67\x4e\x49\xd1\x3f\x2a\x86\ \xb6\x90\x20\x65\x87\xd6\x52\xb0\x74\x1c\x86\x9e\x50\xbd\x71\x45\ \x0f\xa7\xc4\x4d\xe9\xe4\x0e\x7e\x1e\x10\xd2\x4b\xd8\xfd\xfd\xe3\ \x34\x9a\x30\xd9\xe1\x8f\xd3\x8e\xa0\x53\x1c\x2d\xea\xf5\x43\xa7\ \x44\x31\x9d\xcf\xd7\x4f\xec\x1f\x8d\xa8\x64\xae\x47\xe3\x40\x4b\ \x48\x8b\x2f\x35\x92\xf3\x2e\x5c\x3e\x0d\xc1\x89\x52\xc0\x4c\x4c\ \x32\xd9\x4e\xf0\xb4\x13\x9a\x67\xc4\x78\xf4\xe5\x8c\x98\x0e\xbe\ \x1c\xc1\x18\x91\x59\x78\xa4\x56\x06\xf7\xfd\xf5\x6c\xcc\x14\xa9\ \x00\x9c\xde\xcd\x88\x1d\x46\xb4\xa6\x81\xa6\x1a\x5a\xc1\x13\x79\ \x9c\x67\x9a\xbc\x56\x1b\x00\xc9\x26\x00\x2b\x44\xa9\x4f\x11\x93\ \x84\x88\x50\xa9\xc4\x72\x05\x3f\xb1\xc4\x3c\xdf\xd4\xc2\x2e\xb9\ \x63\x92\x39\x2a\xe2\xd4\x51\x26\xb6\x8a\xd6\x74\x45\x87\x20\x84\ \x2e\xf8\xcf\x84\xa5\x43\xa4\x27\x24\x20\x4c\x8a\xe6\x56\xf7\x8f\ \xb6\x15\xe3\x02\xd4\xa2\xba\xa8\x98\x62\x24\x4f\x7d\xb1\x29\x45\ \x89\x71\x7d\x8b\x80\x9e\x04\xcd\x0b\xe0\xf2\xba\x02\x9c\x0a\x15\ \x1e\xba\x1c\xe7\x71\x11\x3c\x4e\x5f\x71\x55\x6c\x76\x44\xeb\xdd\ \x22\x2d\xc3\x29\x0d\x31\x33\xc3\xdc\x95\xaa\x77\xf4\x47\x47\x46\ \xd4\xef\x67\x51\xb9\xf3\xb3\xa7\x61\xd2\x27\xb1\xd5\xef\xa8\x53\ \xba\xa4\xaf\x9c\x27\xb1\x14\xfe\x68\x36\x9f\x85\x22\x65\x92\x0c\ \x91\x08\x2c\x53\xc2\xcd\xf3\x3b\x9a\x06\x6d\xd9\x3f\xba\xec\xfb\ \xb1\x13\x0a\xa1\x42\x26\xf8\xa3\xe5\xaa\x58\x1a\x26\x23\x15\xfe\ \x68\x86\x4e\x6b\x2d\x61\x2b\x2c\xa1\x7f\xd4\xf9\xe9\x98\x64\x19\ \xfe\x45\x49\x12\xfa\xa3\x59\x3b\x5d\x40\x3d\x39\xb1\x28\x8b\x50\ \x43\x21\x57\x82\xfe\x51\x71\xd6\xb9\x19\x7d\xbc\x94\x1f\x5d\xea\ \xfe\xd1\x0b\xa8\xa7\x2a\xa8\xd7\xf1\xc7\x00\x14\xf5\x30\x59\xfb\ \x0c\xfe\xe1\xa3\xe0\xfa\xb8\xd8\x67\x8c\x37\x50\xc9\xfc\x28\xd1\ \x32\x59\xc0\x01\x3f\x89\xea\x49\x50\x32\xfd\x09\x3a\x8d\x8f\x1b\ \x27\xce\xdc\xb9\x09\xc5\xe3\xf3\xab\x1c\x06\x67\x18\xea\xb8\x79\ \xfa\x62\x67\xc3\x1a\x9e\x86\x56\xb2\x43\x46\x4a\xa5\x9f\x57\x3f\ \x5e\x6a\xe0\xba\x0e\x76\x67\x05\xdc\x3c\x22\x78\x63\x81\x5c\x08\ \x41\x45\xa5\xda\x34\xf4\x41\xc8\x4c\x05\x5a\x17\x6d\xe9\x98\x88\ \x64\x8e\xb2\xc9\xbf\x07\x47\x8d\x54\x6a\xa7\xc3\x12\xd6\x70\x16\ \x3e\x7e\xf1\x70\xa3\x2d\x87\x03\x79\x47\xcd\x86\x4e\xe1\x9b\x39\ \xba\xed\x0c\x15\x4b\x39\xd6\x50\x68\x7b\xf0\x14\x63\x42\x99\x4e\ \x68\x7c\xb1\x52\x62\xef\x74\x22\x8b\xbb\x8a\x52\x55\x6e\x30\xdd\ \xb0\x1b\x0c\x1d\xfa\xce\xd5\xd1\x0f\x4c\x93\x28\x06\xad\x31\x49\ \x36\x8f\x51\x9c\x5d\x94\x56\xd8\x50\x15\x16\xb1\xa3\xc7\x98\x09\ \x93\xf4\xac\xc0\xfd\x3c\x7d\x29\x51\x8e\x83\x77\x2a\x9d\xa3\xb6\ \xf4\x88\xa0\x1a\xfe\xa1\xbe\x2f\x39\xfa\x5f\xc1\x2a\xd8\xa0\xc5\ \x51\x93\x08\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x38\xb4\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\xa8\x00\x00\x01\x77\x08\x03\x00\x00\x00\x06\x8a\xf0\xc8\ \x00\x00\x02\xc1\x50\x4c\x54\x45\x7f\x00\x00\xa3\x6d\x93\xa4\x75\ \xa1\xae\x96\xd0\xbb\xb2\xec\xcb\x9d\xa4\xd7\xcd\xed\xbb\xbb\xfe\ \xcb\xcb\xfe\xd0\xce\xfe\xd2\xd2\xff\xc8\xc6\xfe\xc3\xc3\xfe\xdc\ \xdb\xfe\xc0\xbe\xfe\xb8\xb6\xfe\xd8\xd7\xff\xb3\xb3\xfe\xe2\xe2\ \xfe\xeb\xeb\xfe\xf3\xf3\xfe\xfc\xfb\xfe\xb0\xae\xfe\xf0\xef\xfd\ \xad\x80\xa3\xf9\xf7\xfb\xaa\x9b\xe0\xad\xac\xfe\xa4\x62\x7b\xbc\ \x7b\x7b\xbd\xb9\xf5\xbf\x80\x80\x91\x31\x3d\xa7\x50\x50\xa9\x53\ \x53\xca\xc4\xf2\x95\x2c\x2c\x96\x40\x53\xcc\x9b\x9b\xcf\xcb\xf7\ \xae\xa6\xf1\x9c\x53\x6d\xdc\xb9\xb9\xe0\xdf\xff\x9c\x5a\x7b\xb5\ \xb0\xf4\xee\xe0\xe1\xb7\x70\x70\x9e\x41\x42\xf5\xec\xec\xbb\x83\ \x8f\x8f\x20\x21\xbe\xb1\xe4\xd5\xac\xac\xce\xa0\xa1\xbe\xa8\xd3\ \xb2\x6c\x72\xd7\xb0\xb0\xdd\xc2\xc8\xed\xe3\xeb\xa9\x76\x99\x8b\ \x22\x2c\xb6\xa9\xe4\x9d\x6a\x98\x89\x13\x14\xb9\x73\x73\xb9\x76\ \x78\xb9\x9f\xcc\x9f\x68\x90\xa3\x81\xbb\xbc\xa2\xcb\xbc\xaa\xdb\ \x9c\x3a\x3a\xa6\x71\x94\xc1\xbb\xf3\xa6\x88\xc2\xc4\x91\x98\xc5\ \xba\xea\xc6\xba\xe5\xc6\xc0\xf4\x95\x4a\x69\xa8\x85\xb9\x96\x30\ \x31\xcb\xaa\xbd\xb5\x6c\x6c\xaa\x84\xb3\xcd\xa3\xaa\xb5\xa0\xd5\ \x8b\x1a\x1b\xd0\xa2\xa2\xac\x5a\x5b\xd1\xac\xb5\xac\x5e\x62\xac\ \x84\xad\xad\x64\x6d\x97\x56\x7d\x98\x33\x33\xb6\x99\xc4\xdf\xdb\ \xf7\x99\x46\x59\x9a\x3d\x44\xe4\xcd\xd1\xe6\xd0\xd2\xe8\xe6\xfb\ \xea\xd7\xd8\xaf\x60\x60\xec\xda\xdb\xb3\x92\xbb\xaf\x8a\xb5\x9b\ \x4a\x5c\xf1\xe3\xe3\xf3\xed\xf3\xb1\x63\x63\xb1\xa3\xe2\xf7\xf0\ \xf0\xf8\xf3\xf3\x8c\x26\x32\xb3\x89\xab\xba\xaf\xe8\x8d\x2c\x3b\ \xb6\x84\x9a\xac\x8a\xbb\xba\xb4\xf4\x89\x1a\x22\xa1\x50\x5c\xcb\ \xc1\xec\xd9\xb3\xb3\x85\x10\x13\x95\x43\x5c\xc1\x83\x83\xa3\x54\ \x61\xc2\xb4\xe2\xb0\x9e\xdb\x99\x43\x52\xb1\x86\xa9\xc7\xa8\xc0\ \xb1\x8b\xb3\xa3\x87\xc6\xb2\x9c\xd3\xa5\x4c\x4c\xa5\x68\x84\xb4\ \x73\x7c\xb4\xa1\xdb\xb4\xae\xf3\x9a\x4c\x62\x94\x45\x62\xa6\x8e\ \xce\xd5\xb8\xc5\xd6\xc1\xd5\xb6\x78\x83\xa6\x8f\xd1\xd8\xd3\xf5\ \xb6\x90\xb2\x9b\x57\x78\xd9\xc6\xda\xa7\x60\x70\xa7\x90\xd1\x9b\ \x64\x91\xdd\xcf\xe2\x8d\x30\x44\x8e\x38\x52\xe1\xc4\xc4\xb9\x8b\ \xa3\xe4\xcb\xcb\xa9\x8b\xc3\x8b\x27\x39\xe6\xe1\xf4\x9d\x60\x85\ \x90\x2e\x3b\xea\xe4\xf2\xbb\x9a\xbe\x90\x35\x49\x9f\x45\x4d\x96\ \x49\x64\xa1\x44\x44\xad\x90\xc7\xa1\x4d\x56\xae\x72\x86\x92\x34\ \x43\xa3\x6a\x8d\x91\x2c\x34\xc0\x86\x8a\x83\x0a\x0b\x9f\x52\x66\ \xbc\x93\xac\xc4\x8b\x8b\xc3\x9c\xb0\x89\x16\x1a\xcf\xc2\xe5\xe9\ \xd4\xd4\xb1\xa6\xe9\xb5\xab\xeb\xc9\x94\x94\xa1\x6e\x98\xae\x99\ \xd4\x9e\x58\x73\xd5\xbc\xcc\xbf\xa1\xc3\xae\xa9\xf5\x9b\x55\x73\ \xaf\x78\x92\xc1\xab\xd4\xc1\xb7\xec\x87\x19\x23\xb6\x9b\xcb\xa4\ \x72\x9d\xaa\x6c\x83\xb7\x7b\x87\xaa\x72\x8d\xc4\xa9\xc9\x92\x3d\ \x53\xaa\x98\xdc\xb1\x92\xc1\xc7\x91\x92\xa4\x7a\xab\xba\x95\xb5\ \xe8\xda\xe3\xba\x9c\xc3\xca\xb1\xcc\xba\xab\xe2\x94\x3b\x4c\x9a\ \x5c\x82\xae\x95\xcd\x95\x39\x46\xa2\x59\x6d\xcf\xbb\xd7\xb4\x85\ \xa1\x9d\x63\x8c\xbc\xa5\xd1\x99\x4f\x6d\xa6\x78\xa3\x91\x24\x24\ \x9f\x6f\xa1\x86\x14\x1a\x8e\x29\x35\x0d\x87\x2a\x70\x00\x00\x35\ \xae\x49\x44\x41\x54\x78\x5e\x84\x5d\x53\xb7\x2c\xcd\xb2\xed\xb7\ \x72\xb5\x17\xb6\x6d\x1b\x1f\x6d\xdb\xb6\x8d\x63\xdb\xb6\x6d\xeb\ \xda\xb6\x6d\xfd\x8a\x9b\x19\xe8\x59\x51\x51\x3d\x4e\x76\x65\x56\ \xed\x97\x3d\xe6\x08\x67\x44\x64\xae\x5e\x9e\x0f\x72\x8c\x2c\x4f\ \xc3\x1a\x66\x1a\x7f\xfc\xe8\x28\x75\x2d\x69\xca\xab\x2c\x4a\x19\ \x75\x9c\x35\x3d\x65\x12\x9e\x32\xb1\xa3\x1f\x9e\xb8\x0c\x87\xf2\ \xc4\xa5\x1f\x7e\x3a\x56\xc6\x67\x65\x7c\xe2\x57\x5c\x57\xf2\x58\ \x11\x66\x2f\x1f\x0c\xf2\x01\xd0\x66\x39\x43\x15\xb0\xad\x51\xd2\ \x14\xa8\x61\xa5\x59\x16\x0a\x97\x41\xd6\x34\x02\xb2\x32\xfe\x04\ \x70\xcd\x50\xfb\x01\x5d\x12\x27\x23\x0d\x33\xe0\xec\xf7\x81\x34\ \x0c\x8b\x52\xb1\xf6\x22\xc8\x30\x05\x69\x46\x33\x8d\x38\xe9\xc9\ \x2c\x45\x89\x8c\xf4\x13\x7a\x82\xa2\xb5\x3c\x34\x93\x00\x34\xac\ \x34\x4a\x4b\xd6\x00\x92\xa0\x0a\x41\xe9\xb1\x34\x25\xa2\x3a\xac\ \x3d\xa0\x04\x3d\x99\x98\x59\x78\xe2\xa2\x58\x2d\x60\xb0\xde\x30\ \x9f\xa9\x19\x49\x9b\xcc\xa8\x39\xa3\x28\x91\x53\x99\x1e\x66\xe0\ \x3b\x51\xb3\xcf\x28\x01\x95\xb8\x6f\x70\x46\xd6\xc7\x5f\x03\x69\ \x84\x45\x88\x09\x2d\x81\xcd\x0d\x44\x85\x09\x82\x02\x28\x43\x8d\ \x13\x1c\xa7\x85\xc9\x29\xbc\x0f\x0c\x27\xa2\x32\x5a\x82\x89\x21\ \x10\x15\x25\x28\xda\xa5\x4d\x59\x26\x8a\x94\x1b\x42\x42\x99\xf8\ \xc5\x98\x0b\x03\x53\xa5\x94\xe8\x19\x95\x0a\x5c\xe7\x49\x48\x03\ \x38\x61\xbf\xd1\x25\xd6\x23\x20\x35\x14\x8d\x9c\x07\xd4\x8c\x39\ \x4f\x22\xaa\x04\x4d\x31\x3c\x69\x0b\xa1\x6b\x0d\xc5\xa7\x51\x92\ \x74\xd6\x80\x09\x6d\x1a\x8a\x9c\x86\xc1\x7a\xaf\xac\x5f\xa9\x6f\ \x91\x55\xa8\x12\xc9\xa8\xa2\x04\xeb\x55\x4a\x9d\xe6\xab\x61\x4a\ \xe5\x4d\xb3\x28\x9a\xe6\x09\x14\x55\xa4\xa5\xa5\x68\x44\x09\xf3\ \x04\x5d\x02\x58\x50\x73\x08\xa4\xbd\x81\x72\x7e\x00\xd6\x8b\x0d\ \x0d\xdf\x2c\x02\x96\xff\x05\x01\x55\xde\x2b\xd0\x24\x4e\x5a\x09\ \x24\xc1\x34\x4a\xcf\x04\x15\x19\x4d\x48\x8d\xc4\x88\xb6\xb8\xcf\ \x60\xa1\xf6\x50\x26\x36\xa5\xe0\x7c\x2a\x4f\x06\xbe\x03\x27\xa3\ \xb4\x56\x14\x43\x84\x94\x91\xd2\x1b\x28\x85\xa2\x42\xcc\x7e\x18\ \xc0\x6a\xa1\x0e\x41\xce\x79\xca\xd4\x74\x49\xf2\x4a\xed\x00\xdb\ \xf9\xdb\x23\x65\xb5\x67\xe3\x54\x1b\xa2\xf6\x1b\xbc\x6f\xe0\xa4\ \x09\x0b\x4a\x53\xe1\x82\xf5\xa4\x49\x46\x99\x94\xa8\xaa\xf5\x59\ \x1b\x26\xa0\x1a\x82\x12\x32\x75\xa2\x35\x19\xfd\xba\x05\x53\x3c\ \x13\x53\x94\xb0\x0e\x3d\x4d\xa1\xf3\xde\x8e\x02\x66\x26\x1e\x29\ \x63\x55\x92\x91\x33\xd7\xc1\x76\x5e\x5a\x86\x34\x01\x41\xc5\x33\ \xb1\x0c\x60\xb0\x79\x02\x39\x69\xf6\x0d\xd3\xe1\x42\xe9\x31\x2e\ \x54\xb1\x66\xca\xfd\x68\x48\x19\x25\x78\xef\x6d\x13\x7f\x59\xa8\ \x49\xc9\x5a\x6f\xe1\xc1\xdc\xb3\x0b\x25\xa0\x82\xd5\x8e\xee\x98\ \x84\x29\xda\x22\xa9\x38\xfa\x8c\x45\x35\xf3\x40\x4b\xc3\x7a\xb8\ \x50\x98\x7d\x55\x26\xa2\x71\x0b\xab\x78\x51\xa5\x28\x3f\x0d\xae\ \xcb\x2b\x4c\xef\xeb\x19\x69\x98\x50\x7b\xfc\x2c\x44\x88\xa8\x13\ \x52\xe5\xbe\xb8\x4f\x60\x74\xae\x29\xda\x7b\xa6\xe7\xff\xec\x7a\ \x92\x11\x12\x56\x8b\x72\xd8\x2d\xa3\x83\xa6\x94\x66\x71\xcc\xf4\ \x29\x53\x65\x2a\xc2\x13\x66\x6a\xd4\x3d\x2d\x8c\x2a\x89\x01\x85\ \x32\x19\x17\x4a\x3f\x26\xe8\xee\x8f\xff\xda\x93\xe7\xf6\x7a\xdf\ \xce\x2d\xf3\x05\x2a\xd8\x0f\xd6\x2b\xe3\xc1\xfb\x8c\x46\xca\x30\ \xdb\xa3\x80\x6f\x22\xb0\x88\x47\x31\x94\xaa\x50\x7b\x8b\xb5\xbc\ \xf8\xcf\x7a\x3a\xee\x4d\x34\xd4\x53\x8b\x84\xe8\xd9\xc6\xa3\x4c\ \x4d\x80\xcd\x04\x6a\x9e\x6a\x00\x9d\xcd\x40\x16\xec\x95\x6c\x08\ \x65\x21\x26\x09\xcf\x40\x49\x82\x89\xc8\x49\xd6\x61\x79\x7a\xaf\ \x31\xce\xaa\x08\xa6\xa3\x28\x2d\x9e\xf5\xc6\xdd\xf3\x68\x00\x4c\ \xc1\x7b\xb0\x9e\x41\x42\x99\x12\x4c\x62\x3f\x13\xb4\x76\xf4\xfc\ \xa5\x9e\x19\xfb\x26\x00\x8a\x40\x94\x40\x3a\x17\x2a\x24\x85\x88\ \x0a\x5d\x49\x40\xad\xbd\x2f\x14\x29\x82\x92\x96\x2e\xa9\xb0\x12\ \xcb\x09\xa9\x25\xe9\xbb\x7a\x76\x5c\x35\x31\xa4\xd4\x37\x7d\xb6\ \x28\xda\x26\x28\x64\x34\xe2\xcc\x32\x13\x8f\x14\xd8\x2f\x15\x85\ \x0b\x9d\x19\xa4\x2e\x24\xa5\x71\x7d\xfd\x93\xa0\xaa\xe0\xbb\xe5\ \xe6\x1d\xf7\x5d\xf7\x44\xf8\x58\x3f\x85\x0f\x05\x48\xeb\xee\xd5\ \xe0\x0f\x80\x15\x5a\xaf\x76\x14\x34\x05\xeb\xc3\xa0\x57\x41\x70\ \x0d\xd8\x5a\x41\xd2\x9b\xc3\xd2\x2b\x7b\xbd\xbf\x2e\x25\x1e\xbd\ \x95\x60\xae\xdd\x3f\x19\xd4\xc1\xa6\x3e\xbc\xef\xac\xe5\x91\x41\ \x09\x62\x1a\x4f\x0f\xd6\x03\x65\x84\x19\x27\xe1\x44\xdc\x5c\x28\ \x52\x0e\xf4\x09\xab\xc0\x2c\x14\x25\xbb\x25\x25\x69\x52\x9f\x76\ \xce\xab\xbf\xd0\x5b\x47\x0c\x1e\x0b\xf7\x77\x11\xce\x49\x4e\x66\ \x34\xcc\x3c\x1f\x26\x80\x09\xa2\x7a\xcd\x37\x14\x65\xc6\xe7\x02\ \x94\x21\xd2\x9a\x41\x44\x05\xa8\x62\x65\xa0\x30\x4b\x60\xfe\xad\ \xff\xd0\xc3\xf8\x5c\xc9\x40\x6f\x27\x05\x7a\xd3\x95\x4b\x9f\xb8\ \xfd\x8a\xf7\x6c\xde\x7e\xfb\x39\xef\x4a\xa0\x4b\x82\x71\x6e\x50\ \x62\xed\xa8\x1a\xd2\x48\x4d\x11\xd3\xa6\x26\x15\xc2\x7a\x46\xe9\ \xa3\x3c\x41\x5a\xef\x8e\x30\x31\x0e\x8c\x12\x1a\x6a\x9a\x30\xbe\ \xb3\xdb\xda\x26\x15\x00\xa0\x5d\xa1\x14\xb5\x66\x54\x79\x4f\x2e\ \x29\x07\x39\x0b\x36\xa3\x08\xf2\x18\xa7\xb5\xa1\xb2\xd4\xf5\xf5\ \xbf\x6f\xe1\xec\x1d\x4b\x80\xdf\x73\xe3\x82\xd4\x9b\x51\x81\x68\ \xa3\x27\x63\x47\x33\x86\x29\xac\x6f\x3a\xd0\xac\xc8\xd8\x87\x96\ \x4d\x67\x5f\x20\x26\x49\xb0\xd4\xc9\x33\x0c\x62\xd3\x4d\x67\x1d\ \x78\x22\x7e\x5c\xb3\x38\x64\xde\xdf\xea\x91\x5e\x57\x8b\xda\x43\ \xe3\xbb\xf7\xf5\xb4\x17\xb1\x04\x4d\x89\xf5\x6a\xf4\xe3\x72\xfe\ \xd2\xdd\xab\xc2\x1b\x91\xbd\x4d\x40\x14\x8a\x96\x35\xea\x14\x42\ \xb0\x61\xc7\xf2\xa8\xaa\xd2\x57\xdc\xd3\x5b\xff\xd4\x38\x91\xb1\ \xfb\x63\x9b\x18\xdf\xd7\xbf\xb9\x61\x03\x7d\xdc\x30\x36\xd4\xd4\ \xe7\xe7\xed\xeb\xc3\x24\x9a\x6a\xd8\x1c\xbe\x5e\xb7\x74\x37\xdb\ \x66\x44\x51\xd8\x91\x14\x69\x7d\xf6\xad\x17\x7e\xe4\x94\x3b\xd7\ \xd4\x1a\x3a\x27\xf5\x77\x49\xb9\xa7\xa3\xa2\xa4\x2c\xce\xd6\xf1\ \x28\x41\xa6\x24\xad\x3e\xf9\x95\xf1\x78\x3c\x1a\xe5\x83\x94\x90\ \x4e\x4b\xe8\x3c\x76\x4c\x6e\x5f\xaf\x30\xa1\xf7\x6a\x47\x19\xea\ \xd2\xf3\xca\xa3\xc7\x4d\x9a\x4c\x74\xea\xfa\x5f\x57\x16\xbe\x20\ \x24\x0d\x14\x25\x9b\x74\x63\xf5\xd2\x69\x6f\xfd\xd6\x23\x4b\xa7\ \x2f\x5d\x79\x1a\x12\x4e\xc3\xb4\x0e\xb2\xca\x23\xd9\x1d\x71\x9e\ \x41\x40\x87\x76\xdf\xe4\x2c\xa9\x33\x4f\x44\x4f\x44\x4f\xf9\x87\ \x21\x4c\x37\x0f\x40\x4e\x11\xd5\xbf\x68\x0a\xdb\x6b\x67\xfc\xff\ \x54\x5b\x0e\x7f\x14\xa0\xf2\x78\x7d\xef\xd3\x4f\x6a\x7c\x7f\x2e\ \xbb\xfb\x69\xdd\x54\x79\x65\x3c\x4d\xc5\x29\xac\x77\x9e\x49\x47\ \x10\xcf\x03\x00\x72\xe6\x98\x14\xbf\x90\xa1\x30\x31\x76\xa4\xbc\ \xaf\xeb\x52\x99\x1f\xb3\xbb\xff\xe5\xf0\x79\xf0\xc7\xbb\x93\x7a\ \xf7\x8a\x25\x21\xc0\x9e\x45\xb7\xb1\x77\x76\x74\x5b\x60\xbd\x17\ \x51\x1e\x9a\xcd\x23\x08\xc7\xee\x09\xcb\x1d\x13\x42\xa9\x50\xb3\ \x17\x01\xe3\x8c\x33\xc8\xd2\xdc\xaf\x26\xff\x79\x8f\xf4\x8b\xa4\ \xf6\x2b\x9c\xd2\xff\xa5\x72\x7e\x61\xb8\x10\x96\xe6\x86\x19\x03\ \x5b\x11\xe5\xbb\xb8\x51\x80\xcd\x2e\xfa\xe9\x8e\x3d\x93\x31\x11\ \xf6\xa9\x8c\x02\x13\x1d\x82\x60\xd3\x96\xfd\xcb\xe3\xfb\xbf\x1d\ \x3f\x1f\x48\xc5\xe0\x27\x7f\x04\x24\x3f\xfc\x21\xbd\x5e\x93\x26\ \x09\x51\xd4\x8e\xe3\xd3\x1c\x04\x5d\x58\x20\x80\x08\xf3\x9c\xd6\ \x0f\x8c\x6f\x02\xce\x2c\x95\x38\xff\x4e\xfa\x4f\x2b\xa1\x28\xbd\ \xca\x55\x11\xc0\x4d\x27\xa6\xf7\xe7\x45\x50\xaa\x08\x60\x4b\x55\ \x27\xb2\xbf\x2b\xdf\x7f\xdd\x96\xe3\x3b\xee\x7b\xe8\xe4\x64\x72\ \xff\x0b\x1c\x79\xd2\xe6\xce\xc2\x7c\xdb\x63\xcb\x85\x31\xf7\x0b\ \x8c\xb3\x53\xeb\x01\x94\x68\x09\xa0\xc4\x7a\xc6\x99\x6e\x64\x21\ \x0d\x08\x31\xca\xa5\xd7\x1e\xd8\xbf\x1c\xc0\xff\xfd\x23\xdf\xdb\ \xbe\x8e\x50\x8f\x69\xb7\x9c\xd4\x75\x9c\x65\x51\x72\x94\xff\x1c\ \x61\x9a\x92\x6f\xfa\x55\xa0\x3c\xf5\x07\x27\x17\x2b\xda\x88\xd0\ \xf4\x32\x0a\xac\xc6\x85\x7a\x5d\xca\x1a\xa9\xf1\xcd\x64\x49\x49\ \x87\x18\x65\x78\xd2\xbc\xaa\x8a\xf2\x11\xb2\x9a\x6c\xbb\xa7\xb4\ \x07\x09\xcb\x69\xbb\x96\x02\x44\x42\x9c\xbc\xf3\xfb\x11\xd6\x13\ \x93\x80\x93\x2c\xd2\xa6\x03\x07\x5e\x73\xe6\x25\x41\xa0\x46\xa9\ \xc9\x94\x04\xf9\x5c\x30\x7b\x65\x1f\x3d\x59\x75\x02\xca\x38\x65\ \xbc\x9d\x84\x34\x67\x7d\x8f\x93\x23\xe7\x4b\x2f\x7c\x99\xa2\x54\ \x21\xad\xa3\xaf\xff\xdf\x5e\xef\x1d\x97\x53\x78\x7f\xed\xd7\x44\ \xb9\xc7\xa2\xf7\x47\xf7\x4f\xa2\x65\x2b\xc8\xaa\x82\xe7\xf2\x32\ \x32\xea\x5d\x28\xd0\x32\xeb\x81\x95\x06\x84\x14\x14\x25\x5b\x7a\ \xa8\x87\x21\x42\x4a\x16\xea\xbf\x08\xdb\xba\xed\x9b\x9f\x7f\x95\ \x10\xfb\xb3\xd3\x82\x92\x64\x7d\xb5\xfc\x92\xc7\x6f\xa6\xc6\x23\ \x45\x11\x3e\x03\xaf\xa7\x28\x74\x3f\x7b\x78\xf3\xc7\x08\xa9\xb2\ \x7e\xab\x58\x52\x72\xf1\x45\x41\xef\xf2\x39\x8d\x80\x6e\x5a\x7d\ \xe3\x89\x97\x91\x90\xf2\x56\xd9\x19\xd2\x5f\x59\x1e\x04\x94\x0c\ \x95\x3c\x53\xb3\xd4\x34\x33\x4f\xe1\x51\x19\x0d\xb3\x9d\xcd\x73\ \xe5\x86\x8d\xe7\x9d\xf7\xba\x57\xc4\xb0\x26\xe0\x45\xb2\xec\xbb\ \x6c\x49\x69\x6f\xaf\x14\x65\x10\x6b\xf7\x4c\x47\x55\x9e\xde\x4d\ \x42\xca\x99\xfc\xb7\x5a\x98\xff\x7d\xef\x34\xd7\xb4\x23\xe3\xec\ \x2b\x49\x59\x95\x84\xf5\x90\x51\x10\x14\xac\xb7\x35\xa6\x67\x37\ \x63\x87\xb8\x98\x21\xf7\x94\xad\x22\x77\x9f\xb1\xf3\xe4\x5d\x1e\ \xf9\x96\x3b\x1e\x9a\x0e\xca\x30\x2e\xfa\x1a\x51\x6e\xd5\x47\x4e\ \x8b\x44\x6d\x4a\xee\xfa\xe3\xcb\xe3\x72\x96\x72\x8e\xf5\x05\x7a\ \x09\xce\x26\x45\x45\x46\x5d\x8a\x0c\xac\x17\xac\xaf\xf8\x8e\x21\ \xc4\x9e\x1c\x42\x5a\x9c\xc7\x42\xca\x51\x49\xa0\xa8\x10\xf4\x86\ \x93\x8b\xe4\xe2\xcf\x3f\x75\xb6\x03\xfe\x50\x40\xfa\xbd\x68\x90\ \x2e\xd8\xb4\x69\xfd\x59\x47\xf6\x4f\xab\x82\x04\x13\x65\xc6\x21\ \x51\x94\xa0\xda\x4d\xa8\xca\xa8\x53\x7b\xeb\x42\x7f\xb3\x67\xc7\ \x8d\xa3\x14\xe1\x5e\xc9\x91\x3a\xe1\x64\xad\x27\x4d\x5a\x3d\xa5\ \x48\xea\x91\x1e\xc6\x89\x3c\x88\xe9\xd2\xea\x87\xa6\x55\x35\x0a\ \x32\x51\x24\x32\xa0\x45\xfa\x21\x39\x7c\x50\x54\xed\xfd\x42\x7b\ \x27\xba\x4d\x5c\x28\xf1\x5f\x70\x62\x3c\xb8\xd8\xc8\x3d\x64\x9b\ \x23\x9f\xd9\xb2\x84\xa1\xaa\xb4\x61\x5c\x96\x6b\x4e\x79\xa6\x87\ \x71\xc6\xe3\x15\x29\x54\x5a\xa0\x20\x46\x04\xc5\x40\x85\x11\x30\ \x69\x51\x5f\xbf\xd0\x99\x71\x96\x71\x54\xd3\x41\x47\x3e\xf3\x99\ \xeb\xde\x46\x5e\x6f\x39\xcd\x80\x94\x85\x94\x08\x4a\x23\x5d\xc3\ \xae\x7e\x73\x70\x4a\x18\x3f\xdb\xfb\xd0\xb4\xd0\xf2\x8d\x24\xa0\ \x0c\x46\xc9\x3b\x6a\x51\xcc\x72\xde\xc8\xa8\x37\xf8\x71\xe6\xd9\ \x41\x8a\x30\x6e\x9e\x2e\x8e\x06\x79\xba\xf5\xdb\x91\x7e\x53\x78\ \xa6\x22\x3b\x9b\x22\xb9\x8a\x71\x92\x8b\x8a\xd0\x31\x36\x1d\xe9\ \xdd\x74\xc9\x03\xd3\x71\x55\xa2\x20\x46\xe4\xac\x81\x92\x9f\x28\ \xa3\x02\x32\x7c\x01\x26\x3d\x5d\xbb\xa6\x6d\xdb\xb6\x21\x01\xf1\ \x55\x52\xe1\xa7\xa6\x03\xb1\xf5\xc1\xef\x7d\x6e\xda\xd8\xd3\x73\ \xc0\xb7\x3a\x5a\x52\x0d\xf5\x8c\xb9\xdf\xfb\xf4\xb8\x18\x57\x99\ \xd6\x42\xc9\xe1\xd7\x60\x3d\x8a\x0d\xc0\x0b\x7d\xc2\xb0\xe9\x07\ \x0c\xe4\xf0\xef\x26\x3d\x9f\xcc\x7c\xd3\xd6\x8f\x7e\x65\xc2\x45\ \x11\x45\xfa\xcf\x51\x1a\xc6\x61\x9f\xf7\xe8\xa9\xbd\x55\x59\xdc\ \xd4\x1d\x7a\xb9\xc2\x3c\x16\x54\xc7\x54\xee\x08\x64\x0d\xd6\x0b\ \x56\xd5\x76\x7d\x68\xf2\x40\x1c\xca\xd3\x85\x79\x62\x99\x28\x77\ \xb5\x3c\x68\x79\xd0\xb0\x08\x54\xe1\xf4\x6d\x9a\x52\xc8\x29\x30\ \x39\xef\xc0\x86\x6f\xde\xf2\x8e\xe3\x8f\x2f\x0e\x0a\x94\xc1\x6b\ \xa1\xa8\xe2\x04\x49\xd5\x81\x0a\x3a\x55\xa7\xbe\xef\x2b\xb0\x96\ \x14\x75\xa6\x01\xa9\xfc\x2b\x27\xad\x40\x4f\x02\xfd\xad\x11\xe7\ \xf9\xbb\x4c\x06\x76\x12\x60\xd2\x36\xbf\x1a\x55\x83\x54\x6b\xa1\ \xa6\x66\x9f\xc8\xaf\x49\x51\x29\xd9\xa3\xca\x0c\xc6\x23\xa5\x03\ \xb4\x7e\xbb\x4c\x40\xef\x1b\xe5\xb9\x21\x69\xf8\x2c\x22\x41\x0f\ \xf1\x8e\x19\xe3\x86\x07\x26\x05\x67\x1f\x52\x64\x73\xa4\x76\x23\ \x93\xb3\xa3\x80\xc9\x5e\x9e\x50\x8a\x32\xc1\x92\xa2\xd0\xe4\xd8\ \x8e\x4c\x09\x47\xce\x6f\x22\x9d\x1e\x35\x70\xe6\xf9\x65\xdb\x57\ \x5d\x1a\x53\x24\x87\xd8\xad\x62\xdc\x74\xe3\xf2\x24\x2f\x09\xa9\ \x1d\x35\xff\x98\xeb\xed\xec\x38\x94\x29\x60\x66\x4b\x0a\xbd\xb7\ \xbc\xf7\xfa\x34\xcb\xe1\xff\x02\x09\xde\x38\xe2\x53\xf9\xbc\x2c\ \xe6\x33\x76\x86\xcf\xed\x4d\x90\xb7\x6c\x79\x68\x3a\x1e\x65\x9c\ \x22\x61\xa4\x29\x83\x44\xfb\x03\x98\x0f\xc5\x27\x4d\x52\xd6\x53\ \xf4\x34\xa7\x0e\xae\x30\x69\xfa\x12\xe3\x20\x82\x38\xb8\xd8\x60\ \x3c\x1b\x9f\xa7\xf3\x22\xfb\xd1\x8c\x92\x01\xe4\x64\x94\xab\x25\ \x2d\xc9\xe5\x5b\x7a\x32\x41\xc5\xe2\x03\x25\xa9\x11\x94\xa9\xaf\ \xda\x9e\x98\x7a\x58\x78\x80\xd5\x87\x79\x5c\x07\x7f\x81\xd3\x83\ \xba\x19\xf9\x5b\x46\xb7\x69\x9a\xa7\xe9\x65\x31\x16\x3a\x76\xe4\ \x6f\x26\x8b\xa3\x9c\x73\x10\x0a\xd3\xd7\xc0\x13\xc9\x3a\x93\x75\ \x72\xc6\x89\x78\xcf\x5c\x77\xca\x24\x30\xc5\x46\x79\x3b\xaa\xa1\ \xe8\x46\xde\x16\xfe\xdd\xd6\x2c\xbb\xec\xab\x6f\x3f\x28\x5b\xdd\ \x13\xd3\x28\x04\x97\x7e\xf4\xe4\x64\x1c\x41\xc6\x78\x19\x4e\x94\ \xb8\x6f\x68\x8a\x7c\xb3\x58\x28\x57\x66\x12\x3b\xaf\x20\x55\x99\ \x7c\x7e\xb4\xbb\x41\x2b\x8e\x55\xaa\xd0\xcd\xf8\x69\x9a\x77\x55\ \x45\xc0\x7b\xc9\x94\xa2\xc6\x44\x5f\xa4\x48\x4a\xd1\x12\x7e\x09\ \x7e\x34\x82\x12\x21\xb0\xb5\x50\x48\x29\x70\xc2\x33\xd1\xb3\x55\ \xdc\x0c\xc6\xbe\xe0\x6e\x2c\x4a\x49\x8f\x96\x02\x92\x16\x85\xa9\ \x6f\x26\x67\x84\x0c\x9c\x02\x55\xab\xcb\xfa\xb3\xd4\x04\x5c\x6b\ \xf2\x7d\xf9\xe6\x63\x36\x15\x7c\x78\x3a\x96\xbd\xbd\x1d\xa5\x08\ \xa9\xc1\x89\xa2\x98\x56\x97\x5b\xb5\x70\x54\x42\x15\x25\x87\x51\ \xc6\x8c\x1a\x11\x05\x50\x57\x15\xf9\xec\x55\x8a\xf2\x8e\x9f\xec\ \x99\x8e\x07\x06\x64\x64\x32\xd9\x77\x12\x55\x61\xbe\x11\x50\xc6\ \x19\x16\xfd\x91\xe6\x0b\xeb\x51\x06\x67\x9a\xd2\xe2\xcd\x13\x83\ \xec\xd4\x7a\xd3\x57\xf0\x1b\x97\x5c\x73\xcd\xde\x9f\xec\x08\xd9\ \xa4\x4a\x7d\x7d\x06\xa4\xba\x0a\x4a\xb1\xa3\xbe\x6e\xc7\xe0\x4a\ \x9a\x2d\xbd\x47\x13\x21\x33\xde\x34\x11\x82\xac\x1d\x76\x14\xf9\ \x51\x0e\x9c\x06\xd5\xce\xaa\x6a\xc6\x24\x88\xa0\xd0\xeb\x48\x20\ \x1d\x45\x59\x99\x6a\x53\x10\xb7\x6d\x1a\xca\x7a\x6f\x9e\xe0\x98\ \x1c\xdb\x91\x1f\x35\x05\x46\x7a\x78\x47\x6f\x4a\x62\xbe\x89\x2c\ \x4c\x6f\xee\x23\x3d\xc1\x77\x5a\x7c\xdc\xcc\x93\x17\x87\x13\x24\ \x9d\x93\x76\x74\x39\xe7\x0c\x45\x5b\xa8\x91\xab\xd7\x03\xac\x36\ \xe3\x99\x1f\xc0\x02\x2d\x07\x25\x02\xd2\x82\x85\x1d\x85\xf2\xaf\ \xe0\x7a\xbd\x2b\x34\x69\x2a\x8f\xcb\x37\x60\x3c\x06\x68\x4a\xb0\ \x0d\xf3\x99\xf7\xec\x41\xc1\x7a\xd8\x27\x8d\x9e\x34\xd4\x03\x4e\ \xdb\xab\xd1\x59\x15\x19\x18\x9c\x0c\x35\x7c\x13\x46\xcf\xf8\x30\ \xc1\xfb\xc6\xa8\xf5\x45\xe1\xc8\x1c\xd6\xf7\x35\x7a\x02\x40\xe3\ \x40\x81\xd3\x67\x9c\x6d\x0d\x5c\xa1\x6a\x37\xae\xd7\x79\x74\xe5\ \xa6\x62\x49\xd1\xec\xc6\x32\x2a\x54\x05\x41\x4b\xeb\x42\x79\x12\ \x71\x15\x30\x94\xde\xb5\x11\x8a\x32\x71\x44\x02\xb4\xb3\xf2\xa2\ \xd6\xec\xad\x29\xd5\x89\x56\x22\x29\xdc\x83\xaa\xc9\xcc\xd9\x83\ \xa8\x44\xc5\x30\x89\xfd\x8a\x14\x86\xc9\x08\xa9\x4e\x57\x5d\x36\ \x20\x69\x65\xd6\x13\x50\xb5\xa4\xa0\x66\x9c\x8e\xf5\xd0\xfa\xe8\ \xf0\xd1\x44\xe8\x34\x09\xec\xc7\x9e\x29\x31\x30\x31\x7d\x02\xc2\ \xb2\x5f\x0b\x62\xda\xe7\xec\x6d\x13\xd4\xdf\x1b\xa8\x08\x13\x7d\ \x4f\x3c\x1d\xd4\xe1\xec\x65\x38\x6f\x18\xef\xf2\xa3\xcc\x78\xf8\ \xa6\x94\x58\xaf\xed\x59\xb0\x50\xa8\x85\x81\xa2\xbc\x00\x28\xfc\ \x7c\x6d\x8c\x93\xb3\xf8\x0a\x31\xf1\x3a\xef\xeb\xf5\x68\x79\x33\ \xea\xc4\x2f\x6d\xcc\x14\x94\x76\x28\x5c\x87\xb5\xe6\x45\x03\x13\ \x4f\xcd\x3e\xda\x1d\x81\xd2\x6f\x44\xf4\xab\xb3\xd8\xe0\x1b\xde\ \xe2\xc8\x2d\x4e\x70\x5d\x80\x5a\x9c\xe8\x29\x89\x8f\xa5\xe7\xb0\ \x0f\xd6\x4b\x82\x84\x16\x97\x86\x50\x80\x06\x2b\x32\xce\x68\x22\ \x94\x27\x95\xaa\xad\x89\x4a\x4a\x7e\x19\xd6\x17\x66\xbb\x2c\xbd\ \x4f\x08\x49\xbc\x17\xa5\x1f\x7d\x1a\xeb\x84\xbe\x17\xb7\xaf\x67\ \xcf\xe4\xdb\xdb\x09\x67\x06\x19\x05\x2d\xad\xa4\x3a\xbe\x87\x07\ \xca\x94\x04\xe4\x5d\xee\x93\x1f\x24\xc5\x13\x63\xee\x8d\x15\xb5\ \xad\x1a\xae\xdb\x91\xbb\xc5\x33\x18\x7c\xab\xf7\x82\x4f\xdf\xfc\ \x82\xd6\xab\x79\xf2\x32\x0a\xb4\xba\x57\xa6\x6f\x2f\xa8\x70\xf9\ \x36\x1e\xb5\x03\xfd\xcd\x99\x97\x51\x90\x35\x55\x74\xa9\xd1\xa3\ \x1a\xa6\x49\x56\x6b\x99\xb0\x9f\x77\x18\xfd\xbe\xde\xfb\xfa\x2e\ \xb4\x71\x69\xf9\x50\xeb\xe8\x49\x54\x5d\x96\xc4\x91\xd4\xb5\x0e\ \xc3\x86\xd2\x62\x15\xc9\x1f\x6c\x98\xaf\x4c\xaa\xf8\xa0\xa8\x0b\ \x4a\x8a\xb8\xfa\x6e\x4c\x75\xa1\x82\x12\x40\xed\x90\xe8\x09\x35\ \x26\x40\xed\x6e\x78\x5b\xd1\xdd\x3f\x4a\x20\x55\xf3\xa1\xf5\x08\ \x4c\x0a\xae\xde\xd0\xce\xd9\x84\xf8\x89\xb2\x1e\xb3\x9d\x77\x62\ \x96\x73\x61\x84\xd4\x3e\x71\xbe\xc9\x24\x9b\x87\x36\x35\xee\xb8\ \xce\xab\xb7\xf6\x08\x99\x71\x5a\x20\xc5\xae\x5e\x95\x49\xe3\x51\ \xc8\xa9\xed\xc3\xc7\x61\x26\x43\x4d\x79\xe4\xe5\x73\x4f\xf2\x44\ \x68\x99\x60\xcd\x54\xa7\x28\x84\x76\x68\x5b\x6d\xce\x18\x6a\x43\ \x6b\x5a\x15\x6c\x9c\x84\x11\x38\x01\xd6\x0c\xa3\x46\x78\x7b\x8a\ \xa6\xba\x46\xe9\x54\x19\x05\x54\xf6\x45\xea\x3b\xfd\x56\x84\x57\ \x06\x5b\xf3\x0b\xc3\x22\xf5\x81\x33\x76\xa1\xde\xdb\xfb\xde\x3c\ \x36\x4c\xcd\x63\x6c\x99\xe1\x3a\x3e\xbd\xa7\x07\x4a\xa4\x49\x8c\ \x98\x82\x9a\x32\x05\x2d\x6c\x3e\x0c\x3e\x2d\x7e\x5f\xef\x3c\x13\ \xc3\xf5\x5b\x50\x21\xa8\x85\xe9\x73\x3a\x25\xa4\xd3\xf8\x7b\x17\ \xe5\x81\xb0\x4e\xef\xbd\x79\x32\x7d\x4f\x30\xa2\x19\x93\x15\xbc\ \x77\xa7\x9a\x64\x5a\xf9\x04\xf3\x91\xd7\x61\x90\x16\x2e\x60\x26\ \xde\x27\xe9\xea\xca\x37\xc6\x8e\x52\xef\xa0\xc0\xc5\xe0\x04\x59\ \x31\xe3\xba\x4c\x4b\x57\x44\xf7\x6e\x6f\x87\xed\x08\x40\x5a\xdb\ \x04\xc5\x9f\xd7\x01\xe1\x07\xe1\xe4\x98\xc4\xb0\xbf\x40\xa0\xd7\ \x86\x09\xe1\xa4\xc5\x9f\x6d\xe8\xc3\xd3\xeb\x6a\xa0\x76\x07\x25\ \xa0\x68\x65\xa5\x54\x39\x8f\xc0\x99\x61\x02\x69\x89\xc0\xd9\x31\ \x5e\xd9\x5e\x93\x7d\x42\x58\x0a\xa2\xca\xe2\x72\x25\x40\xab\x7c\ \x6f\x6b\x7d\x6b\x08\x4a\x9c\xc0\x70\x91\x93\x22\x2d\x60\x9c\x7c\ \xea\xb1\x4c\x40\x52\x83\x12\x91\x13\x62\xd1\x44\x20\xfa\x5a\x18\ \xa0\xba\xb6\x4c\x06\xca\x10\xad\x6f\x2a\x79\x16\x66\x17\x2a\xae\ \x09\x3e\x54\xa5\x14\xd9\x92\x76\x80\x07\x39\xb5\x81\xb3\x8f\x97\ \x3b\x3d\x13\xcc\x3d\xbf\x52\xdd\x88\xba\x5c\x49\x81\x93\x0d\xc6\ \xd5\x1b\x51\x85\x57\x72\x4a\x4f\xe0\xd0\xa7\xd1\xd2\xa6\xb9\x40\ \x2b\xbb\x5b\x96\xed\x27\x0c\x7e\x6e\xad\x28\x07\x25\x90\x53\xd4\ \x99\x12\x5e\x98\xe3\x3c\xbb\xe3\xe6\x59\xc1\xa1\xa5\xf8\x5e\x34\ \x7d\x7e\x94\x16\x2f\xa7\x80\x69\xad\xe8\xbc\x13\x2d\x00\x8b\x30\ \xaf\x34\xd1\x93\x2a\x13\x60\x7a\x5d\x72\xe2\x49\xb3\x57\x0d\x2c\ \xcc\x54\x81\x32\x3d\x69\x31\x32\x8a\x04\xa9\x1b\x7c\x88\x8d\xb0\ \x39\x9c\xf0\xf3\xb6\x58\xef\x8f\xaf\xce\x73\xa1\x79\x05\x21\x85\ \xb7\x87\xbd\xf7\x81\x33\xa6\x09\x9f\x6a\x9a\x20\xab\x97\x53\x8d\ \xef\xb1\x5b\x02\x5d\x11\xe1\xe1\xa0\xb5\x09\x9c\x09\xa7\x0f\x47\ \x89\x90\x58\xad\x29\xb5\xb9\xd1\xd4\xee\x43\x15\x62\x39\x67\x63\ \x87\xd0\x49\x00\xfb\x5a\x83\x37\x53\x28\xda\x02\xa7\xe1\x7f\x66\ \x37\xa2\x88\x43\xc3\x37\x12\x79\xc0\x89\x5d\xa8\xb8\x7a\x53\x6c\ \xe8\x13\xf3\x01\x18\x3a\x6f\x63\x92\xee\x3e\xfc\xca\xf0\x3d\x15\ \x8c\x10\x53\x9f\x23\x73\x1b\x7b\x79\xd5\xb0\xf8\x9a\xd2\x49\xb0\ \xf6\x85\xf9\xb6\x5e\x6f\x30\x9a\xca\x9d\xa7\xa8\x69\x76\x84\x2a\ \xe9\x23\x0b\x52\xb9\xcd\x9f\x19\x30\xa0\x51\xa9\xcc\xe6\x1e\x15\ \x31\x5f\xaf\x4f\x3a\x14\xaa\x33\xf7\x34\xb0\x28\xf1\x9b\x01\x06\ \x54\xd5\xfc\x19\x4a\x9f\x7e\x12\x5f\xef\x6c\xa8\x60\xb5\xf5\x7a\ \x07\x53\xa0\x82\x9c\xd0\x7a\x2b\xa2\x78\x29\x35\x37\x86\xfa\xf8\ \xaa\x5c\x83\x7b\x73\x30\xd4\x2a\xbd\xf8\x78\xe3\x9c\x4a\x13\x3b\ \x71\xab\x86\xad\xd7\xeb\xb2\xd2\xe2\x0c\x6f\x77\xd2\x16\xa3\x83\ \xf7\xe9\xc6\x2f\xd3\xc1\x13\xb0\x9f\x51\xf2\x2c\xe0\x99\x8c\x88\ \xaa\x69\xd2\xa9\x58\x35\xed\x88\x82\x18\xc2\x67\xa8\x92\x77\xf9\ \x62\x9e\x2a\x30\x1f\x8c\x17\xb4\x5b\x37\x73\x39\x7c\xdc\x14\x52\ \x41\x29\xd3\x46\xa4\xc6\x3d\x41\xe5\xe7\xd5\xeb\xbd\xd6\xbb\x1a\ \x23\x7c\x3d\xf4\x09\x04\x55\xaa\x1e\xe5\x12\xee\x63\xa3\x66\x36\ \x0f\xfe\x93\x26\x06\xec\xa8\x2c\xf4\xc0\xd8\xbb\x7a\xbd\x7e\x43\ \x8f\x90\x18\xf7\xad\xc3\xde\x85\xc2\xd1\xaf\xa2\x8e\xe1\xe5\x09\ \xf6\xa2\x4e\x4a\x4d\x1c\x0a\x3b\x2a\x4f\x93\xf5\x7d\x5f\xaf\xc7\ \x00\x54\xb0\xde\x36\x68\xf9\x34\x09\x9c\x68\x1a\x3b\x4b\xae\x5a\ \x1e\x29\x4c\x94\xeb\x95\xb0\x96\x9a\xb0\xa3\x8a\xd6\x0c\x5f\xaf\ \x87\x4a\x81\xef\xfa\xb6\x5a\x2f\x45\x11\xbb\x15\x81\xce\xc7\x5e\ \x83\x83\x13\x77\x63\x41\x51\xc0\xe2\x5b\xb4\x33\x3b\xaa\x7a\x54\ \xfa\xf3\xf5\x88\x9f\x00\xd2\x15\x1a\x7c\xd1\xd6\x88\xa7\x2e\x02\ \x75\x2b\xf5\xee\x3c\x96\x2b\x44\x28\x94\x4d\x8f\x5a\x01\x80\x6c\ \xaa\x36\x01\x6b\xbb\x5e\xef\x93\x8e\x0c\xd5\xda\x52\x5f\x15\x01\ \xeb\x19\xe9\xef\x92\x71\xaa\x2c\xeb\x51\x5f\x2e\x9c\x8c\xf2\x5b\ \x9f\xba\xfb\x7c\x3d\xcc\x93\x13\x52\xa0\xf5\x9e\xc9\xe0\x6c\x7b\ \x7a\x3a\x3d\x31\x46\xf3\x8b\xb4\x8e\x42\x46\x7d\xcc\xac\x3f\x7d\ \xe9\x30\x81\xb3\xa2\xf5\x5e\x49\x81\x9a\xcc\x38\xf6\x4c\x6d\xa4\ \x02\x35\x7f\x77\x6c\xa3\x9f\x36\x1b\x75\x78\xf8\x44\x1e\x10\x9b\ \x94\x8e\x2b\x36\xb5\xc2\xe6\xc4\xd9\xd0\xee\x4e\x32\x5b\xb2\x85\ \x69\xd2\x68\xf4\x14\x11\x52\x82\x08\x9c\x85\xfa\x28\x0b\x18\xa7\ \xab\xe3\x8b\xbf\x91\x26\xd1\x0f\xd4\xeb\x15\xa7\x05\xeb\xec\xa8\ \xdf\x33\xf9\xf8\xf9\x28\x9f\xf3\x4b\xcd\x29\x56\xdf\x9e\x93\xb2\ \xb3\x47\x4b\x11\x74\xde\xd7\x43\x50\xaf\x57\xbd\x07\xeb\xe7\x1f\ \xb7\xf4\xca\x84\x6b\x3f\xf2\xad\x69\x46\x5d\xad\xe3\x8c\x71\x42\ \x4c\x39\x90\x2a\x3a\x7a\x9e\x12\x6c\x46\x6b\x7f\xf3\x0b\xaa\xe0\ \x1a\x45\x81\x98\x4d\x29\xed\xcc\x38\x2b\xf3\x53\x03\x97\x99\x9f\ \xfd\x5f\x6c\xb6\x9f\x10\xc2\xd4\xb0\x5e\xea\xf5\x5e\x52\xa5\xa3\ \x40\x45\xc0\x2a\xfe\x90\xa6\xb6\x64\x3a\xd6\x03\x9f\x0f\x4a\x8c\ \xb1\x87\x19\x15\xda\xd2\xc9\x8b\x07\xa7\x50\xa5\xd4\x68\xbd\xf3\ \xf5\x4c\x4a\x7e\x9b\xce\x76\xc8\x69\xbf\xa1\xee\x5c\x1d\x01\x4a\ \xb4\x3c\x7a\x3b\xea\x92\xe3\x30\xa3\xf9\xe5\x7c\x26\xf2\xf0\x22\ \x36\x76\x50\xa6\x00\x95\x24\xc0\xb4\x91\x41\x04\x5c\x46\x47\xcd\ \x28\x32\xb9\x3e\x6a\x06\xeb\x8d\x07\xa5\xed\x32\x50\xfa\xfb\x89\ \x8e\xca\x59\x0c\x75\xf6\x50\x78\x28\x7b\x6a\x23\x67\x95\x50\x18\ \x51\xb7\xb7\x77\xbe\xde\x27\x72\x7d\xff\x68\xd5\xd0\xfa\xd4\x15\ \x1c\xbe\xaa\x3d\xe2\x83\x34\x85\xc6\x23\x5f\x42\x20\x4d\xe0\x0c\ \x88\xde\xdd\x0f\x55\x93\x6c\xae\x84\x96\x95\x4e\x4c\xb1\x20\x1e\ \xad\x2c\xeb\x61\x47\x53\x39\xcf\xff\xaa\x09\xb4\xbe\x48\x29\x7a\ \x72\x22\x0a\xad\x47\xd5\x0e\x2e\x14\x04\x35\xe6\xb3\xef\xe2\x7b\ \x20\xf5\xc5\x06\xe7\x42\x53\x28\xd3\x2f\xdc\xbc\x97\x0e\x32\x0d\ \x44\xeb\xc9\x34\xa5\x85\xb4\x91\x5a\xde\x9b\xbd\x28\x40\x22\x24\ \x21\xa4\x90\x50\xef\xeb\x51\x17\x69\x83\x85\x8c\x0e\x5c\x86\x8c\ \x66\x96\x6d\x14\x77\xcf\x8c\xe7\x08\xaf\x19\xe5\xa5\xa6\xb1\x00\ \xed\x83\xbc\xd2\x34\x46\x94\xe5\x13\xf5\x7a\x97\x26\x01\x46\x13\ \x38\x07\xde\xd3\xec\xa8\x2e\x0b\xde\x37\x50\x53\x36\xeb\x92\x20\ \x55\x8d\xc2\x40\xec\x6c\xfc\xbc\xa3\x28\x2a\x4c\x88\xa0\xbd\x42\ \x79\xc5\xef\x59\x80\x88\x98\x91\xc7\xcb\xfe\x84\x8f\xb5\x89\xd6\ \x4b\x69\x59\xf5\x08\x68\x6b\xa3\x51\xdd\xf7\x3d\x59\x5f\x6a\x82\ \x7b\x73\x08\x83\x5e\x6e\x73\xd7\x41\x53\x64\xf3\x32\x71\xf7\x47\ \xaa\x54\x98\xcf\xda\xd4\x11\xdc\xd7\xa8\x81\x32\x4c\x9f\xcb\xe3\ \x60\xc4\x86\x79\xbe\x6c\xdb\x7d\x65\xa2\x2b\x88\x65\xd4\xa7\xa1\ \x89\xfc\x0c\x31\x29\xc1\x94\x5a\x53\xe7\x26\x34\x61\x8a\x92\xeb\ \x94\xc9\x87\x06\x90\x17\x23\xa0\x8a\x54\x13\x26\x18\xf3\x4f\x8b\ \xf4\xda\x7d\x64\xa9\x1c\x0a\xd6\xd6\xac\x38\x1f\x8d\x31\x29\x1b\ \x28\x34\xe7\xf1\xbb\xb0\x4a\xaf\x31\x09\xa7\x74\xf4\xe4\x8d\xbd\ \x4c\x09\x38\x09\xa9\x93\x4e\xab\x4f\xbe\xba\xec\xbb\x4a\x22\x55\ \x09\xea\x29\x10\x52\x51\x77\x5a\x0d\x45\x7d\x17\x21\x2f\x35\x3d\ \x2e\xca\xc7\xdd\x9e\x3f\x37\x1e\x85\x1d\x95\x27\x45\x2d\x14\x11\ \x3e\x01\x25\x21\x5d\xff\xa6\x43\x24\x9e\xa6\x74\x8b\x74\x6e\x22\ \x13\x45\x31\xbf\xc1\x23\x6c\x66\x25\xc6\x03\xa1\x8b\x47\x3d\x45\ \xad\x12\xc1\x96\x32\x52\x3d\x4d\xf9\x21\xa9\xdc\x35\xda\x88\x5c\ \xed\xce\x10\x14\x22\x6a\xb7\x4c\x48\x96\x00\x28\xaf\x2e\x1e\x45\ \x83\x56\x77\x87\x0e\x4f\x69\x7e\x79\xa3\x00\xbd\x6d\x14\x94\xc9\ \xd2\xb3\x9c\x53\xb0\xef\xa2\x28\x94\xca\x44\x24\xa6\x82\x03\x7c\ \x00\x69\x1b\xb4\xa0\xf6\x30\xf6\xaa\x51\x7a\xb0\x72\xf5\x04\xc5\ \x30\x6f\x9e\x92\x04\x38\xf9\xa3\x2d\xa1\x0c\x0a\xde\xc9\x27\xf0\ \x5d\x3c\x3a\xaf\x5e\xcf\x4c\xf7\x1d\x10\xff\x16\xcf\x3c\xec\xdb\ \x33\x2e\xb4\xd8\x10\xde\x04\xd5\x53\x15\x22\x0a\x9d\xf7\xea\xa4\ \x67\xeb\xbd\x71\xf2\xf1\x28\x6e\xd0\xf2\xd9\xbc\xf8\x12\xdd\x17\ \xb0\xff\x7a\xe2\xbe\xc9\x38\x2b\x1a\x09\x7c\xa3\x4c\x2d\x5d\x42\ \xfc\xe4\xa2\x7c\x98\xa7\x2e\xcd\xf7\xf1\x28\x28\xda\x1d\x39\xbb\ \xb6\x27\x71\xf5\xf3\x65\x14\x05\x46\x5d\x69\x76\x75\x11\xce\x2d\ \x88\xcd\x8f\x47\x73\x9b\x24\x4b\x79\xb5\x5a\x8f\x41\x04\x85\x81\ \xf2\xaa\x64\x3c\x3d\x6f\xef\xc0\xfd\x21\xd2\x63\x50\x7b\x9f\x27\ \xf1\xe4\x5c\x21\x11\xbe\xdf\xd0\xe3\x2e\x1d\x53\x06\x67\x8a\xa2\ \x5e\xef\x7b\x89\xd0\x55\x22\x37\x0e\x03\x25\x16\x93\xc9\xe3\xcf\ \x9f\x1f\x8f\x56\xad\xb3\x22\xec\xde\x19\xaf\xf8\xfa\xcc\x92\x14\ \x62\x5a\xd2\x3f\xac\xaf\xa7\xc9\x42\x0a\x82\xf2\x00\xcb\x91\x23\ \xa3\xb0\xcf\x5f\xf7\xe4\xc2\x51\xc9\x94\x54\xd6\x3c\x19\xbe\x9b\ \x7e\x5c\x8a\xf2\x05\xa9\xab\x82\xab\x07\xc5\xa1\x16\x1c\x08\xae\ \x51\xaf\xf7\xbe\xde\xb0\x7f\x7e\x3c\xea\x7b\x4a\xe0\xef\x6d\x0b\ \x21\x6a\xf5\xb6\x5e\x6f\xbb\x20\xa0\x4e\xa5\x3f\x66\xad\x12\x0a\ \xe6\xfb\x02\x23\xae\x4b\xf5\x09\x08\xb7\xfd\x24\xe9\x0c\xd3\x75\ \xe2\xf3\x52\xb8\xbe\x3c\xc8\xa7\x46\x4f\xa2\xf5\xf3\xce\xd7\x1b\ \x5f\xef\xfb\xf0\x01\xd2\x1a\x7c\x0c\xa8\xbc\x70\xbf\x68\x5f\x97\ \x09\x19\xa5\xd5\x94\xc1\x21\x9d\xb8\xdf\xd1\x07\x4e\xfa\x78\xee\ \x83\xf7\x18\xae\xc7\x19\x03\x38\xa1\xf5\x2c\x9f\xa6\x5e\x5f\xf2\ \xe2\x8f\x36\x98\x0c\xa9\x73\xf5\x4a\xd5\x46\x82\x5c\x41\x02\x63\ \x2b\x93\xcb\x05\xb1\xbc\x4d\x50\x6c\xee\xe4\x4a\x9d\x22\x33\xf7\ \x7a\x02\x61\xd1\x2a\xd7\x1b\x6f\xef\x60\x12\xb3\x91\x23\x0d\x08\ \x69\xb1\x66\xc9\xd7\x99\x7c\x75\x19\x9a\xc4\x0b\x43\xe4\x29\x8f\ \xb2\x5e\x2a\x77\xae\x57\xc3\xd5\xeb\x25\xd2\xc3\xd0\x3e\x6c\x36\ \x4e\xf1\xed\x38\xff\xc1\xd3\xaf\xb0\x06\x0a\xf5\x7a\x0c\x25\x24\ \x7d\xc4\x2f\x08\x29\x8b\xa8\xd6\x99\x8c\xd6\xab\x12\xb9\x7a\x7d\ \xc0\x6d\xf8\x8e\x32\xb8\x37\x50\x42\xce\x4f\xd3\x65\x27\x65\xd7\ \x35\xf3\xe1\xb1\x15\xb1\x2c\x6b\xb5\x12\x21\x8d\xab\x75\x26\x5f\ \x0f\xa3\xc5\xd5\xeb\xad\x74\x9a\xd4\x38\x28\xab\xb4\xa4\x37\x5d\ \xb7\xf8\x83\xdc\xd0\xd4\xd6\xeb\x81\x15\xdd\x59\x7a\x9c\xa9\xc8\ \x54\xe9\x71\x62\xc4\xf6\x95\xf0\x9d\x54\x73\xeb\xf5\xc0\x8b\xd4\ \xb8\xc2\xb4\x29\xb2\x2b\x22\xd0\x6f\xde\xa5\xfa\xb4\xc2\x9b\x27\ \x98\x52\x65\x3d\x88\x9a\x05\x94\x71\xe2\x46\xf4\xce\xb6\xcc\xba\ \x95\x22\x87\x84\x22\x26\x51\x8a\x12\x68\x6f\xf0\xf9\x3a\xbd\x0b\ \xc6\x1d\xbe\xde\x3a\x26\x46\x8a\x03\x8c\xe4\xef\x35\x35\xae\x51\ \xb3\xf6\xe1\x03\x66\x82\xb7\xf8\xfb\xda\x79\x26\x01\xaa\x77\xcc\ \xf3\xe3\x62\xbd\x15\xbd\x63\xc7\xf7\x8f\xef\x72\x7d\x4f\x83\xf9\ \xfd\x8e\xb8\x7e\x14\xca\xc4\x24\x45\x1f\xbe\xad\x82\xf2\xca\xb9\ \x71\xa7\xef\xa8\x86\x09\xd2\x8b\x77\x7d\xbe\x79\x2d\xba\xd6\xc1\ \x93\x2a\xed\xab\x31\x85\xd6\x0f\x9a\xea\x94\x32\x31\xf5\x95\xb5\ \xc2\xe6\xd4\xee\x42\x11\x94\x80\x9a\x6a\x4b\x6b\xb9\xac\x02\x37\ \x95\x08\xb3\x59\x3e\x77\x07\x90\xf1\x62\xc5\x63\xa3\xbe\x6f\xc3\ \x1f\x76\x27\x20\xaa\xbc\xeb\x70\x83\xdc\x9a\x07\x92\x82\xa2\x08\ \x9e\x3b\x2b\x22\x04\x51\x6f\xd6\xf0\x09\x88\x7e\x5c\xae\xdd\xac\ \xa7\xce\xc7\x96\xf1\xba\x0d\xb9\xf8\xf6\xe7\xaf\x48\x6c\x4f\x89\ \x17\xd0\x94\x4f\x04\xd3\x84\x3a\xb1\x8c\x0a\xc0\x02\x6a\xef\xa0\ \xea\x20\x88\xae\xbf\x9d\xd8\xdf\xb8\x3c\x6f\xbf\x9e\x1a\x00\x21\ \x2f\x3e\x87\x2f\xb8\x1b\x1b\x17\x8a\x2c\x2e\x86\x22\x05\x48\x90\ \x54\x6e\x56\x80\xde\xe3\xa0\x90\x9e\x69\x00\xd2\x12\xc4\x44\x16\ \x3f\x08\xe5\xab\x01\xf4\x78\xde\xb7\x61\xf3\x3f\xf6\x74\x3c\x96\ \xb4\x33\x25\x95\xcb\x37\x66\x34\x5a\x91\x73\x01\x8a\x3a\x98\x6a\ \xec\x81\x12\xc2\x59\x8b\x95\x47\x7f\xc1\xc5\x04\xe4\x9e\x1e\x09\ \xa9\xb9\x76\x78\xe5\xb9\x20\xf6\x75\xb9\xeb\x1a\x77\x22\xca\x12\ \x8a\xab\xb1\x0b\x5a\x94\xa2\xa6\x0f\xdf\x79\x7b\x60\x25\x6d\x52\ \xa8\xa0\x68\x18\xd7\x7e\xe0\xc0\x8e\xa7\x27\x94\xca\x5e\x34\xac\ \x5f\xb9\x04\xa0\x0f\x8e\xda\x14\x35\x26\x1f\xe7\x99\xb0\x1b\xa1\ \xa0\xb4\x68\xe6\xc6\xad\x80\xa6\xba\xa9\xaf\xe5\xd6\x61\x04\xce\ \xfe\xa2\x92\x3e\x3d\xf1\x26\xc2\x2f\xd1\xed\x86\x14\x97\x20\x70\ \x5e\x17\xad\xfd\xea\x0b\x28\xd3\x89\xeb\x47\xdd\xbe\x1e\x09\x1d\ \xc6\xcc\x8c\x27\xa7\x94\x15\x85\x6d\x1b\x86\x73\xc2\x10\xb8\x12\ \x94\xbc\xf8\x22\x45\x7a\x48\x8e\x0a\x4e\x89\xee\xde\x4b\x42\x9a\ \x5a\x9d\xff\xad\xb3\xef\x7d\x7c\xb1\xba\x90\x85\x74\x61\x06\x15\ \xcd\x2f\x66\xe0\xcc\x65\x26\x38\xc5\x7f\x4a\x49\xa4\x2c\x70\xaa\ \x0d\x5c\x97\x6c\x33\x53\xf4\x9d\xab\x82\x9d\xfc\xfe\xd8\x18\xfd\ \xbe\x50\x54\x0d\x3e\xdd\xbc\x35\x6a\x02\xd5\xe7\xe3\x54\x7d\x4f\ \xe7\xc9\xa8\xed\x7e\xd0\x02\x8e\x70\x3e\x43\xdc\x2c\xb5\x26\x37\ \x12\x5e\x03\xce\xeb\xf5\x02\x60\x44\xf9\x52\x65\x8a\x00\x13\xf1\ \xa0\xeb\x70\xd1\x3c\x33\x1e\xc9\x51\x08\xa9\x95\x51\x8b\x35\x55\ \x17\x0a\x19\x15\xa5\xb7\x94\x94\xd4\x38\x30\xd6\xba\x0d\x79\x54\ \x80\xbe\x2f\x67\x98\x8a\x93\x40\x92\x52\x45\xdf\xf9\xef\x6a\x49\ \xa1\xf9\xfc\x5b\x58\x78\x4f\x00\xfa\xcc\xd4\x50\xd4\x85\xf8\x80\ \x2b\xa9\xf1\x82\x63\x67\x68\x3f\xf6\xf5\xce\xda\xcb\x66\xf9\x1b\ \x02\x74\xf5\x68\x16\x94\xf4\x31\xd5\xdb\x7f\x1c\x42\x6a\xfb\x9b\ \x17\x16\x4e\x87\x25\xc5\x76\x79\x00\x90\xe8\x28\x41\x12\x42\x29\ \x4a\x53\x8c\x13\x81\xe5\x37\x46\x8d\x2a\xe3\x4b\x5f\x96\xeb\x63\ \x26\x33\x65\x7a\x89\xb0\x69\x88\x47\xbc\x37\x42\xaa\x50\x99\xa2\ \x1f\x84\x90\xce\xef\x29\x81\x94\x42\xeb\xd5\x90\xc6\x51\x32\xe0\ \x8e\xa3\x6c\x38\x24\xb2\xa6\x1a\x7f\x80\xac\x4f\x14\xd2\x7f\xfa\ \xc6\x33\x3f\x0b\xdf\x6f\x58\xda\xdd\xd7\xab\x20\x58\x9b\x3e\x45\ \xb7\x1b\x5a\x65\x62\x8a\x92\x90\x3e\x31\x32\x5b\x91\xaa\x6a\xb3\ \xdd\x1c\x6a\xa1\xa7\x60\x88\x92\x79\x40\x25\xd4\xed\x96\x25\x1e\ \x89\x9f\x74\x7d\xf2\x8e\x2c\xf9\xab\xc6\x85\x8a\xbf\xa3\xd1\x28\ \x9f\x0c\xfc\x12\x0b\xa9\x6f\xd6\x58\x58\xf9\x69\xb6\xa4\x6f\xf9\ \x44\x88\x5f\x36\x0f\x29\x70\xf6\xaa\x84\xba\xc8\xcc\x3e\x21\x3d\ \x6a\x4e\x89\xb5\x46\xd3\xd5\x27\x7d\xaa\xf4\xef\xb4\xf7\x15\x1e\ \x13\xef\xc4\x3d\xd9\x6c\x49\x33\x90\x13\x8d\xae\x24\xa4\x5f\x57\ \x51\xaf\x11\xe1\x9b\xae\x71\x34\x37\x67\x7a\xc5\xbc\xf0\x9e\x21\ \x16\x90\x4d\xc0\xd5\xea\x72\xad\x5e\x34\x32\xf6\x82\x63\x3d\x3b\ \x5e\x5e\x0a\x49\xfb\x43\x15\xd2\x9d\x4d\xe9\x8c\x6c\x0f\x60\xdf\ \x12\x76\x4e\x18\xd7\x8c\x7b\xa8\x82\x3b\x6b\x2f\x66\x94\x70\x66\ \xda\xa3\xa3\x77\xe9\xe8\xde\xbe\x9d\xd2\x61\xaf\xc4\x40\x77\x01\ \xde\x86\xb5\x5b\x0e\xdf\x76\x06\x7d\x1e\x29\x84\xf5\x11\xf0\xa7\ \xc8\x92\xc2\xd9\xd3\xa5\xae\x1f\xfc\x52\xeb\xb2\xf2\x53\xf7\x8c\ \x18\xa8\xab\x34\xb5\x94\x09\x96\x09\x59\x3c\x6c\xf2\x8c\xbd\xaf\ \xd3\xf2\xca\x55\xbb\x98\xa4\x9f\x57\x98\xab\x1f\x5b\xae\xb2\x32\ \xe1\xab\xaf\x36\x8d\x59\xfb\x89\xaa\x46\x48\xf9\x3a\xdf\x3f\xff\ \x82\x45\x79\xd5\x35\x57\x4f\x46\xc3\xee\x7a\x3d\xc3\x74\xa9\xf1\ \xc2\x16\x43\x8d\xd6\x27\xfc\xf9\x4e\xbe\x75\x7e\x42\x57\xd1\xad\ \x11\x9c\x47\xa6\x03\xb1\xa4\xef\xa6\xfb\x0e\x23\x49\xc5\x94\xbe\ \x28\x96\x14\xe5\x9b\x68\xea\x01\x72\xef\xcd\xfb\x17\x2b\x57\xaf\ \x77\xd6\xd4\x50\x94\x7d\xbd\x3f\x2d\x80\x91\x28\xb2\xde\xe7\x52\ \x0a\x48\x99\x83\x67\x4e\x73\xf6\xf3\x72\x01\xfe\xbe\x51\x40\x28\ \x82\xba\x40\x0a\xb6\x33\xec\xf3\x6e\xff\xc6\xe6\x73\x57\x46\x8a\ \x5e\x31\x03\x79\x78\xff\x64\x67\x3a\xa4\x5b\x3d\xb5\x5e\x3f\x98\ \xd3\x98\xe9\x8b\x0d\xa0\x66\x13\xf0\x9a\x47\x9e\x8b\xaf\xa5\xc6\ \x5f\xbd\x08\x40\x45\x48\xf7\xdc\x8f\xb0\xe4\x3f\x63\x27\xcd\x34\ \xc1\x1f\xe6\x22\x2b\x74\x40\x64\xf2\xfd\x91\xa2\xdb\x9e\x21\x4a\ \x3e\x35\xe1\xbd\xe8\xfc\xde\xbc\x14\x40\xfd\x5e\x44\x3b\xc9\x70\ \xc1\x5f\x51\x5e\xfb\xdb\xc1\x50\xbe\x90\x07\xa0\x2f\x2a\xd0\xf5\ \x13\xa2\x28\xfd\xfb\x8e\x69\x8a\xd8\xe9\x1c\x8a\x54\x88\xf7\x71\ \x0e\x87\x46\xb9\xb7\xe4\x24\x01\x1b\xc7\xe3\x9d\x01\xa4\xbf\x93\ \x0c\x01\x54\x96\x81\xf9\x0c\xd5\x6e\xec\x0b\x45\x89\x1b\xfe\x96\ \x5e\xc5\xd0\xc6\x01\x68\x11\x0a\xe6\x37\xed\x23\x21\xe5\xdd\x1d\ \x79\xd1\x29\xc2\x3c\xde\xd6\x3d\x95\xf6\x69\xbc\x77\x57\x4b\xbb\ \x4f\x56\x6c\x43\x17\x16\x16\x3a\xb6\xcb\xce\x3a\x29\x35\xe9\x87\ \xe4\x78\x21\x50\x2d\xd2\x8b\xf4\x0a\xdf\x65\x2e\xe0\x8f\x47\x3f\ \xa5\x7b\x37\x33\x4a\x91\xac\xa3\xbe\x99\x1a\xcd\x2f\x44\xc0\x49\ \x04\xfa\x4b\x86\x96\x91\xf2\x6b\x9f\x9e\x24\xac\xf9\x61\xf8\x23\ \xc1\xb8\xa0\x86\x67\x6a\xf4\xa9\xb5\x05\x65\x98\x87\xce\x7b\xf8\ \x8d\x17\xde\x59\x16\xf1\x57\x28\x35\x7e\x31\xe3\x10\x8f\x75\x7d\ \x4b\x55\xce\x84\xf4\x5e\xec\x98\x77\x13\xf1\x97\x4b\x71\xf3\x00\ \x79\xe6\x2b\xa3\x4c\xaa\xd1\x0f\xc3\xa5\xc6\x5d\x1f\xbe\xad\x2e\ \x2b\xef\x81\x76\xeb\x23\xeb\xe4\x7e\xe1\xed\x11\x6b\x7a\x6b\xa0\ \x26\xdd\x7a\xfe\x45\x89\x4a\x6a\x2a\x98\x8f\xeb\x44\x2d\xe9\x86\ \x91\x6e\x98\x78\x9f\x7c\x7c\x1c\x4d\x93\x01\x39\x1e\x15\x62\xf4\ \xc9\x37\x85\xa5\x41\xcd\x6d\xb8\x55\xc3\x67\xf0\x81\x35\xb3\x3a\ \x7f\xb4\x79\x39\xf7\x1f\x93\x00\x7c\x72\x72\x30\xc2\x59\xd4\x90\ \x74\xbb\x58\x52\x15\xd2\xde\xde\x52\x02\x3d\xe6\xf6\xf2\x20\x9a\ \xfb\x25\x92\xdf\xb3\x5e\xf9\xf4\xe2\x28\x4d\x9a\x95\xf0\x05\xa2\ \x28\x80\x76\xfe\xa5\x16\x45\x87\x56\x57\x18\xa8\x43\xcf\x6e\x3f\ \xf5\xe8\xf9\x56\xae\x6e\xe1\x0d\xc9\x76\x12\xd2\x22\x29\xf1\x37\ \x30\xee\x4d\x69\xaf\x2c\x77\x2f\x3e\xbb\x3b\x79\xe9\xf5\xbb\x58\ \xed\x0e\x4c\x6b\x32\xa1\xef\xbd\xee\x7d\x8f\x8f\x03\xc8\x7e\x03\ \x25\xc2\x3c\x73\x3b\x72\xcb\x3c\xf9\xed\x08\x28\x7a\xe9\x39\x07\ \x89\x36\x7a\x43\xa5\xbe\x1f\xcc\x23\xd2\x25\xd6\x1f\xa9\x37\xec\ \x66\x4b\x1a\x63\xbd\xf2\x0f\x7b\xed\xb1\x77\x5a\x69\xd7\x63\x42\ \xfe\xa9\xe3\x7c\x58\x97\xd6\x9b\xba\x9d\x6f\x77\xe3\x94\x5e\xba\ \xf1\xd4\x1e\xc6\xc1\xb5\x0f\x4c\x26\x5f\xbc\x7a\x1f\x01\x78\x20\ \x02\x7d\x8e\xf5\x47\xa3\x92\x18\x7f\xbe\x6d\x91\x0c\xd4\xb5\xee\ \x5a\xf0\xe9\x4e\x89\xf4\x13\x7f\x09\x25\x8e\xb4\xb8\x23\x03\xdd\ \x27\x1b\x04\x24\x3c\xd3\xd1\x1e\xc6\xda\xe9\x62\x45\x96\xff\x3b\ \x14\x84\x57\x51\xf7\x4f\xe5\x4b\xfb\x19\x29\x01\xed\x2d\x07\x98\ \x6f\x45\x92\x49\x18\xb1\x76\x3a\x22\x9c\x0c\x4f\x36\x79\x18\x9d\ \x17\x7d\x6d\x73\x49\x32\x75\xa0\xfa\x03\xd2\xcb\xaf\x82\xf7\x98\ \x56\x85\x8c\xcd\xe4\x20\x23\x49\x29\xd8\x98\xca\x95\x44\xec\x38\ \xd7\x2e\x5d\x5f\x2b\xbe\x7b\x64\x03\x75\xe3\xf2\x38\x27\x98\x26\ \x87\xcf\x13\x52\x1a\x9e\x28\xa2\xc0\x89\x5a\xa8\x27\xaa\xd8\x7b\ \x68\xd2\x6b\xf7\xae\xed\xb1\x9f\xa9\x52\x35\xf7\x67\x53\xff\x73\ \xa5\x42\x7a\x2f\x97\x1c\xbe\x35\x13\x91\xf7\xeb\xd7\xf4\xae\xab\ \x0f\x1c\x3e\xb1\x67\x3a\x2a\x24\x05\x81\x3f\x19\x4b\x20\x5d\xd9\ \x76\x41\x4a\x62\xe6\x08\x46\x3b\xc2\x47\xb1\xde\xdc\x04\x50\x55\ \x2f\xe3\x90\x2d\x55\xaf\x54\x5c\x44\x94\x1b\x15\x22\xa4\x1b\x76\ \x96\x65\x91\x2c\x35\xd2\x71\x8f\x12\x1d\x57\xef\x58\x4e\x93\xb2\ \xa8\x93\xa1\xfa\x27\xa1\x28\x85\x7a\x40\xea\xae\xcb\xc4\x30\x32\ \xea\xcf\xdf\xcc\xcc\xd3\x56\x7a\x13\x7f\x77\x8c\xe0\x41\x4b\x02\ \x1a\xed\x67\x41\xc2\xb8\xfe\xd9\x6b\x4f\xf9\xfd\x1e\xc6\x43\x77\ \x2d\x6d\xd9\xf1\xd4\xe2\x28\xab\x4d\x52\x87\x86\xb4\xe9\xe8\x03\ \x90\xdd\x2d\x5a\xdb\xa8\x55\xa3\xb3\xd4\x94\x3a\x83\xbf\xc4\x40\ \xc9\x8f\x12\x45\xdf\x4c\x7f\xa5\x61\x1c\x85\xf4\x0f\x7a\x4d\xb3\ \xb0\x85\x60\x7f\x66\x39\x5d\x13\x34\xad\x91\xcb\xa3\x85\x07\x71\ \xde\xd7\x96\x2d\x3e\x6b\x47\xfd\x31\x5b\x98\x28\x13\x3e\x1d\x22\ \x2d\x07\x45\x0b\xf2\xa5\x8f\x8d\xc8\xe6\x93\x62\xa9\xdb\x3f\xb9\ \x2e\xb0\x7b\x71\x27\xe7\x1c\xeb\xda\x24\xf3\x84\xaa\x8d\x7b\x7f\ \xfa\x8e\xf3\xb0\x4d\xa0\x68\x17\xe7\x81\xd3\x8c\x92\xd3\x5e\x7a\ \xdf\x74\x4a\x0e\xf1\xe5\x93\xbc\x8c\xe3\xf2\x7f\x11\x94\x74\x8d\ \x7e\x52\xe5\xd2\x9d\x87\xb6\xb7\xa1\xb9\xd1\x95\x81\x26\x5d\x7f\ \x6b\x1d\x34\xf5\xf5\x7a\x4f\x54\xa8\x15\x00\xf3\x7e\xe8\x4f\xf9\ \x4f\x06\xde\xc9\x34\x3c\x31\x2e\x65\x7c\x98\x3c\xc0\x1d\x87\x27\ \x93\x4c\xfe\x2c\xbc\x1f\x80\xe9\xee\x7d\x41\x46\xcf\x1d\x0c\x5d\ \xe1\x32\xce\x2e\x22\xb1\x64\x7d\x56\xac\xe2\xae\xa5\x03\xba\x13\ \x7e\x62\x5c\x61\xcf\x34\xf8\xe8\xc9\xc5\xc9\x28\xd3\xb4\xb8\x69\ \x72\x16\xe6\xa3\x67\x1c\x50\x9d\xc1\x9f\xd3\x91\x5b\xe5\x08\x9e\ \x53\xdb\xad\x61\x39\x4f\x42\x6a\xc7\x6d\x93\x11\xce\xac\x9b\x14\ \xbe\xf0\x5d\xd8\x0e\xa8\xee\x26\xc2\xc4\xdd\xe0\x0d\xdf\xe4\xef\ \xd2\xf1\xe6\x1e\x09\x5d\xa8\x7d\xde\xc6\x79\xcd\x74\x94\xba\x0c\ \x44\x78\x11\x50\x73\xe5\x30\x14\x5f\xcf\x31\xa1\x29\xd7\x88\xa8\ \xa7\x27\xfa\x47\x6d\xbd\x5e\xa7\xd7\xa5\x34\xdf\x6e\xe3\x8b\xfb\ \xa6\x23\x86\x98\xa2\xff\x41\x29\xaa\xcd\x44\xad\x1a\x93\xb0\x1e\ \xa7\x83\x1c\xeb\x65\xb5\x48\xfd\x91\x60\xa7\x44\x26\x72\xe6\x48\ \xf3\xe6\x63\xbd\x7b\xee\xd9\xb0\x76\xff\x72\x55\x94\x85\xbb\xe1\ \x8d\x50\xb2\x8c\x72\xe9\xd6\xda\x51\xd3\x44\x98\x00\xad\x3f\x19\ \x4a\x0b\x4e\x36\x18\x6a\xba\x14\x04\x04\x15\x96\xf4\x92\x51\x3a\ \xa0\x3f\xfc\x47\x76\xdf\xe4\xc8\x78\x36\x65\xb4\xf6\x77\x95\xa0\ \xe3\x4d\x86\x7e\x18\x62\xfa\xb3\x22\xf0\xf4\x96\xf7\xc0\x88\x51\ \x90\x4b\x1f\x93\x15\xc5\x5d\xe3\xb6\xb6\xac\x09\x52\x1c\xb8\x83\ \x6f\xb2\x76\x34\xce\x3e\x50\x6a\xe2\x11\x54\xb5\x8d\xae\x1d\xda\ \x94\x42\x4c\x2d\xda\x22\x5a\xd2\x1b\x26\x05\x0d\x45\x9a\x5a\xa8\ \x8d\x12\x63\x69\x19\x4f\xab\xc2\x44\xe8\x64\xd8\xef\x49\xe9\x6f\ \x22\xb4\xe7\x2c\x9d\x6b\x2a\xe3\xf2\x30\x05\xf5\xb9\xa0\x2c\xdd\ \x1d\xde\x5c\xb8\x53\xb0\xd0\x7a\x7b\x86\x51\x2d\x3d\x90\x7a\x17\ \xea\xa2\x27\xd0\xd3\xc7\x4f\x5e\x48\x7f\x8f\x42\xe7\x91\xa6\x21\ \xd0\x58\x80\xa1\x76\x54\x7e\xfe\xcc\x1d\x9a\x72\xfd\x31\x46\x14\ \x70\xa0\x4f\x5e\xeb\x53\x2c\xc2\x7e\x7f\x41\x0d\x0b\xe9\x9d\xa7\ \xac\x7a\xb3\xb0\x9e\x50\xda\xcb\x89\x94\xf5\x65\xe7\xa1\x16\x6d\ \x7e\xe9\xf2\xf5\xf0\x9d\x9e\xa2\xb6\x71\x58\x57\xa5\xa3\xbc\xd1\ \xab\x83\x24\xf2\xd5\x8c\xd4\x8f\x5a\x86\xf3\xf7\x4d\x3b\xda\x77\ \x67\x6f\x0c\x5e\x50\xd3\xdc\xde\xee\xf2\xcd\xfc\xe9\x39\x8f\xbc\ \x62\xdc\xcb\x05\xa0\x4e\xed\x13\x75\xa2\xa0\x64\xd7\x4d\x84\x08\ \x43\xbd\x25\x75\xd7\xd1\xa1\x23\x97\xa7\x33\xfa\xce\xd9\xaf\x31\ \x29\xc2\x49\x4b\xeb\x13\xd0\x33\xac\xe8\x1d\xef\xf0\xa0\xb2\xb2\ \x75\x82\x2a\xf9\x4b\x5f\x86\xad\x30\xaf\x72\x59\x5c\xd5\x7c\x93\ \x78\x6a\xd0\x73\xdf\xe1\xe5\x9d\x0c\xd4\xf6\xe8\xf8\x6b\x52\x95\ \xfd\x43\x7b\xe4\x92\x1e\x47\x51\x1f\x35\xc3\xd7\x57\x4e\xe5\x9b\ \x09\x08\xf3\xb7\xac\xff\x83\x30\x52\xc2\x28\x44\x73\x5e\x46\x71\ \x54\xdd\x5d\x4a\xe5\x6e\xee\xa7\xe1\x5d\xa8\xe2\xf4\xd7\x7a\xce\ \x2f\x35\xd8\x9b\x67\x29\xd3\xfc\xec\x1b\xd6\x9f\xb5\x63\x79\x3a\ \x1e\xa4\xb3\x44\xee\xff\x37\x76\x36\xad\xb6\x1d\x45\x18\xbe\xd1\ \xab\x6b\x7f\xac\xbd\xd6\x59\xd7\xbb\xb9\xde\x13\x50\x07\x5e\x41\ \x49\x40\x74\x20\x38\x88\xa0\x01\x11\x27\x89\xa0\x44\xf1\x6b\x92\ \x20\x19\x29\x82\x1f\x71\x60\x06\x8e\x1c\x38\xf0\x27\x38\x53\x34\ \x19\x38\x0a\xfe\x80\x8c\xfc\x05\x01\x41\x32\x50\xfc\x15\xe6\x74\ \xd7\x5e\x4f\x57\xbf\x55\x6b\xdf\x3e\x7b\x9f\x73\x86\x45\x77\x57\ \x75\x7d\xbd\x6f\x09\xa4\x6d\x70\x16\xa0\x13\xd4\x31\x16\xe8\x2d\ \x85\x1e\x59\x89\x00\x20\xd0\x42\x9f\x14\xdd\xb0\x4a\xfb\xe0\x41\ \x15\x92\xd9\x12\x09\x59\x85\x4e\x17\x98\x61\x2c\xe0\xf4\xe5\xe4\ \x5b\x1b\xda\x4d\xb2\xa6\xbd\xdd\x67\x73\x6c\x47\x91\x92\x9d\xad\ \x2b\x18\x81\x51\x8f\x5c\xb0\xcb\xa3\x5d\x51\x2b\xd5\x83\x15\xd1\ \x14\x99\x42\xee\x34\x91\xab\x45\x5b\x28\xfe\xa0\xa1\x3b\xb2\x2a\ \x99\x8e\x48\x6b\xa2\x29\xf5\x2c\xc4\x3f\xf6\x90\xf6\x72\x2a\x55\ \x85\xe2\xeb\x63\x92\x1a\x65\xc9\xed\x73\xf8\x5e\xcc\x2a\x9a\x9a\ \xa6\x5e\xe3\x57\x4c\xd3\x2c\x2a\x05\xda\x4e\xba\x1d\xb9\xa3\x44\ \xf5\x48\xca\x00\xb9\x75\xba\x21\x62\x0a\x17\x04\xf6\x89\x7b\xca\ \x66\xfa\x99\x0d\x4c\x42\xe8\xb7\x14\x6e\x0d\x01\xb0\xb2\x9c\xa0\ \xc2\x96\x39\x56\x08\x86\x69\x92\x37\xa3\x0e\x59\x5f\x3e\x8a\x5b\ \x07\xd1\x14\xb2\xfe\x84\x2c\x00\x66\x47\xb1\x4f\xf2\xd6\x77\xef\ \x27\x0d\xe3\x91\x3e\x79\x81\xb9\xa1\x4a\xe6\x4c\x02\x42\x19\x13\ \xf9\x46\xdc\x8e\x8a\x05\x0f\xcd\x13\x72\x72\xf8\xc4\x76\x81\xcd\ \x47\xab\xc8\x3c\x39\xfe\xee\x61\x96\xfc\x83\x28\xbc\x8c\x3d\x92\ \xc2\x88\xea\x13\xd2\x16\x7d\x2f\xa2\x0a\xda\xb2\x01\x09\xc6\x7e\ \x09\x18\x21\x21\x20\x05\xd5\xa0\x3b\x0a\x5b\x41\xaf\x4d\x7d\x8e\ \xcc\xda\x85\xa1\x23\xcb\xfc\x3c\x30\x77\xfc\x76\xfe\x68\xc3\x94\ \x99\xf2\x8f\xaa\xd6\x73\x49\xdd\x35\x45\x5c\xb7\x56\x49\x19\x24\ \xd5\x8e\xe6\x02\xc2\xca\x93\xaf\x0e\x29\x89\x07\xf5\xf3\xb2\x85\ \xa0\x27\x54\x1e\x07\x5f\xa9\x9c\x4d\x99\xd8\xd1\x9e\x97\x8a\xa3\ \x57\x62\x2a\xc0\xe0\x8c\x39\x74\xea\xbe\x49\x94\xaa\x3e\x89\xaf\ \x84\xea\xcc\x86\x0a\x1c\x30\x5d\x2a\x92\x0e\x1e\x67\x8b\x21\x95\ \x67\x29\x99\x81\x91\x6f\xe9\x75\x7c\xfd\x8e\xc9\x0d\x32\x4f\x06\ \xe4\xb2\xea\xfc\x80\xb8\x42\x8c\xcd\x3e\x22\xaa\x52\x92\x09\x3e\ \x2c\xc7\xd7\x17\x61\x2f\x9b\x79\x10\xde\x61\xb0\xa1\x29\xdf\x93\ \xba\xa4\x4b\x8b\x15\x22\x60\x42\x4a\xa1\x44\xbf\x8e\xaf\xb7\xec\ \xbd\x94\x6c\xab\x94\x34\xe4\x06\x31\x13\xa0\xf0\xee\x8a\x22\x2f\ \x96\x34\xe1\x1f\xe5\xae\x2e\x80\xac\x43\x7c\x3d\x68\x4b\xc9\xe9\ \x15\x4d\x32\xc3\x1f\x3c\xf5\xe0\xea\x95\xe8\xcb\x65\x9c\xc5\x3a\ \x21\x2a\x1f\xcc\x53\x8c\xaf\x27\x00\x65\x4b\x57\x64\x35\x58\x11\ \xe9\x1b\x1f\x1a\xc5\x4f\xa3\x91\x35\x5c\xe2\x9b\xf2\x8f\x7a\xc6\ \x02\xc4\x54\x30\x63\x67\x9f\x7a\xac\xc8\x18\xa2\x97\xb1\xa3\x91\ \x2a\xa1\x48\xa2\xf7\x19\xff\x68\x82\xaf\xa7\x81\x30\xec\x80\x30\ \x5d\x32\xc8\xe5\x31\x47\xad\x23\x35\x0b\x6d\xf2\xa0\x70\x30\x22\ \xc2\x3f\xba\x85\xaf\xe7\x97\xb3\xa3\x7e\xbe\x80\xe8\x12\xd8\x45\ \xec\xa8\xc8\x88\x7d\x8a\x9d\x3c\x9f\xd8\xe9\x76\x74\x0b\x5f\x2f\ \x6c\xd3\x15\xc6\xc6\xfd\x54\x6d\x52\x43\xaa\x53\xa4\x70\xf2\x52\ \xfe\x51\xdd\xd1\xd8\x3c\x91\x76\x3a\xe8\x6b\x0f\x7f\xbf\x5a\x27\ \xaf\xee\xd0\xbf\x2c\x28\x3f\x31\xd3\x36\xff\xa8\x32\x68\xdd\x64\ \xf8\x7a\x35\x4d\x3c\x4f\x0e\xd9\xa0\x7b\x89\xa1\x52\xbd\x07\x13\ \x1e\xf3\x8f\x2e\x82\x0b\xbd\x8e\xaf\xc7\x6b\xc6\x79\xe2\x5d\xb2\ \xdf\xba\xb3\x3c\xf6\xa2\xf7\x68\x51\xca\x3f\x9a\x95\x18\x73\x7c\ \x3d\xaf\x13\xac\xc3\x20\xc1\x81\x30\xf6\xd6\x89\x0f\x82\x32\x1a\ \x3a\xcf\x95\x64\xfc\xa3\xd7\xf1\xf5\x09\xdb\xb8\x32\x16\x0c\x6e\ \x47\x11\xd2\xbf\x4d\x20\x6d\x49\x43\x39\x19\x73\xfe\xd1\xd3\x35\ \x7c\x3d\x0b\x0a\xd2\x11\x0f\x4a\xd5\x1d\xa5\x4f\xea\xb6\xc4\xf6\ \x3a\xf4\x48\x11\xcc\x78\xf8\x57\xf0\xf5\xe5\x83\xb0\x63\xcb\x3a\ \xdc\xc9\x29\xa1\x88\xb8\x79\x38\x79\xc2\x46\x17\x33\x3e\x51\x5d\ \x3e\x65\xf8\xfa\xb0\xcf\xf5\x68\xc2\xa2\x4c\x89\x26\x11\xd6\xcb\ \x02\x5f\x1f\xb3\x62\x13\xdb\x6b\xda\x31\xc7\xd7\x6b\x52\x07\x5a\ \x85\xf5\x2b\x03\x8d\x45\xe7\xf5\x61\x6a\xe4\x65\x09\x6d\x3f\x8b\ \x44\x6e\x88\xaf\x07\x6e\x8b\x88\xf6\x3f\x6c\x99\x6e\x61\x41\x75\ \xe4\x15\x0a\xa5\x15\x7b\xf6\x14\x9a\x79\xc9\x3d\x6d\xe1\xeb\x19\ \x84\x50\x17\x47\x0f\x5b\x26\x79\x71\x0d\xf0\x9a\x25\x49\x12\xf4\ \x09\x31\x91\xb6\x1d\xc2\xb9\x8d\xaf\x4f\xde\xfa\x31\x65\x28\xaa\ \x82\x21\x6a\x08\x05\xef\x08\x9f\x9e\x8e\x7f\xf4\xf6\x2a\xbe\x3e\ \x19\x10\x0d\xe7\xb4\x2e\x94\x49\x3d\x7c\x1d\xc1\xfb\x94\xfc\xa3\ \xd7\xf1\xf5\x88\x39\xd6\x0f\x14\xc9\x86\x1b\x91\xe5\x67\x72\x65\ \x44\xbe\x5a\x6f\x50\xfe\x51\x84\x4e\xf1\xf5\x07\x18\xb4\x5c\x80\ \x67\xd2\x9a\x88\xfe\x71\xa2\x1a\xe2\x42\xd1\x60\x63\x79\x9b\xae\ \xf1\x8f\xe6\x50\x36\x2a\xe0\x46\xa8\x81\xa4\xe8\xfe\x51\xe8\x0a\ \xe4\x7d\x92\xf8\x0e\x63\xef\x72\xce\xd7\xf8\x47\x29\x31\x26\xf8\ \x7a\x26\x33\xf9\xa8\xde\xe6\xdc\x31\x4d\x4a\xa7\xd7\x63\xa2\xc2\ \xe5\xa7\xb4\xd8\x7f\x39\xff\x28\xc1\x1d\x06\x9f\x00\x84\x1d\x45\ \xd4\x51\x20\xeb\x5e\x48\xd8\x1c\xc5\x8b\x72\x42\xfa\x51\xd6\x29\ \xff\x68\xee\xe6\xb1\xe0\x24\xdb\xa3\xf5\x58\x53\x62\x26\x2d\x85\ \xea\x70\xcb\x51\xc3\x7a\xe4\xcc\x38\x72\x23\xfe\xd1\x80\x00\xa2\ \xda\x51\xdb\x53\x84\x84\x7b\xd2\xb8\x4a\xdc\x08\x56\x2d\x88\xe6\ \x47\x8f\x42\xd9\x52\xbe\xa7\x04\x82\x71\xea\xb6\x73\xbf\x87\xfb\ \xe3\xd0\xe7\x75\x72\x98\x50\xfd\x10\x86\x28\x53\x89\x67\xcf\x0a\ \x1f\xfb\x96\x48\x69\x03\x5f\x7f\x01\xd8\x83\x13\x41\xcc\xe6\xd8\ \xa3\x34\xd9\x80\xb4\x81\x09\xf5\xe2\xf2\x1b\x09\x95\x7f\x54\xf1\ \xf5\x6a\xf2\xed\xe8\xfb\xb3\xf7\x13\x9a\xb4\x70\x47\x58\x2f\xe6\ \xc9\xf4\x28\xa8\xdf\x0c\x79\x63\x26\x51\xa8\x89\x99\xe2\xeb\xf7\ \xba\x9d\x8c\xba\xd3\x01\xd1\xae\x6c\x0b\xc0\x1a\x74\x3d\xd1\x9d\ \xbb\x00\xd2\x49\x18\xbc\x4c\x37\x31\xbe\x1e\xa7\x44\xf6\x15\x6a\ \x4f\x65\x2b\xc0\x77\xa2\x3d\xcb\x67\x9c\x39\xf8\x84\xe1\x2d\xb9\ \xa3\xf8\x79\x62\xf3\xd9\x52\x3f\x6e\x3b\x87\xaf\x32\x7e\x53\xed\ \xe8\xdc\xcc\x10\xa3\x6b\x3c\x0d\x45\x58\x4f\x8d\xaf\x97\xe7\xc9\ \xc4\xcc\x20\xc1\xe5\xeb\x58\xbe\x26\x47\xde\xbf\xb8\xae\x71\xcd\ \x38\x73\xec\xdb\x06\x1f\x2c\xf8\x7a\xf2\x92\x25\x73\x23\x8d\x8f\ \x9d\xa4\x45\x46\xb6\xd2\x75\xb6\xb7\xf9\xfb\xb9\x4f\x3c\xf1\x41\ \x9f\x32\xa7\x64\xa7\xf8\xfa\x4e\x9b\x50\xa6\xf0\x8a\x32\xb1\x85\ \xc1\x7c\xed\x1d\x75\x53\x06\xe2\x61\x2d\xf6\xc1\xc7\x07\x78\x25\ \x9e\x53\xd4\xf3\xc4\x8e\x3a\xe6\xbc\x60\x98\xb1\x9a\x51\x74\xa9\ \x2e\x2c\x68\xc7\x44\x48\x10\x9a\xb0\xb7\xe7\xf8\x7a\x74\x1e\xfb\ \x24\x05\x1c\xa4\x15\xdf\xc4\x98\x08\x69\x7c\x72\xbc\xd8\x02\x12\ \x42\xca\x65\x91\x28\xf4\x14\x46\x76\x72\xf4\x08\x8b\x98\xa2\xf6\ \xd8\x51\x98\x54\x58\x45\xe3\x7d\x0e\x7f\x50\xea\x51\xfb\x2f\xc3\ \x8a\xb0\xc0\xd7\xfb\x58\x04\xd3\xc4\x0f\x02\x73\xf8\x75\x2b\xe1\ \x79\x34\xbd\x72\x7c\x15\x4e\x46\x79\x45\xa9\x31\x0b\x83\xd6\x4d\ \x8c\xaf\xc7\x3a\x89\xa7\xc7\xd1\x87\x73\xee\xca\x9a\x64\x06\x06\ \x7e\xbe\xcc\xb5\x55\xe0\x55\x0e\xb7\x44\xa3\x28\xd6\xb3\xad\x44\ \x76\xeb\x8f\xef\x19\x77\xf3\x99\xb8\xa0\xee\x09\x75\x1e\xbe\xef\ \x73\x25\x99\x9b\xf3\xe1\x67\xf8\xfa\x68\x2b\x19\x1a\x29\xec\xed\ \x2d\x6f\x9e\xce\x35\x75\x43\x0e\xab\xcc\x01\xf1\x6c\x92\xc3\xdf\ \xc6\xd7\x7b\x07\x1f\x0b\xda\x19\x27\x64\x85\x37\xcf\xcf\x3c\xe3\ \xf0\xcd\x69\xae\x60\x3b\xa5\xc3\xc7\xde\xb3\x36\xf0\xf5\xf6\xe5\ \xec\xdd\x4a\x95\x1e\x65\x12\x97\x64\xb6\xdf\x0b\x3f\xb3\x28\x93\ \x7d\xc8\xe2\x67\x7c\xf8\x8a\xaf\x57\x7f\x74\x64\x5f\x51\xfb\x89\ \x69\xeb\x8d\x5f\xca\x20\x0c\x2e\x29\xf1\xb2\x9a\x27\x0d\x42\xf3\ \xf9\xf5\xdb\xf8\xfa\x91\xbe\xa7\xc2\x8f\xab\xed\xa3\x13\x75\x11\ \xb6\xd5\x5d\x52\x57\xb3\x4b\x4e\x5e\xfa\xc9\x6e\x25\xae\x8f\xf1\ \xf5\xda\xaa\x01\xf1\x6c\x3c\x4d\x4a\x03\x26\x3d\x7a\x64\xf5\xa2\ \x6e\xd7\xeb\x73\x7c\xfd\x6e\x0f\xe7\x2c\xe3\x8c\xd5\xc9\x23\xa5\ \xe3\xcc\x92\x6f\xc8\xf5\x6c\xf8\x45\x9f\xd4\x8a\xea\x24\x46\x9d\ \x5f\xaf\xf8\xfa\xbd\xb8\x7a\x8e\xca\x55\x75\xaa\x6a\x7d\x5f\x11\ \x25\xe9\xe8\x5e\x7b\xc5\x06\x02\x0a\x8e\x76\x34\xc7\xd7\xab\x4b\ \x42\x68\x6f\xdf\xee\x65\xba\x44\x23\x38\xa6\xed\xc2\x1f\x9d\x3b\ \x7f\x34\x8f\xf0\xd4\xc3\x57\x34\x1b\x62\x6e\x9b\x27\xb8\xf3\xcc\ \x86\xc2\xe3\xac\x61\x72\x5c\xbc\x21\x45\x66\x9f\xed\x7a\x7d\xab\ \xf5\x7e\x36\x38\xde\x28\x5e\xbe\x8c\x5d\x9e\x68\xce\xe2\x09\x95\ \x45\x70\xa7\x0b\x9f\x64\xd9\xe4\xc3\x6f\xb5\x5e\x0d\xa9\x7b\xeb\ \x35\xe5\x8c\x87\x3f\xb4\xc9\x52\x15\x90\x70\x19\x4b\x9a\xcc\x6c\ \xb8\xed\xf8\xf0\xc3\x51\x08\xbb\x36\x91\x7b\x84\x7e\x74\x54\xb5\ \x67\x49\x26\x4f\x94\xc9\x61\xee\x74\x54\x8b\x1e\x3c\x5a\x1f\xe3\ \xeb\xe3\x72\x28\x6f\x93\x32\x68\x91\xbc\x67\x2f\x35\x3b\x4e\xbc\ \xa4\x45\x26\x01\x38\x08\x1f\x7e\x8c\xaf\x4f\x1a\x5f\x98\x79\x84\ \xdf\x5c\xfd\xbc\x2a\xe2\xe4\xa0\x96\x62\x48\x17\x18\x0b\x36\x07\ \x1a\x2b\x1f\x7e\x8e\xaf\x97\xc4\x73\x11\x96\x9f\x78\xfe\x49\x55\ \xfc\xde\x3a\xe1\x2d\xd3\xde\x8e\xf2\x83\x6a\xc8\xb5\x9e\xe5\xf1\ \xf5\x28\x13\x05\x3b\x52\x10\xb5\x20\x26\x64\xe3\xf8\xf8\x2e\xad\ \xb7\xac\x6f\x3d\xde\x53\x94\x79\x92\xfd\xbc\xd5\x7a\xbd\xe2\xeb\ \xf5\x8e\xc2\x3a\x1d\x28\xfe\xd4\x06\xcc\x7a\x47\x81\x34\x30\x2a\ \x18\x95\xf7\x61\x68\x7f\xf8\xd4\xeb\x63\x7c\x7d\x18\x2a\x37\xb9\ \xa7\xa3\x97\x72\x98\xf0\x46\xd0\xa6\xa8\xce\x98\x03\x58\xb3\x29\ \xc1\xdb\xf8\x7a\x5b\xea\x3d\x51\x60\x74\xc3\xb9\xa4\x75\x78\x0c\ \xcc\x13\x08\x56\x9f\x7c\x22\x89\x2f\x4a\x2f\xf5\x7a\xe2\xfa\x48\ \xce\xa6\xa5\x24\x9e\x69\x3b\x55\x8d\xaf\x5a\xb4\x62\x43\xe9\x28\ \xd1\x66\xc7\x00\xd3\x12\xbd\xa2\x32\xbf\x5e\xeb\xf5\x87\x5e\x4a\ \xbe\x91\xb0\x36\xf8\xaa\xe2\x81\xa7\xbc\x2d\xd3\xa1\x2e\x63\xd8\ \x7a\x3e\xbf\x9e\xb0\x8e\xc8\x49\x56\x3f\x6c\x9d\x3a\x38\x4f\x28\ \xda\xef\x82\x11\x92\xb8\x33\x9d\x25\x12\x87\x6a\x6d\x39\x9f\x5f\ \xcf\xe9\x87\x99\x32\x24\x0d\x0a\x77\xae\xed\x69\xf2\x31\xe8\x52\ \x23\x27\xd0\xe0\x9a\xd0\x63\x3f\x7d\x5b\x89\xcc\xaf\x37\x01\xa3\ \x1d\x1d\x11\xf0\x78\x64\x47\xb5\x7f\x94\x87\x14\x8b\x8f\xb0\xbe\ \x55\x43\x79\x1d\xa9\x8c\x64\x7c\xf8\xfa\xd8\xdb\x57\x63\xbc\xe3\ \x31\xf6\x48\x10\x51\x03\x3b\x90\x57\x46\xfc\x81\x83\x2f\xea\xf4\ \xb4\xf5\x7a\x8f\x61\xf5\x7b\x6a\xed\x3a\x91\x8c\x48\x6a\x2f\xbe\ \x3f\x7a\x86\xb2\xb1\xa7\xf3\x90\x95\xc1\x89\x9c\xf2\x7a\xbd\x05\ \x77\xa2\xf3\x48\x7b\xf4\xbd\x3a\x5a\x67\xb2\x2b\xca\xd1\x0b\x58\ \x04\x84\xd8\x80\x94\x00\x58\xd1\x27\x66\x83\x07\xd3\x25\x08\x98\ \xa5\x99\xc8\x3e\xcc\xb9\x0b\xdb\xc9\xc6\x15\x66\x2d\x34\xf3\x94\ \xc3\xb2\xc7\xe9\xea\xd1\x6b\x4d\x4c\x22\x50\x7f\x09\xdc\x63\x0f\ \xe6\xc6\x8e\xbd\xc8\xaa\x3b\xca\xe3\x04\x7e\x99\x52\x18\xe9\x87\ \x64\xd8\x19\xf5\x7a\xbc\x3c\x1a\xc7\x15\xdb\x20\x73\xee\x48\x90\ \x18\x24\xbc\x27\xab\x30\xd3\x04\x9d\x4a\x6c\x45\xf1\xa1\x94\xfe\ \x43\xd1\xb6\x80\x58\x3d\x4e\x88\xa4\x53\x3e\x97\xed\x52\x0b\x99\ \x78\x9a\xc0\x58\xf7\x75\x50\x11\x96\x3b\x2a\xe5\x1b\x89\x97\x70\ \x4b\xe4\xec\xed\x57\x6b\x45\x11\xd5\xde\xf9\x89\x70\x69\x05\x85\ \x03\x0e\x64\x8e\xd0\x1c\xbe\xf5\x18\x7c\x85\xad\x37\x7b\x8a\xd2\ \x67\x75\x26\x9d\x1b\xa8\x6f\x3d\xe6\x74\xc2\x3e\x61\x46\xcb\xd7\ \xfc\x66\x9d\x72\xc7\x9e\xea\xfc\x7a\xa4\x24\x58\x0a\x10\x77\xa3\ \x26\x75\x1a\xcc\xdd\xc0\xae\xe2\xe8\x4d\xbd\x94\x33\xe9\x31\x9d\ \x6c\xa9\xe0\x0b\xc1\xd7\xfb\x85\xb9\x2f\x29\x09\xd6\x88\x32\xa5\ \x13\xee\x7c\xbc\x3c\xf1\x42\x31\xdb\x32\xe6\x80\x78\x36\x68\x2a\ \xda\xe2\xc3\x17\x0a\x4a\xcd\xe1\x73\xf4\xbd\x8f\x37\xc1\x3a\x8c\ \xb1\x0f\xa1\x6c\xb6\xb9\x71\x04\xba\x28\xbe\x3e\x8c\xeb\x31\xf7\ \x87\xd0\x71\x8e\xb5\x7e\x6a\xae\xa8\x09\x8a\x3a\xa1\xf5\xeb\x0f\ \x88\x60\xcd\x8d\xcb\x12\x12\x4a\x21\xa7\x41\xf3\x47\x37\x05\x41\ \x94\xde\xc4\x2c\xa2\x32\x5c\xa2\x88\xea\x41\x77\xe4\x47\xa3\x6d\ \x15\x35\xca\xd1\x37\x88\x9b\x5d\x00\x24\xf5\xe5\xfa\xfa\xb1\x93\ \x9f\x2c\x1c\xd1\xa2\xed\x42\xe1\x4e\xd3\xa3\x32\xe0\x32\xe7\xc8\ \x45\xf1\x99\xcb\x26\xba\xe4\x8d\x53\x2b\x2e\xbb\x39\x51\xb0\x35\ \x39\xa9\xd7\x03\xb2\x57\x65\x52\x31\xf1\x9e\xc2\xa2\x18\xcb\x67\ \x1d\xe3\x7a\x3d\x83\x10\x6c\x43\xcd\x71\x9e\x92\x27\x94\x08\x54\ \x23\x26\x56\xc6\x87\x6f\x1f\xea\xa1\x7e\x80\x1c\xb2\x2a\xe1\xb4\ \xb2\xa9\x90\x7c\x42\xe5\xbd\x53\xa2\x4b\x04\x25\x53\xa2\xe6\x89\ \x8a\x6d\x14\x8c\x60\xf1\x11\x16\x40\xc3\x1b\x9f\x7d\xf2\xe4\x1b\ \xf7\xab\x9c\xb0\x68\x45\x3d\x3a\x14\xc5\x00\x5e\xc5\x37\x14\xc0\ \xc0\xa9\x8f\x42\x9c\xa8\x72\xfe\x48\x28\xe3\xce\xee\xdf\x2b\xeb\ \x2f\xc5\x1d\x2d\x1f\x77\xf4\xdf\x7c\xe9\xcd\x97\x8b\x26\xcd\x92\ \x1c\x97\xa8\xbe\xc3\xd7\x4b\x36\x0f\x71\x23\x98\x90\x3f\xf8\xde\ \xd5\x1b\xc6\xdf\xdf\xab\xeb\x23\xa3\x79\x79\x45\x54\x9b\x81\xf1\ \x7c\x61\x04\xfc\x77\xa1\x4d\x4c\x30\xd6\xd8\xd2\x7e\xc5\x7c\xf8\ \x76\xe4\xf6\x4d\xeb\xf5\x9a\xd1\xfb\xc5\x85\xdd\xf1\x51\x51\x25\ \xbf\xa3\xcf\x54\x92\xe1\x93\xd9\x4e\x53\xa8\x6e\x57\xd3\x82\xd8\ \x21\x63\x9c\x0e\x71\x2d\xbe\x5e\xbf\x8a\xca\x25\xfd\xda\xf7\xdf\ \xfa\x74\x1d\xb0\x33\xc8\xcc\xab\x1f\xdd\x33\x66\xf7\x36\x66\x1a\ \xe2\xa2\xed\x76\x92\x4c\x93\x0f\xb6\x0e\x54\xee\xc8\x38\x33\x3c\ \xd0\xf9\xf8\x87\x8f\xc2\xe3\x6c\x9a\x64\x9d\x2f\xff\xaa\xdc\x85\ \x67\xd7\xfb\xa0\x6f\x68\x24\xe8\xf5\xf9\xf5\xfa\xdc\xfb\x1c\x3e\ \x27\x8f\xc1\xff\xdb\x85\x6c\x7c\xe8\x02\xbc\x8f\x17\x52\xa3\x42\ \x3f\x5d\x58\x32\x87\x5e\x46\x9f\x2b\x61\x5d\x9f\x5f\xdf\xc7\xf5\ \x5a\x10\x43\x4c\x6b\x29\x18\x2b\xa7\xeb\xeb\xc7\xa2\x4b\xbe\x57\ \xe3\xfe\x97\xff\xfc\x70\x37\xd3\x03\xa1\xfe\x7d\x9e\x1f\xdd\x9e\ \x5f\xaf\x3a\x2f\xf5\x7a\xdd\x51\x23\x4b\x9e\x6c\x4f\x7b\xa3\x3f\ \xcf\x02\x69\xd1\x0c\xc4\xd6\x5b\x0f\x6c\xfd\xee\xcb\x6c\x4b\x87\ \x67\xb2\x23\x67\x28\xe3\xc8\xc6\xae\xa8\xb0\xdf\x14\x82\xad\x22\ \xa4\xd8\xd2\xc5\x66\x49\x2d\xf6\x9a\x46\x31\x13\x9f\x56\x50\x6a\ \x8c\xfb\xf2\x29\x7f\xee\xe4\xdd\xfb\x3e\x32\xa4\xb5\xe4\xbd\x10\ \xaa\xac\xec\x1f\x77\x97\xf4\x1f\xe7\x6e\x86\x18\x84\x44\x8c\xeb\ \xd0\xb6\x12\xe2\x7a\x96\x8b\x42\x29\x31\xc2\x46\x46\x0d\x5c\x3b\ \x08\x99\x6e\xa9\x47\x3f\x7d\xa8\xb0\xeb\xed\x64\xac\x29\x72\x56\ \x92\x5c\x62\x11\xc5\xdf\x5c\x2d\xda\xf2\x34\x61\xa3\x92\x44\x2e\ \x1b\xcb\x2a\xde\xd3\x7b\x65\x44\xcb\xa9\xa4\xca\xb1\xa4\xf3\xfa\ \xb7\xca\xc9\x0c\xce\x84\xa4\x46\x47\x21\xa8\x61\x0a\xbb\x9e\xf4\ \xfd\x64\x1d\x1b\x96\xb7\x69\xfa\x5f\x99\x62\xf1\xe4\x7d\x0b\x46\ \xea\x5a\x2c\x14\x99\x7e\xf7\xb3\x32\x5c\xc2\xec\x3d\x3b\x4a\xa0\ \x1c\x2b\x13\x5b\xea\x61\x8c\xcc\x42\xd0\xc8\x3e\xec\x72\xc6\xd1\ \x7b\xc6\x5e\xd2\x3f\xed\x70\x9d\xcb\x9a\x3f\xfc\xe2\x07\xd4\xba\ \x5f\xda\x17\x3a\x4a\xf6\xd3\x61\xc3\x42\x60\x0b\x50\xb6\xc8\xe0\ \x87\x78\x26\xd7\x5d\xd2\x63\x58\x87\xea\x36\x7f\xe2\x53\xc6\xa0\ \xfb\xa8\x33\xa4\x7f\xac\xcc\xfd\xfb\x9a\xc5\xcd\xd1\x0d\x51\x89\ \x31\xe9\x71\xb6\xed\x54\x3c\x13\x47\xaf\x1c\x10\x83\x45\x77\xd3\ \x1b\x95\xa3\xff\xbb\x8f\x07\x3f\x16\xbe\xcc\xef\xf8\xeb\x79\xb4\ \x4b\xea\x5d\x12\x0f\x63\xd3\x3b\x1a\x61\xeb\x09\xeb\x38\x7a\x69\ \xd4\x22\x72\x12\x24\xdb\x1f\x0a\xa7\xfc\xe7\xce\x37\x93\x77\xf2\ \x5f\x2e\x9c\xfe\x0f\xe7\xcb\xd2\x70\x24\x62\x2c\x40\xeb\x91\x54\ \x19\x5d\xb3\x0e\x7c\x5c\x3d\xdf\x4e\x50\xc2\xd1\xc2\x51\xfb\xda\ \xc3\x5d\x51\x25\xdc\x92\xca\xe3\xff\xea\x83\x8b\x90\x72\x49\x1b\ \xca\x44\x0d\x45\x58\x32\x9a\x49\x6b\xa1\xf4\xe1\xfb\x6c\x33\xe2\ \x16\x8b\xf4\x76\x11\xf4\xd0\x45\xa1\x73\x75\x03\xce\x87\x99\x2d\ \x1d\xb6\xc8\x5f\x34\xb8\xf3\x3b\x6a\xd4\x2f\x1c\xbd\x80\xaf\x8e\ \x61\x9f\xc6\xb0\xde\xd3\xf7\xea\x73\x6f\x2e\x3e\xeb\xbf\x77\x63\ \x75\xcf\xf6\x34\x95\x2f\x42\x3a\x9c\x48\x8c\xb9\xe3\xe8\xf7\x9e\ \x9d\xe8\x70\x79\x9a\xc8\x8b\x96\xdf\x9a\x71\x46\xd4\x3a\x81\xb1\ \x3e\xf7\xc0\x05\xa6\x9a\xd0\xf9\x4a\x19\xa5\xd0\xdc\xd0\x39\x76\ \x4b\x78\x9b\xf0\x9e\xa2\x54\x89\xdb\x4e\x09\xf0\xa9\xd7\xdb\xff\ \xca\xe8\x3b\xbd\x79\xa7\x4c\x67\xfc\x51\x63\x78\x7b\xf1\x8e\x9d\ \xbc\xb9\xa2\xb3\xc4\x4b\xe8\x7c\x5e\x62\x84\x9e\xc8\x54\x89\x1e\ \x67\x28\x4a\xbc\xc6\x7b\xa7\x84\x01\x72\xd3\x3f\xab\x4f\xda\xb7\ \x3d\xbd\x6f\xf3\x4f\xea\xc1\x9b\xa0\x2c\x5f\x6d\xd0\xe0\x4e\x5a\ \x5f\xaa\xab\x27\x58\x70\x8c\xfe\x51\xf3\xa3\x53\x5b\x69\x9a\xbe\ \x5e\x2f\x69\x75\x9d\x79\xeb\x9f\xfb\x80\x10\xf4\x3c\xb6\x47\xef\ \x02\x3b\xef\xe0\xe7\x5d\x3a\x18\x27\x25\xa6\x52\x4c\x53\x92\xc7\ \xb5\xee\x2c\x0b\x9c\x3a\x3a\xd7\xe7\xb8\xa2\x28\x53\x67\x4a\x13\ \x56\x8d\x38\xe1\x0c\xa8\x6d\x6f\xb2\x82\x07\xf6\x20\xeb\x84\xf0\ \xa9\xf8\xa4\xff\x39\xfb\xdc\x78\x09\x44\x5f\x7b\xb0\x8e\xe1\x64\ \x43\x11\x31\x88\x45\x6e\x5d\x63\x01\x62\x62\x49\xc3\x77\xc9\x1e\ \x23\xcb\xe1\x53\x5f\x1e\xec\xf0\xad\x24\xfa\xf7\x62\x31\x77\x1d\ \x79\xda\xed\x2f\xdf\x39\xef\x6d\x0c\x27\x73\x25\x70\xee\xed\x77\ \xc6\x3f\x9a\x00\xaf\x76\xa0\x6d\x25\xe5\xcc\x7e\x76\x8f\x93\xd9\ \xa7\xe1\xa5\x8b\xa0\x1d\x64\xe0\x74\x53\xe0\x42\xb6\xe2\x17\x94\ \xd3\xef\x94\x09\xf1\x72\x7c\x3d\xe6\x9e\xc9\x81\x81\xcd\xb7\xce\ \xac\x17\x8a\x4f\xf2\xd6\xe3\x63\xc7\xe7\x59\x65\x34\xad\xc7\x3c\ \x21\x63\x18\x2c\xd3\x44\x88\xa8\xbc\x4c\xda\xe3\x3c\x12\xd7\xdb\ \xa0\x7d\xaf\x49\x40\xee\xc6\x7b\xd5\x79\x7a\xe8\x4f\x9e\xd9\xbb\ \x68\x7d\xca\xe6\xab\x5d\xe3\x91\x12\x01\x10\x91\xfb\x59\xe4\xc5\ \xbf\xeb\x14\xca\x10\x0d\x95\xe9\xf9\xf1\x09\x31\xdd\x00\x14\xc6\ \xad\x0b\x9e\x09\xac\x88\x94\x6f\x42\x1c\x23\x95\x5b\xcd\xe4\xea\ \x9c\x3b\x22\x26\xb3\xa6\xef\xde\xc9\xf9\xf3\x8f\xb5\xa0\x01\x3b\ \xfa\x22\xe7\xe2\xcd\x53\xd8\x98\x29\xa3\x10\x0e\x5d\x78\x47\x8f\ \x8e\x43\x83\x8f\x1a\x8b\x50\x11\xdd\xad\x95\x06\xab\x2e\x3f\xff\ \xab\x77\x5e\x7f\x74\x1a\x46\x6f\x9c\x96\x15\x7a\x65\x82\x72\xf2\ \x2e\x54\x0e\xe3\x7a\x38\x28\x95\x08\xc0\x36\xd3\x07\x23\x00\xd9\ \x34\x52\xa6\xdd\x71\xa8\x26\x8b\xe5\xc0\x0d\xee\x8e\xca\x0a\xac\ \x93\xd9\x51\x4c\x29\x09\x7c\x84\xed\x72\xf7\xc0\x59\xec\x8f\x0a\ \x0a\xee\x86\x2d\xf5\x09\x1d\xf6\x94\x30\x54\x77\xd5\x1b\x7c\x1d\ \x17\x4a\x0c\x9a\x06\x22\xf6\xc3\x3b\xca\xd1\x2b\x6e\x7d\x6c\xd1\ \x22\xeb\x2d\x5d\xea\xd9\x3b\xfa\x51\xa9\xd6\x2f\xb1\x79\x02\x76\ \xb7\x07\x0d\x8e\xa4\x0e\xd3\x40\xcb\x9b\x13\x75\x6a\xfa\x74\x82\ \x2b\xca\x80\x1e\x3d\xfa\x67\xb5\x01\x66\x1b\x69\x6b\xf0\x7a\x53\ \x27\x0f\x5f\x1d\x31\x53\x31\x26\x18\xb6\xa7\xa0\x7b\x14\xad\x2f\ \x3b\x5a\xbe\xea\x38\xa3\xf2\x89\x53\xa2\xe9\x92\xbd\x3c\xa1\xf8\ \xf9\x75\x73\xa5\x68\x3b\xd5\x73\xa6\x20\xde\xc2\xae\x30\xf9\xb6\ \xa7\xbc\xf5\xe2\x36\x2b\xab\x06\xe3\x8c\x7d\x58\x6f\x1a\x9f\xb4\ \xbe\xf8\x3d\xa5\x1d\x9b\x36\x2d\xf4\x49\x86\x98\xdb\x9e\xb2\xa5\ \x51\x7e\x54\xe3\xfa\x28\xf5\x44\x35\x4c\xc0\xb6\x88\xaa\x0c\x5a\ \x0e\x22\x16\x15\xeb\xd9\xd0\xbc\x5e\xaf\x95\x70\x85\xad\x63\x48\ \x3d\xf0\xea\xa0\x05\x46\xc5\x5b\x4e\xbc\xa0\x40\x9a\x5a\x7a\xaa\ \x75\x34\xd7\x56\xbd\x9e\x4e\xa2\x25\x05\x0c\x28\x24\x58\x8b\x37\ \x98\x52\x9c\xbc\x76\x01\xbd\x91\x46\x42\xd4\x09\x7c\xbd\x6c\x6b\ \x95\x8f\x0b\xa0\x43\x7a\x34\x4b\xe2\xc9\x15\x50\x78\xb5\x4f\x8d\ \xe5\x74\xdd\x79\x32\x46\x0a\xdf\x69\xc8\xea\xf5\x4b\x52\x0e\xb5\ \xb7\x3e\xc5\xd7\xb3\xa9\x31\xbe\x5e\x82\x26\x85\x8a\xe0\x93\x98\ \xa8\x36\x4d\x26\x64\x55\x91\x2c\x2e\x3b\xaa\x16\x9f\x17\x4a\x63\ \x7a\xad\xd7\xab\xac\xd0\xd5\xe8\x53\x6f\xea\x04\x2e\x54\x18\x4a\ \x36\xfd\xd1\x7c\x7e\xbd\xc2\x57\x3b\x88\x75\x44\xf9\xd3\x1e\xbc\ \x1c\x3e\xa8\x9b\xd9\x89\xa9\x59\x67\xbd\xa3\x58\x28\xc5\xd7\x2b\ \xa3\x4a\x06\xbb\x63\x2b\x51\x25\x7e\xd1\xaa\xe1\xe1\xd5\x82\x67\ \xca\xb0\x22\x87\x78\x11\xd9\x13\x2f\x6b\x2b\xb6\x89\xaa\x2d\xce\ \xd0\x3d\x89\x29\x75\x97\x33\xc6\x33\x51\x63\xee\xb2\x79\x27\x73\ \xa0\x76\xd1\xfc\x7a\xd4\x9e\x0d\x15\x30\xb8\x13\x11\x28\x5b\xf0\ \x32\x0d\x34\x8d\x67\xe6\x9e\xb7\x7e\xdb\x29\x21\x49\x4a\x17\x61\ \x5a\xaf\xf7\x0b\x29\x95\xe7\x4b\x47\xca\x00\x69\xda\x86\x0c\x10\ \x2e\xc7\x53\xd9\x0e\x1a\x86\x6a\xbd\x5e\x7a\xdb\x31\x50\xad\x94\ \xd0\xcc\xb7\xf7\x33\xae\xd7\x9b\x90\x5a\x15\xf1\xf5\x5a\xc1\x33\ \xe9\x56\x86\x50\x5b\xb8\x1f\xc8\x93\xcb\x0d\xcd\x40\x18\xca\xec\ \xa9\x99\x12\x13\xf3\xe6\x3a\xbe\x9e\xce\x66\x14\x5f\x2d\x69\xf6\ \x7e\xfa\x36\x7c\xeb\xc3\x8f\xf0\x4c\xf4\x40\x2c\x92\xcd\x53\xcc\ \x3a\xa0\x16\x89\x42\x15\xcd\xc2\xff\x7a\xf4\x55\x6e\x7c\xbc\x22\ \x9d\xd6\xeb\xf3\xfc\xa8\x92\x50\x9e\xbc\xac\xf6\x89\xbc\x12\x57\ \xaf\xd7\x57\x69\x90\x93\x77\x03\x26\x1c\x07\x61\xf9\x72\xec\x69\ \x10\x2a\x13\xaf\xd0\x22\xf1\x9e\xd0\x26\x93\x50\x8f\x7f\x32\x0f\ \xdf\x3e\xf6\x57\xa7\x9d\x65\xe4\xed\xa8\x52\xde\x9b\x97\x95\x18\ \x0f\x57\xfa\xdd\xd4\x1f\x65\x4f\xd9\x4e\x3f\x52\xc6\xab\x92\xd0\ \x7f\xc4\xad\x64\x6e\xb0\x29\x87\xce\x0c\x94\x80\x24\x17\xcb\xe4\ \xed\x13\xb8\x50\x3b\xf7\xf6\xfc\x17\x99\x71\x88\x1d\x8d\xb8\x15\ \xb2\xc6\x02\xe5\x7f\x00\xc6\xe8\x29\x48\xf1\x9e\x42\x5a\x8d\x96\ \xcf\x95\x3d\x45\x58\x48\xd1\x7b\x3b\x0a\x54\x40\x6f\x28\x64\xbe\ \x1c\xb9\xbc\x4e\x38\x25\xa3\x00\xc4\xec\x2f\x42\x76\xfe\xd3\x06\ \x27\x3a\x24\x94\xc2\x9d\x16\xad\x7e\x9c\x31\x22\xea\xd1\x63\xee\ \x13\xb2\x2f\xc7\x8d\x8c\x4e\xc5\xbc\xe8\xc3\x2c\x08\xb1\xed\x59\ \x8c\x88\xa9\x96\x74\xe7\x61\x42\xa0\x18\xd9\x4a\x99\xc0\xaa\x3c\ \xe3\xa3\x03\x5b\x7a\x9e\x12\x62\x11\xec\x53\x38\xa9\xe5\xff\x47\ \x8a\xa8\x96\xa8\x5a\x04\xd8\x00\x00\x00\x00\x49\x45\x4e\x44\xae\ \x42\x60\x82\ \x00\x00\x0f\x6b\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x01\xf4\x00\x00\x00\x4b\x08\x03\x00\x00\x00\xb1\xe3\x85\xac\ \x00\x00\x00\x42\x50\x4c\x54\x45\xd5\xd5\xff\xf6\xf7\xff\xda\xdb\ \xff\xef\xef\xff\xdf\xe0\xff\xe7\xe7\xff\xea\xea\xff\xea\xeb\xff\ \xeb\xeb\xff\xdf\xdf\xff\xf7\xf7\xff\xfa\xfb\xff\xf3\xf3\xff\xd7\ \xd7\xff\xff\xff\xff\xe6\xe7\xff\xdb\xdb\xff\xe3\xe3\xff\xd5\xd6\ \xff\xf2\xf3\xff\xee\xef\xff\xe5\xe6\xff\x96\xf6\xba\x85\x00\x00\ \x0e\xe4\x49\x44\x41\x54\x78\x5e\xed\x5d\xd9\x92\x24\x37\x08\x1c\ \xa4\x3a\x8f\xbe\x66\x76\xff\xff\x57\x0d\x24\x14\x52\xd7\x4e\xd8\ \x0f\x23\x47\x6c\x84\x52\x05\xd5\xf6\x9b\x9d\x83\x0e\x20\x55\x1f\ \xf7\x1f\x45\xc7\x7e\xdf\x77\x76\xf7\xed\x4e\xf7\x6d\xdf\xb7\x3b\ \x0f\xc1\x2e\x8e\xb6\x63\x3b\x88\x87\x81\x32\x2d\x94\xf9\xf9\xca\ \x05\x96\x3c\x2f\x6a\xcb\x3c\x09\x1e\xd3\x43\x9e\x71\x1c\x1f\x8c\ \xd7\xe3\x35\xb2\x29\x56\x19\x70\xc3\xb0\x0e\x6c\x6b\x5a\x13\x3f\ \xcf\xe7\x73\x65\x4b\x29\x3d\xd3\x2d\xdd\x6e\x1f\xec\x3f\x3e\xf0\ \x30\x7e\x94\x80\x8e\x1d\x9e\x84\xf5\x8d\xf9\xdf\x95\x6d\xf1\x02\ \x12\x53\xde\xc1\xbc\x72\x9e\x29\x33\xc0\xfb\xaf\x99\xc9\x16\xd6\ \xf3\x22\x98\x95\x72\xc6\xfc\x98\x9d\xf4\x51\x59\x07\xed\x41\xbd\ \x93\x9e\xd6\xf5\x29\x8e\xc9\x06\xe9\x37\x19\xcf\xf4\x71\xbb\x81\ \xf0\x06\xa4\xf7\x30\x87\x17\xe3\x58\xdf\xc5\xf1\x6b\x33\x10\xf3\ \x6d\xd1\x2e\xc8\x94\xd9\xd3\x42\xcb\x97\xc5\xfa\xfc\x4b\x1c\xa2\ \xfc\x8f\x91\xfe\x1a\x65\x54\x94\xaf\xaf\xd7\x60\xd1\x9e\x56\x01\ \x07\x79\x5a\x11\xe9\x37\xe6\x3b\xdd\xf0\xdc\x3e\x38\xda\xd9\xb5\ \x21\xbd\xc7\xfa\x2e\x41\x2e\xb1\xce\xcf\xae\xe3\xee\x91\x7e\x90\ \x71\x4e\x94\x65\x1c\x59\xc3\x3d\x30\xcf\x12\xe7\x32\x9c\xf4\x49\ \x49\x9f\x10\xe9\xc6\xbb\xd1\xbe\xca\x33\x30\xe9\x80\x44\x7a\x7a\ \x1a\xe1\x6e\xe9\x79\xbb\xd9\x14\xaf\x9c\x37\x20\xbd\x83\x84\x6d\ \x21\x9f\xd8\x8c\xf3\x3d\xd6\x74\xe7\x1c\x91\x9e\x17\x3a\x98\xf8\ \xaf\xe0\x9c\x1f\xb0\xee\xd3\x3b\xc6\xc8\xcc\x0b\xe9\x55\xa4\xaf\ \x6c\x03\x48\x1f\xd8\x56\x5d\xd3\x95\x78\x7e\x40\xba\x32\xfe\x94\ \x18\x67\x28\xeb\xb7\x9f\x26\xbd\x63\x87\xdb\x09\x3b\x38\x59\xd5\ \x85\x79\x00\x6b\x3a\x06\xf6\x71\x6c\xf9\xf0\x35\x3d\x38\x9f\x75\ \x76\x8f\x48\x9f\x46\x79\xc6\xc7\x0b\xac\x57\x91\xce\x9c\x8b\x07\ \xf1\xe0\x5c\xe7\x77\x4c\xef\x29\xdd\x94\xf7\x60\x9d\x9f\x1f\x26\ \xa0\x53\xbe\xcb\x20\x9d\xde\x39\xde\x69\x33\xec\x14\x6b\x7a\xec\ \xde\x25\xda\x63\x4d\x37\xcc\xc5\xee\x1d\xd0\xa9\x5d\x4d\xf6\x71\ \x3a\x04\x60\xdd\xa6\xf7\x97\xd0\xad\x90\x30\x07\x12\x66\x77\x9b\ \xdf\x99\xef\x26\x6b\x7a\xe7\x1c\x26\x5e\x28\x67\xf3\x75\xdd\xd7\ \x74\x1e\x94\x6d\x8e\x97\x20\xc7\x16\x1e\x98\x84\x70\x7e\x62\xf7\ \xce\x78\xf0\x33\x8a\x8d\x16\xe9\xfc\xc4\xee\x5d\x23\x7d\x78\xad\ \x80\xed\xde\x13\x3b\x90\xce\xb4\x4b\x98\xa7\x9b\x20\xf9\x9a\x3e\ \x01\xf3\xbc\x00\x59\xc7\x9c\x27\xfb\xb5\xe4\x1a\xf8\x67\x6c\x41\ \xf8\xb1\x5d\xe8\xc2\xe3\x50\x6c\x78\x9d\xab\x57\x3e\xb6\x12\x98\ \xf3\xf8\xa1\x8d\xa3\x00\xef\x4d\xd7\x40\x71\x9b\x9c\x71\xe5\x8d\ \xb5\xb1\x0d\x3a\x0a\xd2\x03\x33\x3f\xe0\x5c\x80\x3f\x82\x80\x10\ \x2c\x3e\xfb\x5f\x2a\xce\x1e\x01\x61\x1b\x9e\x0e\xda\xc1\xfe\x76\ \xc1\x5d\x6c\x07\xe5\x4c\x36\x69\x3a\x83\xdf\x08\x16\x62\x93\x7f\ \xe3\x53\x66\x03\x74\xd2\xa7\xe0\x3b\xcb\x5b\xa9\xce\xf3\xac\xaf\ \x12\x87\xbd\x41\xb8\xc4\x37\x58\x3f\x00\xda\xd8\xf0\xd6\x41\x1b\ \x50\x6c\x65\xee\xf8\x67\xb8\xfb\x1d\x26\xf1\x4d\xf8\x4d\x4e\xb9\ \x1f\x79\xf1\x6a\x86\x4e\xfa\x24\x9c\xcf\x59\xdf\xba\xb6\x30\xa6\ \xe0\x5d\x7e\x21\x75\x28\xac\xf3\x0f\x7e\x1f\xfa\xeb\x08\xe4\x63\ \xf3\x41\xf8\x4d\x94\x83\x74\x65\x9b\x30\x9b\x6b\xc4\x13\x29\xf5\ \x32\x24\xb4\xf5\x07\xf1\x60\xd7\x82\xec\x8e\x9f\xdd\xc8\x75\xec\ \x18\xd8\x98\x60\xc7\xe2\x3b\xb9\xcc\xbf\x62\xa7\xa3\x20\x2c\x95\ \xb4\x2c\x6c\x88\x2f\xc6\x82\xcd\xdc\x54\xec\xde\x1f\x23\x36\x72\ \x91\x9b\x79\xd8\xde\x5d\x73\xef\x76\x46\x5f\x93\x3d\x29\x79\xee\ \xfd\xa6\xc7\xb6\x27\xdb\xc7\xf3\x23\x35\x4d\xc3\x76\xea\xc9\x6c\ \x43\x36\x2e\x56\x38\x9c\xd3\xe9\xf0\xdd\x3b\x61\x8f\x54\x16\x5b\ \x2c\x33\x63\xa4\x33\xcf\xec\x41\x7a\x9c\xd3\x1d\xab\xda\xc0\x76\ \xee\xde\x93\xba\x74\xe6\xde\x8d\x79\x1c\xd9\x5a\x64\xe4\x3a\x34\ \x2d\x63\xb9\x77\xec\x4f\xad\xe6\x42\x1b\xc3\xcf\xe9\x9e\x7b\x17\ \x4f\x4c\xbd\x63\x0e\xe2\x05\x73\x04\xfa\x58\xe5\xde\x03\x5e\x65\ \x53\x73\xe2\x53\xe4\xde\xc5\xf0\x30\x9a\xe4\xde\x3b\x88\x0d\xe7\ \x0e\x75\x98\xd9\xeb\x73\x7a\x99\x7b\xb7\xf3\x4f\xce\x14\xbc\x6b\ \x8d\x6d\x2e\x48\x7f\x48\x72\xa6\xce\xbd\x3f\xca\x44\x2c\x58\x2f\ \xcf\xe9\x4c\x38\x22\x5d\x89\xe7\xa3\x3a\xd0\x22\xf7\xde\xb1\xbb\ \x27\xf1\x1b\xdb\xbf\xd4\xd3\x17\x1e\x5f\x19\x83\xf1\xeb\x2d\x0f\ \x3b\xcf\x73\x44\xfa\x03\x69\x58\xb1\xd7\xeb\xf7\x6f\x4b\xcc\xac\ \x2f\xa7\x7c\x18\x12\xc2\xbc\x5a\xd3\x51\x5d\xfd\x3c\x59\x6f\x44\ \x7a\xe7\x5d\x52\x4c\x36\xb3\xff\x5b\x3d\x9d\x07\x18\x07\x9c\x73\ \xcd\x92\xd5\xd3\xfb\x14\x4d\x14\xec\x02\xeb\x8a\x42\x1b\xa3\x5c\ \xd3\xad\xca\xa6\xc9\x38\x75\x40\x9b\x8d\x5c\xcf\xbd\x47\x94\x5b\ \xce\x69\xfb\xae\x9e\x4e\x19\xb9\x77\xe1\xbc\xce\xbd\x83\xf7\xa8\ \xa7\x8f\xbe\x7b\x37\xc6\x7f\x47\xc5\x45\x06\xca\xaa\x8a\x54\xaf\ \xe9\xfc\x28\xe3\x9f\xc5\xfc\xfe\xd9\x8a\xf4\x1e\xea\xc8\xbd\x7f\ \x5f\x4f\x07\xef\xc8\x7a\x44\xac\x4b\x80\x2b\xe3\x58\xd4\x27\xc7\ \xe8\x4d\x14\x0f\xc5\xef\x32\xd4\x6d\x78\x3d\x3d\xbd\xd2\x1a\xeb\ \xb9\x45\xfa\x1a\xab\x7a\x13\xd2\x3b\xe9\xc4\xb6\xe9\x60\xae\x7d\ \x8e\xa7\xa2\x9e\x4e\x45\x0e\xb3\x9e\xdd\x79\x78\x2d\xfd\x0c\xf4\ \xc7\x3c\x71\xa4\xb3\xe9\xb2\xce\x28\xf8\x1e\x94\xef\x24\xbe\xae\ \xa7\x6b\x7b\x1c\xea\xe9\x4a\xbb\xae\xe7\x0d\xce\xe9\x1d\xbb\x97\ \xd9\xb0\x77\xd7\xf4\x32\x69\xfe\x71\x83\xed\x79\x23\xcd\xcd\x90\ \x4c\xed\xb4\xa0\x8f\xc2\x76\x72\x53\x16\x4c\x1a\xe9\xec\x8d\x74\ \x9f\xe0\x1f\x4a\x3a\x12\x33\x30\xc1\x60\x47\x36\x85\xae\xea\x5e\ \x4f\x47\x59\x35\x25\xdf\xc9\xa5\x14\xc9\x99\x09\x98\x97\x05\xa9\ \xd8\x09\x96\xc5\x66\xf5\xfc\x3b\xe7\xd3\xc8\x72\xb2\x07\xbf\x49\ \x72\x49\x3a\x55\xa9\x3f\xc4\x67\x36\xf6\xba\x72\x65\x92\x42\x4b\ \xde\xd9\x9d\x6f\x2a\x4c\xba\xc8\xf8\xd9\x50\x6c\x81\x69\x2d\xda\ \xff\xc7\x01\xf4\x5f\x4b\x2e\x7d\x92\xf9\x6f\x08\xd2\x03\x59\x4d\ \x93\x04\xd1\xb7\x55\x62\xa1\x6c\x75\x60\xab\xb5\xc9\x0b\x20\x31\ \xf5\x36\x93\xc1\xd3\xa5\xbe\xe6\x53\x1f\xce\x34\x9e\xb7\x44\xc7\ \xc9\xc9\x3f\x42\xa7\x31\x7a\xc1\xe5\x5c\x4f\x72\x59\x4f\xbf\x02\ \x89\x05\x78\x79\x88\xad\x00\xc8\xb6\x2a\x9b\xfe\x0c\xc6\x81\xa8\ \xb6\x59\x9b\x01\xc8\x16\x92\xc1\xba\x2d\x90\x0d\x2b\xeb\x9d\x74\ \xd0\x3e\x63\x76\xcf\x66\x5e\x63\x8b\x19\x7e\xc1\xbc\x8e\x5f\xf2\ \xdb\x6a\x6c\x19\xb3\x3c\x3b\xab\xad\x09\xeb\xa8\xb1\xed\x79\xdb\ \x48\x3c\x83\x48\x1c\xaa\x6c\x62\xcc\xb5\xfe\xc2\xec\xce\xcf\x2e\ \x6f\x79\x29\xf9\xf4\x57\x31\xde\xab\x6c\xbd\xfb\x1d\x7f\xcd\xf6\ \x37\xee\xbb\x9a\x83\x34\x12\x32\x91\xac\x89\x1a\x25\xb4\x2c\xe7\ \x3e\x6e\x16\x6f\x3b\x39\x17\x3b\xa8\x63\x58\x72\x46\x53\xb0\xfc\ \x00\xb1\x75\x4f\x2b\x80\x8c\x1c\x12\xb1\xc8\xbd\x27\x6d\x8c\xc4\ \xd6\x1d\xfe\xa7\x09\xe8\xe7\x35\x14\x5d\x4c\xd1\x84\x07\x59\xb9\ \xb2\xe2\xa2\xf8\xbe\xef\x7d\x5e\xea\x1e\x39\x7e\x81\x74\x10\x2e\ \xa3\xca\xbd\xaf\xea\xa2\xd8\xa2\xa7\xf4\x90\x35\x99\xb4\x29\xb5\ \xca\xbd\x77\xf8\x1e\x14\xbb\xd2\x6d\x8f\xce\xc8\xa8\xa7\x1b\xbe\ \xeb\x7b\xaf\xb5\x6c\x28\xaf\x22\xc8\xd5\xd5\x0a\x17\x88\x5b\xd8\ \xa2\x9e\x1e\x5a\x36\x61\x3f\xa1\xbe\x26\x68\x58\x65\xeb\x7d\xef\ \x42\x3c\x1e\x64\xe3\x2e\x7d\xef\x60\xfe\xda\xf7\xbe\xe4\xfc\xa7\ \x6e\xd8\xc7\xe4\x2d\xd0\x4c\xf7\x58\x74\xc3\xae\xa7\x94\x6d\xb8\ \xd4\xd3\x43\xdd\xf2\x2c\xfb\xde\x3f\x7f\x9a\xf4\x9e\x9c\x71\x1d\ \x9b\x98\x32\x4e\x45\x3d\x9d\xa2\x9e\x0e\xfc\xb1\xef\xdd\xa2\xbc\ \x10\x30\x22\x23\xe7\xd9\x19\x70\xee\xb8\x94\xd3\x8b\x1e\xe8\xf4\ \x34\x2d\x5b\xd9\xf7\xde\x86\xf4\x1e\xea\x5a\x65\xd3\x73\xc9\xf7\ \xf5\x74\xf0\x1e\x7d\xef\x45\xe7\xf1\x82\x11\x79\x58\x61\x1c\x91\ \xfe\xe2\x17\x5b\x10\x0e\xc7\xf6\x4d\x3d\xfd\xc6\x3f\xa2\x87\xa2\ \x59\xa4\x77\xd2\x23\xd7\xe4\xeb\xb9\x81\xe8\x40\xfe\xdd\x38\xcf\ \x59\x1d\x31\xc3\x91\x7b\x77\xc1\x43\x31\xbd\x83\x74\x8b\x74\x4d\ \xc5\x7e\xaf\x4f\x4f\xb5\x3e\x1d\xcd\x52\x1f\x09\xe2\xf4\x56\x6b\ \x7a\x5f\xd1\x49\x4d\x13\xcc\xf7\x4d\x5d\x28\x18\x7d\x3d\x07\xe7\ \xe2\xa1\x23\x10\x03\x42\x9d\x5e\xd6\xd3\x99\x79\x44\x7a\x74\xce\ \x5c\xf5\xe9\x6b\x7a\xaf\xa7\x0b\x70\x60\x0b\xce\x9b\x91\xde\xbb\ \x67\x78\xa0\x96\xee\x1d\xfe\x78\xd9\xba\xee\x9b\x77\x21\x9b\x34\ \xb3\xe9\x98\x8b\x1e\xb9\xd9\x54\xab\xde\x23\x87\xc3\xfa\x6b\xbc\ \xe8\xd3\x11\xed\x86\xd7\x59\x4f\xf7\x9e\x48\xb8\x36\xfa\xf4\x0e\ \x57\x66\x10\x8a\x07\xbb\x86\x7a\x71\x4e\xcf\x16\xe5\xfa\xc6\x31\ \x5d\x94\x03\xba\x94\x13\xb4\x6c\xfa\x7b\x42\x39\x7d\x3a\x31\x5a\ \xe3\xcc\xf8\xba\x34\xc3\x96\x7c\x87\x6a\x15\xe2\x45\xc6\x13\xe9\ \x19\x7d\x3c\x37\x53\x15\x5c\x26\xcb\xbd\x87\x96\x6d\xae\x94\x0e\ \x53\xf6\x2e\xbe\x85\x2c\x0b\x4b\x8b\xe9\x1b\xe4\x8d\x5f\x50\x36\ \x65\x36\x7d\x18\x59\x3d\x40\xa1\x70\x21\x8a\x53\xec\xee\x55\x36\ \x79\x21\x4c\x5c\xec\xb0\xc3\xfd\x18\x3a\x2e\xa4\x33\x72\xa1\x65\ \x9b\xc5\x09\xf9\x05\x0e\xa9\xc0\xa1\xd6\x42\x78\x65\xdf\x8c\xea\ \x1b\x75\x35\x2b\xb8\xf0\x6f\x4f\x43\xd6\x40\x51\x8d\x1d\x66\x41\ \x71\x62\xca\x2f\x89\xc3\xef\xff\x05\xbd\xca\x16\x5a\x36\xb6\x77\ \x7c\xd9\xfa\x43\x47\xc6\xc8\x11\xdf\xa8\xa3\x97\x2a\x46\xd4\xd0\ \x0f\x04\x75\x09\x21\xd9\xb5\x6c\xc4\x86\xcb\x59\xc4\xe0\x37\x17\ \x2f\xb6\x97\x36\x75\xd2\x01\x68\xd9\x30\xec\x55\x01\x5a\x36\x0c\ \x53\x2a\x1f\x00\x65\xe5\x9b\x0e\x44\x78\xde\xf3\x81\xdc\xa3\x91\ \x4e\xae\xf3\x61\xb8\x96\x4d\x6c\x27\xaf\xa7\x8b\xe9\x10\x6c\x6c\ \x7f\x89\x6e\xb5\x57\xd9\xfa\xfe\x9d\xde\x7b\xe4\xbc\x6d\x08\x19\ \xb9\x4c\xe7\x39\x3d\x17\x19\xb9\x5f\x93\x9f\xd3\x97\x8b\xc2\xc5\ \x1b\xe4\x90\x7a\x67\xe7\xb0\xbc\x3b\xbf\x79\x58\xdf\xbb\x6f\xe6\ \x90\x8e\x93\xf4\xbb\x0b\x9b\xc4\x5a\x90\xde\x13\x72\xea\xd0\x08\ \x76\x07\xeb\x40\xd9\xf3\x5e\x9f\xd3\x81\x5f\x9e\x7b\xcf\xf5\xe5\ \x81\xd1\xf7\x7e\xd9\xbd\x0f\xc3\x4b\x86\xe6\xe2\xe2\x1e\x39\x88\ \x5b\x40\xba\xeb\x5b\x8c\xf5\x56\xa4\xf7\xbe\x77\xbb\x5d\x6a\xdb\ \x91\x9c\x8b\xdc\xbb\xf1\xfe\x76\x4e\xbf\xf4\xbd\xcf\xcb\x9c\x6b\ \x59\x93\x0b\x18\x85\x77\xa8\x56\x1d\xab\x8f\xb8\x73\x86\xf3\x71\ \x48\xc3\x2a\x6e\xb8\x68\xa8\x75\x95\xad\xe7\xde\x37\x3f\xa7\xa3\ \xfb\xd9\x73\xef\xbe\xc3\x29\xce\xe9\x39\x38\x8f\xbe\x77\xf6\x0a\ \x50\xce\x66\x7d\xef\x3c\xca\xc2\xea\xea\xc9\x77\xc7\x79\x8f\x5c\ \x3a\xef\x9c\xb9\xdd\xd8\xb7\xd3\xb2\x75\xec\x30\x72\x95\x0b\x69\ \x8c\x57\x0a\x97\x4a\xcf\x26\x5b\x62\xfa\x53\xdf\xfb\x8c\x84\x5c\ \x5c\x34\xe4\x55\x36\x49\xc1\x56\xb4\xf3\x03\x7d\x3a\xac\xae\xa7\ \x43\xbc\x28\x9c\xab\x42\x5d\xa2\xbd\xcb\x9a\x9a\xb0\x8e\x7b\x53\ \x38\xca\xd9\x61\x4d\xc7\xa8\x14\x2e\x5e\x56\xfd\x22\xeb\x7a\x0f\ \xe6\x17\x10\xef\xa4\x3f\x66\xb6\xa8\xb2\x5d\x72\xef\x43\xe8\xd3\ \x87\xa1\xca\xbd\x3b\xe9\x89\x4d\xa8\x17\xba\x3f\x1b\x92\xde\xa7\ \x77\x35\xe5\x1a\x3e\x04\x8c\x88\x72\x31\x88\x17\x2b\xc6\xab\xdd\ \xbb\x47\x7a\x90\x2e\x0e\x1b\x39\x36\x60\x88\x82\xfa\x90\xde\xeb\ \xe9\xb0\xe4\x8a\xd5\x67\x2b\x01\x63\xa7\x9c\xe4\xb5\xfb\x9a\x8e\ \x19\x1e\xe6\x33\x3b\x19\xe7\xf4\xc5\x00\xe7\x62\xe0\x3c\xd6\xf4\ \xe9\x84\x57\xd9\x94\x70\x71\x8e\xe1\x1b\x7d\xba\x13\x0e\xf9\x22\ \xc7\xfb\x53\x58\x6f\xd4\x18\xd9\x49\x47\x05\xc1\xef\x7b\x67\xaa\ \xb5\xcd\xfb\x04\xf2\x56\x27\x4c\x41\xe0\x88\xce\x99\x7a\x23\xe7\ \xdd\xb0\xa5\xa4\xc9\xf5\xe9\x75\xeb\x4c\x4a\xa5\x3e\xfd\x5d\xa9\ \xdc\x6a\x23\xd7\xa7\xf6\xb8\xef\x9d\x4c\xcb\xb6\x6d\xea\xf7\x7c\ \x88\xa1\x0f\x9a\xb4\xf9\x99\xc4\x84\x68\x8d\xf4\x49\x09\x57\x35\ \xdb\x3c\x95\xc9\x19\x36\x24\x67\xae\x0a\x75\xe1\x3d\x79\x0f\x74\ \xac\xe9\x02\xbb\x94\x80\x17\x73\xa3\xdd\x0a\xea\x55\x1a\x56\x9c\ \x6b\x1e\x50\xe1\x43\x65\xad\xf4\x4b\x16\x67\x35\x36\xbb\x4b\x8e\ \x32\x65\x54\xd9\x16\x68\xd8\x8e\x83\xd8\x23\xf7\xb4\x6f\x6c\xc8\ \xc2\x8b\xdd\xed\xbf\xff\xd4\xb2\x6d\xa4\xe9\x2b\xcf\xbb\xb3\xdb\ \xed\xb4\xcb\x1e\xef\x86\xe8\xb9\xf7\xa9\xa8\xb9\x54\xb9\xf7\x39\ \x97\x20\x1e\xd9\x55\x4d\xae\xb1\xa6\x23\x40\xa1\x64\xb3\x93\x29\ \xbd\x5f\x22\xe8\x1a\x36\x7e\xe3\x89\x2a\x1b\x3f\x74\x76\xa0\xec\ \x1e\x42\x2d\xd0\x4b\xab\xc0\x1c\x7c\x9b\x3c\x3e\xc3\x3b\x48\x2d\ \xda\xf4\x85\x74\x33\x52\x03\xe2\xc6\x48\x63\x7d\xab\x81\x3c\x15\ \xf8\xd6\x41\xe2\x2c\xd6\xd1\x4b\x4a\xec\x83\xfb\x06\xe8\xa4\x23\ \xd6\x31\xc3\xcf\xa8\xb3\x99\xca\x46\xcd\xfd\x22\xb6\xe0\xd7\x71\ \x2c\xaa\x62\x5b\xec\xc6\x48\xb1\x83\xb2\xdf\x03\x4c\x6a\x1b\xd4\ \x6c\x0a\xbc\xa1\x63\x93\x37\x33\x0e\x2d\x5b\xa8\x95\x77\x36\xd0\ \x2e\xa0\xbf\x81\xed\x5e\x65\xeb\xd8\x31\xb0\x7b\x67\xc3\x2e\x86\ \x47\xde\x37\xca\x3b\x99\x92\x0d\x5a\x36\xb1\x42\xcb\x06\x5b\xf2\ \x6c\x11\x08\x20\x25\xf7\x50\x03\x5e\x27\x92\x9f\xd8\x8a\xdc\x3b\ \x76\xef\xb0\xdb\x99\x7d\x6f\xaa\x70\xe9\xa4\x17\x97\x0d\x6d\xdb\ \x7b\x95\x6d\x0b\xad\x03\xf2\xee\x97\xbe\xf7\x45\x5d\x34\x46\x8a\ \xb1\x03\xe9\x2f\x3c\xd7\x7a\x8b\x21\xc9\x03\x97\xe2\xc8\x26\x8f\ \xa2\xe9\x8d\x91\xbd\xef\x1d\x37\x99\xdb\x5e\xf5\x92\x7b\x37\x44\ \xff\x51\xd5\xf7\x7e\xd5\xb2\x8d\xd6\x19\xf9\x3a\x2f\x0f\x54\x0c\ \xe6\xd0\x04\xeb\x61\x5e\xdd\x2e\x65\xca\x55\xb4\x45\xb6\x8e\xf4\ \xde\xf7\xce\x5c\xbb\x6d\x97\xdc\x3b\xa2\x5d\xb7\x45\xc2\xf9\xa5\ \xef\x3d\xbf\xab\x56\x47\x41\x7c\x96\x8d\x3d\x08\x1f\xca\x48\x8f\ \x7a\x3a\x90\x54\xe2\x82\xc1\x87\xf5\x76\xc9\x99\x9e\x9c\x11\xd0\ \xee\x79\x58\xda\xdf\x72\xef\x31\xbf\x53\x16\xa3\x6a\x7e\x9f\xdf\ \xb5\x6c\xe3\x24\x0e\x78\x08\xe3\x9a\x88\xf5\x50\xff\x8d\x46\x0a\ \xe3\x7d\x88\x7a\x3a\x48\x7f\x5a\xd3\xcc\x67\xfa\x4c\x6d\xd7\xf4\ \xfe\xe5\x1e\xbb\x47\xae\x54\xb7\xa8\x95\x51\x4e\xc6\x37\x46\x5e\ \xa8\xe0\xfc\xa2\x65\x1b\x2d\xd2\x79\x44\xa0\x87\x98\x0d\x64\xa7\ \x61\x88\x7a\xba\x91\x8e\xa2\x2a\x87\xfc\xfa\xe9\x71\xde\x88\xf4\ \xbe\x77\x8f\xc6\xee\xcd\x39\xb7\x48\x0f\xce\x11\xe9\xc6\x79\x66\ \xbb\xce\xef\xbe\xa6\x8f\xe7\x8d\x91\xf2\x44\xa0\x5f\xd4\x0e\xd7\ \x7a\x3a\xae\x92\xfb\xe4\x50\x17\xdf\x3e\xd2\x7b\x79\x15\x92\x65\ \x0c\xdf\xbd\x57\x5a\xb6\x7c\x78\xb4\x57\xf7\xbd\x97\xdf\x65\x63\ \xaa\xc5\xf3\x1b\xbb\xf7\xd0\xa7\x03\xab\x8d\xaa\xca\x66\x4d\x72\ \xc8\xbe\x4b\xa4\x33\xeb\xb7\xf4\xd9\x32\xd2\xfb\x7d\xef\xe8\x93\ \xb3\x1b\xd3\xaa\xdd\x3b\x86\x21\xfb\x70\xcc\xd5\x77\xd9\xae\xf7\ \xbd\xbf\x34\xd2\xe5\x16\xe8\xd7\xb5\x9e\x9e\xd6\xe1\xaa\x4f\xc7\ \x4e\x8e\x29\x57\x7c\xf2\xf8\x79\xd2\xfb\x17\x1d\x00\x66\x1b\xba\ \x0d\x21\x9e\xde\x3b\x62\x61\xc0\xe5\xbe\xf7\xea\x26\x0a\xc1\x58\ \xde\xf7\x3e\x62\xfb\x0e\x0c\x55\x3d\x3d\xbe\xaa\x0c\xc2\x15\xd0\ \xa7\xcb\xec\x0e\xd6\x2f\x5a\xb6\xf2\xbb\x6c\xfa\xf6\x17\x0f\x87\ \x7f\x82\x42\xdf\x4b\x3e\xb5\x6c\x8e\x0d\x2f\xa2\xd0\xb2\x1d\x3c\ \x02\xa5\xce\x01\xfa\x3e\xfc\xf0\x0a\x1b\xf2\xb0\x3c\x14\xbb\x3d\ \xcd\xd0\x0b\x2e\xf5\x77\xd9\xa0\x8e\xbf\x02\xb5\x37\x5c\x14\xe9\ \x5a\xb6\x12\xae\xba\x87\x96\x8d\x30\xaf\x01\xd1\x2f\x86\x14\x65\ \x7c\x97\xed\x4e\x20\x5f\xa3\x03\x34\x6f\xff\x57\xee\xbd\xcb\x9a\ \x2e\x5a\xb6\x77\xde\x43\xcb\x46\xf2\xdb\xdb\xf6\x05\x54\x7d\x97\ \x0d\x5a\xb6\xe3\xd4\xb2\xed\x88\x71\xc0\xd6\xba\x1d\x65\xd5\xb8\ \x31\x92\x47\x68\xd9\x4c\xb9\xda\x94\xf9\x4e\xfa\x5c\x7d\x97\x8d\ \x6d\x62\xe7\xd5\x36\xab\xad\x55\x5a\xb6\xc5\xbf\xba\x69\xdc\xe7\ \xf8\x2e\x1b\xb4\x6c\x5b\x68\xd9\x72\x11\xed\xe4\xbf\xfc\xbb\x6c\ \x88\x70\x92\xf8\x0f\x9a\xb7\xbf\x25\xd2\x7b\x95\xad\x0b\xd9\xd8\ \x61\xde\x52\x49\x5b\x2c\x6f\x94\x91\x7b\x8f\xef\xb2\xe9\x7d\xef\ \x84\x62\x75\x68\xd9\xe2\xbb\x6c\x51\x71\x81\x96\x0d\x1f\x77\xb8\ \x1e\xd9\xce\xf6\x67\x6c\xe4\xd2\x93\x3d\xa4\x0e\xa8\xb4\x7d\x40\ \xdd\xf4\xd1\x44\xd6\xd4\xb1\x63\x47\xaa\xe9\x19\x14\x5c\xc2\x68\ \xb7\x9d\x3b\x1d\xe5\x37\x5c\x16\xca\xec\x4b\x2d\x9b\x7f\x97\x2d\ \xee\x0e\xf4\x4f\x74\x79\xcd\xa5\xbe\x7e\xc4\x53\xb0\xce\x79\x9c\ \xd3\x71\xdd\xfb\x07\x3f\xad\xab\x6c\x9d\x78\xcf\xbd\x13\xfb\x6f\ \x73\xef\x38\x05\xb9\xd2\x41\x49\xff\xd3\x77\xd9\x46\x65\x1d\x55\ \x36\x8b\xf4\xf1\x22\x6b\x7a\xc5\x05\xff\xea\x93\x67\xe4\x20\x58\ \x85\x6b\x15\xe9\x3d\xf5\x1e\x7d\xef\xdb\xbf\xe6\xde\xe3\x6b\x4d\ \x6a\x90\x34\xa9\x03\xe9\xd5\x97\x1d\xe2\x9c\x7e\xf9\x5a\x13\x5b\ \x80\x59\x8f\x2a\x5b\xd9\x03\xfd\xf1\x4c\xcd\x32\x72\x5d\xcb\x26\ \x5e\x0b\x6c\xca\x39\xbe\x24\x1d\xb9\xf7\xc8\xc9\xa1\xb4\x5a\xde\ \x0d\x3b\x63\x78\xeb\xfb\xc9\x39\x3b\xdc\x22\x67\xa3\x84\x7e\xc6\ \xe3\x25\x6e\x4d\x6a\x18\xa8\xac\x6a\xc9\xa5\x90\xa7\x37\x8b\xf4\ \x1e\xed\x68\xf0\xd4\x04\x84\x67\xa2\xea\xdc\xfb\x21\x86\xd4\x3b\ \x3f\x8c\x9a\xf7\xc5\x3f\xd1\x65\x60\xc2\xdd\x5c\xe5\x02\xac\x1e\ \xe9\xf8\x16\x9f\x73\xfe\x4c\xd6\x31\x25\x79\x77\x95\xb7\xa4\x96\ \xaa\xd5\x4e\xb8\x18\x34\x2e\xec\x91\x79\x02\xeb\x00\x73\x4d\xca\ \xb8\x07\xfb\x42\x84\x63\xb1\x60\xae\xd7\xf4\xd0\xb2\xd9\xb7\xd9\ \x4c\xbe\xe8\xac\xaf\x70\x2e\x70\x19\x10\xe9\x4f\x1e\xab\x92\xee\ \x84\xd7\xdd\x52\xff\x00\x0e\x5e\x0c\x4e\xab\x94\x1d\x32\x00\x00\ \x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x58\x32\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\xdc\x00\x00\x01\x7c\x08\x06\x00\x00\x00\xa4\x31\xd5\xdb\ \x00\x00\x57\xf9\x49\x44\x41\x54\x78\x5e\xec\xdd\xc1\x4b\x55\x69\ \x18\xc7\xf1\xdf\xb9\xd7\xeb\x95\x0b\x95\x61\x10\x3a\xad\x24\x50\ \x50\x08\x12\x0a\x71\x56\x21\x83\x25\x48\x10\x11\x2d\x6c\x51\x11\ \x42\x81\x04\xdd\x6a\x46\xbc\xaf\x60\x50\xa3\xb6\x28\x89\x20\x72\ \x71\x21\x5c\xb4\x90\xac\x08\x93\x36\xcd\xb4\x72\x06\x23\x62\x86\ \x1a\x37\x52\xdc\xc8\x82\xb1\xc2\x6c\x91\xf2\xf4\x6c\x82\x10\xbc\ \x48\x59\x69\x7e\x3f\xf0\xe5\xf9\x0b\x7e\xbc\x8b\xb3\x38\xfa\x11\ \x00\x00\x00\x00\x00\x00\x00\x00\x82\xf4\x8b\xb7\x43\x79\x00\x31\ \x7d\x11\x9c\x94\xd6\x06\xa9\x5f\xd2\xed\xa8\xa0\xa0\x45\x5f\x0f\ \x18\x1c\xaf\x5a\x52\x7a\x68\xd2\x9e\x9f\xb6\x6e\x1d\x4a\x4f\x4c\ \x3c\x30\xb3\x94\xe6\x01\x14\xe8\x73\x30\xb4\x94\x49\xbf\x4b\x3a\ \x1c\x4f\x24\x26\xea\xbb\xbb\xfb\x6a\x5b\x5b\x9f\x4a\xba\xeb\xbd\ \xd3\xa2\x01\x18\xdb\x16\xef\x91\x67\xbd\x15\x15\x7f\x4c\x8e\x8f\ \x9f\x36\xb3\xc3\x5e\x99\x16\x07\x80\x43\x52\x22\x48\x1d\x19\xe9\ \x7d\x47\x2c\x36\x39\x7c\xfc\x78\xd6\xcc\x82\xb7\xdd\x2b\xf8\x64\ \x90\x45\xde\x21\xaf\x72\xb7\x14\xf7\x3b\xe4\x1d\xd3\xc2\x00\x08\ \x52\xa5\x37\xe2\x59\x4f\x69\xe9\xdf\xb9\xd1\xd1\xb3\x66\x76\xd4\ \x2b\xd7\x1c\xbf\x4a\xeb\x83\x34\x15\xa2\xe8\x49\x46\xea\x0c\x92\ \xf5\xd5\xd5\x75\x69\x41\x00\xc6\x76\xd0\x9b\xf6\x01\x4d\x0f\x34\ \x37\x5f\x9d\x9d\x9d\xed\x30\xb3\x5d\x5e\x91\xe6\x71\xa7\xad\xad\ \xcb\x5f\xc1\x99\x20\x59\x67\x32\xf9\xf6\xbf\x5b\xb7\xba\x42\x08\ \x31\xe5\x07\xc0\x87\x76\xfd\x4c\x49\xc9\xfd\xb1\xe1\xe1\x73\x66\ \x76\xc2\xab\x56\x1e\xed\x52\xcd\xa9\x54\xea\x5e\x90\x6c\x4e\x37\ \x95\x1f\x00\x33\xab\xf1\x82\xd7\xec\xad\x56\x7e\xca\x48\x17\x7d\ \xa4\x53\x17\xaa\xaa\xfe\x0a\x92\x5d\xae\xad\xbd\xdf\xdf\xd4\x74\ \x63\x60\xdf\xbe\x1e\x33\x4b\x28\x2f\xf0\x59\x00\x8f\xbd\x97\x51\ \x14\x3d\xd1\x02\x1c\x18\x19\xe9\x2e\xdb\xb4\xe9\xc5\x9b\x5c\x2e\ \x71\xbe\xbc\xbc\x26\x59\x5c\xfc\x7a\xef\xe0\xe0\xa0\xa4\x42\x6f\ \x95\xf7\xbf\x56\x2c\x44\x5a\x54\x30\xb3\xa4\xa4\x6a\x6f\x2c\x5b\ \x5f\x9f\xdd\xd8\xd0\x90\xfb\x39\x9d\x3e\xa2\xaf\x0e\x60\x7c\xdb\ \xbc\x16\x7d\x13\x00\x83\x8b\xb4\x52\x01\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\ \x47\x1f\x80\x8c\xd4\x16\xa4\x57\x7e\xc3\xca\x18\x1e\x62\xfa\x6e\ \xb0\x7a\xc3\x86\x7f\x24\xad\x89\x45\x51\x26\x23\xdd\x0b\x52\x8b\ \x96\x13\x30\xb8\x20\x15\xfe\x26\x95\x6a\x19\x98\x7a\xfe\xfc\x48\ \x2c\x1e\x9f\x29\xdd\xbc\xf9\xdf\xd4\xba\x75\x25\x51\x14\xf5\x06\ \x69\x67\xbb\x54\xb7\x5b\x8a\x6b\x29\x02\x32\x52\x3a\x48\x5d\x92\ \x22\xbf\x97\xbc\x67\x5a\xe2\x82\xd4\xe4\xd9\x95\xc6\xc6\x3f\xcd\ \x2c\x5c\xdb\xbf\x7f\x28\x48\xf6\xb1\x8c\x94\xd5\x52\x04\x7c\x68\ \xef\x2a\xa0\xaa\xca\xda\xf6\xbe\xa2\x82\xd9\x1d\x60\x8c\x9d\x63\ \x8d\xdd\xdd\x39\xea\x98\x63\xd7\xd8\x8d\x88\x08\x16\x12\x62\x61\ \xa0\x98\x01\x82\x8d\x05\x82\x01\x8a\x8a\x58\x58\x18\x08\x62\x22\ \x29\x20\xa1\xf2\x3f\xef\x5d\xfb\xac\x75\xbf\xfb\xe3\xcc\xe0\x10\ \xd7\x99\xf7\x59\xeb\x59\x87\xb3\xcf\x61\x3e\xef\xb7\x79\xef\xde\ \xfb\x8d\xe7\x5d\x96\x23\xc7\x7a\xf9\x47\x7a\x08\xd7\xaf\x1b\x6b\ \xd5\xa2\x3f\xe2\xc2\x3a\x6a\x68\x79\xc1\xc9\x60\xa0\x79\xae\x5c\ \x51\xd1\xaf\x5e\x99\x7b\x5b\x5a\x56\x82\x81\x85\xad\x2e\x5a\xf4\ \xbd\xfb\x82\x05\x07\xb6\x36\x6e\xec\x4b\x46\xb7\x2c\x67\xce\xea\ \x42\xd7\xc0\x60\xc4\x85\x85\x35\xb4\x2e\x53\xe6\x95\xb2\x3a\xec\ \xeb\xd1\xc3\x7b\x47\xdb\xb6\xdd\x74\xd4\xe0\x9a\xc0\xb8\xbe\x90\ \x41\xed\xea\xd0\xc1\x17\xab\x5b\x07\xdc\xdb\xd2\xfd\x15\x5b\xdb\ \x3d\xb8\x1f\xb3\xae\x6a\xd5\xeb\xa6\x2a\xd5\xd7\xd7\xfe\xfe\xad\ \x31\x5e\x07\xdc\x6d\x22\x44\x4f\xc1\x60\xe8\xc0\x8a\xf1\x13\x56\ \x87\x5b\xd2\xd8\xb4\xf9\x93\xd0\x41\x5c\xb5\xb5\x5d\x67\x55\xaa\ \xd4\x9b\x35\xa5\x4a\x85\x5a\x97\x2d\x5b\x1b\x06\x95\xb4\xbe\x7a\ \xf5\x07\x30\xb6\x29\x66\xfa\xfa\x6d\xc9\xf8\xb6\xb7\x68\xe1\x8f\ \xfb\xe6\x78\xd6\x50\x63\x9b\x69\x2f\xb2\x12\x0c\x76\x9a\x7c\x11\ \x22\x57\x76\x7d\xfd\x6c\x3f\x75\xe9\xe2\x9a\xa7\x58\xb1\xb0\x7c\ \xa5\x4b\xbf\x1d\xe4\xea\xba\xb9\xd7\xf6\xed\x9b\xe6\x44\x44\xe4\ \x12\x3a\x88\x26\x33\x66\x6c\x9d\xf1\xf2\xa5\xed\x8c\xe7\xcf\x1d\ \x12\xa2\xa3\xcd\x31\x94\x23\x6f\xc9\x92\x71\x31\x61\x61\xee\x5f\ \x13\x13\xad\xb3\xe5\xc8\xf1\xa9\xfb\xa6\x4d\x1e\x18\x7f\x96\xb7\ \x44\x89\x10\x01\xe4\x29\x5e\x3c\xac\xec\x2f\xbf\xe4\x81\x11\xaa\ \xe7\x2c\xcb\x9c\x2a\x0c\x06\x56\x82\xd9\xe0\xc2\x0d\x35\x6a\xf8\ \xaf\xc8\x9f\x3f\x1a\xf7\x5d\xc0\xbe\x60\xa5\xbf\x58\x1d\xab\x61\ \xd5\x70\xc4\xb5\x49\x16\xfc\x9b\x73\x81\xb5\x2e\x59\x59\x55\xd9\ \xd3\xa5\x8b\xb7\xf3\xc0\x81\x47\xb0\x82\x8d\xa3\x95\xcc\x75\xe8\ \x50\x77\x3c\xeb\x25\x57\xf0\xf2\x34\x76\x68\xd0\xa0\xf3\x18\x5b\ \x02\x16\xc1\xbd\x33\x9d\xf9\xc0\x49\x59\x11\xbf\x63\xb0\xc1\x15\ \x02\x73\x7a\x2c\x5c\x38\xe0\xc6\x96\x2d\x8e\xb8\x2f\x2b\xfe\x1c\ \xe4\xcd\x9c\x06\xc6\xe3\xac\x14\xbb\xbd\x79\xf3\x5f\xb3\xf0\xdf\ \x5e\x18\x1c\x06\x1a\xc3\x89\xe2\x44\xc6\x65\x5f\xaf\xde\x5d\xdc\ \xe7\x95\x06\x57\x8b\xc6\x5c\x7e\xfb\xed\x30\xc6\x72\x90\x81\xad\ \x2c\x58\x70\xab\xc6\x36\x73\x8a\xc8\x22\x30\xd8\xf0\xf2\xc9\x3f\ \xde\xc2\x7f\xb2\xaa\x95\xc5\xca\xe0\x8e\x6b\xca\xca\xc2\x85\x03\ \x9e\x5f\xb8\x60\x8b\xf7\x47\xea\xc0\x2a\x9d\x13\xac\xe3\xbf\x7d\ \xfb\xf6\x9b\xdb\xb6\x59\xe2\x3e\xbb\x00\x4c\x84\x68\x4a\x86\x75\ \x74\xec\xd8\x3d\x8b\x85\xa8\x0d\x03\xf3\xa1\x7b\x72\x12\x9d\x9a\ \x31\xe3\xd8\xfd\x23\x47\x86\xcb\xcf\xe3\x02\x56\x14\x3a\x04\x06\ \xc7\xeb\x86\x82\x91\xe4\xa4\xd8\xdf\xb3\xe7\x91\xe4\xc4\x44\x33\ \x18\xdb\x6f\x60\x5e\x1d\x59\xa9\xf5\xc0\xa6\xe0\x10\x30\x87\x34\ \xb8\x8e\x64\x60\x70\xb0\x3c\xc1\x35\x69\xa9\x4a\x15\x77\xb0\x5f\ \x3f\xe7\xe4\x4f\x9f\xe8\xdf\x3e\x09\xec\x62\xae\xaf\xef\x2b\x57\ \xbb\x8f\xb8\x56\x11\x59\x09\x06\x63\x81\x10\x45\x60\x68\xea\xed\ \x9a\x45\xee\xdc\xcf\x03\x9c\x9c\x36\xc2\xc8\x16\x81\x0d\x85\x8e\ \x61\xa1\x10\xc5\xb0\x92\x55\xd7\x58\x91\x27\x2a\xdb\xc7\x35\x25\ \x4b\xde\x7c\xe1\xed\x4d\x2b\xf2\x02\xb0\x11\xa8\x12\x80\xb9\x81\ \xc1\xdb\x55\x85\x0b\x47\x38\x0d\x18\x70\xea\xd1\x99\x33\xd5\x44\ \x16\x82\xc1\x61\x83\x2e\x30\x36\x8a\xd1\x7d\xdd\xf6\xcb\x2f\x67\ \xe2\xc2\xc3\x2d\x28\xd6\xa5\xc3\x81\x71\x17\xf0\x33\xb8\x19\xdc\ \x44\x31\x3b\x9c\xed\x62\xdd\xe7\xcf\x77\xa5\x8c\x14\x70\x10\x98\ \x4f\xe3\xfd\x1a\x60\x0a\x1c\x2f\x57\x2e\x2d\x5f\xbe\x6d\x65\xa1\ \x42\x37\xe4\xd6\xd3\x09\xf4\xa1\x2d\xe9\xb7\x52\xe0\x44\x7a\x81\ \xc1\x98\x0d\x4f\x3a\xfd\xc1\x82\x5f\x91\x89\xf2\xe6\xea\xda\xb5\ \xdb\x60\x64\x26\x60\x2b\x50\x67\xc3\x21\x5e\xa6\xa6\xf5\x90\x29\ \xe3\xaf\xac\x6a\xcb\xf3\xe6\x7d\xf3\xe1\xf1\xe3\xd5\x30\xb2\x59\ \x60\xf5\x54\xb6\xc9\xb3\xe9\xbd\x0d\xd5\xab\x3f\x96\x86\x9a\xb2\ \xb2\x48\x91\x29\xab\x8a\x16\x35\xa1\x71\x32\xd8\xd4\x82\xe6\x78\ \xe6\x09\xee\x98\x2f\x44\x01\xc1\x60\xfc\xd3\x6d\x19\xa5\x4e\x81\ \x29\xc8\xda\xb8\x1c\xf9\xe2\xc5\x4a\x0a\x2a\x83\xa5\x7f\x10\xc7\ \xcf\x78\xf2\xb6\x5a\x95\x2e\xfd\x86\x56\x66\x04\xca\xa7\x61\xcc\ \x40\xa4\x02\x78\x59\x3d\x14\xe3\x34\xcb\x91\x23\x96\xae\xa7\xa7\ \x4f\xdf\x82\xb4\xb0\x17\xb8\x4f\xda\xdd\xa9\xd3\xe5\x8b\x16\x16\ \xbd\x35\xd2\xca\x72\x92\x91\x2d\xd3\xd7\xf7\xc1\xf6\x3a\xc8\x71\ \xe4\x48\x03\xf1\x0f\xc1\xe0\x6d\xa4\xc1\x8a\x02\x05\xce\x21\x4c\ \xb0\x53\x6e\xc3\xba\x82\xd9\x7f\x20\x6f\xab\x0a\xac\x9f\x94\x90\ \x30\xef\x99\xa7\xe7\x3a\x72\xa4\x7c\xeb\x73\xc2\xe0\x12\xd4\xe9\ \x6c\xdd\xbb\x7b\xef\xea\xd4\x49\xed\x3c\xc1\x99\x2e\x0a\xce\x95\ \xc4\x2b\x36\x36\x8e\xf2\xbc\xd7\x5f\xbe\x7f\x17\x2b\x5e\x10\x38\ \x30\x39\x39\xb9\xdf\xdb\x80\x80\x35\xf4\xbf\x23\x18\x8c\x74\x58\ \x29\x86\x83\x33\xc1\x8a\x3f\xf0\x67\x30\x00\x5b\xff\xd9\xca\xec\ \xbe\x70\xe1\xcc\xf3\x26\x26\x87\xf0\xce\x62\x38\x55\xae\x92\xc1\ \x2d\xcd\x96\xed\xf3\xc5\xe5\xcb\x77\x63\x6c\x9a\xe6\x79\xcf\xa1\ \x79\xf3\xd1\x58\xd5\xc2\xe9\x1d\x18\xea\x41\x3c\x6b\x9c\x5e\xdb\ \x6b\x06\x1b\x9c\xfe\x7f\xe4\x73\xd6\x04\x27\x82\x86\x70\xae\xdc\ \x81\xb1\x7d\xf1\x5a\xba\xd4\x09\xf7\x33\xc0\x02\x5a\xef\x56\x76\ \x19\x32\xe4\x2c\x19\x9c\x53\xff\xfe\xb6\x72\x9b\xfa\x3d\x60\x30\ \x18\x2f\x7d\x7d\xdb\xf9\x6d\xdd\xba\x43\x3a\x58\x0a\xa5\xb2\x05\ \x2d\x89\x95\x2d\xc6\xa6\x5c\xb9\xd0\xcf\x49\x49\x73\x28\xd8\x2e\ \xfe\x11\x18\x0c\x4e\x71\xeb\x07\x16\x55\x9c\x47\x32\x0b\x65\x32\ \x98\x1d\x3f\x6f\xc7\xf5\xeb\xad\x9d\x3b\x1d\xf0\x4e\x3d\xf1\x37\ \x20\x43\x0c\xa7\xf1\x7b\x7f\x51\xee\xc4\x60\xb0\xf3\xa8\x20\xb2\ \x50\x02\x64\x78\xe0\x21\xf8\x65\x73\x83\x06\xb7\x60\x6c\xe3\x40\ \x95\xd6\xbb\x55\x64\x22\xf7\x75\x32\x30\x70\xa0\x34\xb8\xea\x66\ \x7a\x7a\xef\xe4\x7f\x63\x9c\xf8\x36\x18\x0c\x46\x62\x6c\xec\xe8\ \xe3\xe3\xc7\xbb\x21\x4c\xf0\x91\x8c\x06\x75\x77\x4f\x8e\x4f\x9e\ \xdc\x42\xbb\x6a\x02\x8c\x90\x31\xcb\x30\x7a\x4f\x72\x86\xa9\x91\ \x91\x01\xb2\x58\x3e\x58\xe4\xc9\x13\xfb\xca\xdf\xbf\xb7\x60\x30\ \x18\x7f\x59\x06\xd4\x2d\xfc\xe9\xd3\x15\xbb\xda\xb7\xbf\x46\x4e\ \x95\x25\xb0\x43\x13\x21\x7a\x6b\x18\xdc\x51\x8c\x27\x5c\x34\x37\ \xdf\x47\x61\x94\x88\x67\xcf\x56\x91\x61\xca\xca\xf4\x93\x60\xca\ \xa9\x69\xd3\x8e\xe1\x59\x27\xf1\xb7\xc0\x60\xb0\xe1\x15\x07\x87\ \x3f\x3e\x71\x62\x23\xca\x7f\x6e\xed\xed\xd9\x73\x94\x90\x40\xcc\ \xee\x93\xad\x91\x51\x88\xf4\x6c\x56\x03\xeb\x86\x3d\x7e\x3c\x1e\ \x21\x04\x75\x20\x7d\x4d\x89\x12\x2f\xe1\x64\x99\x9b\x76\xcf\x2f\ \x83\xc1\x86\x57\x15\xfc\x03\x9c\x24\x24\xb0\xdd\x8c\xa2\xac\x96\ \x57\x01\x01\x86\x1a\xef\x65\xc7\xf9\x2f\x8e\xb6\x99\xe4\xf9\xfc\ \xfe\x00\x39\x83\xc1\x46\x97\x4d\x33\xf3\xc6\xbe\x6e\xdd\x63\xb4\ \x92\xc1\xc0\x76\x2d\x10\xa2\x90\x2c\xd0\x9d\x45\x63\x70\xb2\x04\ \xe0\xdd\x09\xa0\x8a\xb6\xa1\xe4\xf5\x04\xef\xe0\x99\x03\x1c\x2a\ \x55\x45\xda\xc0\x60\x30\x9e\x79\x78\xb4\x44\xce\xe9\x33\x0d\x67\ \xc9\x07\xba\xc2\x3b\x99\x18\x72\xf5\x2a\x05\xc8\x8d\x70\x3f\x55\ \x66\xb1\xc4\x99\x65\xcf\x1e\xa9\xfe\x59\xa5\x7a\x89\x6b\x5a\xf3\ \x53\x19\x0c\x5e\xf1\x12\x3f\x7d\x1a\xeb\x63\x6d\xbd\x77\x4f\xd7\ \xae\xc7\x51\xa1\x40\x86\x94\x02\x7d\xcc\x3b\x78\x36\x80\x92\x9d\ \x71\x9f\x00\x6f\xe5\x0b\xd4\xe3\xad\xc5\x98\xe9\xdd\x7d\xfb\xb6\ \xc2\xf0\x92\x60\x80\x81\xb2\x0a\xbd\xb1\xf8\xdb\x60\x30\xd8\xe8\ \xb2\x83\xed\xc0\xb1\xa8\xab\xdb\x4b\x62\x4c\x6a\x51\xdd\x7a\xf5\ \x5a\x2b\x2a\xd1\xa4\x0e\x2d\xab\xd1\xcb\x83\xf5\xf7\x76\xeb\xe6\ \xa5\xa1\xad\x92\x2c\x6b\xef\xd2\x08\x06\x83\x8d\xaf\xe4\xc7\x77\ \xef\xcc\x7c\xd7\xad\xdb\x99\x98\x98\x58\x1f\x06\xd5\x01\x4c\x39\ \xd0\xa7\xcf\x49\xa1\x01\x24\x4b\xef\x95\x82\xb6\xd7\x8e\x8c\x1a\ \x75\xf2\xd9\x85\x0b\x8d\xc4\x77\x81\xc1\x60\xa3\x2b\x00\xf6\x04\ \x7f\xfa\x43\x08\x7d\xac\x78\x51\x38\xd3\xd1\x2a\xd6\x9f\x72\x32\ \xc1\xc1\x38\xc3\x45\xd3\x78\xec\xfb\xf7\x54\x02\x35\xea\x7d\x60\ \x20\xad\x86\xe3\xb1\xda\x1d\xc1\xf5\x28\xae\x53\x70\xcd\x2f\xd2\ \x06\x06\x83\x81\xf3\xdd\x52\x25\x2e\xa7\x49\x0f\x63\x63\x17\x18\ \x5b\x9b\xf1\x42\xe4\xa0\x94\x30\xe9\x58\x89\x05\xe3\xe5\x3b\x81\ \x7f\xd3\x9b\xc9\xca\xcb\x0c\x06\xad\x6e\xa4\x1c\x56\x6f\xe4\xc8\ \x6d\x93\xee\xdc\xb1\xe9\x64\x65\xb5\xab\x6c\x93\x26\x9e\x02\x28\ \x58\xbe\x7c\x48\x1b\x33\xb3\x2b\x64\x8f\x25\x85\xf8\x4d\x25\x44\ \x17\xbd\x9c\x39\x13\xbb\xad\x5f\xef\x32\x3f\x3c\xdc\xba\xd9\xec\ \xd9\x47\x55\xd9\xb2\x55\xcc\xa1\xa7\x77\x38\x35\xa5\x68\xf2\x70\ \x82\x79\x05\x83\xc1\x50\x8c\x42\x8c\x90\x4e\x91\x9b\xeb\x6b\xd6\ \x1c\x1c\xe0\xec\x3c\xca\xba\x5c\xb9\x07\xd4\x6c\xe4\x9e\xb3\xf3\ \x16\x52\x8f\x96\xef\xed\xc5\x16\xf3\x8b\x65\xc9\x92\xef\xe8\xd9\ \xaa\x22\x45\x0e\x23\xab\x65\xb0\xcb\xd0\xa1\xe7\xe8\xf7\x2d\x8b\ \x15\xeb\x22\x24\x34\x2a\x19\xde\x81\xd6\x72\x88\xc1\x60\xf8\xf9\ \xf9\xe5\x80\xcc\xfa\x61\x24\x35\xc7\x29\xdb\x48\x18\x54\xfc\x99\ \x59\xb3\xa8\xca\x7c\xb4\x90\x80\x5a\xd8\x0e\x7a\x16\xe4\xe5\xb5\ \x16\xc9\xd2\xc7\xf0\x7e\x34\xe9\x64\xae\x2a\x54\xe8\x3c\x8d\xef\ \xef\xd3\x67\xb9\x14\x3d\x1a\x88\x7b\x5f\xf0\x14\x8d\x5f\x58\xb6\ \x0c\x17\x4d\x30\x18\xec\x40\xe9\x0c\xe5\xb0\x55\xc7\xc6\x8c\x39\ \xe9\xd8\xba\xb5\xdb\x03\x57\x57\x8a\xc5\xcd\x01\x4b\x08\x89\xfd\ \x7d\xfb\xf6\xa2\x95\x0d\x4e\x94\x47\x4f\xcf\x9d\x1b\x42\xc9\xd2\ \x3b\x5a\xb5\xba\x80\x31\xb5\x92\x18\x56\xc6\x59\x02\x30\x33\x30\ \x68\xa9\x04\xce\x51\x81\x10\xf3\xe8\xf8\xf1\x25\xe2\xff\x83\xc1\ \xe0\x50\x01\x38\x00\x9c\x0d\x8e\x00\xf3\x69\x3d\xcf\xed\x3a\x6c\ \x98\x1b\x19\x1d\x18\xb1\x3c\x7f\xfe\x9d\x3e\x56\x56\x66\x8f\xdd\ \xdc\x36\x62\x75\xbb\x84\xe7\x7d\xe4\x7b\xf9\xd7\x56\xaa\xf4\x9c\ \x3c\x9e\xa4\xbd\x82\x77\x93\x60\x7c\x56\x73\x85\xc8\x27\xd2\x04\ \x06\x83\x8d\xb2\xd1\x75\x7b\xfb\x9d\xd0\xc8\xbc\x2d\x0d\x29\x05\ \x2b\x9e\xa7\x4c\x96\xfe\x49\x9e\xf5\x06\xd0\x38\xe4\xda\x2f\x04\ \x9e\x39\xb3\x61\x5d\x95\x2a\x8f\xe4\x19\xd1\x5d\xa4\x19\x0c\x06\ \x1b\x5d\x6d\x70\xf6\x87\x27\x4f\x56\xe3\x2c\x77\xf4\xd4\x1f\x7f\ \x2c\xc3\x7d\x2e\x45\xd6\x8f\x64\xfa\x48\x39\x8c\x02\xeb\x18\xff\ \x19\x9c\x0a\x91\xde\xdd\x57\xd7\xad\x5b\xf1\x5f\x28\xff\x51\x89\ \x74\x06\x83\x21\xa5\xf7\x2a\x82\x75\xc0\x62\xe0\x41\x15\x82\xe4\ \x14\x2c\xc7\xfd\x81\xa6\x33\x67\x1e\xeb\x6c\x63\x63\x87\x31\x2f\ \xf9\xee\x2f\xe0\x4f\xa0\x2b\xc6\xe2\xc5\x3f\x07\x83\xc1\x90\x1e\ \xca\x14\xc7\x36\x6d\x2e\x07\x04\x04\x64\xa4\x62\x18\x83\xc1\x08\ \x0c\x0c\xd4\x27\x99\x07\x19\x5e\x78\x01\x03\x5c\x8f\x6b\x8f\xef\ \x6d\x9f\xbc\x58\x88\x9f\xf1\xfb\xfd\xbe\xb1\x53\x63\x30\x18\x90\ \x58\xef\x7a\x62\xf2\xe4\x93\x56\xa5\x4a\xdd\x57\x9a\x8f\x98\x08\ \xd1\xf6\x3b\x83\xf2\xbb\xa5\xf1\x7a\x53\x20\x5d\xa4\x0a\x06\x83\ \xcf\x78\x1d\xc1\x45\xd4\x15\x28\xf8\xca\x15\x2b\xdc\xb7\x11\xdf\ \x81\xb3\xf3\xe7\xb7\xa3\xec\x16\xd4\xe9\x05\x7e\x08\x0c\xfc\x93\ \x24\x69\x06\x83\x8d\x2e\x87\x14\x2c\xea\x00\x96\x15\xdf\x81\xa5\ \x7a\x7a\xfb\xd4\xc2\xb6\xbb\x76\x39\x90\xf0\x91\x48\x3b\x18\x0c\ \x06\x55\x8f\x93\x4a\xb4\xb1\x10\xe5\xfe\xe4\x9d\x26\xe0\xd7\x0d\ \x35\x6a\x3c\x7c\x70\xe4\x88\x89\xf3\xc0\x81\x7a\xe2\xbb\xc0\x60\ \xb0\xc1\xed\x04\x53\xcc\x72\xe6\xdc\x2e\x52\x87\x0a\x06\xa9\x76\ \xbe\x28\xc4\x7d\x30\xae\x15\xb9\x3c\x47\xfd\x7f\x8e\xa0\x6e\x9c\ \xad\xc4\xdf\x00\x83\xd1\xc1\xca\x6a\x7d\xd5\x3e\x7d\x2e\x0f\x3d\ \x75\x2a\xe4\x1b\x06\x39\x5c\x85\x55\xd0\xb0\x55\x2b\xff\xee\x1b\ \x37\x3a\xa0\x74\x68\x97\x5e\x8e\x1c\xc5\x54\x7a\x7a\x67\xfe\xf3\ \x29\x62\xb2\xea\x37\x65\x75\xb1\x62\x4b\x71\xcd\x6f\x2c\x44\x25\ \xc1\x60\xfc\x75\x5f\xbc\x31\xe0\x20\xcd\x71\x2a\x72\x85\xb1\x15\ \xa7\x9e\xed\x10\x3a\xfa\x10\xfb\xe1\x03\x55\x9a\x0f\x04\xfb\x61\ \x4b\xa9\x2e\x03\xb2\x2a\x57\xae\xe3\x7f\x75\x5b\x50\x1f\xac\x02\ \xc6\xa2\x36\x2a\x34\x36\x2c\x6c\x1e\x8c\xcf\x0b\xf7\x97\xc4\xf7\ \x81\xc1\x81\x73\xd2\xcf\x4c\x02\x53\x3c\x97\x2c\x71\xa6\x7e\xed\ \x42\x02\xe9\x62\x97\x28\x21\xfa\xe5\x8d\x1b\x03\xfe\x73\x5b\x4a\ \x18\x15\xb9\x79\x6f\x82\x77\x85\x4a\x65\x80\x65\xdf\x6d\x7f\x8f\ \x1e\xe5\x54\x18\xaf\xde\xb7\x6f\xc4\xf7\xf4\x25\x63\x30\x0c\x9b\ \x35\xdb\xae\x9f\x3f\x7f\x98\x00\x9e\x9e\x39\x63\x84\xcb\x15\x50\ \x98\x08\xd1\x3d\x39\x3e\xbe\x65\xa5\x4e\x9d\xae\x97\x6d\xd8\x30\ \x42\xfc\x17\x61\x5b\xb1\xe2\x75\xe5\x40\x6b\xf7\xd3\x4f\x77\xb0\ \xba\x7d\x42\xb0\xf3\x39\xfa\x56\x1b\xcb\xd2\x8e\xb4\x82\xc1\x5b\ \xcd\xd2\x09\xb1\xb1\x8b\xa8\x1b\xd0\xb9\xb9\x73\xad\x97\xe5\xcb\ \x57\x15\xab\xde\x1a\xf0\x31\xfa\x9a\x47\x47\x86\x84\x58\xe0\x9d\ \xc2\xff\xa5\x6d\x64\x5e\x18\x96\x19\x29\x3b\x21\x46\x92\x00\xbd\ \xfa\xd7\xdb\x5b\xb4\xb8\xa9\x18\xde\x8e\xd6\xad\xfd\x92\x92\x92\ \xfe\x89\x94\x1a\x83\x8d\x2e\x37\xd8\x03\x9c\xba\xb2\x48\x11\x92\ \x5e\x4f\x56\x4b\xb2\xd7\xaf\x7f\x1f\x59\x2c\x9d\xff\x6b\xe7\xb6\ \xf2\xa8\x83\x8a\x52\x0c\xec\xfa\xc6\x8d\x3b\x0f\xf6\xed\x7b\x82\ \xee\x91\x0d\x10\x41\x7b\x6c\xe8\xd7\x57\x10\x40\x2a\xcd\x00\x2f\ \xa6\x55\xbd\x97\xc1\x7d\xdc\xef\xbb\xb8\x6c\xb4\xab\x54\x29\x08\ \x2b\xdc\x27\xc7\x6e\xdd\x4a\x8a\xff\x1a\x76\x77\xea\x74\x52\x31\ \x38\x74\x65\x89\xc3\x35\xc1\xb6\x7c\xf9\x67\x5f\x92\x93\x97\x46\ \x04\x05\xad\xc2\x92\xdf\x44\xbb\x3c\x88\xb4\x0c\x29\x45\x67\x79\ \xbe\x7c\xcb\x44\xda\xc0\x60\xa3\xab\x02\x4e\x8f\x0b\x0b\xb3\xc0\ \xf5\x67\xf1\x5f\x02\xb5\xa7\xc5\x0a\x97\x60\x63\x64\xf4\xf2\xda\ \x86\x0d\xbb\xc8\x88\xd4\x8a\x4d\xc5\x8b\xbf\x7b\x78\xe4\x48\x67\ \x18\x5b\x03\xb0\x80\x3c\xe8\xb6\xc4\xaa\x16\x86\xe7\x4b\xae\x6e\ \xdb\x56\x02\xda\x16\xeb\xe3\xc3\xc3\xe7\xcb\xae\x2e\x69\x05\x83\ \x65\xda\xeb\x81\xb9\xff\x6b\x6e\x5b\x1f\xea\xa4\xf9\xf4\xec\xd9\ \xf5\x10\x9b\xd9\xa7\x6e\x08\xd1\xa4\xc9\x9d\x75\x95\x2b\x07\xee\ \xeb\xd1\xa3\x9a\xb6\x4c\x1a\xb6\x99\xb7\x95\x52\x8d\xc3\xa3\x46\ \x0d\x24\x83\x14\x7f\x0f\x0c\x06\x03\xc1\xc8\x63\xe7\xe6\xcd\x73\ \xf1\xdf\xb1\x63\x11\xb5\xb5\xc5\x56\xf2\xf1\xe7\xcf\x9f\x17\xc1\ \x90\x16\xa7\xd6\xae\x36\x29\x2e\x6e\x0a\x49\x65\x2f\xd3\xd7\x8f\ \xbd\xb0\x6a\x55\x75\x91\x66\x30\x18\x5c\x76\x31\x17\x86\x67\x4b\ \xba\x84\xb2\xb5\x51\x6d\x30\x0f\xa8\x12\x5a\x80\x03\xe5\x0f\x5a\ \xe1\xc8\xcd\x8b\xe7\x5d\xff\x4a\x01\xf8\xff\xab\xf4\x32\x18\xbc\ \x97\xce\xfb\xf0\xe8\xd1\xd2\xef\x1f\x3e\x5c\x8d\xfb\xc1\x5a\xde\ \xc8\x6e\x14\x37\x59\x20\x44\x11\xfc\x5c\x18\xfc\xb0\xba\x48\x91\ \xd7\x49\xf1\xf1\x0b\x14\xa1\x19\xad\xf7\xf3\xcb\x74\x9e\x3a\xb2\ \xeb\x66\x12\x78\x05\x1c\x25\xfe\x07\x0c\x06\x1b\x5e\x4b\x50\x5f\ \xeb\x8c\x37\x57\x66\x76\x87\xe3\x7a\x01\x4c\x81\x5a\xd3\x6e\xbc\ \x97\x5a\x38\x40\x85\xe7\xa1\x14\xd4\xc4\xf5\x2d\xc4\x43\x43\xe0\ \x01\x75\x86\x78\x68\xb0\x3c\xf7\xed\x07\xff\x24\x53\x86\xc1\x60\ \x43\x2c\x4f\xde\x4b\xe8\xcc\xbf\x92\x61\x83\xc4\x8b\x2b\x56\xac\ \xc3\x78\xaa\x86\xb3\xa3\x4d\x9b\xe9\x66\x39\x72\xc4\xd3\xbb\x36\ \xe5\xca\x85\xa2\xee\xc9\x22\x3e\x22\xc2\x74\x67\xbb\x76\xd7\x69\ \xcb\xba\xaa\x54\x29\x43\xf1\xa7\x60\x30\xd8\xe8\x1a\x20\xbd\x6b\ \xc1\x89\x89\x13\x4f\x5a\xe4\xca\x15\xa7\xac\x56\xa9\x09\xc9\x5c\ \x30\x33\xeb\x4a\xc1\x72\x0a\x9a\x23\x73\x85\xf4\x2f\x92\x70\xee\ \xb3\xfa\x9c\x98\x38\x98\xe2\x7a\x30\xd4\xa6\xe2\x2f\xc1\x60\xb0\ \xd1\xe5\x02\xbb\x86\x3f\x7f\xbe\x62\x77\xc7\x8e\x57\xd0\xf0\xe1\ \xbe\x5d\xe3\xc6\xf9\x53\xcd\x0e\x87\x36\x7d\xe0\xa9\x53\x1b\x9e\ \x79\x7a\xae\x83\xa2\xef\x7d\x6c\x2d\xa3\x97\xe7\xc9\x53\x82\x94\ \x7c\xd3\x96\x93\xc9\x60\xb0\xe1\x15\x03\x87\x81\xa6\x60\x03\x19\ \x10\xa7\xfc\xb8\x91\xb8\x76\xa6\xd5\x6f\x67\x87\x0e\x57\xe4\x3b\ \x95\xc0\xc9\xc9\x9f\x3e\x99\x29\x67\x3e\x3c\xef\x03\xfe\x1d\x01\ \x1a\x06\x83\x1b\x32\x42\x65\x37\x0c\xdc\x8b\x9b\xcd\xa0\xbf\xcc\ \xf5\x1a\x05\xee\x04\x4f\xa2\xb9\x5f\x6c\xcf\xad\x5b\x2f\x60\xf8\ \x2c\xde\x7b\x46\xef\x65\x37\x30\x70\xc2\xf5\x1e\x0c\x2d\x27\xae\ \x6b\x41\x4f\xfc\x3c\x55\xde\xa7\x05\x0c\x06\xe3\x43\x50\x50\x93\ \xc3\x23\x46\x9c\x93\xce\x92\xaf\x6b\x2b\x54\xf0\x4a\x4d\x4c\x86\ \x3c\x94\xd8\x8a\x3e\x91\x2d\x6e\x93\x70\xfd\x4c\x5b\xd0\xb4\xc7\ \xea\x18\x0c\x96\x4f\xeb\xf9\x36\x20\xc0\x92\x4a\x7b\xd4\xed\x8f\ \x84\x88\xa1\x3c\x4d\xad\xf3\xdd\x18\x59\x96\x11\xe0\x36\x65\x8a\ \xd3\xc6\xda\xb5\xcf\x90\x81\xc2\xf8\xce\xa7\x3d\x5c\xc0\x60\xb0\ \xe1\x95\x02\x7f\xbf\x7f\xe8\xd0\xe6\x5d\x1d\x3a\x78\xfa\xda\xdb\ \xd7\xd5\x88\xcf\xe5\x07\xdf\xae\x28\x50\x20\xec\x53\x74\x34\x9d\ \xe9\xc6\x83\x13\xa1\x59\x7f\x83\x8c\x10\xc1\xf4\xef\x6c\xe2\xce\ \x60\xb0\xe1\xd5\x02\xa7\x83\xad\xe4\xca\xe6\x46\x01\x71\x30\xe5\ \x82\x85\xc5\x41\x8c\xb7\x10\x12\xd8\x62\x5e\xa7\xd8\xde\x9b\x7b\ \xf7\xda\xfe\x45\xdd\x5e\x3d\xea\x2d\xbd\x48\x88\x32\x42\x57\xc1\ \x60\xc8\x7c\xc6\x1a\x22\x0b\xb1\xa6\x74\xe9\xb9\x54\x4d\x4e\x06\ \x77\x74\xf4\x68\x27\x18\x9c\x9e\x92\x36\x46\x63\x7b\xbb\x76\xf5\ \x21\x23\x94\x63\xa5\x61\x58\xcb\x65\x56\xca\x0a\x6c\x4b\x2b\x2b\ \x5d\x5c\x40\x2a\x1d\x8a\xa5\x32\x21\xa1\x8b\x60\xb0\x97\xb2\xb0\ \x10\x8e\x29\xea\x9c\x46\x91\x65\x75\x47\x73\x5e\xbd\x3a\x33\xc6\ \xc7\x67\xa3\x61\x8b\x16\xfe\x30\xac\xc0\x79\x2a\x55\x6e\xea\xaa\ \x89\x47\x36\xf0\x60\xc6\x74\xdf\xb2\xc5\x0b\x3f\x07\x60\xac\x46\ \x8a\x10\xf7\x55\x42\x2c\xc2\xfd\x10\x70\x21\x3e\x2c\x95\x03\x35\ \x6e\x6b\x6a\x7a\x59\x3f\x6f\xde\x78\xfd\x7c\xf9\x12\xc7\xfb\xfb\ \xc7\x08\x5d\x05\x83\x7b\x7b\x6d\x69\xd0\xe0\x32\xa4\x13\x1a\xe1\ \x67\x5f\xb0\x4a\x16\x6d\x33\x0d\xe5\xb9\x6d\x36\x32\x51\xfa\x2a\ \x15\xe6\x07\xfa\xf6\xbd\x28\xcb\x80\x94\xda\xbc\x78\x94\x0b\x1d\ \xa4\x4a\xf3\x1b\x9b\x37\x3b\xa2\x8a\x21\x0a\x63\x1f\xa4\xb2\xaf\ \x22\xb5\xd6\x5a\xe8\x10\x18\x6c\x68\xd6\x30\xac\x33\xb8\x86\x41\ \xfb\xef\x03\x94\x91\x56\x20\xd3\x83\xee\xbf\xec\xef\xdb\xb7\xbb\ \x0e\x24\x49\xab\xdc\xa6\x4e\x3d\x80\x7f\xd3\x47\xa8\x82\xbd\xc4\ \xbd\xfe\x42\x21\x4a\x90\xc7\x72\x4b\xc3\x86\x77\x65\x13\xf8\x5f\ \xc0\x8e\x77\xf6\xec\xb1\x92\x5e\xcf\x14\x92\x7d\x40\xad\xde\x4c\ \x9d\xaa\x30\x67\x30\xe0\x09\x9c\x43\xee\x76\x30\x05\x8d\x13\x02\ \x77\xb6\x6f\x6f\x4f\x3f\x23\x73\xdf\x9b\x14\x6f\x75\xc4\xb1\xd2\ \x02\x3d\xa3\x97\x45\xbf\x7a\x45\xfd\xa4\xa9\xf4\xa7\x28\x98\xb2\ \xad\x69\xd3\xdb\xb8\x2f\x28\x24\x7c\xd7\xad\xab\x40\xe3\x64\x74\ \x10\x9f\xd9\x8c\x67\x35\x85\x2e\x80\xc1\xa0\x73\x1a\x0e\x3f\xa5\ \x1c\x9a\x37\x27\x83\x4b\x59\x59\xa8\x50\xa4\x86\x38\x50\x2c\xfe\ \xb8\x49\xb4\xa5\xa4\x0e\x79\x33\x0b\x83\x4d\xe4\x8a\xa5\x5a\x53\ \xb2\xe4\x53\xf9\x45\xf1\xab\xc6\x67\x9a\x01\xa6\x6c\x6f\xd5\xea\ \x26\xde\x1b\x25\x75\x58\x6a\x62\xcc\x02\x2b\xb6\x2d\x38\x08\xcf\ \x0d\x44\x16\x80\xc1\x06\xe7\x4c\xe2\xad\xb8\x46\x61\x95\x7b\xf7\ \xf1\xcd\x9b\xe5\x36\x86\x86\xa1\x8a\xd1\xad\xab\x56\xcd\x45\xe8\ \x30\x7c\xac\xac\xa6\xe0\xcc\x16\x43\x46\x07\x43\x7a\x09\xde\x94\ \x5f\x16\xf1\xef\x1f\x3c\xa0\xe2\xd8\x92\x18\x9b\xa0\xac\xde\xca\ \x15\x63\xcf\x32\xdd\x13\xcb\x60\xb8\x8e\x1c\xd9\x84\xfe\x38\xc9\ \xb8\x20\x5f\x17\x63\x57\xa5\xca\x49\xb5\xdb\xbd\x7b\x77\x9f\x33\ \x33\x67\x1e\x45\x5d\xdb\xec\xbf\x30\xd8\x8a\xf3\x85\x28\x90\x85\ \x2b\x5e\xfe\xd7\x37\x6f\x2e\x77\x1d\x36\xec\x2c\x54\xc4\x7c\x10\ \x4a\x20\xe3\x4b\x39\xd8\xbf\xff\x05\xca\x64\x21\x0d\x4d\xf0\x33\ \x6d\x2f\x4f\x4e\x9e\x7c\x0c\x46\x68\x89\xeb\x09\xbc\x97\x04\xc7\ \xca\xbb\x4c\x4d\x11\x63\x30\xb0\x25\x9b\x48\x06\xb6\xa9\x4e\x9d\ \x87\xca\x76\x92\x64\xef\x20\x77\xb7\x01\xab\x43\x6f\x70\x24\xa8\ \x47\xb1\x39\x8a\x65\x81\x6d\x35\xb7\x63\x58\x29\xa2\xc1\x77\xe0\ \xb8\x2c\xde\x66\x0e\x07\x17\x1e\xe8\xd3\xe7\x22\x0c\xe9\x0b\x09\ \x86\xba\x0e\x1d\x5a\x56\x26\x3c\xa7\x90\xb3\x85\x3e\x17\x9c\x28\ \x87\x5f\xf8\xf8\x0c\xf7\x58\xb4\xc8\x49\x76\xfb\x99\xfb\x27\xab\ \x7f\xc1\x74\x4b\x1f\x63\x30\x28\x03\x03\x86\x12\xb9\xb2\x60\xc1\ \xd7\x10\xd8\x5c\xb6\xae\x4a\x95\x40\xfa\x63\xa5\x2c\x0e\xc8\x97\ \x7b\x0b\x09\x18\x59\x7b\x25\xeb\x43\x32\x82\xce\x41\x02\x70\x68\ \xd6\x6c\x25\x8d\x61\x3b\x7a\x5c\x07\xce\x77\x2a\xb0\xe9\xe3\x13\ \x27\x36\xc2\xa3\xe9\x14\xf7\xe1\x43\x19\xd4\xd5\xa9\xbf\x50\x2e\ \x2c\x5b\x76\xf0\xf0\xf0\xe1\x27\x11\x5e\xf8\x48\x9f\xd9\xba\x6c\ \x59\x7b\x1a\xdf\xd2\xa8\xd1\x71\xa1\x01\xd2\x61\xa1\xab\xdc\x86\ \xc6\x4a\x99\x88\xc3\xe9\xb1\xfd\x64\x70\x28\x60\x12\x0c\x2c\xf1\ \xf6\xde\xbd\xdb\x76\xb5\x6f\xbf\x86\x0c\xc7\x69\xe0\x40\x2f\x6a\ \x82\x1e\xfb\xee\xdd\x42\xea\xdf\x85\x31\x43\x90\xe2\x5a\x9f\xa0\ \x39\x79\x6c\x4f\x97\x2e\x9e\x48\xad\x0a\xc5\x6a\x91\x48\x86\x88\ \x6a\x6e\x7f\x64\xfc\x27\x85\x5e\xbb\x66\x83\xf7\x8b\x0a\x1d\x80\ \xec\x29\x3d\x16\xac\xb4\xa9\x61\xc3\xaa\x30\xba\x24\x54\x93\x07\ \x3f\x3e\x7d\x7a\x14\x9d\xeb\xb6\x36\x6e\xec\x4d\xdb\x4c\xa9\x24\ \xe6\xa4\xb1\xa2\xb5\x93\x67\x3c\x7a\x9e\x02\xa3\x3c\x8f\x90\x83\ \x1b\xc5\xf9\xf0\x79\xe3\x30\x36\x58\x7c\x2f\x18\x8c\xdb\xbb\x77\ \xe7\x09\xf6\xf1\xb1\x8c\x78\xf1\x62\x1a\x65\xea\x63\x7b\x15\x82\ \xae\x25\x26\x32\xaf\xb1\x29\x48\x5d\x4d\x17\x92\x21\x5e\xb1\xb5\ \xdd\x83\xfb\xb9\xe0\x64\x64\xf8\xaf\x86\xd1\x45\xe0\x5c\x94\xa4\ \x36\xd2\x01\x03\xbc\x30\xde\x5f\xe8\x28\x60\x54\xce\x6a\x75\x68\ \x95\xea\x3d\x7a\xd8\xad\x0d\x70\x71\x59\xf0\xc0\xd5\xd5\x7e\x43\ \xcd\x9a\x8f\xde\x3d\x78\xb0\x50\xc3\xe0\x72\xa2\xda\xfc\x8c\xb2\ \x92\xe3\xcb\xc5\x3b\xfa\xf5\xeb\x99\x24\x78\x8b\xad\xf7\x6b\xec\ \x04\xce\x08\x06\xe3\x1f\x6e\xc1\x5a\x45\x44\x44\x14\x40\xf0\xd8\ \x3e\xe4\xea\x55\x5b\xad\x9e\x00\x02\x5b\xb0\xcd\x64\x54\x21\x57\ \xae\xcc\x53\xa4\x0e\x70\x35\xd8\xd7\xb3\xe7\x49\x1a\x87\x87\x30\ \x22\xf6\xfd\x7b\x53\x8c\x15\xd0\xe1\xcf\x59\xf3\xea\xda\xb5\xbb\ \xe1\x7d\x55\x87\x10\x88\x70\x10\x5d\x87\x6c\xdf\x2c\x0a\x94\x0b\ \x0d\xec\xea\xd8\x71\x3f\x19\x1b\x04\x90\xde\x4b\xc3\x0b\x45\xf9\ \xd0\xb0\xcf\x49\x49\x33\x65\xc5\x7a\xba\x39\x59\x18\x2c\xee\x3a\ \x50\x5b\xc8\x15\x4e\x86\x25\xd2\xa9\xb2\x5f\x73\x1c\x4e\x88\xa3\ \x34\x7e\xde\xd8\xf8\x10\x7e\xa7\x8d\xd2\xc6\x4a\xc6\xbb\x0c\x75\ \xd1\xe8\xc0\x05\x2f\x2e\x5d\x5a\x0b\xc7\xca\x99\xcd\x3f\xff\x7c\ \x28\x32\x28\xa8\xa0\xa6\xc4\x9f\x4c\x82\xfe\x88\xad\xe4\x8b\x2f\ \x5f\xbe\x2c\xf5\x5a\xba\xd4\x09\x8a\xd1\x61\x30\xce\xa3\x52\x9f\ \xa5\x0a\xa8\x27\x32\x0a\x0c\xc6\xbd\x03\x07\xca\x91\x9a\x96\x8c\ \x73\xed\x01\x49\x45\x79\x37\x98\x82\x66\x8c\xc1\x48\x9b\xa2\x55\ \x82\x44\x5c\xe7\x81\x51\x1a\xb1\x2e\x5f\x13\x21\x1a\xe8\x60\xd1\ \x6b\x6d\xa9\x9d\x62\x0c\x56\x95\x95\x11\x0b\x64\x29\xcf\x2e\xfa\ \x9c\x74\xa6\xc5\xb3\xfa\x60\x4b\xac\x82\xc6\x58\xdd\x4c\x32\x6d\ \x65\x63\x30\x5e\xf9\xf9\x4d\xb7\xaf\x5b\xf7\x01\x9c\x0f\xb1\x64\ \x50\x4a\xda\xd4\x3d\x67\xe7\x2d\x74\xde\xc3\xfd\x68\x1a\x43\x0e\ \xe6\x4b\xac\x1e\x87\xe0\x94\xb8\x88\x77\x3f\xe2\x1d\x8a\xef\xb5\ \xd0\xd1\x15\x3d\x9b\xc6\xaa\x16\x89\xeb\x67\x99\x97\x79\x8b\x1c\ \x2e\x1a\xef\xe5\x03\xab\x89\xcc\x02\x83\x41\xe7\x33\x70\x02\x69\ \x44\x92\x7b\x9d\x8c\x8b\x74\x24\xb1\x9d\xb4\x95\x5b\x31\x7f\x1a\ \x83\x91\x25\xef\xeb\xde\xdd\x1d\x8e\x86\xd9\x10\x73\xb5\x27\x3d\ \x4a\xc4\xc3\xee\x09\x1d\xc7\x93\x73\xe7\x66\xdb\xd7\xab\x77\x97\ \x3e\x03\xc5\xef\xac\xcb\x95\x5b\x4a\xd2\xeb\x22\x0b\xc1\x60\xa3\ \xd3\x03\x0d\xc1\xa6\xd7\x37\x6d\xda\x8c\xb3\x5d\xb0\x43\x8b\x16\ \x2b\x05\x80\xac\x8d\x08\x4a\x07\x43\xd8\xe0\x12\x85\x0b\xc8\xd1\ \x00\x6f\xdf\x34\xc4\xb8\xfc\xc8\x10\x4f\x4e\x9a\x54\xe8\x1b\x61\ \x89\xa1\xa0\x2b\x38\xdd\x58\x88\x0a\x59\xf8\xd9\x4a\x82\x13\x6e\ \x6c\xd9\xe2\x88\x4a\x84\x57\x32\xf5\xcb\xeb\x3b\x53\xe5\x4a\x4b\ \xd9\xf6\xb5\x60\x41\xf1\x4f\xc1\x60\xc8\xf3\xcd\x1c\x70\x81\x00\ \x2c\x4b\x94\xb8\x8d\x56\x54\x09\xf7\x0f\x1f\x9e\x1b\x74\xf1\xa2\ \x1d\x02\xe8\xfe\x72\xdb\x99\x40\x4a\xca\x2f\x7d\x7d\x9b\x68\xf6\ \x1d\x00\xed\xf0\x7c\x83\xcc\x79\x0c\x92\xe7\xc3\x38\x70\x4c\x16\ \x7b\x6c\xeb\x43\x2d\x7a\x1e\xa5\xb4\x1d\xe8\xd7\x6f\x07\xee\xf5\ \xbf\x27\x19\x1c\xab\x7f\xb4\xdc\x7a\xa7\x5f\x1e\x2a\x83\xfb\x2e\ \x2b\xa1\x80\xb3\xf3\xe6\x4d\x40\x70\xf8\x33\xb6\x99\xe1\x28\x5c\ \x9d\x1a\x17\x11\xf1\x07\x4a\x64\x76\x22\x55\xec\xe5\xc6\x5a\xb5\ \x1e\x6b\x86\x1a\xd0\x2b\x40\x1d\x60\x97\xfd\xc0\x43\x48\xf0\x95\ \x32\x43\xd6\x94\x28\xf1\x4a\x8e\xb7\xd2\x81\xcf\xd5\x49\xd6\xd7\ \x95\xf8\x0e\x83\x1b\xa0\x7c\xbe\xad\x8d\x1a\x5d\x26\x47\x8d\x48\ \x67\x30\xd8\xf8\xf2\xa1\x3a\x7c\x1b\x9c\x26\x31\x4a\xea\x17\xb6\ \x9c\x2e\x38\x1b\x59\xbe\xbd\x7b\x77\x0d\x29\x73\x69\xbc\x5b\x4f\ \xb3\x1a\x61\x99\x81\xc1\x53\xa7\xfe\xfd\xc7\xc5\x86\x85\xcd\x25\ \x37\x7c\xd0\x85\x0b\x6d\x7f\xe0\xca\x0b\x03\xac\xd2\x41\x74\xbe\ \x95\x95\xe9\x94\x40\xad\x52\xf2\x32\x97\xe0\x92\x5e\x5b\x67\x06\ \x1b\x5d\xe5\x84\x98\x18\xe3\x0b\xe6\xe6\x07\xd0\x7a\xd8\x8f\xce\ \x72\xc8\xcc\x58\xac\xdd\x88\x43\xca\x96\xa7\xc0\xb9\xe2\x7d\x62\ \xf2\xe4\x93\xd8\x8a\xaa\x4b\x6b\x90\xc9\xd1\x4a\x26\x4b\x37\x14\ \x1a\x30\x11\xa2\x2b\x55\x24\xfc\x20\xa9\x72\xc6\x64\x68\x48\x93\ \xbb\x46\x57\xb4\x57\x3e\x22\x8d\xad\x15\xf8\x42\x9e\x0d\x3f\x81\ \x83\x04\x83\x91\x2e\x59\xfb\x72\x3b\x96\xf8\xf1\xe3\x32\x5c\xa7\ \x08\x80\x3c\x7e\x24\x87\x20\x63\x5e\x4f\xe1\xbd\x8c\x44\x61\xab\ \x39\x9e\xf7\x08\x7f\xfa\x74\xc5\xb9\xf9\xf3\x0f\x45\x05\x07\x37\ \x48\x65\xc5\xc8\x4b\xc9\xc3\x60\x1c\xf5\x0a\xd7\xf5\x44\x70\x19\ \x34\x0f\xf2\x34\x31\xd9\x41\xc6\xe5\xf6\xc7\x1f\x8e\xb8\x5a\x80\ \x9f\x35\xb6\xd1\xef\xb0\xcd\x36\xc7\xfd\x1c\x70\x98\x5a\xf5\x9d\ \xc1\xf8\xa7\x31\x2e\xd9\xf5\xc6\x48\x1a\xce\x66\xf9\xcd\x7e\x9e\ \x56\x80\xb3\x73\xe6\x1c\xc1\xb3\xf6\xf2\xdd\xa2\xe0\xaf\x60\x3d\ \x0d\x43\x1b\x8d\x77\x7f\xc7\x35\x9b\xcb\xb0\x61\x53\x91\xab\x79\ \x8a\x12\x91\x85\x8e\x42\x26\x3d\x1f\x24\xfd\x97\xbb\x07\x0e\x6c\ \x45\x1c\xd2\x54\x86\x18\xa2\xa5\x91\x3d\xc1\xd9\x35\x11\x69\x70\ \x1f\xa3\x43\x43\x57\xc4\x7e\xf8\xb0\x04\x0e\xa5\x00\xb9\xe2\x5d\ \x4c\x57\x75\x34\x06\x63\x7f\xaf\x5e\x3f\xe3\x9b\xff\x81\x52\x6b\ \x77\x7a\xfa\xf4\x03\x81\x81\x81\xdf\xf4\x00\xe2\x9d\xb3\x72\xfb\ \x75\xf3\xe8\xef\xbf\xff\x2e\x93\xa6\x4b\x0a\x1d\x84\xb1\x10\x46\ \x52\xfb\x32\xc5\xb6\x42\x85\xe7\xc8\xbc\x19\x6b\x9a\x2d\xdb\x22\ \x25\x39\x00\x09\xd0\x67\x50\x97\xe7\x4e\xf7\xe4\xfd\xc4\xe7\xe8\ \x0c\xe6\x83\xca\xf4\x5e\x59\x91\xe0\xef\xd8\xad\x9b\xfa\xb3\xc9\ \xde\x79\xff\x1c\x0c\x0e\x9c\xe3\x0f\x71\x9a\x97\x99\x99\x13\x55\ \x19\x48\x63\xba\xaa\x34\xef\xd0\x06\x64\x1e\xda\xec\xee\xdc\xf9\ \x2a\xbd\x87\xd2\x9f\xd3\xba\xae\xb8\x75\x72\xea\xd4\x15\x38\xaf\ \x86\x93\x01\xc1\xdb\x6a\x8d\x1a\xc1\xaa\x0e\x4d\x9b\xde\xba\x6e\ \x6f\xbf\xf3\xed\xbd\x7b\x2b\x11\x1a\x89\xc5\x19\x35\x04\x29\x62\ \x73\x94\x50\x83\x55\x99\x32\xd3\xe9\x7d\x5a\x11\xe9\x9c\x8b\x9f\ \x3b\x81\x51\xe0\x22\xda\x7a\x8b\xf4\x00\x83\xfb\x81\xa3\x95\xb0\ \x89\xcb\x90\x21\x1e\x88\xbf\xc5\xac\x2e\x51\x62\xd0\x37\xde\xcd\ \x69\x5d\xa6\x8c\x1f\xc5\xf0\x9e\xb9\xbb\xdb\xe2\xbe\xa0\x8e\x7f\ \xb6\xba\xf4\xb9\x50\x06\xe4\x86\xaa\x84\xc9\xb8\xcf\x85\x78\xe4\ \x02\xa4\xc4\x2d\x5c\x91\x2f\xdf\x3e\x72\x0a\xf9\x3b\x3a\x6e\xc7\ \xf8\xcf\x42\x02\xdb\xcb\x7e\x64\x70\x90\x7b\x30\xa5\xf3\x2d\x56\ \xc9\x6d\x74\x2f\x57\x76\x57\x91\x5e\x60\x70\x08\x01\xec\x4b\xd9\ \xf8\xb8\x8e\xd4\x38\x07\xd5\x07\x0d\x14\x39\x73\xa5\x1e\x0d\xef\ \x0c\x49\xe5\xcc\x94\x53\x47\x3f\x57\x1f\x70\x84\xfc\x0c\x0f\xc8\ \x89\x02\x7e\x41\x6c\xf2\xb6\x14\xb4\x55\x69\x7c\x86\x66\xea\xf3\ \x5d\xd1\xa2\x8f\x70\x8d\x57\xb6\xa4\x47\xc7\x8e\x3d\x7d\x7a\xe6\ \x4c\x1b\xac\x8a\x4e\x18\x3b\x07\xd6\x11\xe9\x04\x06\x77\xcd\x29\ \x22\xcf\x41\x95\x14\x25\x2d\xa9\xfc\xfc\x88\x1c\x0e\x10\xa3\xb5\ \x50\xde\x51\x40\xed\xac\xe4\x1f\xf2\x65\xbc\xa7\xb3\x49\xc5\x27\ \xa6\x4c\xe9\x8b\x34\xb1\xe7\x64\x48\xc8\xbc\x79\x06\x63\x2b\xa7\ \xe5\x68\x69\x42\xcf\x64\x48\xe4\xaa\xdf\xd6\xad\x9b\x64\xbd\xdd\ \x14\x22\x62\x9a\xe7\xe8\xb9\xec\x87\xb7\x5c\xa4\x27\x18\x0c\xe8\ \x8d\xac\x32\xcf\x95\x2b\x42\xd9\x56\x39\x0f\x1a\xe4\x49\x21\x86\ \xd4\x1a\x34\x22\x76\xf7\x9a\xde\x81\xe7\xef\x24\xae\x5b\xc1\x66\ \x3a\xb8\xe2\x95\xc6\x99\x6d\xb6\xc7\xc2\x85\x2e\xa7\x67\xcc\xd8\ \x47\xdb\x64\x0a\x8f\x28\x31\x45\x18\x11\x49\x37\x24\xdf\xda\xb5\ \xcb\x41\x1a\xda\x68\xb0\xb2\x12\x40\x47\x22\x78\x94\x65\xc9\x92\ \xef\x50\xa5\x71\x0f\xe7\x5e\x3f\x8d\x26\x2b\xe9\x72\x9e\x65\xf0\ \x8a\xd7\x10\xce\x12\xb3\xfd\xbd\x7b\x53\x69\x4f\x32\x79\x33\x61\ \x50\x5b\xb4\x65\xec\x70\x3f\x58\x51\xe1\x5a\x53\xaa\xd4\x13\x2a\ \x01\x92\xba\x9a\xed\x74\xd0\xe8\x72\x82\xed\xc0\x3f\xc0\x22\x24\ \x27\x0f\x26\x83\xc7\xe4\x96\xd9\x47\x6e\x35\x8d\xb4\x3e\xe3\x28\ \x7a\xee\x36\x6d\xda\x71\x38\x9b\x66\xa3\x37\x9e\x39\x85\x59\xf0\ \x7b\xb7\x68\xab\x0a\x76\x11\xe9\x00\x06\x1b\x5d\x41\xf0\xd7\x17\ \xde\xde\x6b\xe9\x9b\x5d\x86\x11\x96\x6b\x26\x06\xc3\xb8\x82\x2d\ \xf2\xe4\x89\x80\xe4\xf9\x72\x5a\x19\x42\xaf\x5f\xb7\x81\x87\x30\ \x12\x46\xfa\x46\xe8\x38\x8e\x8d\x19\xf3\xd3\xfa\x6a\xd5\xfc\x94\ \x55\xdc\xf9\xd7\x5f\x3d\xde\x3c\x7d\x5a\x3c\x95\x8c\x95\x1b\xa4\ \x15\xfa\xe1\xc9\x13\x63\x7c\xfe\x3d\xf8\x6c\x41\x26\x42\xfc\xb2\ \xa9\x6e\xdd\x29\x18\x8f\x91\xdb\xef\x2d\x22\x9d\xc0\x60\xc3\x2b\ \x0f\x4e\x24\xa1\x1f\xe8\xa5\x50\x31\x28\x89\x19\xcd\x07\xf7\x91\ \x11\xba\x2f\x58\xe0\x8a\xe7\xdd\xc1\x22\x60\x05\x78\x34\x5f\x50\ \xed\x5d\x74\x74\x74\x11\x1d\xff\x5c\xb9\xc0\x71\x14\x2e\xc0\xea\ \xfc\x56\x1a\xde\x32\xa1\x01\x32\x2c\x1a\xa7\xb0\x02\x44\x9d\x14\ \xe3\xa4\xea\xfa\x09\xc8\x57\x6d\x4d\x2b\x3b\x09\xf6\x42\xc6\x7d\ \x33\x25\x18\xe0\x59\xe1\xf4\x12\xb3\x65\xb0\xc0\x51\x03\x19\xa3\ \x32\xc0\xd6\xf1\x85\x2c\xe7\x49\x40\x42\xb4\x89\x92\x85\x8f\xb1\ \x36\x34\xbe\xab\x53\x27\x5f\x8c\xd5\xd4\x4c\xb5\x82\x83\xa5\x36\ \x9e\xe5\xd7\xc1\xcf\xd5\x10\x65\x40\xf3\xcf\x9b\x98\x1c\x7a\x74\ \xfc\xf8\x58\xad\xed\xe4\x6e\xe5\x73\x92\x87\x13\x61\x85\x70\xba\ \x0f\x38\x74\xc8\x04\xd7\x4b\x38\xf7\x25\xdd\x3b\x78\x90\x2a\xed\ \x17\x82\x25\x31\x16\xa8\x08\xf1\xa6\x9b\x68\x2d\x83\xf1\xfa\xf6\ \xed\x11\x14\x0c\xa7\x32\xa0\xed\xcd\x9b\x2f\x55\x32\x33\x60\x50\ \xb7\xa9\xe9\x08\xb4\x34\x57\x91\x6b\x7e\x81\x10\x85\xb0\x12\x9e\ \xd0\xe8\x21\x10\x87\xab\x83\xae\xad\x02\xa4\x7c\x26\xb3\x4e\xfa\ \x0a\x09\xca\x17\x85\xd1\x24\x28\x15\xf5\x70\xb8\xec\xb5\xfb\xe9\ \xa7\x67\x54\x85\x6e\x5f\xbf\xbe\x2b\x8d\x1f\x1b\x37\xee\x38\xad\ \x92\x4a\x32\x00\x2a\x2d\x86\xad\x2c\x50\xe0\xb5\x5c\x09\xfd\xd3\ \xb3\xd3\x2b\x83\x65\x1e\x86\x42\xbe\x61\x05\xae\x23\x61\x48\xb3\ \xc1\x68\xd9\x92\xf8\x34\x6d\x31\xe5\xf9\xc7\x8b\xc6\x50\x02\xe4\ \x8b\x1c\x4c\x67\x6c\xc5\xee\xc8\x34\x2b\x7f\x5d\xcf\xe0\x80\xc1\ \x8c\x00\x53\xcc\xf5\xf5\x3f\xd1\xb6\x93\x1c\x26\xa4\x23\x43\x4a\ \xd8\x14\x3e\x40\xd2\x33\x75\x03\x9a\xaf\x99\x08\x40\x5b\xea\x67\ \x9e\x9e\x6b\x94\x7e\x78\x44\xda\x7a\xcf\x16\x22\x8f\x48\x07\x30\ \xd8\xf0\x2a\x81\x46\x96\x65\xca\xd4\x41\xb6\x86\xba\xae\x6e\x77\ \xc7\x8e\x54\xe8\x99\x5b\x4a\xf4\xa5\x10\x21\xf8\x1a\xee\xbe\x70\ \xa1\x3d\xc6\x17\x28\xb9\x8c\x88\x6d\xcd\xd5\xf5\x6c\x9c\x8b\xcb\ \x97\xef\x78\x78\xf4\x28\xc5\xe4\xa6\x20\xdf\xb2\xa1\x62\x44\x28\ \xe6\x7d\x47\x89\xcf\x18\xaf\x2a\xb4\x00\xa3\x3c\x44\x5e\x5d\xd2\ \xde\xdc\xd1\xb2\xe5\x0d\x32\x52\x8b\x02\x05\xea\x89\x74\x04\x83\ \x0d\x2f\x2f\xea\xef\xe6\x9d\x98\x38\xf1\xa4\xaf\x9d\x1d\x95\x02\ \xa9\xc8\x5d\x2e\xdd\xed\x57\xd6\x56\xa8\xf0\x48\x7a\x39\xdd\x2f\ \x5a\x58\x8c\xa2\x2d\x9a\x4d\xf9\xf2\x57\x7e\x00\xdd\x98\xa6\x52\ \xc6\x2f\x17\x0c\xed\x37\x29\xce\x94\x44\xa2\x4c\xa9\xc5\x25\x69\ \x0b\x49\xab\x1f\x19\x9a\xdc\x6a\x8e\x4a\x4e\x4c\x34\xc3\xb5\x8b\ \x4c\x19\x3b\x44\xff\x9d\xf4\x28\x03\x62\xb0\xd1\xe5\x01\x7b\x82\ \x63\xc0\xec\x94\xb9\x4f\xcd\x48\x50\x00\x7b\x3f\xe1\xe3\xc7\x99\ \x30\xb4\xfd\xf0\xea\xbd\xa6\xd8\x17\x9d\xfd\x36\xd7\xaf\x7f\x8f\ \x3c\x7b\x3f\x50\x05\x79\x71\x24\x42\xfb\x9e\x9a\x31\xe3\x98\x0c\ \x8c\x67\xd3\x4e\x02\xa0\x8a\x0a\xc4\x2b\xe3\x94\x5e\x78\x02\x90\ \xa5\x51\xf9\xe8\xf7\xf1\xb9\xdf\xc8\x55\xd2\x1b\x6c\x28\xd2\x13\ \x0c\xc6\x8e\xd6\xad\x3d\x94\x95\xed\xfc\xe2\xc5\xa3\x29\xa9\x18\ \xf1\xae\x53\x08\xa2\x7f\x22\xaf\x20\x6d\x3d\x7f\x30\xa5\xb4\x51\ \xb2\x3c\x29\xbf\x46\x0e\xe6\x00\x5a\xb1\x70\x1d\x0b\xa6\x1c\x19\ \x35\xea\x2c\x7d\xf1\xa4\x62\xb0\xe5\xb1\xb2\x27\xae\x2e\x52\xe4\ \x03\xae\x9f\xc8\xeb\x09\x62\x38\x9d\xc0\x60\x24\xc5\xc5\xfd\xb2\ \xbf\x4f\x1f\x92\xeb\x53\xd7\xa8\x61\x65\xbb\x8f\xb3\x90\x23\x32\ \xf7\xad\x92\x3e\x7d\xfa\x23\x8d\x2b\x4c\x13\x92\xf0\xcb\x6a\x67\ \x0b\xad\x56\x1a\x41\x71\x27\x8d\x15\xeb\x2d\x82\xfe\x6f\x49\xda\ \x82\x56\xfb\x54\x02\xe8\xea\xb3\x1d\x6d\x47\xf1\xf9\xad\x6d\x8d\ \x8c\x14\x99\x87\x99\x22\x1d\xc1\xe0\xf8\x5d\xf7\x37\x77\xee\x58\ \x51\x3f\x38\xfc\x41\x3e\x87\xd1\xc5\x1e\x19\x37\xae\x59\x5a\x15\ \x94\x61\x70\xeb\xa4\xd7\xef\x09\xae\x65\x85\x0e\xe0\x5d\x40\x40\ \x3d\xac\xd8\xe7\x69\xc5\x22\xe3\x59\x5f\xb5\xea\xd3\x07\x87\x0f\ \xf7\x48\xe5\x6c\xd7\x9a\x9e\x3b\xb6\x6e\x7d\x5d\x36\xd5\x2c\x47\ \x0d\x59\x90\xa5\x13\x8b\x5c\xd4\x0f\xe9\x1e\xb3\x63\xb0\xa8\x11\ \x38\x08\x5c\x2c\xf5\x55\xd2\x1c\x9f\x82\xa7\xf3\xa0\xe2\x25\xdc\ \xdf\xbf\x7f\x43\x1d\xca\xcf\xec\x47\x2b\x16\xa4\x1c\xee\x4a\x4d\ \xcf\x68\xe5\x7c\xa6\xc4\x27\x31\x76\x0b\xc5\xbb\xb1\xef\x1f\x3e\ \x5c\xad\xc8\xfd\xe1\xaa\x8f\x33\xe1\x65\x32\xc4\x8b\x56\x56\xe5\ \x04\x83\x91\x41\xe9\x54\x75\xc0\x7c\x22\x0d\x40\x88\xe1\x67\x45\ \x3a\x81\x88\x9e\x78\xbd\x84\x04\x62\x0b\x14\x60\x2f\x92\xc5\x86\ \x57\x0e\x1c\x77\x6b\xe7\x4e\x07\xc8\xb4\xdf\x70\x1e\x38\xb0\x8b\ \x52\x2b\x08\x8e\xd7\x88\x4f\xf6\x10\x1a\xc0\x0a\x77\x9d\x3c\xb6\ \x58\x29\x5b\x68\xac\xe4\x75\xc0\xb1\xd4\xf5\x35\x2b\x56\x72\x06\ \x43\xa5\x4e\xa3\xd2\xd3\x4b\x40\xeb\xe5\x37\xd4\xe1\x95\xdc\xf5\ \x1a\x75\x6c\x4f\x65\x2b\xe2\xb1\x3a\xb0\xe2\xd5\x03\x67\x82\x43\ \x28\xd8\x2d\x6b\x05\x93\xd5\x67\xbb\xd8\xd8\x45\xd2\x49\xa4\x18\ \x56\x49\x6c\xaf\xe3\xa8\x31\x0b\xc6\x1b\x4b\x1d\x4d\x27\x25\x2b\ \x47\xa3\xfe\x6e\x3d\xeb\xab\x30\x32\xd3\x1d\xff\xab\xd2\xd9\xd5\ \xa6\x5c\xb9\x17\xa4\xc3\x12\x1b\x19\x59\x0f\x63\x4b\x28\xc4\xa0\ \xac\x7a\x72\x05\x2c\xad\x0b\xe2\xae\xb2\x6f\x9d\x6a\x7d\x8d\x1a\ \x9b\x28\xdf\x12\xfc\xb2\xb6\x52\x25\x8a\xc1\x15\x97\x45\xbe\x15\ \x60\x48\xf7\x29\x23\xc5\x7f\xfb\xf6\xed\xb1\xb1\xb1\x94\x87\xe9\ \x02\xa6\x20\x91\xfa\x36\x72\x3a\xd7\x93\xbe\x0a\xad\x96\x8a\x4c\ \x3b\x15\x03\x8b\x0c\x06\x83\x8d\x2d\x37\xf8\x02\xf1\xbb\x70\xea\ \xec\x6a\x9e\x3b\x77\x28\xf5\xc7\x43\x96\x8a\x9f\x14\x35\x8a\xa5\ \x2b\x4a\x65\x1e\x52\x73\x47\xbb\x4a\x95\x1c\xa4\x01\x1e\x05\x4b\ \xeb\x80\xe1\xb5\xa2\xfe\x0e\xf4\xef\x93\x86\x93\x40\x82\x4d\x60\ \x38\xdd\xbb\x0e\x1b\x46\x92\x83\x63\x95\x2f\x95\x6d\x4d\x9b\x5e\ \x91\x81\xf2\x81\x60\x5b\x70\x02\x54\xd6\x2e\x2b\x5f\x28\x14\x3c\ \x17\x19\x04\x06\x1b\xdb\x32\xac\x02\x6f\x34\x3b\xbb\x92\xe2\x96\ \xb2\x9a\x41\x51\xf9\x2c\x0c\x4c\x2d\x65\x4e\xc6\x86\xe7\x5d\x1f\ \x9f\x3d\x5b\x8f\x56\x0d\x99\x11\xb2\x55\x47\xce\xad\x15\xc1\x49\ \x74\xbe\x43\xbf\xbb\xab\xe8\xe2\xfa\x00\x32\x0e\x3e\x3e\xd6\xd6\ \x3b\x65\x95\x41\x71\x0a\x2b\xa0\xa4\x29\x3a\x2e\x2c\x8c\x8c\xad\ \x81\xd0\xc0\xd9\xb9\x73\x27\x4b\x63\xa4\xdc\xcd\x46\xf8\x79\x15\ \x8c\x6f\x65\xba\x56\x5e\x30\x18\xab\x8b\x17\xef\x85\x00\xb9\x3a\ \x21\x1a\x5b\x2b\x7f\xe7\xc9\x93\xf3\xae\x2e\x5a\xf4\x1e\x09\xb8\ \x5e\xb5\xb3\xdb\x75\xd1\xdc\x7c\x1f\x3d\x43\x7f\xf3\x4b\xe4\x01\ \x55\xf2\x1f\x91\xcd\x1f\x83\xc2\xd2\xa7\x90\x54\x98\x86\x7b\x15\ \x75\x08\x52\x84\x6c\xb3\x58\x8c\xb7\xa1\xd4\x48\x31\x95\x9c\x04\ \x1a\x51\xe5\x04\xf8\x15\xf1\xc9\x6b\xb8\x6f\x23\xab\x2e\x7a\x51\ \xa5\x85\x52\xb5\x80\x8a\x0c\xef\x88\xe7\xcf\x57\x92\x54\x04\xbe\ \x48\x76\xcb\xd5\xf2\x2d\x38\x3a\x5d\x3e\x17\x83\x41\xee\xf3\xb0\ \xc0\xc0\xe5\x08\x94\xfb\x22\x24\xf0\x1c\xa5\x31\xc5\x92\x93\x93\ \x7b\x51\x85\x42\x5c\x64\xe4\x0c\xac\x6c\x41\x1a\x32\xed\x85\x84\ \x04\x0a\x42\x83\x71\x5e\x7a\x41\x2b\x05\x79\x09\x11\x6c\x56\xb6\ \x73\xbe\x60\x71\x1d\x91\xa0\xcf\xaf\xd1\x58\xc4\x80\x56\x65\xfb\ \x9f\x7f\x0e\x20\x2f\x2e\x25\x7d\xcb\x2c\x94\x30\x3c\x9b\x48\x06\ \x28\xd3\xe5\x3a\xcb\xec\x1d\x2b\x32\x38\xc4\xef\x94\xd5\xfe\x86\ \x89\x10\xcd\x45\x3a\x80\xc1\x46\x57\x0c\x1c\x2e\x57\x84\x26\x58\ \xdd\x5a\xe2\xfc\x16\x08\x27\x84\xab\xb6\x4c\xbb\x02\xa4\x4e\xdd\ \xa6\xea\x04\xd4\xa5\xcd\xc5\x3b\x21\x60\xca\x8a\x02\x05\xa2\xf0\ \xbb\xef\x56\x97\x2c\x49\x92\x7f\x8b\xa5\xe8\xad\xce\x00\x2b\xf2\ \x45\x69\x3c\xa6\x02\x70\x5f\xb4\x68\x35\x79\x36\x15\x83\x52\x56\ \x3b\xba\x92\x21\xe2\x33\xbe\x87\xe6\xcc\xf2\xc3\x23\x46\x9c\xc3\ \x39\x36\x5e\xae\x78\xfb\xd3\xeb\x73\x31\xd8\xf0\x4a\x83\xd9\x48\ \xc2\x0f\xf5\x69\xc1\x64\x44\x38\xcf\x25\xbd\xbc\x7e\x9d\x82\xe8\ \x39\x85\x06\x2c\x8b\x17\x3f\xaf\xd1\x7e\xeb\x39\x2a\x18\xf6\x24\ \xc6\xc5\xd1\x7b\xa6\x10\x02\x32\x53\x0a\x61\x41\x13\x5d\x51\xdd\ \xba\x7b\xf0\x60\x1f\x48\x38\x84\x49\x03\xdb\x75\x70\xc0\x80\x09\ \xa8\xb2\x50\x1b\x21\x2a\x2d\x6e\xec\xed\xd6\x4d\xe9\x0b\xb1\x96\ \xc6\xce\xcc\x9a\x75\x14\xfa\x31\xbd\xae\xae\x5f\x3f\x1d\x15\xf7\ \x96\x78\x27\x88\xc6\x97\x66\xcf\x3e\x5f\x30\x18\xe9\x79\x16\x8a\ \x8f\x8c\x9c\x87\x34\xb1\x73\x70\x3e\x44\x1d\x1e\x39\x72\xa2\x90\ \x50\x56\x00\x6a\xb9\x8c\x7a\xb5\x48\x24\x4b\xef\xa5\x3e\xe8\x32\ \x2e\xd6\x04\x2c\x05\xbd\x95\x81\x64\x70\x08\x38\x2b\xbd\xf3\x26\ \xea\x4a\xed\x5d\xe4\x8b\x17\x73\x51\x41\x11\xa0\x19\x87\x93\x1a\ \x9a\xa6\x60\x2b\xd2\x00\xc5\x58\x92\x74\x08\x25\x52\x58\x01\xbc\ \x15\xe4\xe5\x55\x9e\x1c\x2e\xb4\xaa\x53\xf3\x4d\xbc\xab\x68\xce\ \x0c\x4a\x97\x32\x20\x06\xd7\xde\x81\xbd\xc0\x25\xe0\x10\xd9\x46\ \xeb\xac\x5c\xb1\x36\x83\x29\x54\xfc\x29\xe5\xf0\xea\x83\x7a\x1a\ \x09\xc4\xa7\xc9\xab\x89\xf2\x19\x4b\x92\x3b\x8f\xfb\xf0\x81\x24\ \xce\xab\xd1\xaa\x82\x6b\x7d\x1d\x28\x6f\x1a\xf5\xcc\xc3\x63\x1d\ \x2a\x0e\x4e\x6f\x6f\xd1\xc2\xeb\xb1\x9b\xdb\x46\xa4\xc2\xd5\xa1\ \x95\x98\xfe\xed\x38\x8f\x26\x53\xa3\x4c\x88\xda\xbe\x24\xc3\x83\ \xcc\x85\xc7\x33\x3f\xbf\x02\xb8\x3f\x49\x5e\x50\xfc\x7e\x73\x8c\ \x57\xc1\xbb\x89\xd2\x68\x2f\xa5\xd7\xe7\x62\xb0\xe1\x15\x07\xf3\ \x51\xf5\x00\x9c\x27\x17\x95\x55\x01\xab\x58\x08\xe4\x10\xa6\xa4\ \x52\xaf\x46\x5a\x92\x5f\xb6\x36\x69\x72\x27\xf2\xe5\xcb\xfe\x38\ \x37\x39\xa1\xf1\x07\x69\x4e\x36\x83\x63\x25\x8e\x9e\x49\x21\xdb\ \xdc\x59\xec\xcd\xac\x0d\x8e\x00\x67\x81\xc3\x61\x30\x7b\x49\x98\ \x48\x6d\x60\x9d\x3a\x5d\xc1\xd8\x38\x68\xaa\xdc\xa0\x15\x5e\x3a\ \x8c\x0a\xca\x2f\xa0\x89\x20\x79\x38\x4f\x92\x61\xd2\xbb\x94\x9d\ \x23\xb3\x55\xb6\xa5\x5b\x0c\x8f\xc1\x80\xf7\xb2\x3f\xc5\xea\xa0\ \xaa\x15\x49\x46\x07\x4f\xe5\x65\x52\x56\xd6\x2a\x8f\xb1\xa6\x67\ \x97\x57\xae\xdc\x8f\x70\xc3\x75\xa9\xad\xf2\xc1\xcb\xcb\x2b\x3b\ \xa4\xfe\x9c\xe4\x7d\x92\x59\xce\x9c\x7d\x85\x0e\x01\xd9\x2a\xbd\ \xb0\x65\x54\x9f\xef\xd0\x3a\x3a\xd8\xa1\x55\xab\xb6\x57\xd6\xad\ \x6b\x4c\x1a\xa0\x41\x97\x2f\xf7\x25\xcf\xab\xac\xd5\xcb\x81\x03\ \x5c\x01\x5a\xdd\x70\xe6\xa3\x50\xc9\x6c\xca\xce\xd1\x48\x13\x8b\ \x02\xe7\xa4\x57\x2f\x08\x06\x77\x03\x6a\x43\xd9\x28\x48\x1c\xf6\ \x44\xce\xa5\x1f\xae\x05\x34\x33\x56\xb0\x8a\x45\x22\x46\x17\x8f\ \x95\xe1\x2d\xad\x66\xb2\x9c\x26\xd0\xdb\xd2\xb2\x1e\x56\x82\xb7\ \x64\xac\x48\x8a\x5e\x43\xd9\xff\x26\x42\x34\x92\x0d\x2b\x7b\xe8\ \xc0\x67\x2b\x83\xcf\xb5\x84\x8c\x87\xce\x6e\xb4\x62\xe1\x4b\xc1\ \xf1\xe1\xf1\xe3\xbf\xe1\x33\xdd\xa3\x15\x4d\x68\x00\xa9\x62\xd3\ \x92\xe2\xe3\x4d\x4f\x4e\x9f\x5e\x1d\xde\x5c\xea\x02\xfb\x0a\xbd\ \x12\x76\xc0\x99\xf4\x4a\x1a\xde\x3a\x91\x1e\x60\x30\x64\x7c\x6b\ \x00\x68\xaa\xd9\x9f\x5c\xa9\xc6\x96\x0e\x87\x4f\x54\x9b\x47\xf7\ \x94\x36\x05\xaf\x27\x15\xc8\x26\xcb\x33\xd0\xaf\x60\x71\xd2\xd0\ \xc4\x7b\x8a\xd7\xf0\x2c\xae\x35\x74\x40\x25\xad\x7f\xe8\xb5\x6b\ \x36\x70\xac\xdc\x26\xc7\x8a\x92\x47\xea\x3a\x62\xc4\x4e\x5a\xdd\ \xb4\x4a\xa1\x6a\x49\x39\x42\xa5\x35\x57\x1f\x74\x03\x7a\x4a\x7a\ \x2c\x81\x67\xce\xcc\x90\xe7\xda\xf4\x03\x83\x65\xfc\xb4\xb6\x93\ \xb7\xa4\x77\x32\xf6\xee\xfe\xfd\x5b\xdc\xa6\x4c\xd9\x43\xf7\x48\ \xb1\x7a\x2d\x73\x1a\xdd\xe4\x19\x28\xbb\x92\x7c\x1c\x15\x12\xb2\ \x64\x43\x8d\x1a\x8f\xa5\xa1\x26\xc1\xf0\xec\x68\xcb\x96\xc5\x86\ \x67\x08\x8e\xa7\x84\x67\xcb\x12\x25\x42\xd0\x70\xf2\x25\x44\x6c\ \xe7\x69\x87\x46\xb0\x42\x37\xd0\x68\xcd\x35\x0e\x8e\xa2\xf6\x64\ \x9c\x07\xfb\xf5\xbb\x80\xfb\x0c\x4d\x82\x66\x30\x54\x9b\xea\xd5\ \x5b\x84\xf3\x4f\x08\x5c\xe8\x76\xf4\x07\x8b\xad\xa5\xbd\xb2\xe2\ \xc1\x79\x72\x07\x4e\x96\x85\xda\xad\xb8\x7c\xb7\x6e\x2d\x8b\xb3\ \x53\x38\x55\x27\x6c\xa8\x59\xf3\xbe\x92\x14\xad\x23\x55\xf4\x3f\ \x23\x75\x6d\x2e\x55\xd2\x93\x27\x56\xa3\x9b\x4f\x79\x29\x45\x7f\ \x99\x1c\x26\x74\xce\xa3\x5a\x3d\x8c\xe7\x47\xee\xe9\xb1\xf8\xa8\ \x28\x53\x5a\x2d\xff\x8b\x89\xb9\xad\x44\xa6\x81\x21\xc3\x03\x8b\ \x65\xd6\x4a\x4e\xc5\x69\x42\xc6\x14\x11\x14\xb4\x4a\x91\x65\xd7\ \x5a\x15\xcd\xe9\x1d\x6a\xcd\x8c\xe7\xf3\x6e\x6c\xde\xec\x18\xe0\ \xec\x4c\xa1\x88\x1c\x18\x37\xcc\xea\x74\x2a\xaa\x18\x07\x9b\x69\ \xa4\x88\x6d\x92\x2b\xb1\x1b\x7d\x36\x3a\xcb\xd2\x36\x54\xe3\xfd\ \x51\x60\x13\xf1\x5f\x04\x26\x3c\x48\x5d\x0f\x25\xc4\x2c\x91\x99\ \xe0\x6e\x40\x7a\x52\xae\x41\x1d\x8f\x93\x7d\x01\xba\xa5\xa6\xb0\ \x45\x6d\xb7\x90\xc1\xf1\x04\xcf\xa7\x4b\x29\x85\xc6\xb2\x6c\x86\ \xaa\xb8\x15\xb9\x87\xc3\x60\x45\xa1\x03\x38\xfa\xfb\xef\x35\x90\ \x6f\x7a\x5b\x59\xb9\x91\xf6\xe5\x16\x19\x14\x44\xaa\xd0\x0c\x2a\ \x2d\x41\xae\x9f\x72\x3e\x98\x27\x32\x1b\x6c\x7c\xad\x43\xae\x5e\ \xb5\x95\x62\xad\x7a\xd2\xc8\xf2\xa6\xa6\xb0\x85\xe7\xd5\x85\x16\ \xd6\x55\xab\xb6\x40\xd9\x92\xca\x7e\x78\x4b\xc1\x6c\x3a\x10\x38\ \x9f\x78\xc5\xd6\x76\x0f\x3c\x92\xef\xff\xe9\xdf\x16\xad\xe2\xa4\ \x92\xf6\xaf\xe8\x06\x44\x5b\x14\xc8\xa9\x2d\x41\x90\xf6\x15\xf6\ \xda\xf1\x9c\x82\x93\x25\x01\xe6\xc6\x60\x41\x69\x6c\x0d\x29\x7e\ \x05\xba\x82\x23\xc9\x90\x90\x95\x7f\x4d\xe9\x7b\xae\x40\x31\x4c\ \xc4\xeb\x5e\xa3\x10\x36\x9c\x56\x48\x5a\x05\xa5\xe1\x1d\xd0\x91\ \xcf\xf5\x0b\xc9\x39\x50\x5c\xf2\x99\xbb\xfb\xd0\x7f\x60\x70\x13\ \xa5\xd1\xbe\x06\x7b\x89\x1f\x1d\x81\x81\x81\xfa\xc8\x62\x7f\x09\ \x46\xc5\xc7\xc7\x1b\x8a\x2c\x03\x83\x82\xc1\xeb\xab\x57\x77\x85\ \x21\x7d\x96\xc2\xb5\x5f\x42\x7c\x7c\x6c\xa4\xc2\x96\xf6\xbb\x2b\ \xc0\x14\x28\x49\x1f\xa0\x98\x5d\x42\x74\xf4\x4c\x1b\x23\xa3\x97\ \x34\x66\xa6\xaf\xdf\x56\x47\x56\xf0\xdc\x60\x57\xb0\x9b\xf8\x4e\ \xe0\xef\x72\x86\x6c\xd5\x15\x87\x52\x20\xf2\xf2\x66\x47\xb8\xa4\ \xaa\xf8\x51\x81\xe5\x7a\xb8\x74\x49\xbb\x6b\x1e\xda\xe5\x81\xbc\ \x3d\x29\x55\x89\x4c\x03\x83\x62\x55\x08\x26\x6f\x5a\x5b\xb1\xa2\ \x7a\xc5\x82\x27\x33\x1c\xf3\xd0\x5b\xcb\xd8\x2a\xd2\x16\x12\x95\ \xe6\x81\x78\x7f\x1a\xa8\x47\x84\x33\x65\xb6\xf4\x78\xee\x11\x3a\ \x86\x34\x0b\xe7\x6a\x97\x01\xa1\xbb\xed\xa7\xc8\x48\xb3\x55\x45\ \x8b\x5a\xca\x3a\x3d\x0a\xa9\x94\x14\x3f\x12\x60\x4c\x75\x31\x71\ \xaf\x90\xf5\xf0\x9e\xe4\xbf\xa9\x3f\x99\x54\xa7\x5a\xa7\x04\x34\ \x25\x1f\x64\xaa\xe1\xb1\xd1\x55\x43\x88\x60\xfa\x05\x0b\x8b\x83\ \xa8\x47\x7b\x07\xe7\xd6\x63\x2d\xcf\xa5\x7a\x15\x24\xc3\xd4\x14\ \xb2\x45\xc8\xe1\x17\xf5\x36\xb4\x55\x2b\x2f\xa1\x85\x99\x42\xe4\ \xca\x2a\xd5\x2d\x5a\x95\x40\x6f\x70\x56\x5a\x52\xba\x94\x32\xa0\ \x73\x73\xe7\x1e\xa6\x7a\x43\xf0\xe7\x33\xc6\xc6\xe5\x10\x4e\xf9\ \x28\x53\xe0\xae\xfc\x68\x5b\x98\x14\x8d\x0f\xd4\x42\x4e\xa6\x09\ \x8d\x61\xa2\x9f\x21\x13\xe2\xb0\x43\xf3\xe6\x3e\xd4\x43\x1a\x13\ \x1c\x83\x67\x43\xa8\x3c\x43\xfb\x2c\x41\xab\x24\xf7\x1e\xcb\x90\ \x1e\x02\x2d\x60\x78\x8b\xa0\x31\xb2\x8c\xb6\x95\x58\xe9\x3a\x53\ \x81\x27\x98\xe2\xd8\xb6\xad\x2f\xc6\x46\x08\x40\x3b\x93\xc5\xd3\ \xc4\x64\xb7\xbc\x2f\x0c\x5a\x91\x4c\x9e\x3c\xdf\xbd\x04\xcd\xb3\ \x22\x8f\x31\x3c\x28\xa8\x29\xbc\x97\x54\x25\xee\x87\xcf\xd1\x5d\ \xfc\x05\x94\x32\x20\x29\xd2\x94\x80\x1a\x3b\x63\xdc\x1b\x98\xe7\ \xc9\x33\x84\xc6\xd0\x5e\xf9\x26\x52\xc5\xac\x7e\xa4\xbe\x10\x02\ \xe9\x39\x3e\x54\x56\x0f\x35\x27\x12\x86\xc9\x2e\xcb\x2f\xc2\x20\ \x91\x16\x14\x17\x1e\x6e\x21\xf5\x2e\x66\x51\x79\x06\x2a\x95\x95\ \xda\x2d\x17\xad\x49\xee\xa3\x31\x99\x43\xd9\xf1\x92\xfe\x3d\x04\ \xc0\x06\x60\x0e\x72\xa4\x28\xf9\x97\x48\x0c\xbe\xfc\xe4\xcc\x99\ \x72\x1a\xbb\x95\xaa\x78\x16\x8c\x79\x8a\x46\x3d\xda\x54\x12\xa6\ \xc5\xbd\x5a\xde\x0e\x5b\x32\x7f\xa8\x2b\xbb\xc2\x6b\xf8\x50\xae\ \x0c\x1e\x52\xfc\x47\x95\xc9\xe5\x4c\x7d\x82\x2e\x5d\x9a\x8c\x6a\ \xf9\x03\x32\x4e\x57\xed\x4f\x16\x83\x33\x38\xc3\x52\xc7\x57\x17\ \x38\xf5\x94\xfe\x07\xcf\xc0\x97\xa4\x9e\x86\x2f\x21\xaa\x31\x1c\ \x0c\xe6\xfa\x91\x26\x73\x04\x6d\x4b\xa0\xb8\x3b\x9f\x0c\x8e\x26\ \x49\xc9\xed\x93\xf2\x68\x2a\xf9\x5e\xc9\x2d\x8d\x1a\xf9\xd1\xb3\ \xcb\x96\x96\xf6\x14\x03\xd2\xf4\x48\xa1\xc8\x72\xa7\xa2\x6d\x41\ \xfa\xf5\x22\xc3\xc0\x52\x0f\xa4\x13\x89\x06\x1c\x21\x72\xc5\x0a\ \xc6\x75\x31\xae\x33\xc1\x70\x72\xb2\x5c\xb1\xb1\xd9\x23\x7b\xa0\ \xef\x00\xbf\xee\xeb\xd1\xc3\x53\x4a\xde\xcd\x02\x17\xa1\xda\xfc\ \x38\x6d\x47\x65\xdb\x65\xcb\xac\x48\x82\x06\x47\x5d\x30\x33\x9b\ \x86\xa4\xe6\xcb\x30\x20\x5b\x18\x53\x41\x8d\xed\xf2\x7c\xdc\x87\ \x6a\x94\x01\x8d\xa7\xae\xaf\xe7\xe6\xcd\x3b\xac\xfc\x8d\x91\x11\ \x62\xbc\xcd\x8f\x3a\x89\xb5\xc1\x4e\xb2\x6e\x4b\x85\xf3\xdc\x6b\ \x7c\x03\xc5\x52\x1e\x9c\xd0\x00\x56\xc1\xf9\x94\x29\x4e\xab\x9e\ \xd0\xc2\x55\x5b\xdb\x7e\x08\x2b\x7c\xc6\x61\xfd\x09\x19\xaa\xc8\ \x48\xb0\xd1\xd5\xc4\x36\x73\x16\x4a\x79\x5c\xa1\x1e\xa6\xfe\xe6\ \x97\x5e\xbc\x77\x1e\xc6\xc6\x4e\xe4\x44\xb1\xaa\x50\xa1\x8a\xcc\ \x61\xa4\x9d\x8b\x31\xf8\xb3\xfc\xdd\x1c\x60\x2b\x6c\xed\x9e\xcb\ \x2c\x10\x0b\x5c\x0d\xc0\x1e\x54\x46\x94\xd9\x7f\x77\x30\xa4\x89\ \x3b\xdb\xb7\x5f\x4c\x7d\xc8\xc1\xb1\x52\x9a\xa2\x03\x0c\x4b\x1d\ \xbf\x43\x0e\x69\x60\x4c\x4c\x4c\x31\x0a\xf2\xdf\x77\x71\x19\x84\ \xb3\x6c\x22\xa5\xc6\xd1\xe7\x97\x89\xd2\x3f\x3e\x4e\x4e\x99\xb2\ \x8c\x14\x7a\xe9\xcc\x86\x89\x18\x06\xe6\x54\xfa\x46\x83\x26\x60\ \xf9\x54\x3c\x9d\xc7\x68\xf9\x0f\x3c\x75\x6a\x03\x9e\x57\x11\x19\ \x0a\x86\x34\x9c\xb6\xe0\x22\xca\xcb\x24\x91\x57\xd9\x9c\x64\x36\ \x48\x4d\xf8\xd5\x01\x71\x19\x30\x57\x14\xa0\x55\x42\x02\xca\xd1\ \x9e\xf4\xc7\x0c\xc3\x9d\x61\x9a\x2d\xdb\x60\x69\xb4\x8f\xc0\x6e\ \x59\xf1\x39\xa2\x42\x43\xa7\x38\xb4\x6c\x39\x4a\x59\xc5\x63\xde\ \xbe\x35\x3d\xd8\xbf\xff\x85\x8d\xb5\x6b\xdf\x80\xb7\xd6\x90\x02\ \xe7\xa0\x0b\x1d\x7f\x64\x56\x4e\xed\x7f\xd5\xb6\xe5\xa6\x83\x83\ \x03\x56\xba\x48\x45\x77\x50\xf9\x06\xd2\xce\x06\x27\x28\x2d\x7d\ \x77\x76\xe8\x40\xcb\xff\x30\x91\x99\x60\xc3\x33\x90\x3a\x93\xed\ \xe5\x35\x87\x00\xb0\xdb\x50\x17\xb9\x7e\x78\xfa\xf4\x77\xb9\x4d\ \x5b\x2f\xdb\x6b\xf5\x11\x40\x62\x6c\x6c\x5d\x59\x3a\x54\xd3\xd9\ \xd9\x59\x0f\x19\xff\xea\x2d\x9c\xe4\x29\x3a\x0f\x66\x45\xd9\x0f\ \x98\x5f\x23\xfd\x6d\x20\x68\x0a\x79\xc2\x89\x8a\xbe\x0a\xe2\x8c\ \xc1\x18\x1b\xfd\x6f\x9c\xc8\xaa\x28\xb7\x58\xe8\x63\x65\xb5\x67\ \x43\xf5\xea\x57\xb0\xcc\x5f\x74\x1e\x34\x68\xd4\x37\xdc\xbd\x01\ \xd8\x02\xc4\x84\x3f\x7f\xbe\x82\x8c\x55\x64\x39\x18\x76\x95\x2b\ \x8f\x97\x32\x0f\x27\x70\xcd\x86\x3e\xe6\xbd\x51\x6f\xa7\x7c\x81\ \x7a\xac\xab\x55\xab\x1a\xcd\xb1\x9c\xc3\x0e\x34\xee\xd0\xb4\xe9\ \x2d\x68\x93\x78\x63\x77\x93\x2c\xb5\x48\x6c\xc0\x82\x3a\xa0\x1b\ \xa3\x77\xdd\xde\x7e\x1b\x1c\x79\xea\x12\xa6\x7d\xdd\xbb\x77\xfe\ \xd7\xd6\x72\x81\xed\x64\xd2\xac\x29\x38\x41\xb6\x3d\x9a\xa6\x44\ \xfa\x31\x31\x7f\xd0\x64\x9d\x98\x3c\xf9\x24\x9e\x77\xd1\x32\xc6\ \x2a\xe0\x92\xcc\x6f\xe1\xc4\x88\x8c\x8c\x2c\x48\x32\x08\xd2\xc0\ \x36\x5f\xb6\xb1\xa9\x89\x04\x62\x73\x94\xf9\xc4\xc9\xe2\xd7\x3d\ \x02\x90\x5e\xe9\x7b\x38\xff\xc5\x7e\x78\xfc\x78\x15\x39\xd1\x48\ \x3c\x08\x67\xf1\x07\xca\xee\x86\x6a\xef\x74\xc1\xcf\x80\x32\xa0\ \x79\x4f\xcf\x9e\x5d\x4f\x0e\xa1\xff\x4c\x21\x25\x09\x7c\x52\x4c\ \x47\x29\x84\x04\xc3\x2d\x8b\x15\x7b\x85\xd5\x90\xbc\x9c\x06\x5a\ \x06\x37\x5a\x7a\xd2\xc2\x33\xbf\x84\x84\xf1\xfe\xf1\xe3\x21\x90\ \x66\x7f\x20\xe7\x20\x19\x46\x76\x47\x29\x97\x91\x4e\x07\x32\xb6\ \x29\x32\x9b\x25\x7e\x57\xa7\x4e\x5b\x70\x7f\xcb\x75\xe8\xd0\xbe\ \x57\xac\xad\xd7\xd2\x38\x7a\x10\x90\xb7\xba\xb4\x94\x3d\x6f\xa6\ \x03\x65\x40\xad\xc1\x32\xe2\xbf\x84\x80\x43\x87\xe6\xc3\x1b\xa9\ \x8e\xe7\x10\x31\x51\xbe\x71\x51\x51\x8d\x05\xa0\xa5\x69\x51\x0c\ \xe7\xc0\x68\x78\x3c\xa3\x1e\xba\xb9\xd5\x56\x14\xab\x44\xa6\x81\ \x9b\x4e\xc2\xb0\x46\x52\xb6\x0a\xbc\x92\x01\x58\xc5\xde\xa3\xee\ \x4e\xe9\xfd\x5d\x4f\x06\xc5\x3f\x20\xc1\xe1\x3d\x2a\xce\xdf\x2a\ \xf3\x79\xb0\x6f\xdf\xc1\x70\xa4\x2c\xc1\xbc\x45\x43\xc6\x6f\x35\ \xe9\x68\x6a\x24\x10\x3b\x83\xe5\x45\xa6\x82\x27\xb2\x2c\x38\x05\ \x5d\x57\xf6\x6a\xa8\xf4\xde\xd2\x3e\x68\x63\xcc\x98\x9e\x9d\x9d\ \x3d\x9b\x64\xc0\x3b\xe0\x7e\x88\x9c\xb4\xe3\x78\xb7\xb2\xc8\x2c\ \x70\x35\x42\x2b\x70\x2e\x68\xfa\xe5\xcb\x97\xa5\xd8\x96\x75\xa5\ \x6d\x26\xe6\xe3\x08\xcd\x07\xc5\xed\x5e\x5c\xbc\x68\x2d\x9b\x95\ \x04\x50\x43\x92\xa0\x6b\xd7\x4a\x42\x10\x96\x9a\x76\xf4\x54\xf2\ \x19\x29\x06\x46\x71\x3e\xca\xe1\x04\xcd\x33\x3f\xb3\x88\x27\xb2\ \x09\xbe\x2d\x17\x51\x0b\x5b\xea\x8f\x86\x6f\xca\x45\x42\x82\xb4\ \x07\x69\xeb\x09\xd5\xaa\x60\xec\xbd\xe7\x90\x77\x13\x8d\xdc\x0d\ \x10\xff\xb9\x2e\xcf\x10\x0e\x22\xb3\xc1\x6a\x62\x65\xc0\x1a\x88\ \xdd\x55\x86\xe1\xa8\xbf\x28\x29\xde\xfa\xdc\xd3\x73\xc9\x95\x6d\ \xdb\x0a\xc3\x69\xe2\x25\xd5\xc2\xca\x83\x2a\x59\x36\x94\x43\xa9\ \x38\xa7\xfa\xb6\x87\x47\x8f\x6e\xc2\x4a\x99\x40\xf7\x32\x38\x3d\ \x8c\x33\x8b\x32\xbf\xfc\xa2\x07\x32\x19\x96\xe2\x3a\x4d\xe3\xfc\ \x36\x0d\x4c\xf1\x5e\xb3\x66\x1f\x6d\x5f\xe4\x58\x36\xa4\x18\x3d\ \x83\xb7\xec\xd3\xeb\xdb\xb7\x4d\xc9\x08\xff\xa2\x97\xf6\x90\x8c\ \x29\xa6\x64\xbc\xb9\x7b\x77\xbe\x63\x9b\x36\x37\x68\xc5\x02\xa9\ \x70\x75\x59\x64\x70\xf0\x28\x64\xb1\x8c\xa7\x46\x8d\x9a\x3d\xbd\ \x57\x16\x2d\xda\x0a\x95\x24\x27\x68\x57\xb3\xbb\x63\xc7\x69\x4a\ \xe3\x49\x1c\x17\x94\xdd\xcd\xd5\xac\x11\x77\x65\xf5\xe1\x72\x42\ \x02\x67\x80\x59\x34\x19\x68\x60\xb1\x03\xe3\x2a\x4d\x27\xca\x91\ \x91\x23\xcf\xd2\x56\x45\x29\xbf\xc0\xd8\x54\x70\x19\xd8\x47\xc9\ \x76\xc0\x24\x2a\xe2\x3a\x7e\x19\xd2\x41\x94\xe7\xab\x28\x38\x92\ \x02\xe3\x24\x5b\xa7\x91\x0b\x7b\x93\x44\x68\x0f\xfe\xfa\x6b\x7d\ \x2d\x6f\xf5\xbc\x97\xfe\xfe\x95\xa9\x8f\x39\x56\xc5\x48\x74\xcc\ \x59\x16\xec\xe3\xb3\x9a\x12\x24\x50\x00\xfb\xf4\x43\x60\x60\x56\ \x36\x65\x64\xb8\x9b\x9a\x1a\x62\x3b\xf9\x9a\xb2\x4f\xe8\xcc\x26\ \x35\x37\xa2\xa8\xd4\x44\x1e\xda\xf3\x48\x09\x35\x75\x6c\x48\x21\ \x4d\x38\xe5\x73\x7a\x59\x58\x34\x91\x93\x19\x14\x7c\xf7\x6e\x21\ \x91\x51\x60\xc3\xab\x0e\x4e\xa7\x3e\x01\x50\x89\x0e\x53\x92\xa2\ \xbf\xa1\xaf\xb2\x40\xb3\x35\x17\xce\x74\x3b\x71\xaf\xee\x0d\x2e\ \x53\xc7\x18\x59\x29\xa1\x16\x7a\xe3\xc6\xac\x1d\x2d\x5b\xde\x80\ \xd1\xdc\xa6\x4e\x32\x34\x59\xc8\xf5\x73\x21\x85\x27\xe9\x66\x7e\ \x01\xa3\x8a\x87\x36\xa3\xd3\x8d\x2d\x5b\x1c\x0f\xfd\xf6\x9b\x3b\ \xa5\xee\x60\x8c\xae\xbb\xe9\x7d\x1a\x97\xad\x6f\x33\x16\x7c\xbe\ \x6b\x19\xfb\xe1\xc3\x12\xea\xf3\x16\xfd\xf2\xa5\x39\xee\x0b\x0b\ \x0d\xe0\x70\x4e\x9e\xca\x18\x9c\xc9\x5f\x50\x22\x31\x1d\x07\x70\ \x96\x73\x46\x89\x90\x3a\xb9\x18\xd4\x91\x73\x1c\x9f\xef\xc6\x91\ \x67\x0c\x1d\x58\x6e\xd2\xb7\x21\xca\x81\xae\xcb\x46\x0f\x2d\xc0\ \x14\xa7\x81\x03\xbd\x70\x3f\x07\xec\x0c\x0e\x73\x19\x3a\xf4\x1c\ \x8d\x4b\x8f\xd9\x5d\x0a\xb6\x67\xea\x64\x72\x19\x50\x5f\xb0\x83\ \x5c\xd1\x0a\x82\x1e\xd8\x75\x2c\xc4\x75\x2f\xf8\xf5\xf6\xee\xdd\ \xdb\xf0\xbc\xae\x7c\xbf\x21\xb8\x04\xe4\xe6\x8b\x3a\x98\xb1\xd2\ \x9a\xce\x0b\x61\x0f\x1f\xce\x25\x83\x53\xd2\x89\x8e\x8d\x1b\x47\ \x85\xaf\x79\x85\xc4\x89\x29\x53\x46\x4a\x2f\x66\x12\xb4\x3c\x48\ \xcd\xca\x48\x64\x09\x18\x54\x21\x8e\x80\xf8\x55\x65\xbb\x0f\x39\ \x07\xea\x11\x37\x56\x33\xf1\x58\x87\x8d\x8d\xb7\x98\x60\x5d\xb0\ \x23\xa8\xa2\x06\xed\xd8\x92\x24\x52\x03\x3f\x63\x21\x8c\x34\x62\ \x77\xa6\x34\xb9\x07\xfa\xf6\xbd\x28\xa5\xb0\x95\xec\x96\x31\xe0\ \xb8\xcc\xd5\x5e\x64\x60\xfb\x38\x98\xd4\x00\xa8\xa8\x55\x1d\x46\ \xc8\x9b\xf7\xdc\x3f\xf1\x1c\xd3\xf9\x3c\x8b\xba\xbd\x32\xe0\x6a\ \xde\xaf\x3e\xb3\xa9\x54\xe1\x94\xa1\xae\x28\xf5\xe2\xf0\x1e\x05\ \xc1\x18\x2a\x98\xa4\x2d\xcd\x0c\xa5\xcc\x5e\xf2\xab\x6c\x5a\x68\ \x20\x32\x0b\x5c\x06\xd4\x1e\x5e\x49\xb3\x03\x7d\xfa\x5c\xb4\x2a\ \x5b\xf6\xea\x29\x3b\x3b\x7d\xf1\x1d\x20\x43\x05\xdf\x4b\xe1\xe1\ \x4e\x22\xd3\xc1\x93\x59\x83\xb2\x1c\xec\xeb\xd6\xf5\x43\x6c\xee\ \xa5\x52\x82\xe1\x3e\x7f\xbe\x2b\xd5\x48\x51\xfe\x25\x8d\x91\x5e\ \xa6\xc7\xe2\xc5\xfb\x29\xf0\x8a\x73\xdd\x2d\x29\x1f\xe7\x2a\x32\ \x1b\xac\x16\xfd\x2b\x68\xfa\x4f\x6a\xd1\x10\x52\xb8\xab\x7c\x71\ \x66\x76\xed\x1d\xc0\x90\xdb\xcc\x79\xe4\x58\x51\x34\x15\x91\xe5\ \xfe\x44\xea\xe6\xaf\x93\x06\xf7\x19\x5b\x9a\x08\x97\xdf\x7e\x5b\ \x85\xac\x95\xc9\x70\xc0\xf8\xd3\x7b\xd8\x8e\x36\x14\x99\x0e\x6e\ \xc3\x25\xbe\x13\xca\xb9\x7d\x5b\xd3\xa6\xb7\x4f\x4c\x98\x70\xfc\ \xc1\xb1\x63\x1d\x65\x8b\xe6\x35\x99\x5f\x91\xc0\x13\x59\x04\x15\ \xcb\xc3\xd0\x7a\xf7\x04\x5a\x19\x39\x91\xdb\x19\x07\xf6\xed\x64\ \x70\x7e\x0e\x0e\x3b\xd6\x56\xaa\xa4\x68\xd5\x7b\x6f\x6b\xd6\x4c\ \x1d\x32\x80\x4c\xbb\x89\xc8\x74\x30\x4c\x84\xe8\x89\x79\xf8\x2d\ \x2d\x29\x5d\x4a\xdd\xa4\x46\x19\xd0\xb0\x4f\x1f\x3f\xb6\x35\xcf\ \x95\xeb\x8c\xa2\xaa\x9c\x35\xe2\xae\x5c\xbd\xdc\x05\x9c\x07\x96\ \xda\xda\xa8\xd1\x1f\x32\x8d\xe8\x34\x56\xb6\x29\xd8\x52\xee\x46\ \x3a\x51\x08\x8d\x11\xcf\x9b\x98\x58\x88\x4c\x07\x83\xb2\x83\x90\ \xc0\x40\x3d\xc8\x2f\x50\x87\xd6\xbf\x69\x70\x53\xe5\xae\x24\xee\ \x40\xbf\x7e\xb6\x8a\xe3\x05\x9d\x5f\xcf\xd2\x2e\x06\xbb\x9a\x87\ \x07\x7a\xf7\x6e\x29\xb2\x0e\x8c\xa8\xa8\xa8\x42\x1b\x6b\xd5\x7a\ \xac\xc8\xba\x39\xf5\xef\x3f\x07\x2d\x6f\x17\xa3\x3b\xcb\x71\x2a\ \x27\x81\x4c\xc0\x08\x91\x55\xe0\x2f\xc6\xa1\x7e\xdb\xb7\x4f\x41\ \xf8\x86\xb6\x84\x8e\x14\x18\xff\x13\x63\x2b\x4c\x35\x91\x90\x2c\ \x0f\x43\xb7\xa0\x70\x9a\x4f\x29\xdf\x37\x1e\xd7\x2f\xf8\x62\xbd\ \x2b\xcf\x86\xfd\x44\xd6\x82\x91\x10\x17\xd7\x17\x92\x7d\x97\xa0\ \xd6\x14\x27\x9d\x25\x61\x50\x15\xde\x01\x77\x35\x4d\x50\x23\x91\ \x95\x60\xc3\xab\x02\x8e\x77\x19\x32\x64\x2e\xe6\xe5\x26\xc9\xdb\ \x69\xca\x99\x53\x5e\x2c\xc6\x4c\x35\xcb\x80\x3e\x45\x47\x9b\x41\ \x5c\xf8\x1c\xaa\x4b\xe2\x69\x4c\xa3\x59\xe3\x4f\xa0\x8e\x84\x0b\ \x38\xed\xa8\x0f\x89\xd2\x42\x4d\xd8\x09\x89\xb6\xfe\xd6\x86\x86\ \x3b\x31\x56\x42\x67\x64\xd2\x58\x1d\xba\xe9\xa7\xa8\xa8\xc9\x28\ \x4a\x5e\x4d\xb9\xb0\x8a\x50\x11\xae\x45\xb1\x33\x51\xcb\xdd\x41\ \xb8\xf5\x63\xd8\xe3\xc7\x73\x65\xef\xf3\x5e\x6e\x53\xa7\x1e\xd7\ \x6a\xd6\xa8\x63\xe0\x89\x2d\x0d\x76\x55\x0a\x29\x53\xe9\x26\xc3\ \xc8\xfa\x9e\x70\x3d\xb1\x5a\x4d\xc6\x39\x9b\x9a\x6c\x34\x11\x40\ \x88\xaf\xef\x14\x4a\xeb\xa3\x78\xab\x5d\x95\x2a\x56\x18\xcf\x0f\ \x16\xc6\xca\x16\x4a\xaa\x70\xc8\xdf\x34\x95\x9e\x4f\x5d\x05\x17\ \xbe\xea\xf4\x04\xf1\xfc\x94\x02\x47\x82\xcd\x15\xef\x33\xf8\xfb\ \x2b\x3f\x3f\x6b\x1c\x03\x46\x63\x05\x7c\x08\xaa\x8f\x07\xd4\x37\ \x0e\xcf\xfe\x6d\x2a\xdd\x0c\x92\x02\xa0\x8c\x15\x4c\xf4\xef\xb8\ \xf6\x21\x29\x76\xea\x1a\x84\x31\x43\x4a\x35\xe2\x8a\xe5\x8c\x91\ \xb7\xd3\x4e\x72\x00\x6b\x22\x94\x33\x1a\xdb\x4b\x75\x19\x10\x14\ \x00\x2e\xfd\x4b\xcf\x6d\x0c\xc4\x7f\xb6\xd2\x24\x7f\x83\xb1\x94\ \xaf\x29\x32\x03\x6c\x88\x95\x69\x1b\x49\x8e\x93\xd3\x33\x66\x58\ \xfd\x4b\x2b\x41\x18\xd8\xd2\x74\xa2\x0e\xaf\xd4\x01\x08\x7d\x10\ \xec\xd1\x48\x62\x13\x0e\xee\x5b\x0e\x0d\x1e\xbc\x0d\x9a\x2c\x56\ \x51\xc1\xc1\x15\x05\x20\xeb\xf4\xea\x53\x70\x17\x46\x58\x49\x64\ \x08\x38\x5b\x05\xec\x07\xce\x00\x73\x89\xff\x10\x54\xff\xa5\xb3\ \xdf\x85\xa5\x4b\x6d\x60\x68\xd3\x2b\x74\xe8\xb0\x7f\xa4\xbb\xbb\ \x0b\x86\xf5\xc1\x9c\x60\x76\xd0\x7b\x89\x4a\x55\x06\xd1\x58\x3b\ \xdc\x6b\xe6\x0a\x5e\x04\x07\x98\x09\xf1\x41\xa4\x0d\x0c\x06\xeb\ \xae\x20\x2d\xcc\xdf\x4c\x4f\x2f\x51\x5b\x1f\x13\x2b\x5a\x53\x92\ \x7f\x23\x49\x88\x3d\x5d\xba\xb8\x5c\x5a\xb1\xc2\x01\x9a\xf6\xe7\ \x65\xec\xef\xde\x77\x74\x0e\x65\x30\x18\xc1\x97\x2f\xf7\xa4\x6d\ \x25\xb2\xd6\xbd\x35\x57\x78\x18\xdb\x63\x64\x4d\xc4\x40\xc3\x9e\ \x62\x7c\xf3\x65\x66\xfc\xa8\x6d\x4d\x9a\xa8\x73\x38\x91\x21\xf1\ \x9b\x48\x3b\x18\x0c\x0e\x2b\xa0\xfc\x47\xad\x08\x86\x92\xa0\x2e\ \xb2\xc0\xb5\x92\x54\x18\x3b\x49\x52\x70\x9a\x7a\x1e\xc1\xbe\xbe\ \xbd\x65\x9f\x32\x17\x8d\x96\xbe\x07\xd3\x70\xbe\x63\x30\x38\x88\ \xee\x6d\x65\xb5\xfb\xdd\xc3\x87\x9d\xa5\x11\x8d\x07\x53\x1e\x1c\ \x3e\xbc\x46\xbb\x3f\x02\xda\xf4\xe6\xa3\x67\xdb\x5b\xb6\xf4\x16\ \x00\xb6\x97\x76\x74\x0f\x26\x80\x4b\xff\x5e\xf5\x33\x83\xc1\x46\ \xd7\x04\x6c\x2c\x00\x53\x3d\xbd\x6e\x64\x44\xf0\x5a\x9a\xa4\x92\ \x78\x3b\x40\x4a\xc3\x39\x09\x00\xdb\x51\x1f\xe4\x01\x26\xa1\x1d\ \xd4\x63\x64\x4b\xc4\xa4\xb1\x33\x10\x83\xc1\xf0\xda\xb0\x21\x2f\ \x95\x8d\x58\xe4\xc9\xf3\x8a\x82\xe4\x5a\x05\x93\x11\xa8\xd9\x8a\ \x89\x0c\x09\x99\x8d\xfb\x6a\xe0\x57\x2a\x80\x85\xb1\x2e\x46\x22\ \xae\x39\xae\xa4\xad\x69\x8d\xf1\x2b\x7f\xd1\x5d\x86\xc1\x60\x90\ \xf7\x91\x3c\x94\xd0\xdb\x57\xf7\xbc\x86\x07\xf3\x2a\x0c\x88\x8c\ \x6b\x9d\x6c\xf1\x14\x45\x7a\xfa\xd4\xc0\x9d\xc6\xc0\x94\x7b\x07\ \x0e\x6c\x46\x1b\xdc\x89\x10\xd4\xb9\x67\x63\x68\xd8\x6e\x55\xb1\ \x62\x23\xb1\xda\xc5\x4a\xbd\x95\x6d\xbc\xcd\x64\x30\xbe\x5d\xab\ \xb5\x0a\x4c\x21\x42\x33\xf3\xbe\x75\xd9\xb2\xf7\xd5\xdd\x3f\x55\ \xaa\xd7\x48\xb6\x75\x97\x3d\xb2\xc7\x20\xab\xbd\x30\xc6\x62\xd0\ \x1b\x3b\x74\x4f\xb7\x6e\x07\x15\xfd\x15\x6c\x31\xed\xa2\xde\xbd\ \xab\xb4\xae\x4a\x95\x67\x10\xb0\xfd\x8c\x3a\x3d\x6f\x34\x2c\x49\ \x4d\x64\x87\xc1\x60\x6c\xaa\x5d\xbb\xc1\xa6\x3a\x75\x7c\x50\x6f\ \x47\x46\xf6\x15\x86\xe5\x0e\xc5\xb0\xe5\xa4\xb1\x22\xab\x12\x46\ \x82\xfa\x30\xc2\x49\x52\x51\x2c\x86\xae\x48\x15\xfb\x48\x57\x8f\ \x45\x8b\xf6\xe1\x99\x19\x8d\x1d\x19\x35\xea\xac\xfc\x9d\x3a\x1a\ \x06\xdd\x49\x3a\x57\x72\x0b\x35\x18\x0c\x16\xa9\xfd\x23\x3a\x34\ \x74\xc5\x8e\xd6\xad\xaf\xc8\xda\xad\x9b\x90\xf4\x3e\x74\xc9\xce\ \x4e\xe9\x57\xae\xa2\xb6\xbc\x1a\x8a\xd0\x97\xa9\x09\x25\xdd\x43\ \xea\x6f\x37\x55\x2d\x43\x2a\xe0\x36\xb2\xe1\x17\x6a\x77\xe7\x54\ \x8a\x2f\x4d\x84\xe8\x2b\x14\x30\x18\x82\x0d\xcf\x90\x44\x4f\x49\ \x5b\x1f\x5d\x42\x9f\x41\xc0\xe8\xce\xd6\xc6\x8d\xcb\xca\x55\xaa\ \x89\x62\x6c\xa4\xcd\x48\xd5\xe6\x38\xf3\xdd\xa5\x56\x4f\x58\xe9\ \xa8\xc3\xeb\x5b\xd9\xbc\xb0\xa6\x7c\x7f\x18\x0c\xcd\x1d\x46\xf6\ \x4b\xd8\xa3\x47\xd5\x3c\x16\x2e\x74\x49\x5d\x3e\x80\xc1\x60\xc3\ \x6b\x0f\x9a\x4a\x2a\xb5\x5a\xaa\xe3\x13\x26\x6c\x72\x9b\x36\xed\ \x38\xc6\x8c\xa1\x2e\x56\x13\xe7\xb5\x58\xe9\x64\x49\xbe\x77\xf0\ \xe0\x16\x8c\x77\xd5\xa8\x4c\xe8\x85\xe7\x71\x74\xce\x33\xcb\x99\ \x73\xdf\x9b\x7b\xf7\x06\xe1\x79\x75\x91\x3a\x18\x0c\xae\xe5\x02\ \x0d\xc1\x6c\x1a\x63\x6d\xc0\xd9\xa0\x11\x0c\xa9\x86\xb2\xe2\x1d\ \x9f\x38\xd1\x0d\x63\xe3\x40\x3d\xcd\xde\x6b\x81\x6e\x6e\xeb\xe8\ \x4c\x88\xed\xe9\x73\x2f\x53\xd3\xb4\xaa\x44\x33\x18\x0c\x8d\xc6\ \x92\x45\x71\x66\xbb\xba\xa5\x41\x83\x7b\x32\xef\xb2\xa0\xd0\x02\ \x72\x2f\xb7\x93\x41\x5e\x5d\xbb\x76\x37\x9e\xd7\x17\xff\x08\x0c\ \x06\x1b\xdf\x88\x2f\xc9\xc9\xe4\xc9\xac\x2a\xab\x0d\xfa\x82\x5d\ \x05\x20\x57\xc0\xa4\x0d\x35\x6b\xde\xc7\xf3\xc9\xe0\x5f\xc6\xe5\ \xa4\x63\x86\x3a\x07\xd5\x12\xa9\x82\xc1\xe0\x6e\x40\x86\x1a\xee\ \x7f\x5f\x30\x45\x36\x25\xb9\x88\x33\x5c\xf2\x33\x4f\xcf\x75\x78\ \xa7\x52\x2a\xb1\xbf\xfa\xe0\x62\xbc\x67\xbc\x58\x88\x9a\x8a\x0c\ \x04\xee\xc3\x30\xfe\x19\x5c\x25\xfe\x1c\x0c\x06\x57\x94\x23\x20\ \xee\x4d\x86\x46\x86\x67\x55\xa6\x4c\xe8\xfb\xfb\xf7\x47\x0b\x2d\ \xc0\xa8\x96\x2b\x41\x73\x85\x18\x5b\x28\x80\x9d\xed\xda\xed\xa6\ \x7b\x78\x47\xd7\xfe\xf5\xaa\xc8\x60\xb0\x4c\xdc\x10\x5a\xd5\x14\ \xb5\x68\x18\x5f\x38\xae\x03\xb4\x9b\x59\xa0\x05\xf3\xb3\x5b\x8e\ \x8e\xdb\x2e\xae\x58\x71\x00\x5a\x8e\x0f\x65\x4a\xd8\x74\xbc\x1f\ \x81\x70\x43\x58\x42\x6c\xec\x22\xea\x1e\x2b\xfe\x12\x0c\x06\x1b\ \x5e\x25\x90\xfa\x20\xec\x81\x61\xbd\x46\x42\xf4\x25\x0d\x83\xdb\ \x01\xa6\x84\x5e\xbb\x66\x43\x67\x40\xb0\xd7\xc7\xb7\x6f\x67\x60\ \x35\x7c\x4d\xe3\x44\x9f\x35\x6b\xf6\x62\xbc\x89\x48\x13\x18\x0c\ \xd6\xce\xfc\x05\x0d\x48\x16\x24\x27\x26\x92\x63\xa5\xa8\x00\x10\ \x4c\x3f\x44\xab\x59\x78\x50\xd0\x10\xc5\xe3\x89\xab\xde\x96\x86\ \x0d\x3d\xc9\xd8\xd6\x57\xaf\xfe\x08\xf7\x53\xbf\x6f\x3b\xc9\x60\ \xb0\xe1\xe5\x06\xab\x83\x2a\x99\xb7\xa9\xce\xb9\x5c\x53\xaa\xd4\ \x69\x45\xde\x81\xfa\xa2\x21\x8f\xf3\x3e\x25\x3e\x3f\x75\x77\x5f\ \x4f\x32\x72\xb2\xb5\xd3\x60\x70\x19\x38\x99\xda\x33\x8b\xb4\x81\ \xc1\x60\xbc\xf4\xf5\x2d\x8b\xb6\x4c\xc1\x72\xfb\x18\x80\x73\xdb\ \x3e\xf0\x19\xdd\x43\xb8\xe8\x0a\x8c\xed\x37\x12\xa6\xc5\xd8\x0d\ \x2d\xa7\xca\x3b\x5c\xcb\x8b\xb4\x81\xc1\x60\xc4\x47\x44\x8c\x40\ \xb7\x19\x0f\x24\x47\xdf\xa1\x8a\x71\x32\x28\x14\xb8\xc6\x22\x17\ \x73\x39\xc9\x82\xe3\x7e\x2b\x8d\x6d\xfb\xe5\x97\x0b\x0f\x8f\x1f\ \xdf\x44\x72\xe0\x48\x15\x8b\xc7\xbb\x8f\x30\xbe\xc4\x58\x08\x23\ \xf1\xef\x03\x83\xb6\x35\x22\xa3\xc0\x4d\xea\xfb\x83\x26\xb7\xf7\ \xee\xdd\x06\x63\x52\x97\x05\x1d\xe8\xdb\xd7\x8a\xb6\x92\x48\x88\ \x8e\x46\x4d\x5e\x30\x3d\x07\x7b\x80\x7d\xdc\x17\x2c\xd8\xa7\xb1\ \xe2\xc5\x9b\x08\xf1\x2f\xeb\x36\xc3\xc6\x96\x8d\xb6\x3a\xa0\x17\ \x69\xf0\x8b\x8c\x02\x1b\x9e\xd1\x6b\x7f\xff\x55\xbb\x3b\x77\xbe\ \x7a\x76\xde\xbc\x31\x54\x29\x4e\x15\x08\x1b\xeb\xd4\x21\xe7\x49\ \x35\x21\xb1\xbb\x63\xc7\x81\x64\x6c\xf0\x78\xc6\x22\xa5\xec\x3e\ \xb4\x35\x67\x8a\x7f\x17\x18\xf0\xa8\xf9\x4b\x11\xd4\x67\x22\x23\ \xc1\x86\x57\x06\x1c\x4d\x14\x80\x65\x89\x12\xf7\xc9\xe8\x56\x97\ \x28\xd1\x43\xee\x34\x72\x2a\xe1\x04\xa5\x5a\x01\x9c\x2b\x33\x53\ \x86\x60\x7c\x22\x56\xbc\xe6\x3f\xb6\x8a\x36\xaf\x70\x0d\xa9\xa8\ \x92\x26\x19\x9a\x1d\xc1\xd2\xad\x9d\x09\x60\x78\x2d\x5d\x3a\x09\ \xe1\x83\x78\x18\x1d\x65\xac\x3c\x00\x03\xc1\x14\xab\x52\xa5\x5e\ \x25\x25\x24\xcc\xa3\x00\x3b\x1c\x2f\xad\x30\xf6\x41\xcb\xb1\x72\ \xcb\x58\x88\x0a\x42\xa7\xc1\x48\x2d\xde\xa3\x4a\x11\xc2\x4e\x4f\ \x4f\x2f\x49\x00\x06\x85\x0a\xc5\xd3\x7b\xd2\x10\xf3\x92\x42\x95\ \x4c\xd2\xcd\x00\xf0\xb9\xb9\x8d\xa9\xe9\xde\x41\x47\x8f\xda\x57\ \xe9\xd9\xd3\x3b\x4f\xc9\x92\x31\x2a\x95\xca\x48\x00\x6d\xcc\xcc\ \xce\xe6\xd0\xd7\xf7\x98\xa6\x52\x7d\x8e\x7a\xf1\xc2\x05\x43\x45\ \x2a\x75\xea\xe4\xd1\x77\xef\x5e\x87\xea\xfd\xfa\x9d\xcf\x96\x2d\ \x5b\xed\x1c\x2a\x15\x55\xac\xff\x48\xbd\xf3\xd8\xe0\x30\x61\x43\ \x54\x42\x34\xab\xd4\xa5\x8b\x9f\x00\x72\x15\x2a\x14\x8b\x4b\x1c\ \x8c\xac\x11\xae\xfe\x78\x36\x0b\xbf\x44\x09\xba\x76\x22\x3d\xc1\ \xc6\x46\xbb\x88\xb7\x58\xd9\xf6\x04\xec\xdb\xe7\x32\xe4\xd8\xb1\ \x03\xed\x96\x2d\xbb\x8f\xb1\xec\xa5\x20\x6e\xd4\x60\xdc\xb8\x6b\ \x78\x7e\xab\xb0\x10\xed\x71\x2d\x96\x2d\x7b\xf6\xcf\x41\x9e\x9e\ \xad\x6f\xac\x5f\x9f\xa7\x8b\x9d\x9d\x73\x2b\x63\x63\x57\xac\x7e\ \x25\xf1\x05\x39\x27\x95\xfc\xcd\xb9\x98\xd7\xa3\x42\xb7\xc0\x90\ \xe7\x82\x97\x10\xca\x09\x7b\x74\xec\x98\x5a\x61\x78\x7f\xef\xde\ \x94\xf9\x6e\x4c\xe5\x26\x74\x4f\xb4\x2c\x5e\xfc\xdd\xb6\xa6\x4d\ \x1d\xd2\xb7\xb7\x17\x3b\xaa\xd6\x56\xa8\xe0\x68\x8a\x15\x8c\xe4\ \xf9\x70\x7f\x1b\xd7\xc4\x95\x05\x0a\xbc\x96\x4d\xe7\x8d\xe4\x7b\ \x7d\xc0\x94\xe3\xe3\xc7\xbb\x41\x57\xe5\x3c\x0c\x34\x91\xe6\x0c\ \xd9\x2a\x0b\xe9\xec\x87\xdc\xcc\xcb\x02\xa0\x78\x1e\x85\x11\x4c\ \x84\xe8\x4d\xff\x9d\x35\x25\x4a\x5c\xd1\xa1\xe6\x87\x0c\xda\x26\ \x62\x82\x36\x83\x29\x9e\xa6\xa6\xce\xe7\xe6\xce\x9d\x28\xd5\xa9\ \x12\xe8\xba\xb2\x60\xc1\x27\x2b\xf2\xe5\x8b\x86\x4e\x63\x52\x88\ \x8f\x8f\x2d\x26\x6f\x09\x58\x50\xa4\x27\xd8\x81\xd2\xf1\xb1\x9b\ \xdb\x46\xc4\xe0\xae\x43\x7e\x2f\x18\x72\x7b\x1e\xd1\xaf\x5f\xaf\ \xd0\xd4\x43\x59\x24\x44\x29\x52\x18\xc3\x97\x5e\x10\x2a\x14\xc6\ \x05\x79\x79\xd9\x41\xa2\xef\x16\xcd\x91\x0c\xa0\x7b\x0b\x00\xde\ \xe5\xca\x30\xb4\x8f\xca\xb8\xbf\xa3\xe3\x76\xcc\x57\x59\xa1\x13\ \x60\xa8\x30\x29\x6f\xa5\xeb\x39\x06\xcd\x09\xe7\x43\x89\xb8\xaf\ \x9c\xac\xaf\xdb\x5b\xb5\x3a\x8f\x58\x90\x2b\xdd\x23\x5e\x74\x31\ \x3e\x3c\x7c\x08\x26\x9d\xca\x4c\xa6\xa6\x7b\xbc\x8e\x8d\xae\x0a\ \x38\x05\x34\x95\xec\xa4\x9d\x5b\xe9\xf2\xdb\x6f\xea\x9c\x4c\xac\ \x86\xcf\x37\x54\xaf\xbe\x38\xec\xe1\xc3\xb9\x57\xed\xec\x76\xad\ \x2e\x5e\xfc\xcd\x7d\x17\x97\x8d\x8a\xdc\xc3\xf1\x71\xe3\xf6\xd0\ \x9c\xc9\xc6\x25\x1f\x75\x68\xbe\x18\xd7\xb7\x6c\x19\x8e\xc9\x53\ \x97\x95\x60\x1b\xe3\x8d\xab\x01\x34\x1a\xdd\xbc\xd7\xac\xd9\xf7\ \x29\x32\xd2\x0c\x9e\xb3\xd7\x50\xa7\x8a\x80\x66\xa3\xd9\xd3\xb3\ \x67\x8b\x63\xb2\x43\xa4\x77\xec\x66\x86\x1c\xd4\xd9\xf0\x8a\x81\ \x45\xbe\xf1\xac\x86\xe7\x92\x25\xce\xe8\xe8\x1a\xa6\x34\x16\x81\ \x00\xed\xb9\x98\xb7\x6f\x17\x53\x89\x90\x00\xc6\x0b\x91\x03\xbb\ \x91\x40\xcc\x59\xec\x45\x0b\x8b\x03\x96\xc5\x8a\xbd\x92\xef\xfa\ \x49\x39\xf7\x2c\x06\x4f\x70\x49\x70\xea\xb5\xf5\xeb\x77\xa1\x47\ \x9a\x39\xee\xf5\x1f\x9d\x3c\x39\xc6\x6f\xdb\x36\xeb\x35\xa5\x4b\ \xaf\xa6\xc9\xa2\x55\x0e\xe3\x6d\x05\x00\x37\xf5\xfd\x65\xfa\xfa\ \x09\x08\x1b\xdc\x71\x1d\x3a\xb4\x94\xf8\x06\x32\xac\x91\x21\xcf\ \x57\x03\xc8\xf5\x99\xf8\x6f\xdf\xbe\x7d\x4b\xa3\x46\x97\xa0\xa1\ \xe2\x7b\x76\xf6\xec\x0a\x8a\x91\x52\x7d\x1d\xcd\xd9\xc9\xa9\x53\ \x4f\x60\x6c\x24\x42\x0a\x0b\xa8\x1d\x17\xce\x78\xde\xa7\x16\x2e\ \x2c\xa6\x79\x76\xcc\xda\x86\x24\x5c\x56\xd2\x04\x1c\x0a\xe6\xc6\ \x64\xec\x97\x5b\x92\x24\xd4\x6c\x05\x61\x82\x49\xb5\x2a\x87\x00\ \xb0\xda\x3d\x40\x3e\xe0\x1b\xb9\xed\x69\x29\x83\xb0\x6f\x71\x25\ \xe3\xcc\x2b\x27\xd3\x8a\x0e\xf4\xe0\xd0\x0c\x0b\xcc\x72\xaf\xec\ \x16\xe0\x64\x39\x0f\xcd\x95\xd5\x8d\xe4\x1a\x68\x55\x83\xa1\x91\ \xc8\x91\x01\x98\x0b\xec\x0a\x9a\x80\x0d\x35\x0c\x6e\x34\x18\xa5\ \x03\x39\x9a\x8c\xeb\x0e\x0e\xb5\x1c\x9a\x37\xf7\x25\xef\x17\xa5\ \x15\x85\xdc\xb8\xd1\x58\x48\x20\xe1\x36\x18\x0d\x2d\x5e\xc8\x82\ \x4b\x95\x99\x81\x41\x6b\x0d\xe9\x80\x33\x02\xc0\xef\xac\xa4\x7b\ \x49\xcf\x0c\x95\xfb\x66\xe3\x2b\xa8\xe9\x58\x51\x7a\x25\x24\x25\ \x25\x35\x14\x1a\xd0\xee\x7f\x07\xc7\x4c\x31\x1c\x17\xa2\x70\x66\ \xf7\xd2\x8d\x2f\x45\xae\xe7\x1a\xf9\xc0\xd5\xd5\x1e\x5d\x43\x5d\ \xe9\x5c\xa1\x21\x76\x1a\x06\x55\xaa\xc0\x9b\xfb\xf7\x97\xc6\xe4\ \x5a\x90\xeb\x99\xbc\x67\xe4\x54\xf1\x32\x33\xb3\xc2\xbb\x39\x21\ \x82\x1a\x80\x95\x30\xce\x79\xf0\x60\x0f\x6c\x41\xef\xec\xef\xd9\ \xb3\xc4\x1f\x42\xe8\x67\xf8\x16\x86\xa1\x82\x2e\xca\x39\x32\x3a\ \x9c\xb5\xcf\x90\x1a\xd8\xb7\x6a\xea\x28\xb4\x83\x94\x31\xb5\xa4\ \xdf\xd2\x1c\x39\xc6\x08\x9d\x00\x1b\x5e\x0d\x70\x0c\x68\x24\x24\ \xa0\x38\x1c\x47\xf1\x38\xc4\xe8\x82\x68\xb2\x90\x03\x78\x1d\xe1\ \x02\x6b\xbc\xb3\x80\xde\xb5\x32\x34\x6c\x4f\xe3\xfb\xba\x77\xf7\ \x26\xaf\xdb\x87\x27\x4f\x56\xe3\xda\x14\x63\xce\x60\x0c\xb8\x80\ \x8c\x4f\x64\x08\x18\xc9\xc9\xc9\x6d\xf6\xf7\xea\x75\x09\xde\x64\ \x25\x34\x40\xba\x29\x26\xdf\x98\xdf\xb2\x68\xa9\x1c\x08\x27\x0b\ \xcd\x4b\x69\xa1\x13\xe0\xb0\x41\x0d\x8d\x7d\x7f\x41\x2a\x29\xa1\ \x89\xc4\x24\x45\x9c\x99\x39\x73\xb7\x3c\x43\xfc\x0a\xe6\x93\xef\ \xec\xa7\x77\x9e\x7b\x7a\xda\x60\x35\xdc\x80\x9f\xe3\x61\x9c\xe3\ \x0e\xf4\xee\xdd\x72\x75\x91\x22\x4a\x11\xe6\x5d\x30\xe3\x54\x89\ \x79\x87\xd2\x2d\x31\x2e\x6e\x19\x39\xc2\x0e\xf6\xeb\x77\xf8\xea\ \xba\x75\xc3\x34\xe6\xb0\x22\x58\x0d\x2c\x4b\x55\xe7\x28\x15\x1a\ \x43\x3b\x14\x6c\x2d\x4f\x8b\xac\x05\x43\x3a\x44\x52\x70\x3d\x0d\ \x8e\xc3\xcf\xaf\xc1\xaf\x0e\x4d\x9b\xde\x92\x2b\xd7\x2c\xb0\x9a\ \xc6\x64\x96\xa4\x2d\x26\xce\x07\x41\xb6\x15\x2a\x5c\x55\xce\x71\ \x76\x95\x2a\xd9\xfb\x6c\xd8\x60\x04\x71\x9d\x77\x38\x03\xc6\x63\ \xf5\x3b\xf8\xcc\xcf\xaf\x80\x90\xc8\x98\x33\x04\xf7\x33\x07\xdb\ \x81\x13\xc1\x81\x32\xc9\xa1\x39\xcd\x87\x16\x29\xae\xf7\x55\xaa\ \x8d\x0d\x11\x59\x07\x86\x69\x8d\x1a\x39\x77\xb4\x6c\x79\x42\xd1\ \x5c\x24\x27\xca\x85\x65\xcb\x76\xca\x6c\x93\xee\xa0\xbe\x96\x81\ \x9a\xc8\xd5\x4f\xfd\x3e\xa4\xdf\x42\xe9\x7a\xd1\xdc\x7c\x17\x9e\ \x39\xd1\xe4\x52\x7c\x0f\xbf\x37\x4f\xcb\x50\xc7\x82\x9e\x8b\x85\ \xc8\xe0\xc6\x17\x9c\x46\x06\x89\xbe\x39\x88\xe5\xad\x3f\x39\x69\ \xd2\x96\x83\x03\x06\xec\x74\x6c\xd7\x6e\xbf\x63\x9b\x36\xe7\x90\ \xe4\x70\x13\xce\xb0\x23\x59\x1c\x20\x67\x50\x95\xb2\x5a\x73\xb1\ \x66\xcd\x87\x64\x3c\x10\x3d\x3d\xa5\x28\x0f\x6b\x42\x56\x2d\xbf\ \x52\x0c\xf3\xd8\xd8\xb1\xce\x10\x38\xf5\xa5\x7b\xf4\x5f\x3b\x29\ \xd3\x8f\x3c\xf1\xbb\xd3\x41\x03\xf9\x3b\xca\xd5\x8a\x7e\xc7\xd6\ \xc8\xa8\x8d\xc8\x50\x30\x64\xf8\xa7\x0f\x38\x04\xfc\x1d\x9c\x04\ \xce\x04\x17\x82\xc6\xba\x53\x92\xc5\x9a\x8b\x93\xa9\x3d\x53\xec\ \xfb\xf7\xa3\xbf\xf1\xed\x59\x8f\x12\x6a\x29\xe7\x92\x32\x1c\xa8\ \xd9\x05\xf2\x02\xef\xd2\x76\x85\x56\x48\xeb\x32\x65\x1e\x27\xc5\ \xc7\xd3\xca\x58\x5a\xae\x86\x0b\x29\x0e\x04\xce\x89\x78\xf6\xac\ \xce\x0b\x6f\x6f\x52\x1d\x1e\x26\xb2\x1a\x1c\x8f\xd5\xa1\x15\x8e\ \x27\xa3\x11\xd8\x4c\x7c\x03\x4f\xce\x9c\x31\xbe\xbb\x6f\xdf\x56\ \xfa\xc6\x0c\xbe\x79\xb3\x34\x8c\x2d\x41\x8a\xe5\xc4\xbc\xf2\xf7\ \xb7\xa6\xdf\x17\x12\x6b\xab\x55\xab\x85\x33\xdd\x53\x7a\x0e\x06\ \xde\x39\x78\xb0\x95\x34\xc6\xbf\x09\x06\x83\x8d\xb2\x82\xf4\x58\ \xe6\xc7\x79\xec\x67\x19\x0f\xfa\x0a\x2f\x19\x79\x33\x07\x68\xf7\ \x62\x0b\x3c\x73\x66\x05\x6d\x25\xad\x0d\x0d\x03\xbe\x77\x2b\xc3\ \x60\x30\x64\x2e\xe5\xa1\xc1\x83\x77\x51\x2b\x5f\xa9\x2a\x9c\x53\ \x68\x01\x5b\xcd\xa3\xe0\x97\xc7\x27\x4e\x6c\x94\xed\x9f\xfe\x01\ \x18\x0c\x5e\xf1\x3a\xc8\x80\x78\x09\x79\x6e\x33\xc7\xaa\xb7\x02\ \xcc\x6f\x22\x84\x3a\x40\x4e\x8e\x15\x3c\x1f\xfe\x77\xa5\x08\xc0\ \xcd\xf8\xef\x4c\x4a\x25\x39\x9a\xc1\x60\x68\x86\x0e\xe0\x50\xd9\ \x21\xe3\x7a\x6f\x70\x7d\x8e\xc0\xf8\x47\xc4\xf3\xa8\x29\x7d\x71\ \x2d\xc3\x32\xc0\x3b\x53\xc0\xd3\x44\xdc\x4f\x04\x73\xab\x4b\x4f\ \xf4\xf4\xae\xc9\x73\xdf\x71\xf1\xa7\x60\x30\xd8\xf8\x9a\x5e\x5e\ \xb5\x6a\x3f\xaa\xc8\xdf\xc9\xf6\x4d\x11\xe7\x4d\x4c\xe6\x68\x6f\ \x45\x61\x64\x27\x64\x20\x36\x5e\x43\x9d\xea\x18\x3d\xdb\xda\xa4\ \x89\x2b\xdd\xa3\xe4\xc4\xfa\xaf\x5b\x3b\x31\x18\xdc\x3d\xb4\x69\ \x42\x4c\x8c\xf1\x91\x91\x23\xcf\x52\x5d\x9d\x34\x28\x4b\xed\xec\ \x16\x68\x75\x5c\x8b\x0b\x0b\xb3\xa0\x8c\x16\x14\xc4\x7a\xca\xf8\ \xde\x21\x5c\x13\xd6\x56\xaa\xf4\x9c\xe2\x79\x7f\xdf\x85\xcd\x60\ \x70\x13\xc3\x5e\x6f\x03\x02\x2c\xb7\xb7\x6c\x79\x1d\xc5\x91\x4e\ \x1a\x8d\xeb\x77\x83\x29\xaf\x6e\xde\xb4\x92\xa9\x49\x75\xc1\x9e\ \xf6\xf5\xea\x3d\x50\x82\xea\x0f\x8e\x1c\xb1\xc7\x58\x1a\xb3\x52\ \x18\x0c\x36\xbc\x52\x32\xdb\x61\x89\xe2\x58\x81\x7c\x00\xe9\xe9\ \x7f\xbd\x77\xe0\xc0\x70\xa1\x01\x48\x7d\x9f\x22\x83\x43\x1a\xd2\ \x0d\x2a\x1d\x12\xdf\x0d\x06\x83\x0d\x2f\xaf\x90\x40\x6e\xdf\x28\ \x32\x2c\xd4\xd3\x05\x2a\xb5\x5c\xb8\xaf\x83\x2e\x32\xa1\x28\x98\ \xfc\xf4\xfe\xe1\xc3\xd5\x64\x9c\xc6\x42\x94\xc3\xf8\x3a\xd0\x13\ \xdb\xd0\x03\xb8\xb6\x13\x69\x07\x83\xc1\xf2\x01\x7b\xbb\x75\xbb\ \xac\x64\xb2\x83\x21\x60\x3c\xdd\x1f\x1d\x3d\xfa\x34\x9e\xf7\xc0\ \x7d\x45\xd9\x3b\x2d\x05\x5e\x4b\xa5\xee\x2b\x09\xec\x24\x74\x12\ \x0c\x56\x5e\xd6\x51\x40\xe2\x3b\x66\xa8\x9b\xdb\x92\x9e\x5b\xb7\ \x3a\x56\xe9\xd1\xe3\x54\x9e\x62\xc5\xa2\x30\x9c\x2b\x07\xca\x7a\ \x3a\x58\x5a\xfa\xe0\x67\x4f\xd0\x06\xef\x15\x68\x3a\x6b\x96\xcb\ \x92\xcf\x9f\xad\x26\xde\xba\x65\x9d\xa7\x78\xf1\x28\x55\xb6\x6c\ \x2e\x30\xc4\xb3\xe0\x40\x91\x26\x30\x18\xbc\xd2\x35\x04\xc7\x3c\ \x39\x7b\xd6\x16\x89\xcf\xea\x2a\x04\xb4\xee\x75\x90\xad\x9d\x12\ \x6d\x8c\x8c\x82\xf1\x7c\x8e\x74\xaa\xd4\xf7\xdd\xb0\x61\x95\x56\ \x9d\xd7\x12\x91\x66\x30\x18\x6c\x78\xd9\xd1\xa4\x7e\x3a\x55\x20\ \x04\x5d\xbe\x3c\x5a\x00\x38\xcf\xc5\xa3\x93\x4c\x48\x5c\x5c\x5c\ \x59\x21\x61\x53\xb1\x62\x77\x32\x34\x28\x56\xbd\x77\x19\x3a\xf4\ \x9c\x97\xa9\xe9\x54\xf1\x7d\x60\x30\x38\x5b\x05\x6c\x0f\x76\x15\ \xc0\xba\xaa\x55\x3d\xa4\xaa\xb0\x05\xae\x45\x41\x43\xd0\x9b\xc6\ \xae\xd8\xd8\xec\x91\x55\xce\x24\x1d\xd0\x00\xcb\xdc\x7a\x29\xfb\ \xb7\x00\x4c\xab\xe4\x37\x83\xc1\x78\x78\xe4\x48\x67\x65\x9b\x49\ \x54\x7a\xda\x41\x4d\xec\x91\x94\x0b\xcf\x86\xb1\xe1\x8a\x7c\x1f\ \xe9\xaa\xc8\xf7\x3e\x9a\x08\xd1\x53\x30\x18\xec\x34\xf9\xdb\x32\ \x01\xb5\x2e\x6d\xdc\xe8\x3b\xfe\xe6\x4d\xcb\xce\x36\x36\x7b\x2b\ \x77\xef\xee\xaa\x87\xa2\x57\x95\x9e\xde\x97\xae\x76\x76\xe7\xf0\ \xca\xd9\x05\x2a\x55\x3e\x5c\xb7\x82\xaa\x6a\x90\xef\x5b\x1c\x1f\ \xbf\xa6\xdb\x86\x0d\xbb\xf5\xf3\xe6\xfd\xa2\xa7\x52\x1d\x86\xf1\ \xfd\x24\xb4\x40\x79\x9a\xb2\x75\x14\x83\xc1\x90\xc6\x56\x18\xfc\ \x4c\xaa\xce\x88\xd3\x8d\x7e\x7b\xf7\x6e\x5f\xe7\x5f\x7f\x55\xab\ \x44\xef\xed\xda\xd5\x87\x64\x03\x34\x5b\x3b\x41\x06\x3c\x9c\xae\ \xe8\xfe\x73\x03\x67\xc0\xb1\x0f\x0e\x1f\x5e\x43\xf7\xd0\xe3\x77\ \x4c\xc5\x90\x77\x82\x81\x1a\x43\x0c\x06\xe3\xe8\xef\xbf\x1b\xc3\ \x80\xde\x6b\xc4\xe9\x52\xec\x2a\x57\xbe\x81\x66\x23\xcb\x14\x7d\ \x7d\x13\x21\x3a\x2a\x71\xbb\xcb\x96\x96\x7b\xf1\x7e\x28\x19\xea\ \xf2\x3c\x79\x76\x21\x21\xfa\x33\x52\xc9\xfc\xa5\x91\x95\x06\x3d\ \x40\x4b\x30\x05\xe9\x63\xe7\x29\x0e\x28\x14\x30\x18\x82\x1d\x28\ \xb5\xe2\x23\x22\x4c\x4e\xcf\x9c\x79\x04\x92\xde\x17\x5c\x87\x0d\ \xdb\xf3\xe5\xcb\x97\xa5\x18\x6f\xa1\xb1\x5a\xe5\x46\x45\x42\x24\ \x32\x53\x62\xdc\xa6\x4f\x1f\x97\x10\x1b\xbb\xe8\xf0\xf0\xe1\x27\ \x51\x1e\xa4\x0e\x92\x1f\x1a\x34\x88\x0c\x4b\x8f\x94\xa0\xcd\x72\ \xe4\x78\x40\x63\x44\xa8\x59\x39\x69\x95\x0a\x31\x18\x0c\x12\x9d\ \x05\x7b\x80\xf3\xc1\xb9\x60\x0d\xa1\x05\xc8\xc1\x2d\xa7\x54\x30\ \x29\x6c\xe4\x74\x6a\xda\x34\xd3\x37\x77\xee\xac\x41\x2e\xe6\xf5\ \x60\x6f\xef\x55\x4a\x2f\x35\x84\x10\x76\xc9\x52\xa0\xcf\x52\xa5\ \xf8\x10\xae\xe5\x45\xda\xc0\x60\x70\x1b\xae\xa7\x1e\x1e\xd6\xe8\ \x2a\x7a\x93\xe2\x76\x64\x54\xb8\xbe\x7e\x79\xed\xda\x34\x92\x89\ \x13\x00\x29\x0f\xc3\x83\xf9\x1e\x8a\xd0\xef\x61\x8c\x56\x8e\xad\ \x5b\x5f\xa7\x6a\x04\x18\x5d\x24\xde\xff\x8e\x95\x8e\xc1\xe0\xde\ \x77\x53\x70\xbe\x5b\x8e\x56\xca\x87\x1c\xdb\xb6\x3d\x14\x70\xfc\ \xb8\x21\xc6\x72\x28\x7a\x98\x60\x8a\xcf\x9a\x35\x7b\x31\xd6\x05\ \x1c\x49\xa5\x3f\x38\x27\xba\x84\x87\x86\x96\x13\xdf\x05\x06\x83\ \x0d\xaf\x1c\xd8\x5d\x4a\xb3\xd7\x55\xce\x79\x60\x3c\x64\xd9\x03\ \xa5\xc8\x51\x36\xf9\x6e\x75\x70\x02\x58\x4d\xa4\x0f\x18\x0c\xc6\ \x5c\x21\xf2\x61\x3b\xf9\x19\x9d\x5c\x5f\x3e\x38\x7e\xbc\xbe\xc8\ \x58\x30\x18\x8c\x13\x93\x26\x6d\x20\x99\x3e\x18\x1e\x85\x19\x1c\ \xc0\x3e\x19\xd4\xb7\x9c\xc1\x99\x26\x8c\x1e\x9b\x36\x6d\x6c\xbf\ \x72\xe5\xfe\xc2\x15\x2b\xbe\x43\xb9\x0f\x55\x98\x1f\x01\x57\x88\ \x8c\x03\x83\xc1\x71\x3d\x70\x41\x74\x68\xe8\x8a\x9b\xdb\xb7\x6f\ \x8b\xfd\xf0\x21\xdd\xbb\x81\x32\x18\x2a\x50\x82\x21\x63\x72\xe5\ \xc1\x2a\x60\x14\x56\xbb\xab\xe2\xc7\x00\x83\xc1\x3d\xd8\x32\xe1\ \x0b\x8d\xc1\x60\x50\x20\x1d\x41\xf3\x27\x30\x3a\x3f\x30\x23\x45\ \x6a\x19\x0c\x06\x79\x38\x91\x46\xf6\x1a\xfc\xb4\xad\x49\x93\x9f\ \x44\xc6\x82\xc1\x60\xbc\xbd\x7f\x7f\xe2\x2b\x3f\x3f\x92\x61\x6f\ \x2b\x24\x18\x8c\x0c\x92\x08\x67\x94\xa8\x51\xc3\x19\x97\xfa\xe0\ \x75\x91\xfe\x60\x30\x18\x24\x54\x0b\xda\x64\x42\xc2\x33\x83\x03\ \xdf\x8c\x14\x21\x16\xe2\x32\x53\x2f\x07\x54\x1a\x32\x16\x0c\x06\ \xc3\x7d\xc1\x82\xa6\xa8\x44\xb8\x06\x09\x87\x8c\xd6\xc2\x64\xf0\ \x19\x8e\xd1\x61\xe5\xca\x7b\xb8\xec\x01\xa3\xc5\x0f\x01\x06\x83\ \xcf\x81\x27\xb1\x3c\x5e\xc4\xb5\xa1\xf8\xd7\x81\x41\xba\x8d\x13\ \x48\x1a\x4e\xe8\x04\x18\x79\x4b\x96\xf4\x55\x09\xd1\x4a\xa8\x54\ \x9e\xd4\x09\xc8\x58\x08\x23\xf1\xef\x01\x43\x0a\xa3\x3e\xc4\xb5\ \x8b\xc8\x72\x30\xd0\xff\x6e\x23\xcd\x89\xa6\xb8\x2d\xb8\xef\x5f\ \x92\xb1\xc2\x38\x39\x75\xea\x09\x48\x85\x47\xcb\x09\x3e\xb9\x58\ \x88\xca\x22\x4b\xc0\xa0\x5e\x77\x30\xae\x38\x8b\x5c\xb9\x3e\x9e\ \x9d\x3d\xfb\x08\xfa\x22\x38\x6c\xaa\x53\xe7\xa6\x54\x8d\x3e\x29\ \xfe\x15\xe0\x0c\xf9\x4e\x90\x17\x78\x0e\x61\x9d\xcf\x10\xce\x49\ \xc6\x84\x93\xf1\x15\x9d\x29\x44\x2e\x91\xa9\x60\xc8\xde\x07\x4a\ \xff\xbb\xd9\x60\x8f\xcf\x9f\x3f\x4f\x75\x68\xda\xf4\x16\x8d\x9b\ \xe5\xca\xd5\x48\xfc\xf0\xe0\x49\xee\x07\xa6\xec\xeb\xd1\xc3\xfb\ \xa9\xbb\xfb\xfa\xd3\x33\x66\x1c\x40\xbb\xdf\x3d\xd4\xe8\x10\x1c\ \x37\x50\x08\x3d\x91\xe1\x60\x98\x08\xd1\x5c\x11\xb4\x75\x1e\x38\ \xf0\x38\x35\x2e\x11\x12\x07\xfa\xf5\xb3\xa5\xf1\x3d\x5d\xba\x18\ \xcb\x39\xdb\x8a\xb9\xb1\xc3\xb5\xb0\xf8\xe1\xc0\x06\xf7\xdc\x3c\ \x57\xae\xa8\xe8\x97\x2f\xcd\xa9\xbf\x1a\xf8\xfb\xc9\xc9\x93\x47\ \x91\xce\x87\xdc\x66\xfa\x83\xad\x44\x46\x82\xe7\x80\x9c\x57\x37\ \x20\x46\x1b\xbf\xb1\x56\xad\x47\xe8\xe6\x9a\x8c\xb1\x6e\x78\xa4\ \xc2\xd5\x00\x73\x71\x90\xe6\xe2\xfa\xe6\xcd\x73\x05\xb0\x4c\x5f\ \x7f\xaf\x9c\x9b\x47\x3f\x60\x09\x10\x3b\x4d\xce\xcc\x9a\x75\x14\ \x86\xd6\x41\x48\x58\x95\x2b\x37\x8b\xc6\x37\xd6\xac\xf9\x18\x7f\ \x04\xb1\x52\x18\xd5\x29\x7d\x3d\x66\x0c\xf2\x0e\xc3\xd0\xc6\x80\ \x0b\x69\x1e\x0e\x8f\x18\x71\x2e\xe6\xed\x5b\x63\x9b\x72\xe5\x42\ \xa5\x41\xbd\xc7\xb3\x60\xd9\x11\x88\x54\xc5\xfa\x0b\x00\x5b\xcc\ \x4d\x34\x76\x64\xd4\xa8\xfd\x18\xa3\x36\x5d\x4d\xc0\x36\x42\x03\ \xd2\x58\xe7\x80\x1d\x84\xce\x80\x91\x2d\x5f\xe9\xd2\x21\x1d\x56\ \xad\xf2\xc6\xcd\x65\xa5\x96\x2b\xe6\xe5\xcb\xf9\x06\x05\x0b\x86\ \xfd\xee\xe3\xb3\x7f\xd2\xdd\xbb\x1b\x0d\x5b\xb6\xf4\xcb\xa6\x52\ \x0d\x40\x94\xfc\xe1\x12\x7a\x25\x5d\xc0\x28\x25\x84\x91\x4a\x88\ \x8d\xe0\x0a\xbd\x9c\x39\x13\xda\x9a\x9b\x5f\xc9\x57\xa2\xc4\xfa\ \x81\xce\xce\x96\x95\x3a\x75\xf2\xd1\xcf\x9f\x3f\x29\x67\xee\xdc\ \x02\x3f\x9f\x1f\xe9\xe9\x79\x48\x08\x11\xb0\x50\x88\x12\x21\x57\ \xaf\x0e\x45\xf8\xe0\x4d\xf7\xcd\x9b\xef\x60\x2c\x36\x45\x08\x73\ \x5c\xbd\x60\x5c\x47\xc9\xf1\x22\x57\x4d\x3a\x83\x4f\x03\xdd\x31\ \x67\xdb\x84\x6e\x80\xf1\xcc\xd3\x73\x1d\x56\xb7\x7a\x42\x02\x93\ \x63\x4d\xab\x1b\xf4\xfa\xef\x5f\x5d\xb7\x6e\x10\xd4\x88\xd7\xe3\ \xf9\x28\xe7\x41\x83\x3c\x69\x1c\x0d\x30\xec\x44\xba\x81\xe1\xb7\ \x65\xcb\x12\x5b\x23\xa3\x17\xb4\x62\xc1\x3b\x19\x02\x83\x2a\x86\ \xf9\xc8\x05\xfe\x0e\x9a\x4a\x2e\x06\x5b\x49\x43\x5a\x07\xa6\xdc\ \xd8\xbc\xd9\x11\x63\x0d\x04\xf0\xe8\xf8\xf1\x16\xe8\x22\x14\x47\ \xca\xd2\x1b\xaa\x57\x37\x12\x12\x3b\xdb\xb7\xdf\x2d\x3b\x0a\xd9\ \xe2\xdd\x9c\x42\x27\xc0\x5e\xca\x61\xa0\x4a\x00\x08\x09\x54\x85\ \xc1\x25\x22\x4c\xf0\x49\x23\x06\x94\x88\x43\x7c\x01\xe8\xf3\x07\ \xae\x2a\x5c\x38\x82\x1a\x68\xe0\xfd\x3c\x22\xbd\xc0\x5e\xe2\x4a\ \xf0\x44\xce\xf2\x30\x36\x76\xd9\x58\xbb\xf6\x15\xf7\xf9\xf3\x0d\ \x35\x9e\x15\x96\xe2\xb5\xb9\x85\x04\xce\x73\xaf\xd1\x66\xeb\x35\ \xc6\x26\x2a\xf3\x86\x79\x19\x43\xf3\xe5\x32\x64\x88\x07\xc6\xba\ \x09\x80\x0c\x17\xef\x46\x59\x95\x2e\xfd\x26\x29\x21\x61\x9e\x74\ \xc2\xe8\x02\xb8\x8d\x2f\x75\x8f\xa1\xc0\x37\x78\x92\x9c\x25\x8f\ \xdd\xdc\x36\x3a\x0d\x18\xe0\x45\x93\xb8\xbb\x4b\x97\x4b\xa4\x52\ \xec\x3c\x64\xc8\xef\xcf\x3c\x3c\x68\x35\x6c\x29\xb7\x2c\x9d\x64\ \xc3\x0b\x6f\xd0\x01\x6c\x21\xbe\x17\x3c\x07\x39\xc0\x36\xa0\x31\ \xd8\x4c\xfc\x09\x56\xe4\xcf\xff\x80\x9a\x93\xb8\x0e\x1d\xda\x57\ \xce\x43\x5e\x30\x14\xe3\x61\xf2\xcb\x30\x9f\x1c\xdf\x0c\xa6\xec\ \xea\xd4\xc9\xd7\x75\xc4\x88\x09\x3a\xe4\x6d\x66\xc0\x70\x4c\x94\ \xcc\x86\xad\x8d\x1a\xdd\xc6\xa4\x8d\xd8\xdf\xab\xd7\x62\x7c\x3b\ \xbe\x96\xde\xcb\xc2\x60\x79\x70\x20\xa8\xa7\x6c\x6b\x10\xb7\xfb\ \x88\x6d\x50\x20\x8c\x34\x49\xae\x86\xe3\xc4\x77\x83\xf1\x77\xb6\ \x7d\x37\xec\xed\x47\x58\xe4\xc9\x13\x2b\xe7\xeb\x29\x98\x00\xa6\ \x78\x99\x99\x39\xd1\x97\xa1\xdc\xa9\xd4\xc5\x98\xe2\x65\x56\x18\ \x08\xd6\x11\x59\x0f\xc6\x23\x2f\xaf\xa2\x07\xfb\xf5\x3b\x8b\xe6\ \xf3\x89\xe4\x91\x5c\x9e\x37\xef\xfe\x7d\xdd\xbb\x57\xc4\x04\x2e\ \xf1\xdd\xb0\x61\x34\x0c\xe9\x77\x0d\xaf\xe6\x78\x30\x65\x75\xd1\ \xa2\xcf\x43\xaf\x5f\xb7\xa1\x33\xc6\x87\x27\x4f\x56\xaf\xad\x58\ \x31\x48\x6e\x43\xf7\x60\xc2\x6b\x8a\x8c\x02\x1b\x65\xa1\xe7\x5e\ \x5e\xcb\xb1\x03\x39\x8b\x34\xb0\x53\x34\x17\x38\xbf\xc5\x24\x44\ \x47\x53\xbf\x84\xec\xf2\x1c\xee\x45\xf3\xb8\xab\x43\x07\x5f\x74\ \x7c\xdd\x7f\xa0\x4f\x9f\xf3\x50\x96\x4e\xa6\x2f\x48\x8c\x97\x14\ \x12\x5a\x73\xba\x28\x13\x93\x1d\x58\x8f\x11\xec\x4d\x1a\x1c\xb4\ \xc2\xc9\x30\x40\xb4\xb5\xa1\xe1\x4a\x5c\xef\xd3\xb9\x41\x00\x34\ \x21\x78\x16\x81\x16\xbe\x21\x71\xe1\xe1\x16\xf8\x9d\x41\x60\x19\ \xb0\x72\x54\x68\xe8\x94\x65\x39\x73\x26\xaa\x5b\xfb\x16\x2c\x68\ \x2c\x80\x3f\x84\xc8\x98\x73\x03\xcf\x57\x71\x70\x54\xa8\x9f\x9f\ \xe9\x96\x86\x0d\x6f\x43\xac\x28\xe9\xd8\xf8\xf1\x03\x31\x37\xcd\ \x30\x5f\x43\x69\x0e\xec\xeb\xd6\x7d\x80\x77\x4c\xe4\xae\x64\x1c\ \xfa\xe2\x1d\xa1\x71\xcb\x62\xc5\xd6\x0a\x2d\xc0\x10\xb7\xcb\x55\ \xf0\x05\x7e\x7f\x90\xc8\x34\xf0\x44\x96\x05\xc7\xdd\xda\xb5\xcb\ \x01\xdb\xc9\x60\x65\x3b\x02\xed\xfd\x13\x18\xaf\x88\xfb\x3e\xb2\ \x43\xa8\x33\xc5\xed\xb4\xb7\x43\x58\x19\x3f\x2c\xcf\x97\x2f\x26\ \xfa\xd5\xab\xe9\x78\x2f\xa7\x4c\x8a\xde\x40\x67\x44\x91\x11\xe0\ \xf9\x52\x81\xa3\x3e\x45\x46\x9a\x63\x25\x9b\xaa\xb9\x85\xdc\xdd\ \xb9\xf3\x55\x3c\x33\x12\x12\x8f\x4f\x9e\xfc\x45\xc6\xf3\xae\x0a\ \x2d\x24\x25\x25\x35\x42\x67\xd8\x73\xf4\x1c\x0e\xb3\x0b\x22\xd3\ \xc1\x93\x58\x17\x9e\xb3\x39\x94\x40\x8b\x6d\x4b\x20\xbc\x5c\xf3\ \x31\x66\x80\x09\x99\x01\xa6\x3c\x39\x7b\x76\x29\xbd\x27\xb4\x80\ \x6a\xe7\xb5\x64\xac\x78\xd6\x4e\x06\x5d\x95\x8c\xf7\x70\x5c\xa7\ \x82\x19\x53\xf0\xca\xf3\x55\x03\x2c\x74\x64\xf4\xe8\x0d\xd8\x5e\ \x46\x91\x97\x19\xd9\x43\xf1\xe4\x75\xd6\x48\x1d\xeb\x49\x73\x81\ \xad\xa8\x67\x2a\x73\x9e\x0d\xe7\x75\x6f\x6a\x68\x02\xc3\xdc\x80\ \xfb\x12\x22\xf3\xc1\x07\x78\xb0\xbd\x8c\x01\x35\x14\x00\xb6\x23\ \x03\x69\xd2\xb6\x35\x6b\xb6\xfd\x1b\xbf\xd3\x08\x1c\xbf\xfe\xe7\ \x9f\x4b\xd3\x96\x14\xee\xeb\x90\x6b\xeb\xd7\xef\x82\xdb\xfa\x8d\ \x34\xbe\x00\x6a\x74\x2f\x32\x0a\x3c\x67\x4d\x3f\xbe\x79\xb3\x8c\ \x72\x62\x61\x78\xb1\x30\xa0\x77\x32\x21\x7a\x03\x98\x84\x94\xb0\ \xd8\x60\x1f\x9f\xd5\x64\x60\x42\x03\x34\x27\x6a\x8f\x66\xfb\xf6\ \xb4\x2a\x0e\x17\x59\x0a\x9e\xc4\x82\xca\x6a\xb6\x65\xfc\xf8\x1c\ \x96\x25\x4b\xbe\x93\x49\xb6\x23\xc4\x37\xb0\x48\x88\x32\x70\x5d\ \x87\xde\x73\x76\xde\x92\x9c\x9c\xdc\xcf\xba\x5c\xb9\x60\x72\xc8\ \x60\xc2\x63\xe4\x8a\x77\x0c\xd7\x0c\x12\x48\x65\xa7\x0a\x58\xf7\ \xd6\xce\x9d\x76\xab\x8b\x15\x7b\xad\xec\x32\xb0\xd5\x0f\x92\xbb\ \x8f\x3e\x5a\x0e\x93\xec\x98\x8f\x7b\x58\x15\x3f\x86\x3f\x7d\xba\ \x02\xcf\x8b\xfd\xa9\x7c\x7b\xe6\x82\xe1\xe7\xe0\x30\x77\x65\xc1\ \x82\x91\x14\x06\xc0\x04\x5c\x00\x17\xc8\xec\x14\x03\xad\x89\x9f\ \x00\x8e\xc5\x36\x65\xb8\x12\x94\x8d\x08\x0a\x5a\x45\x8d\x0f\xe9\ \x1e\x71\xa3\xa1\x22\xa3\xc1\x6d\x96\x17\x04\x5d\xbc\x68\xf7\xe2\ \xd2\xa5\xb5\x5f\xbe\x7c\x59\x2a\x83\xe5\x06\x5a\x21\xa1\x29\x34\ \x1f\xc7\x27\x4e\x74\xc3\xb3\xae\xe2\x4f\x20\xe7\xfa\x84\xb1\x10\ \x15\x44\xe6\x81\xdd\xd2\xef\xee\xdd\x5b\xea\xd8\xa6\xcd\x0d\x64\ \xb5\xab\x33\x52\x68\xeb\x82\x42\xc9\x5a\x5a\xef\x15\xc1\x19\xb0\ \xb8\x66\x50\xd6\xd3\xc4\xa4\x39\x3c\x6a\x89\x08\x21\xbc\xc0\x19\ \x31\xe3\x63\x76\x3c\x57\xb9\xc1\xc6\x60\x3b\xb0\x36\x98\x8d\x1c\ \x5f\xb2\x82\xfc\x27\xb0\x30\xf8\x61\x55\xd1\xa2\xaf\x92\xe2\xe3\ \x17\x50\x4a\x99\xd0\x86\xac\x58\x90\xde\x4c\x47\x30\x79\x6d\x85\ \x0a\x99\x1c\xcf\xe3\x89\x2c\x00\x8e\xf9\x14\x1d\x6d\x1e\xe2\xe3\ \x63\x4b\x57\x3a\xef\xa5\xb2\x05\xb1\x00\x53\x2e\x2c\x5b\x76\x90\ \x82\xb2\xb8\x9f\x48\xf1\xa0\x07\xae\xae\xf6\xb8\xcf\x82\x58\x1d\ \x83\x76\x1c\xb2\xf4\x2a\x01\x86\xa7\xae\x26\xa7\x73\x36\x9d\xbf\ \xbf\x91\x14\x61\x2d\xab\x16\x26\xc5\x45\x45\x35\x7a\xe1\xed\xbd\ \x16\xef\xf6\x13\x59\x06\x6e\x52\xdf\x0c\xfc\x05\x34\xd0\x38\x80\ \x0f\x03\x2b\x62\x92\x3e\xe1\xdb\xf0\x09\x9e\x4d\x07\xb3\x43\x32\ \xa0\x20\xbc\x60\x34\x61\xbf\x8b\x2c\x03\x6f\x35\x03\x4f\x9d\xda\ \x60\x57\xb9\xf2\x63\x32\x36\xaa\xbd\x43\x0c\x6f\x37\xe6\x26\x55\ \x0f\x32\xe2\x79\xcd\xb1\x43\x09\x91\x67\xc1\xdb\xaf\x6e\xdf\x6e\ \x41\x25\x41\x42\x67\xc0\x69\x62\x07\xe4\xe4\xc4\x62\xfb\xf1\xe5\ \xe1\xd1\xa3\x9b\x30\x41\xd5\x85\x84\xdc\xde\x14\x14\x59\x09\x36\ \xba\xca\xe0\x1f\x97\x57\xad\xda\x8f\x6a\x90\x0f\xd2\x91\x75\x59\ \x5b\xa8\x48\x49\xa0\x86\x03\x46\x9d\xca\xb7\xae\x4a\x15\x1f\x1d\ \x9b\x3b\x46\x64\x68\x68\x3d\x2a\xa4\xa4\xea\x65\xf2\x66\x52\xa0\ \x75\xae\x10\xf9\x84\x2e\x82\xb3\x8b\x9a\x25\xc4\xc4\x18\x1f\x19\ \x39\xf2\x2c\x4a\x84\x6e\xb8\x0e\x1b\x96\x5a\x4f\x04\x15\xb6\xa1\ \x97\xe9\xec\x1d\x7a\xed\x1a\xa5\xf2\x19\x0a\x9d\x03\x67\xbe\xf7\ \x7c\x7b\xf7\xae\xe5\xf6\x16\x2d\x6e\xd2\x2a\x87\x04\xe7\x81\x42\ \x57\xc1\xf3\x95\x07\xec\x05\x2e\x01\x1b\xcb\x5d\xca\x7a\xd0\x44\ \xa6\xf2\xfd\x2a\x03\xe6\x5e\xb2\xe2\x5c\x47\xc1\x13\x59\x0a\xfc\ \x3d\x32\x24\x64\x25\xae\x3a\x6e\x70\x0c\xa5\xac\x87\x80\xd5\xec\ \x18\x19\x99\x94\x77\x08\x5d\x9e\x27\x4f\x78\xec\xfb\xf7\xa6\xe4\ \x2c\x4b\xc5\x31\x56\x16\xef\xcd\x05\xcd\xa8\x64\x2b\xeb\xcb\x80\ \x78\x22\xab\x83\x65\xc5\x8f\x04\x9e\xb3\xf6\xe7\x8d\x8d\x0f\xa1\ \x0c\x48\x7d\xbe\x43\xc6\xd0\x5b\xaf\xa5\x4b\xc7\xa5\xa2\x34\xd6\ \x9f\x0a\x94\xb5\x84\x6b\x6f\xca\x8a\x84\x34\x80\xc1\xe0\xf3\x5d\ \x6b\x5a\xd5\x9c\x06\x0e\xf4\xc2\x79\x3c\x49\x26\x3a\x2c\xd0\x58\ \xd9\xca\x83\xf1\x78\x16\x85\xc4\xe9\xdd\x14\x5a\x80\x32\xc0\x79\ \x0a\x39\xe0\x18\x11\x84\xf7\x4d\xd3\xde\x27\x8f\xc1\xe0\x78\x6b\ \x7f\x72\x96\x6c\x6d\xdc\xf8\x26\x92\x1b\x36\x0b\x09\x25\x39\x5d\ \xa6\x8b\x4d\x05\xbb\x80\x23\x2e\x98\x99\x1d\xa0\x71\xa2\x4d\xa5\ \x4a\xf5\x45\x9a\xc1\x60\xb0\xe1\x19\x82\xe3\x41\x13\x30\xbf\x00\ \x10\xbf\x5b\x4a\x46\x85\x62\xd8\xb9\x9a\x7a\x29\xde\xd6\xd6\x6d\ \x69\xdc\xbe\x5e\x3d\xaa\xc7\xeb\x2e\xbe\x1b\x0c\x06\x97\x01\xe5\ \x15\x12\x0e\xcd\x9a\x91\x61\x7d\xc5\x39\xef\x89\xe6\x99\x8d\x9c\ \x26\x07\xfb\xf7\xdf\x4d\x2a\x00\x24\xcb\x21\xd2\x07\x0c\x06\x97\ \x6f\x41\x1e\xdf\x4b\x36\x1a\xa1\x4a\x71\x52\x0e\xcb\x3b\x5b\x08\ \x0a\x33\x74\x02\xd3\xb9\x1b\x13\x83\xc1\x46\xd7\xdc\xd3\xd4\xd4\ \x79\x65\xa1\x42\xef\x34\xbc\x94\x89\x24\x56\x2b\x32\x04\x0c\x06\ \x1b\x5d\x23\x54\x7e\x98\xdc\xde\xbb\x77\xdb\xce\x76\xed\x4e\x9c\ \x9a\x3e\x7d\xf5\xa7\xa8\xa8\x8a\xdf\x21\xb9\x9f\x1b\x1c\x0d\x5a\ \xc0\x68\x67\xe3\x5a\x51\xfc\x0b\xa1\x12\xff\x10\x0c\x86\x3c\xdb\ \xd5\x06\xeb\x80\x5f\xc1\xf3\x2a\x95\xea\x79\x1a\x3a\x07\x75\x56\ \x09\xb1\x03\x2c\xad\x31\xfc\x25\x45\x88\x71\xcb\x84\x70\x14\xe9\ \x06\x06\x83\x9b\xc9\xd4\xc3\x8a\xf6\x11\xb1\xbb\xc4\x13\x13\x27\ \x1e\xa4\xcc\x24\x2a\xdf\x42\x91\xf3\x07\xa9\x2c\xd0\x4c\xfc\x6b\ \x90\xc5\x25\xf3\x0c\x06\x56\x31\x13\x3d\x3d\x3d\xd1\x6b\xc7\x8e\ \xbd\x3d\xec\xed\xfd\x0a\x96\x2b\x77\xaa\x7a\xbf\x7e\x7b\xda\x9a\ \x99\xed\x15\x2a\x95\xc8\x5d\xb4\xa8\x89\x90\xf8\x67\x69\x63\x0c\ \x06\xaf\x6e\xd9\x51\x89\x10\x4d\x82\xb5\xd8\x96\x4e\x02\x0b\x6a\ \x2a\x06\xd8\x18\x19\x05\x5b\x96\x28\x11\x8a\x71\x15\xe9\x65\x82\ \x91\xf8\x9d\xc5\xdf\x21\x58\xcb\x60\x30\x16\x0b\xf1\x33\x79\x36\ \xd1\x00\xf4\xb8\x12\xe3\xd3\x04\x74\x4e\x9f\x93\xdc\xbe\xba\x07\ \x9e\x9e\x5e\x37\x7a\x57\xf2\x79\x1a\x45\xa8\x18\x0c\x06\x29\x73\ \x43\xad\x3b\x6e\x63\x9d\x3a\x57\x53\x71\xa4\xa8\x85\x6b\xd1\xe3\ \xe2\x12\x55\x9e\x40\xf5\x6d\x0b\xdd\xbb\xfc\xf6\x9b\x07\x8c\x30\ \xd0\xa1\x65\x4b\x6a\xdf\xa5\x5a\x44\x6d\xf6\xfe\x1e\x18\x0c\x06\ \x8c\xed\x92\x5c\xb5\x7a\x69\x18\x5b\x77\xd2\x35\x25\x59\x45\x28\ \x8e\x59\xed\xee\xdb\xb7\x38\x82\xeb\xb1\xb6\x15\x2a\xbc\xa0\x4a\ \x75\xc8\xf9\x59\xe0\xfa\x2b\xde\x99\x09\x7e\x01\xb7\x50\x7b\x2e\ \x91\x3a\x18\x0c\x06\x29\x81\x81\x63\xa1\x97\x72\x53\x76\x62\x4a\ \xc4\xf5\x0c\x8c\xc7\x1d\xd7\xcf\xa8\xc9\x8b\x86\xb7\x92\xe4\x39\ \xfa\x61\xec\x0f\x7a\x87\x7a\xe8\xd9\xff\xfc\xf3\x3c\x3c\x4f\x40\ \x2e\xe7\xa6\x3d\x9d\x3b\x97\xb2\xab\x54\xc9\x4f\x51\xf7\x96\x4a\ \xd3\x0c\x06\x23\x95\x2d\x63\x6f\xe5\x4c\x66\x5b\xb1\x62\x30\xba\ \x37\x79\xa0\x23\xd3\xb5\xe5\xf9\xf3\x5f\xd9\xdd\xa9\xd3\x4e\xd2\ \xd2\x24\x5d\xd3\xa0\xa0\x20\x03\xea\x51\x01\x25\xe9\x8f\x7b\xba\ \x76\x3d\x43\xa1\x02\xd9\xbd\xc9\xef\xd5\xab\x57\xb9\xd1\x91\xd7\ \x0f\xab\xdf\x57\x04\xdd\x3d\xce\x2f\x5b\x56\x97\x85\x6b\xb5\xc1\ \x60\x48\x17\x3f\x74\x4a\xb7\x6e\xaa\x5b\xf7\x21\x3c\x95\x9f\xb1\ \xd2\xbd\x7d\x72\xfa\xb4\xad\x6c\xb1\x3c\x17\xec\x23\xfb\x59\x74\ \x00\x53\xa8\x43\x10\x5d\x91\x46\xf6\x42\x9e\xed\x2e\xa3\x30\x76\ \xb1\xfa\xe7\x3e\x7d\xbc\x64\x33\xcb\x56\x1a\xc6\x56\x87\x0c\x95\ \x82\xea\xb8\x65\x30\x18\x54\xe4\x0a\xce\x7a\x70\xe4\x88\x3d\x35\ \x21\xa1\xfe\x07\xe8\x53\xfe\x02\xdb\xc5\xf6\x1a\x2a\x70\x47\x94\ \x95\x10\x35\x79\xde\x67\xe7\xcc\x51\xdf\x63\x3b\x79\x99\xde\xb7\ \x29\x57\xee\x21\x9a\xcd\x98\x90\xfc\x9f\xd2\x52\x4d\xfe\xde\x50\ \x18\xf2\x27\xcb\xe2\xc5\x8d\x05\xc0\x60\x30\x64\xe7\x1e\xb0\xda\ \x33\x77\xf7\x55\x68\x02\xea\x61\xf7\xd3\x4f\x67\x4e\xcf\x99\xd3\ \x41\x39\xe7\xc1\x68\xde\x6a\x88\x18\x2d\xc1\xf6\x73\x9f\xd4\xd3\ \xa4\x5e\x15\x11\x10\xab\x5a\x83\xf1\x06\xf2\xfd\x0e\x52\xe0\x76\ \xe3\x91\x09\x13\xca\xbf\xba\x71\x63\x4d\x42\x6c\xec\x7c\xd9\xa8\ \x92\xc1\x60\x68\x06\xba\xc1\x71\xa0\x09\xd8\x5b\x48\xbc\xb9\x7b\ \x77\xa0\x8f\xb5\xf5\x5e\xb9\x65\xac\x0a\x49\x87\xab\xd2\xc9\xf2\ \xe5\xba\xbd\xfd\x4e\x4d\x15\x68\x8c\xe7\x47\x13\x51\xc5\xeb\x19\ \xb1\xb3\x63\xc7\xbe\x7f\xa1\xf0\xcd\x60\xb0\x9e\x4a\x2a\x86\x38\ \x00\x2c\x47\x4e\x10\xac\x78\xf1\x64\x50\x87\x06\x0d\x3a\x8f\xb1\ \x29\xda\x7d\xd1\xd1\x9c\x92\x3c\x9f\x51\xd8\xa2\x46\x3f\x74\x73\ \xab\x2d\xbe\x1f\x0c\x06\xe3\xf8\xf8\xf1\x0b\xd0\xf8\xf3\xc6\xe7\ \xa4\x24\xe3\xd4\x5a\x6b\x99\x66\xcb\x36\x8f\x0c\xf2\xcc\xac\x59\ \x47\x65\x97\x5e\x9d\xc2\x0f\xb5\xb7\x65\x30\x7a\x6e\xd9\xe2\x86\ \x4b\x0a\x78\x06\x25\x40\x61\x94\x63\xa9\x12\xa2\x2b\xee\x97\x26\ \x61\x81\x4b\xf9\xfa\xd5\x38\x1f\x1a\x81\x76\x58\xb5\xca\x1b\x63\ \x97\xfe\x66\x4c\x70\x16\x2e\xc5\xc1\x15\x66\x42\xc4\x68\x3c\x62\ \x30\x18\x9a\xa2\x45\xd8\x62\x8e\xa3\x40\x39\x18\x0f\xe3\xbb\x85\ \xeb\x57\xf4\x42\x20\x25\xb1\x7a\xda\xb5\x9f\x32\xf6\x77\x14\xef\ \x9d\xc7\x75\x89\xa2\xc5\x02\xcf\xe8\x7a\x79\xee\x0b\x01\x0d\xc4\ \x37\xc1\x60\xb0\xf1\x95\xa5\xb0\x02\xd2\xbe\x9e\x92\xd1\x50\xa7\ \xdd\x93\x93\x26\x6d\x37\x35\x35\xcd\xa6\xd5\x64\xc6\x44\x3a\x5b\ \x3e\x49\x8d\xcd\x14\x30\x10\x2c\x8a\x98\xde\x4c\xba\xdf\xd1\xaa\ \x95\xfb\x5f\xf7\x37\x67\x30\xd8\xe8\xaa\x81\xd3\xbd\xcc\xcc\x9c\ \x56\x15\x2a\x14\x41\xc6\x23\x57\xb1\x9c\x02\xa0\x8e\xad\xb4\xf2\ \xa1\xd4\xe7\xc9\x2b\x3f\x3f\xeb\x8f\xef\xde\x2d\xa7\x9e\x85\x14\ \x70\xc7\xea\x76\x1d\xef\x3e\x31\xcf\x95\x2b\x06\xc5\xaf\x16\xe4\ \xa4\x11\x7f\x09\x06\x83\x8d\x2e\x3b\xd8\x92\x3a\xed\x3a\x0f\x1e\ \xec\x81\xd6\xd6\xbe\xe8\x0a\x54\x50\x9e\xd1\x26\x83\x29\x90\x6b\ \x77\xa2\x3e\x17\x60\x0d\xb0\xed\xe1\xe1\xc3\x4f\xd3\x38\xd1\x6d\ \xda\x34\x2a\x11\xea\x24\xd2\x06\x06\x83\x9b\x92\x80\x7d\x41\x53\ \xb0\xbe\x00\xb0\xcd\x1c\x2b\xeb\xee\xb6\x08\x0d\x6c\xfd\xe5\x97\ \xc5\x34\x4e\x35\x77\xc8\x56\x99\x27\xcf\x86\xff\x04\x0c\x06\x77\ \x03\x5a\x5b\xa3\x86\x21\x9d\xed\xc0\x8f\x8b\x85\xa8\x2b\x00\x2a\ \xe7\xc1\x56\xf2\x22\x19\x9c\xdf\xd6\xad\x3b\xc8\x38\xa9\x8f\x21\ \xee\x17\x81\xe7\xf0\xcc\x0d\xd7\x51\x22\xed\x60\x30\x18\x88\xcb\ \xed\x44\x02\x74\xb2\x3c\xdf\x85\x11\xe9\xe7\xcd\xf5\xeb\x07\xc0\ \xd8\x26\xac\x29\x51\x22\x0f\xee\x7d\x69\x0c\x67\xbb\x58\x0d\x9d\ \x4d\x63\x16\x11\x4a\x23\x18\x8c\xce\xd6\xd6\x2b\x87\x9c\x38\x61\ \x5f\x67\xd8\xb0\x93\x45\x2a\x55\x7a\xa4\x12\xa2\x88\x00\x5a\x19\ \x1b\x5f\x26\x7b\x84\x23\x65\x2a\xae\xbf\x54\xea\xd4\xe9\xc2\xc2\ \x8f\x1f\x6d\x66\x85\x86\xae\x2c\xdd\xa0\x41\x00\xde\x33\x87\xe1\ \x79\x52\x0f\x3c\xe9\x84\xf9\x9b\x60\x30\x78\x9b\x59\x01\x1c\x1e\ \x15\x1a\xba\x84\x9a\x8d\xa8\xcf\x6f\x65\xcb\xde\x16\x00\xc2\x05\ \x17\xb1\xb2\x7d\xc1\x59\x8e\x3a\xc1\x36\x03\xeb\x44\x04\x07\x8f\ \xa0\xad\xa8\xc6\x6a\x77\x5a\x7c\x17\x18\x0c\x36\xbe\x01\xfe\xdb\ \xb7\x6f\xbf\x7b\xe0\x80\x39\x39\x4b\x90\x7f\xe9\x8d\xea\x83\x84\ \x97\xbe\xbe\xed\x84\xc4\x86\xd6\xad\xf3\x52\x38\x81\x6a\xf1\x8e\ \x8d\x1d\x7b\xea\xf8\x84\x09\xeb\x28\x77\x93\x53\xbb\xd2\x0e\x06\ \xc3\xf5\xe7\xd1\xa3\x5f\xe0\x5a\x09\x14\xc5\x6a\xd5\xf2\x0a\xbd\ \x72\xa5\xf9\xde\x2e\x5d\x16\x1a\x23\x2e\x07\xc3\x88\x0c\xbb\x78\ \x71\x1d\x1e\xa9\xaa\xf6\xee\x7d\xa5\xd7\xb6\x6d\x6e\x74\x04\x3b\ \xf4\xeb\xaf\x3f\x51\xad\x1d\xc6\x0d\xc1\xb7\x5f\x85\xd8\x6b\x21\ \xc4\x1d\xf1\xf7\xc1\x60\x30\x5e\x05\x04\x18\xa2\xc0\xf5\x8e\x86\ \x14\xdf\x67\xba\x42\xaa\x2f\x22\x2e\x2c\x6c\x29\x35\xa7\x24\x59\ \x3f\x12\x37\x92\x1d\x83\xe2\xe5\x36\xf3\x4b\xda\x1c\x2b\x0c\x06\ \x0b\x1a\x19\x92\xdc\x5e\x72\x72\x72\x3f\x7f\x47\xc7\xed\x90\xe2\ \x73\x5e\x9e\x2f\xdf\x2b\x45\xb8\x08\xc6\xd6\x46\x00\xb2\x7f\x79\ \xca\x86\x1a\x35\x1e\xc5\x85\x87\x5b\x3c\x3c\x7e\x7c\x93\xad\x91\ \xd1\x0b\x69\xa0\x83\xbf\xf1\xdf\x2e\xfd\xbf\x5e\x4a\x06\x83\x71\ \x32\x07\xb6\x91\x16\xfa\xfa\xb5\x72\x15\x2c\x78\xb4\x58\x8d\x1a\ \x4f\xbe\x24\x24\x14\x2d\x58\xbe\x7c\x48\x1b\x33\xb3\x2b\x78\xee\ \x43\x2d\xb8\x54\x42\xd4\x37\x28\x54\x28\xea\xc3\xa3\x47\x95\xad\ \x4b\x97\x1e\x71\x6f\xcf\x9e\xa0\xd1\xde\xde\xb6\x39\xf3\xe6\x8d\ \xd5\xcf\x9f\x7f\x89\xd0\x02\x06\x26\xa4\x08\x11\xa4\x61\x74\x0c\ \x06\xc3\x6d\xfa\xf4\x3e\x56\x65\xca\x3c\xd3\xd8\x4e\x7e\x5d\x91\ \x2f\xdf\x53\xa9\x1c\x56\x4b\x00\xd4\x68\x52\xc6\xed\xee\xd1\xca\ \x66\x5d\xb6\x6c\x80\x8c\xd9\x79\x40\xd8\xe8\x0d\xb6\x9e\x31\x78\ \x97\xf4\x54\x54\x18\x3f\x08\x5a\x91\x34\x3b\x9e\x3d\x8f\x7e\xff\ \xbe\xb2\x00\x00\x06\x83\x41\x02\x44\xe8\x71\x37\xcb\x7b\xcd\x9a\ \x7d\xdb\x9a\x34\xb9\x88\x02\xd7\xc3\xb4\x65\xc4\xf8\x60\xa1\x01\ \xd2\xc0\xa4\xd0\xc1\x96\x06\x0d\xa8\xf8\x75\x1a\x12\xa0\xf7\xa3\ \xe5\xf2\x1b\xb9\xcd\x0c\xc4\x98\x91\x00\xcc\x0d\x0c\x8e\x29\xc6\ \xbb\xb7\x6b\x57\x9f\xc4\xd8\xd8\xfa\xe2\x7f\xc0\x60\xb0\xd1\xe5\ \x90\x2a\x62\x33\xc0\x85\x60\x1b\x50\x25\x34\x70\xd7\xc9\xe9\xd7\ \xd5\xc5\x8a\x85\xd1\x0a\x08\xa7\x89\x07\x8c\xc9\x22\x2c\x30\xd0\ \xdc\x75\xe8\x50\x77\xef\xd5\xab\xf7\x29\xca\x61\x97\x2d\x2d\x27\ \xd0\x3b\xa8\x44\xf8\x2c\x0d\xef\xba\x89\x10\x4d\x45\xda\xc0\x60\ \x70\x5f\xf3\xf0\xe7\xcf\xe7\xef\xed\xde\xdd\xc7\x5c\x5f\x3f\x5c\ \x7a\x2b\x63\xd1\x76\x79\x11\xe9\xab\xc8\xd7\x54\x18\xbb\x40\xb1\ \xbb\xa7\xee\xee\xeb\x51\x91\x70\x0e\xc2\x47\xf1\xb2\x06\xaf\x9d\ \x48\x13\x18\x0c\x36\xba\x3c\xe0\xb0\xe4\x4f\x9f\xcc\xae\xd8\xda\ \xee\x71\x6c\xd3\xc6\xed\xa2\xb9\x79\x2b\x32\x46\xa9\x28\xdd\x9f\ \x0c\xf1\x40\xdf\xbe\x17\x30\x36\x04\xec\x0d\x29\x3f\x4b\xd7\xe1\ \xc3\x4f\x3d\x3e\x71\xa2\xa5\x4a\x7c\x0f\x18\x0c\x36\xbc\xc2\xb8\ \xd4\x05\x6b\x83\x4f\xa0\xaf\x72\x5a\x7a\x26\xaf\xe6\x30\x30\xa8\ \x31\x23\x38\x78\x6d\xde\xe2\xc5\x6d\x31\x1e\x45\xdd\x7f\xf0\xa8\ \x23\x18\x2c\xd2\x0f\x0c\x06\x03\x0e\x93\xfb\xcb\xf3\xe4\xf9\x78\ \x6d\xfd\xfa\x91\x19\x5c\x2d\xc0\x60\x30\xda\x98\x9a\x5a\x93\x9a\ \xf4\xe9\x69\xd3\x36\x99\x0a\x71\x18\x1c\x85\xa0\x7a\x99\x0c\x30\ \x38\x06\x83\xd1\x62\xc1\x82\xe3\x7d\xf7\xec\xd9\x5c\xaa\x7e\xfd\ \x47\x2a\x3d\x3d\xd2\xc5\x74\x44\x50\xfd\x84\xc8\x18\x30\x18\x0c\ \x52\xfe\x02\x67\x24\xc6\xc5\x2d\x23\xc9\xbe\xb0\xc7\x8f\xe7\x2a\ \x15\xe8\x19\xe7\x34\x61\x30\xd8\xf0\x4a\xe3\x52\x4d\xda\x99\x0f\ \x1c\x28\x09\xe2\x5f\x09\x06\x83\xc1\x60\x30\xfe\x0f\x1d\x9b\x1f\ \x99\x66\xa1\x3a\xad\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ \x00\x00\x06\x53\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x40\x00\x00\x00\x40\x08\x03\x00\x00\x00\x9d\xb7\x81\xec\ \x00\x00\x02\xeb\x50\x4c\x54\x45\x00\x00\x00\x00\x00\x00\xff\xff\ \xff\xff\xff\xff\x7f\x00\x00\xff\xff\xff\x66\x00\x00\xff\xff\xff\ \x7f\x00\x00\x71\x00\x00\x7f\x00\x00\xff\xff\xff\x73\x00\x00\xff\ \xff\xff\x7f\x00\x00\xff\xff\xff\x75\x00\x00\x7f\x12\x12\xff\xff\ \xff\x77\x00\x00\x78\x00\x00\xff\xff\xff\xff\xff\xff\x79\x00\x00\ \xff\xff\xff\x7f\x00\x00\x7a\x00\x00\xff\xff\xff\x7f\x00\x00\xff\ \xff\xff\x7b\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\ \xff\x7c\x00\x00\x7c\x00\x00\xa3\x47\x47\xff\xff\xff\xff\xff\xff\ \x7f\x00\x00\x7c\x00\x00\x7f\x00\x00\xff\xff\xff\xff\xff\xff\x7f\ \x00\x00\xff\xff\xff\x7d\x00\x00\xff\xff\xff\x7f\x00\x00\xea\xd5\ \xd5\xff\xff\xff\xff\xff\xff\x99\x38\x38\x7d\x00\x00\xff\xff\xff\ \xff\xff\xff\xff\xff\xff\x7d\x00\x00\xff\xff\xff\xff\xff\xff\x7f\ \x00\x00\x7f\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x00\ \x00\x7e\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\ \x7e\x00\x00\x7e\x00\x00\xff\xff\xff\x7f\x00\x00\xb7\x70\x70\x7f\ \x00\x00\x7f\x02\x02\x7e\x00\x00\xff\xff\xff\x7e\x00\x00\xff\xff\ \xff\xc5\x8c\x8c\x7f\x00\x00\xff\xff\xff\x7f\x00\x00\xff\xff\xff\ \x7e\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x00\x00\xff\ \xff\xff\xba\x75\x75\x7e\x00\x00\xa8\x51\x51\x7e\x00\x00\xed\xdc\ \xdc\xff\xff\xff\x7f\x00\x00\x7f\x00\x00\xff\xff\xff\x7f\x00\x00\ \xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd8\xb2\xb2\xff\ \xff\xff\x7f\x00\x00\xff\xff\xff\x7e\x00\x00\x7f\x00\x00\x7f\x00\ \x00\x7e\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb7\x71\x71\ \x7f\x00\x00\xff\xff\xff\x93\x27\x27\x7f\x00\x00\x7e\x00\x00\xf9\ \xf4\xf4\xc3\x87\x87\xff\xff\xff\xff\xff\xff\x91\x24\x24\xff\xff\ \xff\x8f\x1f\x1f\xff\xff\xff\xec\xd9\xd9\xff\xff\xff\x8c\x1a\x1a\ \x7f\x00\x00\x7f\x00\x00\x7e\x00\x00\xff\xff\xff\xff\xff\xff\x7e\ \x00\x00\x7f\x00\x00\xad\x5c\x5c\xff\xff\xff\x8d\x1b\x1b\x84\x0a\ \x0a\x81\x03\x03\x7f\x00\x00\xff\xff\xff\xff\xff\xff\x80\x02\x02\ \xff\xff\xff\x80\x02\x02\xff\xff\xff\xff\xff\xff\xb1\x63\x63\x7f\ \x00\x00\x7f\x01\x01\xff\xff\xff\x7e\x00\x00\x83\x08\x08\x7e\x00\ \x00\xff\xff\xff\xb6\x6d\x6d\x7e\x00\x00\x87\x10\x10\xd6\xae\xae\ \x7f\x00\x00\x7f\x00\x00\xff\xff\xff\xff\xff\xff\xde\xbd\xbd\xf9\ \xf4\xf4\x7e\x00\x00\x7f\x00\x00\x90\x22\x22\xdf\xc1\xc1\xff\xff\ \xff\xac\x5a\x5a\xc4\x8b\x8b\xff\xff\xff\x7f\x00\x00\xff\xff\xff\ \x90\x22\x22\x80\x01\x01\x98\x32\x32\xa3\x48\x48\xdb\xb7\xb7\xf4\ \xea\xea\xf7\xf0\xf0\xf8\xf2\xf2\xfe\xfe\xfe\x80\x02\x02\xa5\x4c\ \x4c\x8c\x1a\x1a\x81\x04\x04\x92\x26\x26\x93\x27\x27\x82\x05\x05\ \x99\x33\x33\x9a\x35\x35\x9d\x3b\x3b\x9e\x3e\x3e\xa1\x44\x44\x82\ \x06\x06\x8c\x19\x19\xa7\x4f\x4f\xa8\x52\x52\xab\x57\x57\xab\x58\ \x58\xac\x59\x59\xb0\x61\x61\xb0\x62\x62\xb2\x66\x66\xb4\x6a\x6a\ \xb9\x74\x74\xba\x75\x75\xbd\x7b\x7b\xbe\x7e\x7e\xc0\x81\x81\xc7\ \x8f\x8f\xce\x9e\x9e\xcf\x9f\x9f\xd0\xa2\xa2\xd4\xaa\xaa\xd5\xab\ \xab\xd7\xb0\xb0\xd8\xb1\xb1\xd9\xb4\xb4\x84\x09\x09\xde\xbe\xbe\ \xe1\xc4\xc4\xe7\xd0\xd0\xe9\xd4\xd4\xea\xd5\xd5\xed\xdb\xdb\xee\ \xde\xde\xef\xe0\xe0\xf1\xe4\xe4\x85\x0b\x0b\xf5\xec\xec\x86\x0e\ \x0e\x8a\x15\x15\xfb\xf7\xf7\xfd\xfb\xfb\xfd\xfc\xfc\x8a\x16\x16\ \x8b\x17\x17\xd2\x67\xa5\xb8\x00\x00\x00\xb6\x74\x52\x4e\x53\x00\ \x01\x01\x03\x04\x04\x05\x08\x08\x09\x0a\x0a\x0b\x0b\x0c\x0d\x0d\ \x0e\x0f\x0f\x13\x13\x14\x15\x15\x16\x1b\x1b\x1c\x1c\x1d\x1e\x1f\ \x21\x24\x25\x27\x27\x2a\x2b\x2c\x2d\x2e\x2f\x32\x36\x36\x39\x3b\ \x3c\x3d\x40\x41\x44\x45\x48\x4b\x4c\x4d\x4e\x4f\x50\x54\x54\x55\ \x5a\x5c\x5d\x5d\x60\x61\x63\x65\x67\x67\x68\x6b\x6c\x6c\x6d\x70\ \x71\x73\x78\x7c\x7e\x80\x81\x83\x84\x8a\x8b\x8c\x8c\x8d\x91\x93\ \x95\x95\x95\x96\x98\x99\x9c\x9d\x9e\xa4\xa6\xa7\xa7\xa8\xa8\xa9\ \xaa\xac\xad\xad\xb0\xb3\xb3\xb4\xb7\xbb\xbc\xbd\xbd\xc0\xc1\xc4\ \xc6\xca\xcb\xcc\xcd\xcd\xd0\xd2\xd4\xd7\xd8\xd9\xdb\xdc\xdc\xdd\ \xde\xe0\xe1\xe4\xe5\xe6\xe7\xe8\xe9\xe9\xea\xef\xf0\xf0\xf1\xf3\ \xf3\xf5\xf6\xf6\xf7\xf7\xf7\xf8\xfa\xfa\xfb\xfb\xfb\xfb\xfc\xfc\ \xfd\xfd\xfe\xfe\xfe\xa0\xb1\xff\x8a\x00\x00\x02\x61\x49\x44\x41\ \x54\x78\x5e\xdd\xd7\x55\x70\x13\x51\x14\xc7\xe1\xd3\x52\x28\xda\ \x42\xf1\xe2\x5e\xdc\x5b\x28\x10\xdc\xdd\xdd\xdd\x0a\x45\x8a\xb4\ \xb8\x7b\x70\x29\x5e\x24\x50\xa0\xe8\xd9\xa4\x2a\xb8\xbb\xbb\xbb\ \xeb\x23\x93\x3d\x77\xee\xcb\xe6\x66\x98\x93\x17\xa6\xbf\xd7\xff\ \xe6\x9b\x7d\xc8\x9c\x99\x85\x14\x52\xfa\x52\x39\x5d\xfa\xf9\x80\ \x28\xc4\x95\x41\x26\x36\x30\x10\xa9\x19\xd9\x78\x80\xc7\x4e\x14\ \xed\xaa\xca\x02\x72\xa3\xec\x60\x25\x96\xb0\x1e\x65\x1b\x33\x70\ \x80\xfa\x36\x09\xd8\x46\x00\xa7\x5e\x17\xbe\xa0\xe8\x68\x19\x96\ \x50\x7d\xca\xee\x68\x02\xae\xb6\x03\x5e\x9e\x7d\x08\xb0\x8e\x02\ \x66\x45\x09\x38\x61\xe6\x02\x79\x05\x10\xf9\x3f\x03\x6e\x2e\x01\ \x25\x47\x2f\x39\xb0\x2a\x34\x90\x0d\x34\x8f\xa2\x7d\x32\x13\xf0\ \xb3\xa0\x68\x2a\x0f\xe8\x84\x22\xbc\x5c\x97\x05\x8c\x95\x80\x75\ \x3c\x0b\xe8\x2d\x81\x73\x66\x16\x60\x92\xc0\xdd\xe9\x0a\xc0\xd7\ \x29\xe0\x36\x0b\x29\x6b\x7c\x37\x05\x90\x8e\x80\xa4\xfd\x8e\xe7\ \x2c\xcb\x2e\xda\xe7\x2b\x1f\xcd\x3e\xa0\x68\x33\x09\x87\x14\x37\ \xc9\xbb\xdf\xbe\x47\xb1\x9f\xb4\x71\x85\x40\xd5\x42\x02\x62\x5a\ \xa8\xfe\xb1\x39\x2a\x37\x0a\x28\x08\xea\xc2\x50\xb4\xa2\x95\x17\ \x70\xaa\x85\xb2\x6d\xc5\x58\xc2\x3c\x94\xed\xc8\xc7\x01\xca\xa2\ \x2c\xb9\x27\x07\xe8\x81\xb2\x9b\x21\x0c\xc0\x6f\x8f\x04\x6c\xaf\ \x87\x30\x80\x60\x14\xe1\x9f\x27\xc7\xaa\x30\x80\xf9\x04\x1c\xbf\ \xf7\x2e\x71\x5d\x03\x60\xb4\x89\x80\x17\xab\xbb\x96\x70\x07\x46\ \x59\x91\x8a\xab\xe1\xe2\x55\xd6\x72\x39\x9c\xfd\xbb\x88\x9a\x32\ \x8f\x6a\x28\x8a\x26\x34\x63\x01\x5e\x16\xa4\x4e\xfd\x6c\xcc\x02\ \x02\x51\xf4\x74\x51\x6a\x16\xd0\x17\xa9\xe8\xc4\x3a\xc0\x02\x96\ \x22\x15\x3b\xd7\x9d\x05\x14\x41\xea\xbc\x16\x00\x2c\xa0\x35\x52\ \x6f\xa6\x01\x0f\x98\x48\x63\xb2\x56\x81\x07\xa4\xdd\x4e\x17\xfb\ \x6d\x08\xf0\x00\x7f\xda\xae\x1f\x2e\x0d\xea\xca\x13\xf0\x2a\x52\ \x79\x6a\x4e\x7f\x18\x0e\x4e\xea\x40\xc0\xd9\x08\x30\xb6\x40\x9f\ \x6e\xed\x2d\xac\x04\x7c\xeb\x05\x6f\x25\xe0\xf6\x4c\xe3\x9a\x9f\ \xde\xed\xf3\x20\x50\x94\x39\x08\x65\x8f\xfb\x1b\xf7\x26\xfa\x72\ \x27\x22\x8f\x0a\x18\x8c\xb2\xef\x71\x0d\x8d\xfb\x18\xfb\xf2\xed\ \x6b\x77\x50\x94\xc6\x82\xb2\x67\xe1\xc6\x73\xe0\xa1\xdf\xaa\x07\ \x5b\xb2\xff\xc3\xf7\xc2\x35\xad\xb6\x71\xaf\xa8\xbf\x5a\x42\x47\ \x50\xb6\x16\x45\x37\x12\x46\x82\xb1\xb6\xf6\xe9\x61\xb8\xb7\x1a\ \x30\x25\xe9\xc0\xef\xe7\xda\x50\x47\x4f\xb5\x44\xc4\x93\x3f\xda\ \x80\x93\xda\x1f\x39\x13\x73\xff\x65\xfc\x86\x9a\x0e\xd7\x8c\xcb\ \xf1\xd2\xfb\xc5\x9e\xe0\xac\x72\xc3\x66\x4f\xea\x5c\xcd\x47\xb1\ \x66\x9a\xf3\x6b\x4d\x71\x70\xa9\x02\xa9\x20\x25\xf7\x17\x09\xba\ \x39\x39\xea\xb1\x61\x75\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\ \x60\x82\ " qt_resource_name = "\ \x00\x06\ \x07\x03\x7d\xc3\ \x00\x69\ \x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\ \x00\x09\ \x0e\x25\xb1\xe7\ \x00\x6c\ \x00\x6f\x00\x67\x00\x6f\x00\x32\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x09\ \x0e\x26\xb1\xe7\ \x00\x6c\ \x00\x6f\x00\x67\x00\x6f\x00\x33\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0e\ \x09\xbc\x6f\x27\ \x00\x77\ \x00\x61\x00\x74\x00\x65\x00\x72\x00\x6d\x00\x61\x00\x72\x00\x6b\x00\x32\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0e\ \x09\xbd\x6f\x27\ \x00\x77\ \x00\x61\x00\x74\x00\x65\x00\x72\x00\x6d\x00\x61\x00\x72\x00\x6b\x00\x31\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0a\ \x04\xc8\x47\xe7\ \x00\x62\ \x00\x61\x00\x6e\x00\x6e\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0e\ \x07\x04\x9f\x87\ \x00\x62\ \x00\x61\x00\x63\x00\x6b\x00\x67\x00\x72\x00\x6f\x00\x75\x00\x6e\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x09\ \x0e\x24\xb1\xe7\ \x00\x6c\ \x00\x6f\x00\x67\x00\x6f\x00\x31\x00\x2e\x00\x70\x00\x6e\x00\x67\ " qt_resource_struct = "\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x07\x00\x00\x00\x02\ \x00\x00\x00\x86\x00\x00\x00\x00\x00\x01\x00\x00\x7f\xaa\ \x00\x00\x00\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x8f\x19\ \x00\x00\x00\x42\x00\x00\x00\x00\x00\x01\x00\x00\x0c\xae\ \x00\x00\x00\x64\x00\x00\x00\x00\x00\x01\x00\x00\x46\xf2\ \x00\x00\x00\xc2\x00\x00\x00\x00\x00\x01\x00\x00\xe7\x4f\ \x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ \x00\x00\x00\x2a\x00\x00\x00\x00\x00\x01\x00\x00\x06\x57\ " def qInitResources(): QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) def qCleanupResources(): QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data) qInitResources()
epl-1.0
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-3.6.0/Lib/contextlib.py
2
13171
"""Utilities for with-statement contexts. See PEP 343.""" import abc import sys from collections import deque from functools import wraps __all__ = ["contextmanager", "closing", "AbstractContextManager", "ContextDecorator", "ExitStack", "redirect_stdout", "redirect_stderr", "suppress"] class AbstractContextManager(abc.ABC): """An abstract base class for context managers.""" def __enter__(self): """Return `self` upon entering the runtime context.""" return self @abc.abstractmethod def __exit__(self, exc_type, exc_value, traceback): """Raise any exception triggered within the runtime context.""" return None @classmethod def __subclasshook__(cls, C): if cls is AbstractContextManager: if (any("__enter__" in B.__dict__ for B in C.__mro__) and any("__exit__" in B.__dict__ for B in C.__mro__)): return True return NotImplemented class ContextDecorator(object): "A base class or mixin that enables context managers to work as decorators." def _recreate_cm(self): """Return a recreated instance of self. Allows an otherwise one-shot context manager like _GeneratorContextManager to support use as a decorator via implicit recreation. This is a private interface just for _GeneratorContextManager. See issue #11647 for details. """ return self def __call__(self, func): @wraps(func) def inner(*args, **kwds): with self._recreate_cm(): return func(*args, **kwds) return inner class _GeneratorContextManager(ContextDecorator, AbstractContextManager): """Helper for @contextmanager decorator.""" def __init__(self, func, args, kwds): self.gen = func(*args, **kwds) self.func, self.args, self.kwds = func, args, kwds # Issue 19330: ensure context manager instances have good docstrings doc = getattr(func, "__doc__", None) if doc is None: doc = type(self).__doc__ self.__doc__ = doc # Unfortunately, this still doesn't provide good help output when # inspecting the created context manager instances, since pydoc # currently bypasses the instance docstring and shows the docstring # for the class instead. # See http://bugs.python.org/issue19404 for more details. def _recreate_cm(self): # _GCM instances are one-shot context managers, so the # CM must be recreated each time a decorated function is # called return self.__class__(self.func, self.args, self.kwds) def __enter__(self): try: return next(self.gen) except StopIteration: raise RuntimeError("generator didn't yield") from None def __exit__(self, type, value, traceback): if type is None: try: next(self.gen) except StopIteration: return else: raise RuntimeError("generator didn't stop") else: if value is None: # Need to force instantiation so we can reliably # tell if we get the same exception back value = type() try: self.gen.throw(type, value, traceback) raise RuntimeError("generator didn't stop after throw()") except StopIteration as exc: # Suppress StopIteration *unless* it's the same exception that # was passed to throw(). This prevents a StopIteration # raised inside the "with" statement from being suppressed. return exc is not value except RuntimeError as exc: # Don't re-raise the passed in exception. (issue27112) if exc is value: return False # Likewise, avoid suppressing if a StopIteration exception # was passed to throw() and later wrapped into a RuntimeError # (see PEP 479). if exc.__cause__ is value: return False raise except: # only re-raise if it's *not* the exception that was # passed to throw(), because __exit__() must not raise # an exception unless __exit__() itself failed. But throw() # has to raise the exception to signal propagation, so this # fixes the impedance mismatch between the throw() protocol # and the __exit__() protocol. # if sys.exc_info()[1] is not value: raise def contextmanager(func): """@contextmanager decorator. Typical usage: @contextmanager def some_generator(<arguments>): <setup> try: yield <value> finally: <cleanup> This makes this: with some_generator(<arguments>) as <variable>: <body> equivalent to this: <setup> try: <variable> = <value> <body> finally: <cleanup> """ @wraps(func) def helper(*args, **kwds): return _GeneratorContextManager(func, args, kwds) return helper class closing(AbstractContextManager): """Context to automatically close something at the end of a block. Code like this: with closing(<module>.open(<arguments>)) as f: <block> is equivalent to this: f = <module>.open(<arguments>) try: <block> finally: f.close() """ def __init__(self, thing): self.thing = thing def __enter__(self): return self.thing def __exit__(self, *exc_info): self.thing.close() class _RedirectStream(AbstractContextManager): _stream = None def __init__(self, new_target): self._new_target = new_target # We use a list of old targets to make this CM re-entrant self._old_targets = [] def __enter__(self): self._old_targets.append(getattr(sys, self._stream)) setattr(sys, self._stream, self._new_target) return self._new_target def __exit__(self, exctype, excinst, exctb): setattr(sys, self._stream, self._old_targets.pop()) class redirect_stdout(_RedirectStream): """Context manager for temporarily redirecting stdout to another file. # How to send help() to stderr with redirect_stdout(sys.stderr): help(dir) # How to write help() to a file with open('help.txt', 'w') as f: with redirect_stdout(f): help(pow) """ _stream = "stdout" class redirect_stderr(_RedirectStream): """Context manager for temporarily redirecting stderr to another file.""" _stream = "stderr" class suppress(AbstractContextManager): """Context manager to suppress specified exceptions After the exception is suppressed, execution proceeds with the next statement following the with statement. with suppress(FileNotFoundError): os.remove(somefile) # Execution still resumes here if the file was already removed """ def __init__(self, *exceptions): self._exceptions = exceptions def __enter__(self): pass def __exit__(self, exctype, excinst, exctb): # Unlike isinstance and issubclass, CPython exception handling # currently only looks at the concrete type hierarchy (ignoring # the instance and subclass checking hooks). While Guido considers # that a bug rather than a feature, it's a fairly hard one to fix # due to various internal implementation details. suppress provides # the simpler issubclass based semantics, rather than trying to # exactly reproduce the limitations of the CPython interpreter. # # See http://bugs.python.org/issue12029 for more details return exctype is not None and issubclass(exctype, self._exceptions) # Inspired by discussions on http://bugs.python.org/issue13585 class ExitStack(AbstractContextManager): """Context manager for dynamic management of a stack of exit callbacks For example: with ExitStack() as stack: files = [stack.enter_context(open(fname)) for fname in filenames] # All opened files will automatically be closed at the end of # the with statement, even if attempts to open files later # in the list raise an exception """ def __init__(self): self._exit_callbacks = deque() def pop_all(self): """Preserve the context stack by transferring it to a new instance""" new_stack = type(self)() new_stack._exit_callbacks = self._exit_callbacks self._exit_callbacks = deque() return new_stack def _push_cm_exit(self, cm, cm_exit): """Helper to correctly register callbacks to __exit__ methods""" def _exit_wrapper(*exc_details): return cm_exit(cm, *exc_details) _exit_wrapper.__self__ = cm self.push(_exit_wrapper) def push(self, exit): """Registers a callback with the standard __exit__ method signature Can suppress exceptions the same way __exit__ methods can. Also accepts any object with an __exit__ method (registering a call to the method instead of the object itself) """ # We use an unbound method rather than a bound method to follow # the standard lookup behaviour for special methods _cb_type = type(exit) try: exit_method = _cb_type.__exit__ except AttributeError: # Not a context manager, so assume its a callable self._exit_callbacks.append(exit) else: self._push_cm_exit(exit, exit_method) return exit # Allow use as a decorator def callback(self, callback, *args, **kwds): """Registers an arbitrary callback and arguments. Cannot suppress exceptions. """ def _exit_wrapper(exc_type, exc, tb): callback(*args, **kwds) # We changed the signature, so using @wraps is not appropriate, but # setting __wrapped__ may still help with introspection _exit_wrapper.__wrapped__ = callback self.push(_exit_wrapper) return callback # Allow use as a decorator def enter_context(self, cm): """Enters the supplied context manager If successful, also pushes its __exit__ method as a callback and returns the result of the __enter__ method. """ # We look up the special methods on the type to match the with statement _cm_type = type(cm) _exit = _cm_type.__exit__ result = _cm_type.__enter__(cm) self._push_cm_exit(cm, _exit) return result def close(self): """Immediately unwind the context stack""" self.__exit__(None, None, None) def __exit__(self, *exc_details): received_exc = exc_details[0] is not None # We manipulate the exception state so it behaves as though # we were actually nesting multiple with statements frame_exc = sys.exc_info()[1] def _fix_exception_context(new_exc, old_exc): # Context may not be correct, so find the end of the chain while 1: exc_context = new_exc.__context__ if exc_context is old_exc: # Context is already set correctly (see issue 20317) return if exc_context is None or exc_context is frame_exc: break new_exc = exc_context # Change the end of the chain to point to the exception # we expect it to reference new_exc.__context__ = old_exc # Callbacks are invoked in LIFO order to match the behaviour of # nested context managers suppressed_exc = False pending_raise = False while self._exit_callbacks: cb = self._exit_callbacks.pop() try: if cb(*exc_details): suppressed_exc = True pending_raise = False exc_details = (None, None, None) except: new_exc_details = sys.exc_info() # simulate the stack of exceptions by setting the context _fix_exception_context(new_exc_details[1], exc_details[1]) pending_raise = True exc_details = new_exc_details if pending_raise: try: # bare "raise exc_details[1]" replaces our carefully # set-up context fixed_ctx = exc_details[1].__context__ raise exc_details[1] except BaseException: exc_details[1].__context__ = fixed_ctx raise return received_exc and suppressed_exc
mit
doodersrage/CheapLocalDeals.com
includes/libs/fckeditor/editor/filemanager/connectors/py/fckutil.py
54
4235
#!/usr/bin/env python """ FCKeditor - The text editor for Internet - http://www.fckeditor.net Copyright (C) 2003-2008 Frederico Caldeira Knabben == BEGIN LICENSE == Licensed under the terms of any of the following licenses at your choice: - GNU General Public License Version 2 or later (the "GPL") http://www.gnu.org/licenses/gpl.html - GNU Lesser General Public License Version 2.1 or later (the "LGPL") http://www.gnu.org/licenses/lgpl.html - Mozilla Public License Version 1.1 or later (the "MPL") http://www.mozilla.org/MPL/MPL-1.1.html == END LICENSE == Utility functions for the File Manager Connector for Python """ import string, re import os import config as Config # Generic manipulation functions def removeExtension(fileName): index = fileName.rindex(".") newFileName = fileName[0:index] return newFileName def getExtension(fileName): index = fileName.rindex(".") + 1 fileExtension = fileName[index:] return fileExtension def removeFromStart(string, char): return string.lstrip(char) def removeFromEnd(string, char): return string.rstrip(char) # Path functions def combinePaths( basePath, folder ): return removeFromEnd( basePath, '/' ) + '/' + removeFromStart( folder, '/' ) def getFileName(filename): " Purpose: helper function to extrapolate the filename " for splitChar in ["/", "\\"]: array = filename.split(splitChar) if (len(array) > 1): filename = array[-1] return filename def sanitizeFolderName( newFolderName ): "Do a cleanup of the folder name to avoid possible problems" # Remove . \ / | : ? * " < > and control characters return re.sub( '(?u)\\.|\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[^\u0000-\u001f\u007f-\u009f]', '_', newFolderName ) def sanitizeFileName( newFileName ): "Do a cleanup of the file name to avoid possible problems" # Replace dots in the name with underscores (only one dot can be there... security issue). if ( Config.ForceSingleExtension ): # remove dots newFileName = re.sub ( '/\\.(?![^.]*$)/', '_', newFileName ) ; newFileName = newFileName.replace('\\','/') # convert windows to unix path newFileName = os.path.basename (newFileName) # strip directories # Remove \ / | : ? * return re.sub ( '(?u)/\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[^\u0000-\u001f\u007f-\u009f]/', '_', newFileName ) def getCurrentFolder(currentFolder): if not currentFolder: currentFolder = '/' # Check the current folder syntax (must begin and end with a slash). if (currentFolder[-1] <> "/"): currentFolder += "/" if (currentFolder[0] <> "/"): currentFolder = "/" + currentFolder # Ensure the folder path has no double-slashes while '//' in currentFolder: currentFolder = currentFolder.replace('//','/') # Check for invalid folder paths (..) if '..' in currentFolder or '\\' in currentFolder: return None return currentFolder def mapServerPath( environ, url): " Emulate the asp Server.mapPath function. Given an url path return the physical directory that it corresponds to " # This isn't correct but for the moment there's no other solution # If this script is under a virtual directory or symlink it will detect the problem and stop return combinePaths( getRootPath(environ), url ) def mapServerFolder(resourceTypePath, folderPath): return combinePaths ( resourceTypePath , folderPath ) def getRootPath(environ): "Purpose: returns the root path on the server" # WARNING: this may not be thread safe, and doesn't work w/ VirtualServer/mod_python # Use Config.UserFilesAbsolutePath instead if environ.has_key('DOCUMENT_ROOT'): return environ['DOCUMENT_ROOT'] else: realPath = os.path.realpath( './' ) selfPath = environ['SCRIPT_FILENAME'] selfPath = selfPath [ : selfPath.rfind( '/' ) ] selfPath = selfPath.replace( '/', os.path.sep) position = realPath.find(selfPath) # This can check only that this script isn't run from a virtual dir # But it avoids the problems that arise if it isn't checked raise realPath if ( position < 0 or position <> len(realPath) - len(selfPath) or realPath[ : position ]==''): raise Exception('Sorry, can\'t map "UserFilesPath" to a physical path. You must set the "UserFilesAbsolutePath" value in "editor/filemanager/connectors/py/config.py".') return realPath[ : position ]
mit
tow/dnspython
tests/test_exceptions.py
6
2136
# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. import binascii import unittest from dns.exception import DNSException class FormatedError(DNSException): fmt = "Custom format: {parameter}" supp_kwargs = set(['parameter']) class ExceptionTestCase(unittest.TestCase): def test_custom_message(self): msg = "this is a custom message" try: raise DNSException(msg) except DNSException as ex: self.assertEqual(str(ex), msg) def test_implicit_message(self): try: raise DNSException() except DNSException as ex: self.assertEqual(ex.__class__.__doc__, str(ex)) def test_formatted_error(self): """Exceptions with explicit format has to respect it.""" params = {'parameter': 'value'} try: raise FormatedError(**params) except FormatedError as ex: msg = FormatedError.fmt.format(**params) self.assertEqual(msg, str(ex)) def test_kwargs_only(self): """Kwargs cannot be combined with args.""" with self.assertRaises(AssertionError): raise FormatedError(1, a=2) def test_kwargs_unsupported(self): """Only supported kwargs are accepted.""" with self.assertRaises(AssertionError): raise FormatedError(unsupported=2) if __name__ == '__main__': unittest.main()
isc
CyanogenMod/android_kernel_htc_flounder
tools/perf/scripts/python/event_analyzing_sample.py
4719
7393
# event_analyzing_sample.py: general event handler in python # # Current perf report is already very powerful with the annotation integrated, # and this script is not trying to be as powerful as perf report, but # providing end user/developer a flexible way to analyze the events other # than trace points. # # The 2 database related functions in this script just show how to gather # the basic information, and users can modify and write their own functions # according to their specific requirement. # # The first function "show_general_events" just does a basic grouping for all # generic events with the help of sqlite, and the 2nd one "show_pebs_ll" is # for a x86 HW PMU event: PEBS with load latency data. # import os import sys import math import struct import sqlite3 sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from EventClass import * # # If the perf.data has a big number of samples, then the insert operation # will be very time consuming (about 10+ minutes for 10000 samples) if the # .db database is on disk. Move the .db file to RAM based FS to speedup # the handling, which will cut the time down to several seconds. # con = sqlite3.connect("/dev/shm/perf.db") con.isolation_level = None def trace_begin(): print "In trace_begin:\n" # # Will create several tables at the start, pebs_ll is for PEBS data with # load latency info, while gen_events is for general event. # con.execute(""" create table if not exists gen_events ( name text, symbol text, comm text, dso text );""") con.execute(""" create table if not exists pebs_ll ( name text, symbol text, comm text, dso text, flags integer, ip integer, status integer, dse integer, dla integer, lat integer );""") # # Create and insert event object to a database so that user could # do more analysis with simple database commands. # def process_event(param_dict): event_attr = param_dict["attr"] sample = param_dict["sample"] raw_buf = param_dict["raw_buf"] comm = param_dict["comm"] name = param_dict["ev_name"] # Symbol and dso info are not always resolved if (param_dict.has_key("dso")): dso = param_dict["dso"] else: dso = "Unknown_dso" if (param_dict.has_key("symbol")): symbol = param_dict["symbol"] else: symbol = "Unknown_symbol" # Create the event object and insert it to the right table in database event = create_event(name, comm, dso, symbol, raw_buf) insert_db(event) def insert_db(event): if event.ev_type == EVTYPE_GENERIC: con.execute("insert into gen_events values(?, ?, ?, ?)", (event.name, event.symbol, event.comm, event.dso)) elif event.ev_type == EVTYPE_PEBS_LL: event.ip &= 0x7fffffffffffffff event.dla &= 0x7fffffffffffffff con.execute("insert into pebs_ll values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (event.name, event.symbol, event.comm, event.dso, event.flags, event.ip, event.status, event.dse, event.dla, event.lat)) def trace_end(): print "In trace_end:\n" # We show the basic info for the 2 type of event classes show_general_events() show_pebs_ll() con.close() # # As the event number may be very big, so we can't use linear way # to show the histogram in real number, but use a log2 algorithm. # def num2sym(num): # Each number will have at least one '#' snum = '#' * (int)(math.log(num, 2) + 1) return snum def show_general_events(): # Check the total record number in the table count = con.execute("select count(*) from gen_events") for t in count: print "There is %d records in gen_events table" % t[0] if t[0] == 0: return print "Statistics about the general events grouped by thread/symbol/dso: \n" # Group by thread commq = con.execute("select comm, count(comm) from gen_events group by comm order by -count(comm)") print "\n%16s %8s %16s\n%s" % ("comm", "number", "histogram", "="*42) for row in commq: print "%16s %8d %s" % (row[0], row[1], num2sym(row[1])) # Group by symbol print "\n%32s %8s %16s\n%s" % ("symbol", "number", "histogram", "="*58) symbolq = con.execute("select symbol, count(symbol) from gen_events group by symbol order by -count(symbol)") for row in symbolq: print "%32s %8d %s" % (row[0], row[1], num2sym(row[1])) # Group by dso print "\n%40s %8s %16s\n%s" % ("dso", "number", "histogram", "="*74) dsoq = con.execute("select dso, count(dso) from gen_events group by dso order by -count(dso)") for row in dsoq: print "%40s %8d %s" % (row[0], row[1], num2sym(row[1])) # # This function just shows the basic info, and we could do more with the # data in the tables, like checking the function parameters when some # big latency events happen. # def show_pebs_ll(): count = con.execute("select count(*) from pebs_ll") for t in count: print "There is %d records in pebs_ll table" % t[0] if t[0] == 0: return print "Statistics about the PEBS Load Latency events grouped by thread/symbol/dse/latency: \n" # Group by thread commq = con.execute("select comm, count(comm) from pebs_ll group by comm order by -count(comm)") print "\n%16s %8s %16s\n%s" % ("comm", "number", "histogram", "="*42) for row in commq: print "%16s %8d %s" % (row[0], row[1], num2sym(row[1])) # Group by symbol print "\n%32s %8s %16s\n%s" % ("symbol", "number", "histogram", "="*58) symbolq = con.execute("select symbol, count(symbol) from pebs_ll group by symbol order by -count(symbol)") for row in symbolq: print "%32s %8d %s" % (row[0], row[1], num2sym(row[1])) # Group by dse dseq = con.execute("select dse, count(dse) from pebs_ll group by dse order by -count(dse)") print "\n%32s %8s %16s\n%s" % ("dse", "number", "histogram", "="*58) for row in dseq: print "%32s %8d %s" % (row[0], row[1], num2sym(row[1])) # Group by latency latq = con.execute("select lat, count(lat) from pebs_ll group by lat order by lat") print "\n%32s %8s %16s\n%s" % ("latency", "number", "histogram", "="*58) for row in latq: print "%32s %8d %s" % (row[0], row[1], num2sym(row[1])) def trace_unhandled(event_name, context, event_fields_dict): print ' '.join(['%s=%s'%(k,str(v))for k,v in sorted(event_fields_dict.items())])
gpl-2.0
kozmikkick/kozmikkernel3.8
tools/perf/scripts/python/sched-migration.py
11215
11670
#!/usr/bin/python # # Cpu task migration overview toy # # Copyright (C) 2010 Frederic Weisbecker <[email protected]> # # perf script event handlers have been generated by perf script -g python # # This software is distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. import os import sys from collections import defaultdict from UserList import UserList sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from SchedGui import * threads = { 0 : "idle"} def thread_name(pid): return "%s:%d" % (threads[pid], pid) class RunqueueEventUnknown: @staticmethod def color(): return None def __repr__(self): return "unknown" class RunqueueEventSleep: @staticmethod def color(): return (0, 0, 0xff) def __init__(self, sleeper): self.sleeper = sleeper def __repr__(self): return "%s gone to sleep" % thread_name(self.sleeper) class RunqueueEventWakeup: @staticmethod def color(): return (0xff, 0xff, 0) def __init__(self, wakee): self.wakee = wakee def __repr__(self): return "%s woke up" % thread_name(self.wakee) class RunqueueEventFork: @staticmethod def color(): return (0, 0xff, 0) def __init__(self, child): self.child = child def __repr__(self): return "new forked task %s" % thread_name(self.child) class RunqueueMigrateIn: @staticmethod def color(): return (0, 0xf0, 0xff) def __init__(self, new): self.new = new def __repr__(self): return "task migrated in %s" % thread_name(self.new) class RunqueueMigrateOut: @staticmethod def color(): return (0xff, 0, 0xff) def __init__(self, old): self.old = old def __repr__(self): return "task migrated out %s" % thread_name(self.old) class RunqueueSnapshot: def __init__(self, tasks = [0], event = RunqueueEventUnknown()): self.tasks = tuple(tasks) self.event = event def sched_switch(self, prev, prev_state, next): event = RunqueueEventUnknown() if taskState(prev_state) == "R" and next in self.tasks \ and prev in self.tasks: return self if taskState(prev_state) != "R": event = RunqueueEventSleep(prev) next_tasks = list(self.tasks[:]) if prev in self.tasks: if taskState(prev_state) != "R": next_tasks.remove(prev) elif taskState(prev_state) == "R": next_tasks.append(prev) if next not in next_tasks: next_tasks.append(next) return RunqueueSnapshot(next_tasks, event) def migrate_out(self, old): if old not in self.tasks: return self next_tasks = [task for task in self.tasks if task != old] return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old)) def __migrate_in(self, new, event): if new in self.tasks: self.event = event return self next_tasks = self.tasks[:] + tuple([new]) return RunqueueSnapshot(next_tasks, event) def migrate_in(self, new): return self.__migrate_in(new, RunqueueMigrateIn(new)) def wake_up(self, new): return self.__migrate_in(new, RunqueueEventWakeup(new)) def wake_up_new(self, new): return self.__migrate_in(new, RunqueueEventFork(new)) def load(self): """ Provide the number of tasks on the runqueue. Don't count idle""" return len(self.tasks) - 1 def __repr__(self): ret = self.tasks.__repr__() ret += self.origin_tostring() return ret class TimeSlice: def __init__(self, start, prev): self.start = start self.prev = prev self.end = start # cpus that triggered the event self.event_cpus = [] if prev is not None: self.total_load = prev.total_load self.rqs = prev.rqs.copy() else: self.rqs = defaultdict(RunqueueSnapshot) self.total_load = 0 def __update_total_load(self, old_rq, new_rq): diff = new_rq.load() - old_rq.load() self.total_load += diff def sched_switch(self, ts_list, prev, prev_state, next, cpu): old_rq = self.prev.rqs[cpu] new_rq = old_rq.sched_switch(prev, prev_state, next) if old_rq is new_rq: return self.rqs[cpu] = new_rq self.__update_total_load(old_rq, new_rq) ts_list.append(self) self.event_cpus = [cpu] def migrate(self, ts_list, new, old_cpu, new_cpu): if old_cpu == new_cpu: return old_rq = self.prev.rqs[old_cpu] out_rq = old_rq.migrate_out(new) self.rqs[old_cpu] = out_rq self.__update_total_load(old_rq, out_rq) new_rq = self.prev.rqs[new_cpu] in_rq = new_rq.migrate_in(new) self.rqs[new_cpu] = in_rq self.__update_total_load(new_rq, in_rq) ts_list.append(self) if old_rq is not out_rq: self.event_cpus.append(old_cpu) self.event_cpus.append(new_cpu) def wake_up(self, ts_list, pid, cpu, fork): old_rq = self.prev.rqs[cpu] if fork: new_rq = old_rq.wake_up_new(pid) else: new_rq = old_rq.wake_up(pid) if new_rq is old_rq: return self.rqs[cpu] = new_rq self.__update_total_load(old_rq, new_rq) ts_list.append(self) self.event_cpus = [cpu] def next(self, t): self.end = t return TimeSlice(t, self) class TimeSliceList(UserList): def __init__(self, arg = []): self.data = arg def get_time_slice(self, ts): if len(self.data) == 0: slice = TimeSlice(ts, TimeSlice(-1, None)) else: slice = self.data[-1].next(ts) return slice def find_time_slice(self, ts): start = 0 end = len(self.data) found = -1 searching = True while searching: if start == end or start == end - 1: searching = False i = (end + start) / 2 if self.data[i].start <= ts and self.data[i].end >= ts: found = i end = i continue if self.data[i].end < ts: start = i elif self.data[i].start > ts: end = i return found def set_root_win(self, win): self.root_win = win def mouse_down(self, cpu, t): idx = self.find_time_slice(t) if idx == -1: return ts = self[idx] rq = ts.rqs[cpu] raw = "CPU: %d\n" % cpu raw += "Last event : %s\n" % rq.event.__repr__() raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000) raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6)) raw += "Load = %d\n" % rq.load() for t in rq.tasks: raw += "%s \n" % thread_name(t) self.root_win.update_summary(raw) def update_rectangle_cpu(self, slice, cpu): rq = slice.rqs[cpu] if slice.total_load != 0: load_rate = rq.load() / float(slice.total_load) else: load_rate = 0 red_power = int(0xff - (0xff * load_rate)) color = (0xff, red_power, red_power) top_color = None if cpu in slice.event_cpus: top_color = rq.event.color() self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end) def fill_zone(self, start, end): i = self.find_time_slice(start) if i == -1: return for i in xrange(i, len(self.data)): timeslice = self.data[i] if timeslice.start > end: return for cpu in timeslice.rqs: self.update_rectangle_cpu(timeslice, cpu) def interval(self): if len(self.data) == 0: return (0, 0) return (self.data[0].start, self.data[-1].end) def nr_rectangles(self): last_ts = self.data[-1] max_cpu = 0 for cpu in last_ts.rqs: if cpu > max_cpu: max_cpu = cpu return max_cpu class SchedEventProxy: def __init__(self): self.current_tsk = defaultdict(lambda : -1) self.timeslices = TimeSliceList() def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state, next_comm, next_pid, next_prio): """ Ensure the task we sched out this cpu is really the one we logged. Otherwise we may have missed traces """ on_cpu_task = self.current_tsk[headers.cpu] if on_cpu_task != -1 and on_cpu_task != prev_pid: print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \ (headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid) threads[prev_pid] = prev_comm threads[next_pid] = next_comm self.current_tsk[headers.cpu] = next_pid ts = self.timeslices.get_time_slice(headers.ts()) ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu) def migrate(self, headers, pid, prio, orig_cpu, dest_cpu): ts = self.timeslices.get_time_slice(headers.ts()) ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu) def wake_up(self, headers, comm, pid, success, target_cpu, fork): if success == 0: return ts = self.timeslices.get_time_slice(headers.ts()) ts.wake_up(self.timeslices, pid, target_cpu, fork) def trace_begin(): global parser parser = SchedEventProxy() def trace_end(): app = wx.App(False) timeslices = parser.timeslices frame = RootFrame(timeslices, "Migration") app.MainLoop() def sched__sched_stat_runtime(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, runtime, vruntime): pass def sched__sched_stat_iowait(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, delay): pass def sched__sched_stat_sleep(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, delay): pass def sched__sched_stat_wait(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, delay): pass def sched__sched_process_fork(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, parent_comm, parent_pid, child_comm, child_pid): pass def sched__sched_process_wait(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio): pass def sched__sched_process_exit(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio): pass def sched__sched_process_free(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio): pass def sched__sched_migrate_task(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio, orig_cpu, dest_cpu): headers = EventHeaders(common_cpu, common_secs, common_nsecs, common_pid, common_comm) parser.migrate(headers, pid, prio, orig_cpu, dest_cpu) def sched__sched_switch(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, prev_comm, prev_pid, prev_prio, prev_state, next_comm, next_pid, next_prio): headers = EventHeaders(common_cpu, common_secs, common_nsecs, common_pid, common_comm) parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state, next_comm, next_pid, next_prio) def sched__sched_wakeup_new(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio, success, target_cpu): headers = EventHeaders(common_cpu, common_secs, common_nsecs, common_pid, common_comm) parser.wake_up(headers, comm, pid, success, target_cpu, 1) def sched__sched_wakeup(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio, success, target_cpu): headers = EventHeaders(common_cpu, common_secs, common_nsecs, common_pid, common_comm) parser.wake_up(headers, comm, pid, success, target_cpu, 0) def sched__sched_wait_task(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid, prio): pass def sched__sched_kthread_stop_ret(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, ret): pass def sched__sched_kthread_stop(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, comm, pid): pass def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm): pass
gpl-2.0
cndavy/JFinal_Authority
jfinal-authority/src/main/webapp/static/js/xgcalendar/jsonhelp.py
2
2680
# -*- coding: utf-8 -*- import datetime,time import decimal from django.utils import simplejson def safe_new_datetime(d): kw = [d.year, d.month, d.day] if isinstance(d, datetime.datetime): kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo]) return datetime.datetime(*kw) def safe_new_date(d): return datetime.date(d.year, d.month, d.day) def addtime(date,d=0,h=0,m=0,s=0): """给传入的参数加上指定的时间""" diff= datetime.timedelta(days=d,hours=h,minutes=m,seconds=s) return date+diff #---------------------------------------------------------------------- def get_date_stamp(d): """获取当前日期和1970年1月1日之间的毫秒数""" return int(time.mktime(d.timetuple())*1000) def get_ms_json_date_format(d): """获取MS Ajax Json Data Format /Date(@tickets)/""" stamp=get_date_stamp(d); return r"/Date("+str(stamp)+")/" class DatetimeJSONEncoder(simplejson.JSONEncoder): """可以序列化时间的JSON""" #DATE_FORMAT = "%Y-%m-%d" TIME_FORMAT = "%H:%M:%S" def default(self, o): if isinstance(o, datetime.datetime): d = safe_new_datetime(o) return get_ms_json_date_format(d) elif isinstance(o, datetime.date): d = safe_new_date(o) return get_ms_json_date_format(d) elif isinstance(o, datetime.time): return o.strftime(self.TIME_FORMAT) elif isinstance(o, decimal.Decimal): return str(o) else: return super(DatetimeJSONEncoder, self).default(o) #---------------------------------------------------------------------- def convert_callist_to_strlist(callist,zonediff): """将CalendarList转换成json元组""" strlist=[] for a in callist: s = addtime(a.start_time,h=zonediff) e = addtime(a.end_time,h=zonediff) strlist.append([str(a.key()) ,a.subject, s, e, 1 if a.is_all_day_event else 0, 0 if s.strftime("%y%m%d")==e.strftime("%y%m%d") else 0, 1, a.category,1, a.attendees, a.location ]) return strlist; #---------------------------------------------------------------------- def build_json_calendar_viewdata(start,end,events,issort=True,error=None,zonediff=8): """创建日程视图的json对象""" jsonlist =convert_callist_to_strlist(events,zonediff) return {"start":start,"end":end,"error":error,"issort":issort,"events":jsonlist}
apache-2.0
gtostock/foursquared
util/gen_class.py
262
3173
#!/usr/bin/python import datetime import sys import textwrap import common from xml.dom import pulldom HEADER = """\ /** * Copyright 2009 Joe LaPenna */ package com.joelapenna.foursquare.types; %(imports)s /** * Auto-generated: %(timestamp)s * * @author Joe LaPenna ([email protected]) */ public class %(type_name)s implements %(interfaces)s { """ GETTER = """\ public %(attribute_type)s get%(camel_name)s() { return %(field_name)s; } """ SETTER = """\ public void set%(camel_name)s(%(attribute_type)s %(attribute_name)s) { %(field_name)s = %(attribute_name)s; } """ BOOLEAN_GETTER = """\ public %(attribute_type)s %(attribute_name)s() { return %(field_name)s; } """ def main(): type_name, top_node_name, attributes = common.WalkNodesForAttributes( sys.argv[1]) GenerateClass(type_name, attributes) def GenerateClass(type_name, attributes): lines = [] for attribute_name in sorted(attributes): typ, children = attributes[attribute_name] lines.extend(Field(attribute_name, typ).split('\n')) lines.append('') lines.extend(Constructor(type_name).split('\n')) lines.append('') # getters and setters for attribute_name in sorted(attributes): attribute_type, children = attributes[attribute_name] lines.extend(Accessors(attribute_name, attribute_type).split('\n')) print Header(type_name) #print ' ' + '\n '.join(lines) for line in lines: if not line: print line else: print ' ' + line print Footer() def AccessorReplacements(attribute_name, attribute_type): # CamelCaseClassName camel_name = ''.join([word.capitalize() for word in attribute_name.split('_')]) # camelCaseLocalName attribute_name = (camel_name[0].lower() + camel_name[1:]) # mFieldName field_attribute_name = 'm' + camel_name return { 'attribute_name': attribute_name, 'camel_name': camel_name, 'field_name': field_attribute_name, 'attribute_type': attribute_type } def Header(type_name): interfaces = common.INTERFACES.get(type_name, common.DEFAULT_INTERFACES) import_names = common.CLASS_IMPORTS.get(type_name, common.DEFAULT_CLASS_IMPORTS) if import_names: imports = ';\n'.join(imports) + ';' else: imports = '' return HEADER % {'type_name': type_name, 'interfaces': ', '.join(interfaces), 'imports': imports, 'timestamp': datetime.datetime.now()} def Field(attribute_name, attribute_type): """Print the field declarations.""" replacements = AccessorReplacements(attribute_name, attribute_type) return 'private %(attribute_type)s %(field_name)s;' % replacements def Constructor(type_name): return 'public %s() {\n}' % type_name def Accessors(name, attribute_type): """Print the getter and setter definitions.""" replacements = AccessorReplacements(name, attribute_type) if attribute_type == common.BOOLEAN: return '%s\n%s' % (BOOLEAN_GETTER % replacements, SETTER % replacements) else: return '%s\n%s' % (GETTER % replacements, SETTER % replacements) def Footer(): return '}' if __name__ == '__main__': main()
apache-2.0
yuzhangcmu/Python-Study
Leetcode/String_to_Integer_atoi.py
2
2023
""" Implement atoi to convert a string to an integer. Hint: Carefully consider all possible input cases. If you want a challenge, please do not see below and ask yourself what are the possible input cases. Notes: It is intended for this problem to be specified vaguely (ie, no given input specs). You are responsible to gather all the input requirements up front. spoilers alert... click to show requirements for atoi. Requirements for atoi: The function first discards as many whitespace characters as necessary until the first non-whitespace character is found. Then, starting from this character, takes an optional initial plus or minus sign followed by as many numerical digits as possible, and interprets them as a numerical value. The string can contain additional characters after those that form the integral number, which are ignored and have no effect on the behavior of this function. If the first sequence of non-whitespace characters in str is not a valid integral number, or if no such sequence exists because either str is empty or it contains only whitespace characters, no conversion is performed. If no valid conversion could be performed, a zero value is returned. If the correct value is out of the range of representable values, INT_MAX (2147483647) or INT_MIN (-2147483648) is returned. """ class Solution: # @return an integer def atoi(self, str): str = str.strip() N = len(str) if N == 0: return 0 sign = 1 res = 0 imin, imax = -1<<31, (1<<31)-1 for i, bit in enumerate(str): if i == 0 and bit in ['-', '+']: if bit == '-': sign = -1 elif bit.isdigit(): res = res*10 + int(bit) if res * sign <= imin: return imin elif res * sign >= imax: return imax else: break return sign * res # Don't forget to check sign at the beginning
mit
15Dkatz/pants
tests/python/pants_test/backend/project_info/tasks/test_ensime_integration.py
31
1642
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from pants.util.contextutil import temporary_dir from pants_test.pants_run_integration_test import PantsRunIntegrationTest class EnsimeIntegrationTest(PantsRunIntegrationTest): def _ensime_test(self, specs, project_dir=os.path.join('.pants.d', 'tmp-ensime', 'project'), project_name='project'): """Helper method that tests ensime generation on the input spec list.""" if not os.path.exists(project_dir): os.makedirs(project_dir) with temporary_dir(root_dir=project_dir) as path: pants_run = self.run_pants(['ensime', '--project-dir={dir}'.format(dir=path)] + specs) self.assert_success(pants_run) # TODO: Actually validate the contents of the project files, rather than just # checking if they exist. expected_files = ('.ensime',) workdir = os.path.join(path, project_name) self.assertTrue(os.path.exists(workdir), 'Failed to find project_dir at {dir}.'.format(dir=workdir)) self.assertTrue(all(os.path.exists(os.path.join(workdir, name)) for name in expected_files), 'Failed to find one of the ensime project files at {dir}'.format(dir=path)) # Testing Ensime integration on a sample project def test_ensime_on_all_examples(self): self._ensime_test(['examples/src/scala/org/pantsbuild/example::'])
apache-2.0
cherusk/ansible
lib/ansible/modules/database/postgresql/postgresql_ext.py
64
6115
#!/usr/bin/python # -*- coding: utf-8 -*- # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: postgresql_ext short_description: Add or remove PostgreSQL extensions from a database. description: - Add or remove PostgreSQL extensions from a database. version_added: "1.9" options: name: description: - name of the extension to add or remove required: true default: null db: description: - name of the database to add or remove the extension to/from required: true default: null login_user: description: - The username used to authenticate with required: false default: null login_password: description: - The password used to authenticate with required: false default: null login_host: description: - Host running the database required: false default: localhost port: description: - Database port to connect to. required: false default: 5432 state: description: - The database extension state required: false default: present choices: [ "present", "absent" ] notes: - The default authentication assumes that you are either logging in as or sudo'ing to the C(postgres) account on the host. - This module uses I(psycopg2), a Python PostgreSQL database adapter. You must ensure that psycopg2 is installed on the host before using this module. If the remote host is the PostgreSQL server (which is the default case), then PostgreSQL must also be installed on the remote host. For Ubuntu-based systems, install the C(postgresql), C(libpq-dev), and C(python-psycopg2) packages on the remote host before using this module. requirements: [ psycopg2 ] author: "Daniel Schep (@dschep)" ''' EXAMPLES = ''' # Adds postgis to the database "acme" - postgresql_ext: name: postgis db: acme ''' try: import psycopg2 import psycopg2.extras except ImportError: postgresqldb_found = False else: postgresqldb_found = True class NotSupportedError(Exception): pass # =========================================== # PostgreSQL module specific support methods. # def ext_exists(cursor, ext): query = "SELECT * FROM pg_extension WHERE extname=%(ext)s" cursor.execute(query, {'ext': ext}) return cursor.rowcount == 1 def ext_delete(cursor, ext): if ext_exists(cursor, ext): query = "DROP EXTENSION \"%s\"" % ext cursor.execute(query) return True else: return False def ext_create(cursor, ext): if not ext_exists(cursor, ext): query = 'CREATE EXTENSION "%s"' % ext cursor.execute(query) return True else: return False # =========================================== # Module execution. # def main(): module = AnsibleModule( argument_spec=dict( login_user=dict(default="postgres"), login_password=dict(default="", no_log=True), login_host=dict(default=""), port=dict(default="5432"), db=dict(required=True), ext=dict(required=True, aliases=['name']), state=dict(default="present", choices=["absent", "present"]), ), supports_check_mode = True ) if not postgresqldb_found: module.fail_json(msg="the python psycopg2 module is required") db = module.params["db"] ext = module.params["ext"] port = module.params["port"] state = module.params["state"] changed = False # To use defaults values, keyword arguments must be absent, so # check which values are empty and don't include in the **kw # dictionary params_map = { "login_host":"host", "login_user":"user", "login_password":"password", "port":"port" } kw = dict( (params_map[k], v) for (k, v) in module.params.items() if k in params_map and v != '' ) try: db_connection = psycopg2.connect(database=db, **kw) # Enable autocommit so we can create databases if psycopg2.__version__ >= '2.4.2': db_connection.autocommit = True else: db_connection.set_isolation_level(psycopg2 .extensions .ISOLATION_LEVEL_AUTOCOMMIT) cursor = db_connection.cursor( cursor_factory=psycopg2.extras.DictCursor) except Exception: e = get_exception() module.fail_json(msg="unable to connect to database: %s" % e) try: if module.check_mode: if state == "present": changed = not ext_exists(cursor, ext) elif state == "absent": changed = ext_exists(cursor, ext) else: if state == "absent": changed = ext_delete(cursor, ext) elif state == "present": changed = ext_create(cursor, ext) except NotSupportedError: e = get_exception() module.fail_json(msg=str(e)) except Exception: e = get_exception() module.fail_json(msg="Database query failed: %s" % e) module.exit_json(changed=changed, db=db, ext=ext) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.pycompat24 import get_exception if __name__ == '__main__': main()
gpl-3.0
Upande/MaMaSe
apps/galleryapp/thumbs.py
1
7714
# -*- encoding: utf-8 -*- """ django-thumbs http://django.es """ from __future__ import division #Decimal division from django.db.models import ImageField from django.db.models.fields.files import ImageFieldFile from PIL import Image from django.core.files.base import ContentFile import cStringIO def generate_thumb_max_size(img, thumb_size, format): """ Fits the image into the thumbnail. Author: Antonio Melé Parameters: =========== img File object thumb_size desired thumbnail size, ie: (200,120) format format of the original image ('jpeg','gif','png',...) (this format will be used for the generated thumbnail, too) Returns: a ContentFile object with the thumbnail. """ img.seek(0) # see http://code.djangoproject.com/ticket/8222 for details image = Image.open(img) # Convert to RGB if necessary if image.mode not in ('L', 'RGB', 'RGBA'): image = image.convert('RGB') # get size thumb_w, thumb_h = thumb_size # If you want to generate a square thumbnail if thumb_w == thumb_h: # quad xsize, ysize = image.size # get minimum size minsize = min(xsize,ysize) # largest square possible in the image xnewsize = (xsize-minsize)/2 ynewsize = (ysize-minsize)/2 # crop it image2 = image.crop((xnewsize, ynewsize, xsize-xnewsize, ysize-ynewsize)) # load is necessary after crop image2.load() # thumbnail of the cropped image (with ANTIALIAS to make it look better) image2.thumbnail(thumb_size, Image.ANTIALIAS) else: # not quad image2 = image image2.thumbnail(thumb_size, Image.ANTIALIAS) io = cStringIO.StringIO() # PNG and GIF are the same, JPG is JPEG if format.upper()=='JPG': format = 'JPEG' image2.save(io, format) return ContentFile(io.getvalue()) def generate_thumb_max_rectangle(img_in, size, format): """ Generates the biggest rectangle proportional to size on the original image, (img_in) centers it on the picture, resizes it to size. Author: Samuel Muñoz Hidalgo <[email protected]> Parameters: =========== img_in File object, original image size (Width, height) of the generated thumbnail format format of the original image ('jpeg','gif','png',...) (this format will be used for the generated thumbnail, too) Returns: a ContentFile object with the thumbnail. """ img_in.seek(0) # see http://code.djangoproject.com/ticket/8222 for details image = Image.open(img_in) # Convert to RGB if necessary if image.mode not in ('L', 'RGB', 'RGBA'): image = image.convert('RGB') # Thumbnail size xthumb, ythumb = size #proportion between thumbnail sides relthumb = xthumb/ythumb # Original image size xsize, ysize = image.size #Biggest proportional rectangle (xtmp,ytmp) #Rectangle offset (xoffset,yoffset) if (xsize/ysize)>relthumb: #Height limits ytmp = ysize xtmp = ysize*relthumb else: #Width limits xtmp = xsize ytmp = xsize/relthumb xoffset = (xsize-xtmp)/2 yoffset = (ysize-ytmp)/2 image2 = image.transform( size, Image.EXTENT, ( int(round(xoffset)), int(round(yoffset)), int(round(xoffset+xtmp)), int(round(yoffset+ytmp)) ) ) io = cStringIO.StringIO() # PNG and GIF are the same, JPG is JPEG if format.upper()=='JPG': format = 'JPEG' image2.save(io, format) return ContentFile(io.getvalue()) class ImageWithThumbsFieldFile(ImageFieldFile): """ See ImageWithThumbsField for usage example """ def __init__(self, *args, **kwargs): super(ImageWithThumbsFieldFile, self).__init__(*args, **kwargs) if self.field.sizes: def get_size(self, size): if not self: return '' else: split = self.url.rsplit('.',1) thumb_url = '%s.%sx%s.%s' % (split[0],w,h,split[1]) return thumb_url for size in self.field.sizes: (w,h,f) = size setattr(self, 'url_%sx%s' % (w,h), get_size(self, size)) def save(self, name, content, save=True): super(ImageWithThumbsFieldFile, self).save(name, content, save) if self.field.sizes: for size in self.field.sizes: (w,h,f) = size split = self.name.rsplit('.',1) thumb_name = '%s.%sx%s.%s' % (split[0],w,h,split[1]) # you can use another thumbnailing function if you like thumb_content = f(content, (w,h), split[1]) thumb_name_ = self.storage.save(thumb_name, thumb_content) if not thumb_name == thumb_name_: raise ValueError('There is already a file named %s' % thumb_name) def delete(self, save=True): name=self.name super(ImageWithThumbsFieldFile, self).delete(save) if self.field.sizes: for size in self.field.sizes: (w,h) = size split = name.rsplit('.',1) thumb_name = '%s.%sx%s.%s' % (split[0],w,h,split[1]) try: self.storage.delete(thumb_name) except: pass class ImageWithThumbsField(ImageField): attr_class = ImageWithThumbsFieldFile """ Usage example: ============== photo = ImageWithThumbsField(upload_to='images', sizes=((125,125),(300,200),) To retrieve image URL, exactly the same way as with ImageField: my_object.photo.url To retrieve thumbnails URL's just add the size to it: my_object.photo.url_125x125 my_object.photo.url_300x200 Note: The 'sizes' attribute is not required. If you don't provide it, ImageWithThumbsField will act as a normal ImageField How it works: ============= For each size in the 'sizes' atribute of the field it generates a thumbnail with that size and stores it following this format: available_filename.[width]x[height].extension Where 'available_filename' is the available filename returned by the storage backend for saving the original file. Following the usage example above: For storing a file called "photo.jpg" it saves: photo.jpg (original file) photo.125x125.jpg (first thumbnail) photo.300x200.jpg (second thumbnail) With the default storage backend if photo.jpg already exists it will use these filenames: photo_.jpg photo_.125x125.jpg photo_.300x200.jpg Note: django-thumbs assumes that if filename "any_filename.jpg" is available filenames with this format "any_filename.[widht]x[height].jpg" will be available, too. To do: ====== Add method to regenerate thubmnails """ def __init__(self, verbose_name=None, name=None, width_field=None, height_field=None, sizes=None, **kwargs): self.verbose_name=verbose_name self.name=name self.width_field=width_field self.height_field=height_field self.sizes = sizes super(ImageField, self).__init__(**kwargs)
apache-2.0
zeickan/Infected-Engine
store/functions.py
2
1347
import random import string import re def random_generator(size=6 , chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for x in range(size)) """ def pruebas(request): # Formulario if request.method == 'POST': form = ProPedidoForm(request.POST) if form.is_valid(): form.save() #return HttpResponseRedirect('/debug/') else: form = ProPedidoForm() # productos en la cesta try: basket = request.session['basket'] except KeyError: basket = [] cesta = [] for row in basket: cesta.append( re.sub("([^0-9]+)", "", row) ) productos = Producto.objects.filter(id__in=cesta) # Obtenemos total y sumamos el envio sumar = [] for items in productos: precio = Decimal(items.precio) sumar.append(precio) subtotal = sum(sumar) total = subtotal+199 # Usuario y perfil user = request.user profile = user.profile # Datos extras paso = 'confirm' data = { "user": user, "lista": productos, 'profile': profile, "subtotal" : subtotal, "total": total, "formula": form, "step" : paso } return render_to_response("test.html", context_instance=RequestContext(request,data)) """
apache-2.0
BeDjango/intef-openedx
cms/djangoapps/contentstore/management/commands/populate_creators.py
182
2406
""" Script for granting existing course instructors course creator privileges. This script is only intended to be run once on a given environment. """ from course_creators.views import add_user_with_status_granted, add_user_with_status_unrequested from django.core.management.base import BaseCommand from django.contrib.auth.models import User from django.db.utils import IntegrityError from student.roles import CourseInstructorRole, CourseStaffRole #------------ to run: ./manage.py cms populate_creators --settings=dev class Command(BaseCommand): """ Script for granting existing course instructors course creator privileges. """ help = 'Grants all users with INSTRUCTOR role permission to create courses' def handle(self, *args, **options): """ The logic of the command. """ username = 'populate_creators_command' email = '[email protected]' try: admin = User.objects.create_user(username, email, 'foo') admin.is_staff = True admin.save() except IntegrityError: # If the script did not complete the last time it was run, # the admin user will already exist. admin = User.objects.get(username=username, email=email) for user in get_users_with_role(CourseInstructorRole.ROLE): add_user_with_status_granted(admin, user) # Some users will be both staff and instructors. Those folks have been # added with status granted above, and add_user_with_status_unrequested # will not try to add them again if they already exist in the course creator database. for user in get_users_with_role(CourseStaffRole.ROLE): add_user_with_status_unrequested(user) # There could be users who are not in either staff or instructor (they've # never actually done anything in Studio). I plan to add those as unrequested # when they first go to their dashboard. admin.delete() #============================================================================================================= # Because these are expensive and far-reaching, I moved them here def get_users_with_role(role_prefix): """ An expensive operation which finds all users in the db with the given role prefix """ return User.objects.filter(groups__name__startswith=role_prefix)
agpl-3.0
DinoTools/dionaea
modules/python/dionaea/sip/rfc3261.py
1
22112
# This file is part of the dionaea honeypot # # SPDX-FileCopyrightText: 2011-2020 PhiBo (DinoTools) # # SPDX-License-Identifier: GPL-2.0-or-later import re import logging import time try: from dionaea.sip import rfc2396, rfc4566 from dionaea.sip.extras import int2bytes, ErrorWithResponse except: import rfc2396, rfc4566 from extras import int2bytes, ErrorWithResponse logger = logging.getLogger('sip') logger.setLevel(logging.DEBUG) # For more information see RFC3261 Section: 21 Response Codes # http://tools.ietf.org/html/rfc3261#section-21 # Provisional 1xx TRYING = 100 RINGING = 180 CALL_IS_BEING_FORWARDED = 181 QUEUED = 182 SESSION_PROGRESS = 183 # Successful 2xx OK = 200 # Redirection 3xx MULTIPLE_CHOICES = 300 MOVED_PERMANENTLY = 301 MOVED_TEMPORARILY = 302 USE_PROXY = 305 ALTERNATIVE_SERVICE = 380 # Request Failure 4xx BAD_REQUEST = 400 UNAUTHORIZED = 401 PAYMENT_REQUIRED = 402 FORBIDDEN = 403 NOT_FOUND = 404 METHOD_NOT_ALLOWED = 405 NOT_ACCEPTABLE = 406 PROXY_AUTHENTICATION_REQUIRED = 407 REQUEST_TIMEOUT = 408 GONE = 410 REQUEST_ENTITY_TOO_LARGE = 413 REQUEST_URI_TOO_LARGE = 414 UNSUPPORTED_MEDIA_TYPE = 415 UNSUPPORTED_URI_SCHEME = 416 BAD_EXTENSION = 420 EXTENSION_REQUIRED = 421 INTERVAL_TOO_BRIEF = 423 TEMPORARILY_UNAVAILABLE = 480 CALL_TRANSACTION_DOSE_NOT_EXIST = 481 LOOP_DETECTED = 482 TOO_MANY_HOPS = 483 ADDRESS_INCOMPLETE = 484 AMBIGUOUS = 485 BUSY_HERE = 486 REQUEST_TERMINATED = 487 NOT_ACCEPTABLE_HERE = 488 REQUEST_PENDING = 491 UNDECIPHERABLE = 493 # Server Failure 5xx INTERNAL_SERVER_ERROR = 500 NOT_IMPLEMENTED = 501 BAD_GATEWAY = 502 SERVICE_UNAVAILABLE = 503 SERVER_TIME_OUT = 504 VERSION_NOT_SUPPORTED = 505 MESSAGE_TOO_LARGE = 513 # Global Failures 6xx BUSY_EVERYWHERE = 600 DECLINE = 603 DOES_NOT_EXIST_ANYWHERE = 604 NOT_ACCEPTABLE = 606 status_messages = { # Provisional 1xx 100: b"Trying", 180: b"Ringing", 181: b"Call Is Being Forwarded", 182: b"Queued", 183: b"Session Progress", # Successful 2xx 200: b"OK", # Redirection 3xx 300: b"Multiple Choices", 301: b"Moved Permanently", 302: b"Moved Temporarily", 305: b"Use Proxy", 380: b"Alternative Service", # Request Failure 4xx 400: b"Bad Request", 401: b"Unauthorized", 402: b"Payment Required", 403: b"Forbidden", 404: b"Not Found", 405: b"Method Not Allowed", 406: b"Not Acceptable", 407: b"Proxy Authentication Required", 408: b"Request Timeout", 410: b"Gone", 413: b"Request Entity Too Large", 414: b"Request-URI Too Large", 415: b"Unsupported Media Type", 416: b"Unsupported URI Scheme", 420: b"Bad Extension", 421: b"Extension Required", 423: b"Interval Too Brief", 480: b"Temporarily Unavailable", 481: b"Call/Transaction Does Not Exist", 482: b"Loop Detected", 483: b"Too Many Hops", 484: b"Address Incomplete", 485: b"Ambiguous", 486: b"Busy Here", 487: b"Request Terminated", 488: b"Not Acceptable Here", 491: b"Request Pending", 493: b"Undecipherable", # Server Failure 5xx 500: b"Internal Server Error", 501: b"Not Implemented", 502: b"Bad Gateway", 503: b"Service Unavailable", 504: b"Server Time-out", 505: b"Version Not Supported", 513: b"Message Too Large", # Global Failures 6xx 600: b"Busy Everywhere", 603: b"Decline", 604: b"Does Not Exist Anywhere", 606: b"Not Acceptable", } class SipParsingError(Exception): """ Exception class for errors occurring during SIP message parsing """ class CSeq(object): """ Hold the value of an CSeq attribute >>> cseq1 = CSeq.froms(b"100 INVITE") >>> cseq2 = CSeq(seq = 100, method = b"INVITE") >>> print(cseq1.dumps(), cseq2.dumps(), cseq1.seq, cseq1.method) b'100 INVITE' b'100 INVITE' 100 b'INVITE' """ def __init__(self, data = None, seq = None, method = None): # do we need to convert the data? if seq is not None and type(seq) == str: seq = int(seq) if type(method) == str: method = bytes(method, "utf-8") self.seq = seq self.method = method def dumps(self): return int2bytes(self.seq) + b" " + self.method @classmethod def froms(cls,data): return cls(**cls.loads(data)[1]) @classmethod def loads(cls, data): if type(data) == str: data = bytes(data, "utf-8") d = data.partition(b" ") seq = int(d[0].decode("utf-8")) method = d[2].strip() return (len(data), {'seq':seq,'method':method}) class Header(object): """ >>> print(Header.froms('"John Doe" <sip:[email protected]>', 'to').dumps()) b'To: "John Doe" <sip:[email protected]>' >>> print(Header.froms(b'"John Doe" <sip:[email protected]>', b'to').dumps()) b'To: "John Doe" <sip:[email protected]>' >>> print(Header.froms(b'"John Doe" <sip:[email protected]>;tag=abc123', b'to').dumps()) b'To: "John Doe" <sip:[email protected]>;tag=abc123' >>> print(Header.froms(b'To: "John Doe" <sip:[email protected]>;tag=abc123').dumps()) b'To: "John Doe" <sip:[email protected]>;tag=abc123' """ _address = [ b"contact", b"from", b"record-route", b"refer-to", b"referred-by", b"route", b"to" ] _exception = { b"call-id": b"Call-ID", b"cseq": b"CSeq", b"www-authenticate": b"WWW-Authenticate" } _header_compact2long = { b"c": b"content-type", b"e": b"content-encoding", b"f": b"from", b"i": b"call-id", b"k": b"supported", b"l": b"content-length", b"m": b"contact", # m = moved b"s": b"subject", b"t": b"to", b"v": b"via" } def __init__(self, name, value = None): if type(name) == str: name = bytes(name, "utf-8") self.name = name.lower() if type(value) == str: value = bytes(value, "utf-8") self._value = value def dumps(self): """ Dump the value with header name. """ return self.format_name(self.name) + b": " + self.get_value() @classmethod def froms(cls, data, name = None): return cls(**cls.loads(data, name)[1]) @classmethod def loads(cls, data, name): if type(data) == str: data = bytes(data, "utf-8") if type(name) == str: name = bytes(name, "utf-8") if name is None: data = data.strip() d = re.split(b": *", data, 1) name = d[0].strip() data = d[1].strip() name = name.lower() name = cls._header_compact2long.get(name, name) if type(data) != bytes: value = data elif name in cls._address: # FIXME may cause problems? addr = rfc2396.Address.froms(data) # addr.must_quote = True # l = addr.loads(data) # ToDo: use l to parse the rest value = addr elif name == b"cseq": value = CSeq.froms(data) elif name == b"via": value = Via.froms(data) else: value = data return (len(data), {'name':name,'value':value}) def format_name(self, name): name = name.lower() if name in self._exception: return self._exception[name] names = name.split(b"-") names = [n.capitalize() for n in names] return b"-".join(names) def get_raw(self): return self._value def get_value(self): """ Prepare the value and return it as bytes. """ if type(self._value) == bytes: return self._value if type(self._value) == int: return int2bytes(self._value) return self._value.dumps() value = property(get_value) class Headers(object): _single = [ b"call-id", b"content-disposition", b"content-length", b"content-type", b"cseq", b"date", b"expires", b"event", b"max-forwards", b"organization", b"refer-to", b"referred-by", b"server", b"session-expires", b"subject", b"timestamp", b"to", b"user-agent" ] def __init__(self): self._headers = {} def __getattr__(self, name): return self.get(name) def __iter__(self): return iter(self._headers) def append(self, headers, copy = False, name_new = None): if headers is None: return if type(headers) != list: headers = [headers] for header in headers: if copy: header = Header.froms(header.dumps()) if name_new is not None: header.name = name_new if header.name in self._single: self._headers[header.name] = header elif header.name in self._headers: self._headers[header.name].append(header) else: self._headers[header.name] = [header] def dump_list(self): ret = [] for name, header in self._headers.items(): if not type(header) == list: header = [header] for h in header: ret.append(h.dumps()) return ret def get(self, name, default = None): if type(name) == str: name = bytes(name, "utf-8") name = name.lower() if name not in self._headers: return default return self._headers[name] def items(self): return self._headers.items() class Message(object): """ >>> s = b'ACK sip:[email protected] SIP/2.0\\r\\n' >>> s = s + b'CSeq: 1 ACK\\r\\n' >>> s = s + b'Via: SIP/2.0/UDP example.org:5060;branch=foo-bar;rport\\r\\n' >>> s = s + b'From: "Bob" <sip:[email protected]>;tag=123\\r\\n' >>> s = s + b'Call-ID: cWhfKU3v\\r\\n' >>> s = s + b'To: "Alice" <sip:[email protected]>\\r\\n' >>> s = s + b'Content-Length: 0\\r\\n' >>> s = s + b'Max-Forwards: 70\\r\\n' >>> s = s + b'\\r\\n' >>> m = Message.froms(s) >>> print(m.method) b'ACK' >>> print(m.protocol) b'SIP/2.0' >>> m.uri b'sip:[email protected]' >>> print(m.headers.get(b"to").dumps()) b'To: "Alice" <sip:[email protected]>' >>> print(m.headers.get(b"call-id").dumps()) b'Call-ID: cWhfKU3v' >>> s = m.dumps() >>> # parse the generated message again >>> m = Message.froms(s) >>> s2 = m.dumps() >>> # check if the content is the same >>> t1 = s.split(b"\\r\\n") >>> t2 = s2.split(b"\\r\\n") >>> t1.sort() >>> t2.sort() >>> print(b"\\r\\n".join(t1) == b"\\r\\n".join(t2)) True >>> s1 = b"INVITE sip:[email protected] SIP/2.0\\r\\n" >>> s1 = s1 + b"Via: SIP/2.0/UDP example.org;branch=foo-bar\\r\\n" >>> s1 = s1 + b"To: Alice <sip:[email protected]>\\r\\n" >>> s1 = s1 + b"From: Bob <sip:[email protected]>;tag=123\\r\\n" >>> s1 = s1 + b"Call-ID: cWhfKU3v\\r\\n" >>> s1 = s1 + b"CSeq: 123 INVITE\\r\\n" >>> s1 = s1 + b"Max-Forwards: 70\\r\\n" >>> s1 = s1 + b"Contact: <sip:[email protected]>\\r\\n" >>> s1 = s1 + b"Content-Type: application/sdp\\r\\n" >>> s1 = s1 + b"Content-Length: 141\\r\\n\\r\\n" >>> s2 = b"v=0\\r\\n" >>> s2 = s2 + b"o=bob 12345 23456 IN IP4 192.168.1.1\\r\\n" >>> s2 = s2 + b"s=A dionaea test\\r\\n" >>> s2 = s2 + b"c=IN IP4 192.168.1.2\\r\\n" >>> s2 = s2 + b"t=0 0\\r\\n" >>> s2 = s2 + b"m=audio 8080 RTP/AVP 0 8\\r\\n" >>> s2 = s2 + b"m=video 8081 RTP/AVP 31\\r\\n" >>> s = s1 + s2 >>> m = Message.froms(s) >>> m.sdp[b"v"] 0 >>> m.sdp[b"o"].dumps() b'bob 12345 23456 IN IP4 192.168.1.1' >>> m.sdp[b"s"] b'A dionaea test' >>> m.sdp[b"c"].dumps() b'IN IP4 192.168.1.2' """ def __init__(self, session=None, method = None, uri = None, response_code = None, status_message = None, protocol = None, body = None, headers = None, sdp = None, personality = "default"): self.method = method self.uri = uri self.response_code = response_code self.status_message = status_message self.protocol = protocol self._body = body self._personality = personality self._session = session if headers is None: headers = Headers() self.headers = headers self.sdp = sdp #: time of package creation self.time = time.time() def create_response(self, code, message = None, personality = None): logger.info("Creating Response: code=%s, message=%s", code, message) if personality is not None: self._personality = personality res = Message(session=self._session) res.protocol = b"SIP/2.0" res.response_code = code res.status_message = message if res.status_message is None: if code in status_messages: res.status_message = status_messages[code] else: res.status_message = b"" if type(res.status_message) == str: res.status_message = bytes(res.status_message, "utf-8") for name in [b"cseq", b"call-id", b"via"]: res.headers.append(self.headers.get(name, None), True) # copy headers res.headers.append(self.headers.get(b"from", None), True, b"from") res.headers.append(self.headers.get(b"to", None), True, b"to") # create contact header addr = self.headers.get(b"to", None)._value uri = rfc2396.URI( scheme = addr.uri.scheme, user = addr.uri.user, host = addr.uri.host, port = addr.uri.port ) cont_addr = rfc2396.Address(uri = uri) contact = Header(name=b"contact", value = cont_addr) res.headers.append(contact) handler = self._session.config.get_handlers_by_personality(self._personality) res.headers.append(Header(name = b"allow", value = ", ".join(handler))) res.headers.append(Header(name = b"content-length", value = 0)) return res def dumps(self): # h = Header h = [] if self.method is not None: h.append(self.method + b" " + self.uri.dumps() + b" " + self.protocol) elif self.response_code is not None: h.append(self.protocol + b" " + int2bytes(self.response_code) + b" " + self.status_message) else: return None sdp = b"" if self.sdp is not None: sdp = self.sdp.dumps() self.headers.append(Header(name = b"content-type", value = b"application/sdp")) self.headers.append(Header(name = b"content-length", value = len(sdp))) h = h + self.headers.dump_list() return b"\r\n".join(h) + b"\r\n\r\n" + sdp @classmethod def froms(cls, data, session=None): return cls(**cls.loads(data, session=session)[1]) def header_exist(self, header_name): """ Check if a header with the given name exists """ if type(header_name) == str: header_name = bytes(header_name, "utf-8") return self.headers_exist([header_name], True) def headers_exist(self, headers, overwrite = False): if not overwrite: headers = headers + [b"to", b"from", b"call-id", b"cseq", b"contact"] for header in headers: if self.headers.get(header) is None: logger.warn("Header missing: %r", header) return False return True @classmethod def loads(cls, data, session=None): """ Parse a SIP-Message and return the used bytes :return: bytes used """ # End Of Head if type(data) == bytes: pos = re.search(b"\r?\n\r?\n", data) else: pos = re.search("\r?\n\r?\n", data) if pos is None: return (0, {}) # length of used data l = pos.end() # header without empty line header = data[:pos.start()] headers_data = re.split(b"\r?\n", header) # body without empty line body = data[pos.end():] # remove first line and parse it try: h1, h2, h3 = headers_data[0].split(b" ", 2) except: logger.warning("Can't parse first line of sip message: %s", repr(headers_data[0])[:128]) raise SipParsingError del headers_data[0] response_code = None status_message = None try: response_code, protocol, status_message = int(h2), h1, h3 except: method, uri, protocol = h1, rfc2396.Address.froms(h2), h3 # ToDo: check protocol headers = Headers() for h in headers_data: header = Header.froms(h) headers.append(header) sdp = None try: content_length = int(headers.get(b"content-length", None).value) except: content_length = None if content_length is not None: if content_length <= len(body): content = body[:content_length] content_type = headers.get(b"content-type", None) if content_type is not None and content_type.value.lower().strip() == b"application/sdp": try: sdp = rfc4566.SDP.froms(content) except rfc4566.SdpParsingError: msg = Message(**{ "session": session, "method": method, "uri": uri, "response_code": response_code, "status_message": status_message, "protocol": protocol, "body": body, "headers": headers }) raise ErrorWithResponse(msg, BAD_REQUEST, "Invalid SIP body") l += content_length else: logger.info("Body is to short than the given content-length: Content-Length %d, Body %d", content_length, len(body)) return ( l, { "session": session, "method": method, "uri": uri, "response_code": response_code, "status_message": status_message, "protocol": protocol, "body": body, "headers": headers, "sdp": sdp } ) def set_personality(self, personality): self._personality = personality class Via(object): """ Parse and generate the content of a Via: Header. :See: http://tools.ietf.org/html/rfc3261#page-179 Test strings are taken from RFC3261 >>> s = b"SIP/2.0/UDP erlang.bell-telephone.com:5060;branch=z9hG4bK87asdks7" >>> v = Via.froms(s) >>> print(v.port, v.address, v.protocol) 5060 b'erlang.bell-telephone.com' b'UDP' >>> print(v.get_param(b"branch")) b'z9hG4bK87asdks7' >>> print(s == v.dumps()) True >>> v = Via.froms(b"SIP/2.0/UDP 192.0.2.1:5060 ;received=192.0.2.207;branch=z9hG4bK77asjd") >>> print(v.port, v.address, v.protocol) 5060 b'192.0.2.1' b'UDP' >>> print(v.get_param(b"branch"), v.get_param(b"received")) b'z9hG4bK77asjd' b'192.0.2.207' """ _syntax = re.compile(b"SIP */ *2\.0 */ *(?P<protocol>[a-zA-Z]+) *(?P<address>[^ :;]*) *(:(?P<port>[0-9]+))?( *; *(?P<params>.*))?") def __init__(self, protocol = None, address = None, port = None, params = None): if params is None: params = [] self.protocol = protocol self.address = address self.port = port self._params = params def dumps(self): ret = b"SIP/2.0/" + self.protocol.upper() + b" " + self.address if self.port is not None: ret = ret + b":" + int2bytes(self.port) if self._params is not None and len(self._params) > 0: params = [] for x in self._params: if x[1] != b"" and x[1] is not None: params.append(b"=".join(x)) else: params.append(x[0]) ret = ret + b";" + b";".join(params) return ret def get_param(self, name, default = None): for x in self._params: if x[0] == name: return x[1] return default def set_param(self, name, value): for x in self._params: if x[0] == name: x[1] = value return @classmethod def froms(cls, data): return cls(**cls.loads(data)[1]) @classmethod def loads(cls, data): m = cls._syntax.match(data) if not m: raise Exception("Error parsing the data") protocol = m.group("protocol") address = m.group("address") port = m.group("port") if port is not None: try: port = int(port) except: # error parsing port, set default value self.port = 5060 param_data = m.group("params") if not param_data: raise Exception("Error no parameter given") params = [] # prevent crashes by limiting split count # ToDo: needs testing for param in re.split(b" *; *", param_data, 64): t = re.split(b" *= *", param, 1) v = b"" if len(t) > 1: v = t[1] params.append((t[0], v)) return ( m.end(), { "protocol": protocol, "address": address, "port": port, "params": params } ) if __name__ == '__main__': import doctest doctest.testmod()
gpl-2.0
Xaltotun/conan
conans/server/rest/controllers/conan_controller.py
4
9988
from conans.server.rest.controllers.controller import Controller from bottle import request from conans.model.ref import ConanFileReference, PackageReference from conans.server.service.service import ConanService, SearchService from conans.errors import NotFoundException import json from conans.paths import CONAN_MANIFEST import os import codecs class ConanController(Controller): """ Serve requests related with Conan """ def attach_to(self, app): conan_route = '%s/:conanname/:version/:username/:channel' % self.route @app.route("/ping", method=["GET"]) def ping(): """ Response OK. Useful to get server capabilities (version_checker bottle plugin) """ return @app.route("%s/digest" % conan_route, method=["GET"]) def get_conan_digest_url(conanname, version, username, channel, auth_user): """ Get a dict with all files and the download url """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) urls = conan_service.get_conanfile_download_urls(reference, [CONAN_MANIFEST]) if not urls: raise NotFoundException("No digest found") return urls @app.route("%s/packages/:package_id/digest" % conan_route, method=["GET"]) def get_package_digest_url(conanname, version, username, channel, package_id, auth_user): """ Get a dict with all files and the download url """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) package_reference = PackageReference(reference, package_id) urls = conan_service.get_package_download_urls(package_reference, [CONAN_MANIFEST]) if not urls: raise NotFoundException("No digest found") urls_norm = {filename.replace("\\", "/"): url for filename, url in urls.items()} return urls_norm @app.route(conan_route, method=["GET"]) def get_conanfile_snapshot(conanname, version, username, channel, auth_user): """ Get a dictionary with all files and their each md5s """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) snapshot = conan_service.get_conanfile_snapshot(reference) snapshot_norm = {filename.replace("\\", "/"): the_md5 for filename, the_md5 in snapshot.items()} return snapshot_norm @app.route('%s/packages/:package_id' % conan_route, method=["GET"]) def get_package_snapshot(conanname, version, username, channel, package_id, auth_user): """ Get a dictionary with all files and their each md5s """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) package_reference = PackageReference(reference, package_id) snapshot = conan_service.get_package_snapshot(package_reference) snapshot_norm = {filename.replace("\\", "/"): the_md5 for filename, the_md5 in snapshot.items()} return snapshot_norm @app.route("%s/download_urls" % conan_route, method=["GET"]) def get_conanfile_download_urls(conanname, version, username, channel, auth_user): """ Get a dict with all files and the download url """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) urls = conan_service.get_conanfile_download_urls(reference) urls_norm = {filename.replace("\\", "/"): url for filename, url in urls.items()} return urls_norm @app.route('%s/packages/:package_id/download_urls' % conan_route, method=["GET"]) def get_package_download_urls(conanname, version, username, channel, package_id, auth_user): """ Get a dict with all packages files and the download url for each one """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) package_reference = PackageReference(reference, package_id) urls = conan_service.get_package_download_urls(package_reference) urls_norm = {filename.replace("\\", "/"): url for filename, url in urls.items()} return urls_norm @app.route("%s/upload_urls" % conan_route, method=["POST"]) def get_conanfile_upload_urls(conanname, version, username, channel, auth_user): """ Get a dict with all files and the upload url """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) reader = codecs.getreader("utf-8") filesizes = json.load(reader(request.body)) urls = conan_service.get_conanfile_upload_urls(reference, filesizes) urls_norm = {filename.replace("\\", "/"): url for filename, url in urls.items()} return urls_norm @app.route('%s/packages/:package_id/upload_urls' % conan_route, method=["POST"]) def get_package_upload_urls(conanname, version, username, channel, package_id, auth_user): """ Get a dict with all files and the upload url """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) package_reference = PackageReference(reference, package_id) reader = codecs.getreader("utf-8") filesizes = json.load(reader(request.body)) urls = conan_service.get_package_upload_urls(package_reference, filesizes) urls_norm = {filename.replace("\\", "/"): url for filename, url in urls.items()} return urls_norm @app.route('%s/search' % self.route, method=["GET"]) def search(auth_user): pattern = request.params.get("q", None) ignorecase = request.params.get("ignorecase", True) if isinstance(ignorecase, str): ignorecase = False if 'false' == ignorecase.lower() else True search_service = SearchService(app.authorizer, app.search_manager, auth_user) references = [str(ref) for ref in search_service.search(pattern, ignorecase)] return {"results": references} @app.route('%s/search' % conan_route, method=["GET"]) def search_packages(conanname, version, username, channel, auth_user): query = request.params.get("q", None) search_service = SearchService(app.authorizer, app.search_manager, auth_user) conan_reference = ConanFileReference(conanname, version, username, channel) info = search_service.search_packages(conan_reference, query) return info @app.route(conan_route, method="DELETE") def remove_conanfile(conanname, version, username, channel, auth_user): """ Remove any existing conanfiles or its packages created """ conan_reference = ConanFileReference(conanname, version, username, channel) conan_service = ConanService(app.authorizer, app.file_manager, auth_user) conan_service.remove_conanfile(conan_reference) @app.route('%s/packages/delete' % conan_route, method="POST") def remove_packages(conanname, version, username, channel, auth_user): """ Remove any existing conanfiles or its packages created """ conan_reference = ConanFileReference(conanname, version, username, channel) conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reader = codecs.getreader("utf-8") payload = json.load(reader(request.body)) conan_service.remove_packages(conan_reference, payload["package_ids"]) @app.route('%s/remove_files' % conan_route, method="POST") def remove_conanfile_files(conanname, version, username, channel, auth_user): """ Remove any existing conanfiles or its packages created """ conan_reference = ConanFileReference(conanname, version, username, channel) conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reader = codecs.getreader("utf-8") payload = json.load(reader(request.body)) files = [os.path.normpath(filename) for filename in payload["files"]] conan_service.remove_conanfile_files(conan_reference, files) @app.route('%s/packages/:package_id/remove_files' % conan_route, method=["POST"]) def remove_packages_files(conanname, version, username, channel, package_id, auth_user): """ Remove any existing conanfiles or its packages created """ conan_service = ConanService(app.authorizer, app.file_manager, auth_user) reference = ConanFileReference(conanname, version, username, channel) package_reference = PackageReference(reference, package_id) reader = codecs.getreader("utf-8") payload = json.load(reader(request.body)) files = [os.path.normpath(filename) for filename in payload["files"]] conan_service.remove_package_files(package_reference, files)
mit
trondeau/gnuradio-old
grc/base/Param.py
6
7177
""" Copyright 2008-2011 Free Software Foundation, Inc. This file is part of GNU Radio GNU Radio Companion is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. GNU Radio Companion is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA """ from . import odict from Element import Element def _get_keys(lst): return [elem.get_key() for elem in lst] def _get_elem(lst, key): try: return lst[_get_keys(lst).index(key)] except ValueError: raise ValueError, 'Key "%s" not found in %s.'%(key, _get_keys(lst)) class Option(Element): def __init__(self, param, n): Element.__init__(self, param) self._name = n.find('name') self._key = n.find('key') self._opts = dict() opts = n.findall('opt') #test against opts when non enum if not self.get_parent().is_enum() and opts: raise Exception, 'Options for non-enum types cannot have sub-options' #extract opts for opt in opts: #separate the key:value try: key, value = opt.split(':') except: raise Exception, 'Error separating "%s" into key:value'%opt #test against repeated keys if self._opts.has_key(key): raise Exception, 'Key "%s" already exists in option'%key #store the option self._opts[key] = value def __str__(self): return 'Option %s(%s)'%(self.get_name(), self.get_key()) def get_name(self): return self._name def get_key(self): return self._key ############################################## # Access Opts ############################################## def get_opt_keys(self): return self._opts.keys() def get_opt(self, key): return self._opts[key] def get_opts(self): return self._opts.values() class Param(Element): def __init__(self, block, n): """ Make a new param from nested data. Args: block: the parent element n: the nested odict """ #grab the data self._name = n.find('name') self._key = n.find('key') value = n.find('value') or '' self._type = n.find('type') self._hide = n.find('hide') or '' #build the param Element.__init__(self, block) #create the Option objects from the n data self._options = list() for option in map(lambda o: Option(param=self, n=o), n.findall('option')): key = option.get_key() #test against repeated keys if key in self.get_option_keys(): raise Exception, 'Key "%s" already exists in options'%key #store the option self.get_options().append(option) #test the enum options if self.is_enum(): #test against options with identical keys if len(set(self.get_option_keys())) != len(self.get_options()): raise Exception, 'Options keys "%s" are not unique.'%self.get_option_keys() #test against inconsistent keys in options opt_keys = self.get_options()[0].get_opt_keys() for option in self.get_options(): if set(opt_keys) != set(option.get_opt_keys()): raise Exception, 'Opt keys "%s" are not identical across all options.'%opt_keys #if a value is specified, it must be in the options keys self._value = value if value or value in self.get_option_keys() else self.get_option_keys()[0] if self.get_value() not in self.get_option_keys(): raise Exception, 'The value "%s" is not in the possible values of "%s".'%(self.get_value(), self.get_option_keys()) else: self._value = value or '' def validate(self): """ Validate the param. The value must be evaluated and type must a possible type. """ Element.validate(self) if self.get_type() not in self.get_types(): self.add_error_message('Type "%s" is not a possible type.'%self.get_type()) def get_evaluated(self): raise NotImplementedError def to_code(self): """ Convert the value to code. @throw NotImplementedError """ raise NotImplementedError def get_types(self): """ Get a list of all possible param types. @throw NotImplementedError """ raise NotImplementedError def get_color(self): return '#FFFFFF' def __str__(self): return 'Param - %s(%s)'%(self.get_name(), self.get_key()) def is_param(self): return True def get_name(self): return self._name def get_key(self): return self._key def get_hide(self): return self.get_parent().resolve_dependencies(self._hide).strip() def get_value(self): value = self._value if self.is_enum() and value not in self.get_option_keys(): value = self.get_option_keys()[0] self.set_value(value) return value def set_value(self, value): self._value = str(value) #must be a string def get_type(self): return self.get_parent().resolve_dependencies(self._type) def is_enum(self): return self._type == 'enum' def __repr__(self): """ Get the repr (nice string format) for this param. Just return the value (special case enum). Derived classes can handle complex formatting. Returns: the string representation """ if self.is_enum(): return self.get_option(self.get_value()).get_name() return self.get_value() ############################################## # Access Options ############################################## def get_option_keys(self): return _get_keys(self.get_options()) def get_option(self, key): return _get_elem(self.get_options(), key) def get_options(self): return self._options ############################################## # Access Opts ############################################## def get_opt_keys(self): return self.get_option(self.get_value()).get_opt_keys() def get_opt(self, key): return self.get_option(self.get_value()).get_opt(key) def get_opts(self): return self.get_option(self.get_value()).get_opts() ############################################## ## Import/Export Methods ############################################## def export_data(self): """ Export this param's key/value. Returns: a nested data odict """ n = odict() n['key'] = self.get_key() n['value'] = self.get_value() return n
gpl-3.0
scivision/dmcutils
tests/TestHDF5writeRace.py
1
1154
#!/usr/bin/env python """ testing possible pytables vs h5py race on write/append """ from tempfile import mkstemp from pathlib import Path import h5py from pandas import Series import numpy as np def test_h5race(outfn: Path, N: int): assert isinstance(N, int) ticks = np.random.randint(0, N, N) flist = [Path(f'{n:010d}spool.dat') for n in np.random.randint(0, N, N)] F = Series(index=ticks, data=[f.name for f in flist]) F.sort_index(inplace=True) print(f'sorted {len(flist)} files vs. time ticks') # %% writing HDF5 iprintndex print(f'writing {outfn}') F.to_hdf(outfn, 'filetick', mode='w') with h5py.File(outfn, 'a', libver='latest') as f: f['path'] = str(flist[0].parent) # %% test read with h5py.File(outfn, 'r', libver='latest') as f: print(f['path'].value) if __name__ == '__main__': from argparse import ArgumentParser p = ArgumentParser() p.add_argument('outfn', nargs='?', default=mkstemp(suffix='.h5')[1]) p.add_argument('-N', help='number of elements to write', type=int, default=1e6) p = p.parse_args() test_h5race(p.outfn, int(p.N))
gpl-3.0
xantin/qualitybots
src/appengine/models/site.py
26
3464
#!/usr/bin/python2.4 # # Copyright 2011 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Site model and method for get or insert from URL string. Site model stores root URL for a site. i.e. scheme://netloc. Remember, it doesn't store path or any other parameters from a URL. e.g. http://www.google.com:80/%7investors/q4r.html -- For this URL we will only store http://www.google.com:80. """ import urlparse from google.appengine.api import users from google.appengine.ext import db from models import bots_user from models import url_config MAX_FETCH_COUNT = 1000 class SiteError(Exception): pass class Site(db.Model): """Stores site root URL.""" url = db.StringProperty() domain = db.StringProperty() # This UrlConfig key reference is used to glue the results and input url. config = db.ReferenceProperty(url_config.UrlConfig, default=None, collection_name='associated_sites') def GetOrInsertSiteFromUrl(url_string, url_config_key): """Parses an input URL to get only the root and gets or inserts an entity. It tries to glue site and url_config together while accessing or creating site entity. Args: url_string: Site URL. url_config_key: UrlConfig Key Reference (db.Key). Raises: TypeError: If required parameter is missing. SiteError: If no matching url_config found for given url_config_key. Returns: Newly created/existing Site Entity. """ if not url_string: raise TypeError( 'Missing parameter - url_string is a required parameter.') if not url_config_key: raise TypeError( 'Missing parameter - url_config_key is a required parameter.') my_url_config = db.get(url_config_key) if not my_url_config: raise SiteError('Invalid url_config_key:%s , no matching UrlConfig found.' % (str(url_config_key))) url = url_string # If url does not start with http, let's prepend it. if not url.startswith('http'): url = 'http://' + url parsed = urlparse.urlsplit(url.strip()) domain = parsed.netloc # Let's find the exact site which has same config. site = my_url_config.associated_sites.get() if not site: # If exact matching site not found then let's find sites with matching urls. matching_sites = Site.all().filter('url =', url).fetch(MAX_FETCH_COUNT) for matching_site in matching_sites: # If site doesn't have config associated and supplied config is without # auth then we can safely assoicate config with this site. if not matching_site.config and not my_url_config.auth_enabled: matching_site.config = my_url_config.key() matching_site.domain = domain matching_site.put() return matching_site # If no matching site found or auth is enabled for config then let's # create new site entity. site = Site(url=url, domain=domain, config=my_url_config.key()) site.put() return site
apache-2.0
adamchainz/ansible
lib/ansible/plugins/callback/junit.py
92
8951
# (c) 2016 Matt Clay <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import time import re from ansible.module_utils._text import to_bytes, to_text from ansible.plugins.callback import CallbackBase try: from junit_xml import TestSuite, TestCase HAS_JUNIT_XML = True except ImportError: HAS_JUNIT_XML = False try: from collections import OrderedDict HAS_ORDERED_DICT = True except ImportError: try: from ordereddict import OrderedDict HAS_ORDERED_DICT = True except ImportError: HAS_ORDERED_DICT = False class CallbackModule(CallbackBase): """ This callback writes playbook output to a JUnit formatted XML file. Tasks show up in the report as follows: 'ok': pass 'failed' with 'EXPECTED FAILURE' in the task name: pass 'failed' due to an exception: error 'failed' for other reasons: failure 'skipped': skipped This plugin makes use of the following environment variables: JUNIT_OUTPUT_DIR (optional): Directory to write XML files to. Default: ~/.ansible.log JUNIT_TASK_CLASS (optional): Configure the output to be one class per yaml file Default: False Requires: junit_xml """ CALLBACK_VERSION = 2.0 CALLBACK_TYPE = 'aggregate' CALLBACK_NAME = 'junit' CALLBACK_NEEDS_WHITELIST = True def __init__(self): super(CallbackModule, self).__init__() self._output_dir = os.getenv('JUNIT_OUTPUT_DIR', os.path.expanduser('~/.ansible.log')) self._task_class = os.getenv('JUNIT_TASK_CLASS', 'False').lower() self._playbook_path = None self._playbook_name = None self._play_name = None self._task_data = None self.disabled = False if not HAS_JUNIT_XML: self.disabled = True self._display.warning('The `junit_xml` python module is not installed. ' 'Disabling the `junit` callback plugin.') if HAS_ORDERED_DICT: self._task_data = OrderedDict() else: self.disabled = True self._display.warning('The `ordereddict` python module is not installed. ' 'Disabling the `junit` callback plugin.') if not os.path.exists(self._output_dir): os.mkdir(self._output_dir) def _start_task(self, task): """ record the start of a task for one or more hosts """ uuid = task._uuid if uuid in self._task_data: return play = self._play_name name = task.get_name().strip() path = task.get_path() if not task.no_log: args = ', '.join(('%s=%s' % a for a in task.args.items())) if args: name += ' ' + args self._task_data[uuid] = TaskData(uuid, name, path, play) def _finish_task(self, status, result): """ record the results of a task for a single host """ task_uuid = result._task._uuid if hasattr(result, '_host'): host_uuid = result._host._uuid host_name = result._host.name else: host_uuid = 'include' host_name = 'include' task_data = self._task_data[task_uuid] if status == 'failed' and 'EXPECTED FAILURE' in task_data.name: status = 'ok' task_data.add_host(HostData(host_uuid, host_name, status, result)) def _build_test_case(self, task_data, host_data): """ build a TestCase from the given TaskData and HostData """ name = '[%s] %s: %s' % (host_data.name, task_data.play, task_data.name) duration = host_data.finish - task_data.start if self._task_class == 'true': junit_classname = re.sub('\.yml:[0-9]+$', '', task_data.path) else: junit_classname = task_data.path if host_data.status == 'included': return TestCase(name, junit_classname, duration, host_data.result) res = host_data.result._result rc = res.get('rc', 0) dump = self._dump_results(res, indent=0) dump = self._cleanse_string(dump) if host_data.status == 'ok': return TestCase(name, junit_classname, duration, dump) test_case = TestCase(name, junit_classname, duration) if host_data.status == 'failed': if 'exception' in res: message = res['exception'].strip().split('\n')[-1] output = res['exception'] test_case.add_error_info(message, output) elif 'msg' in res: message = res['msg'] test_case.add_failure_info(message, dump) else: test_case.add_failure_info('rc=%s' % rc, dump) elif host_data.status == 'skipped': if 'skip_reason' in res: message = res['skip_reason'] else: message = 'skipped' test_case.add_skipped_info(message) return test_case def _cleanse_string(self, value): """ convert surrogate escapes to the unicode replacement character to avoid XML encoding errors """ return to_text(to_bytes(value, errors='surrogateescape'), errors='replace') def _generate_report(self): """ generate a TestSuite report from the collected TaskData and HostData """ test_cases = [] for task_uuid, task_data in self._task_data.items(): for host_uuid, host_data in task_data.host_data.items(): test_cases.append(self._build_test_case(task_data, host_data)) test_suite = TestSuite(self._playbook_name, test_cases) report = TestSuite.to_xml_string([test_suite]) output_file = os.path.join(self._output_dir, '%s-%s.xml' % (self._playbook_name, time.time())) with open(output_file, 'wb') as xml: xml.write(to_bytes(report, errors='surrogate_or_strict')) def v2_playbook_on_start(self, playbook): self._playbook_path = playbook._file_name self._playbook_name = os.path.splitext(os.path.basename(self._playbook_path))[0] def v2_playbook_on_play_start(self, play): self._play_name = play.get_name() def v2_runner_on_no_hosts(self, task): self._start_task(task) def v2_playbook_on_task_start(self, task, is_conditional): self._start_task(task) def v2_playbook_on_cleanup_task_start(self, task): self._start_task(task) def v2_playbook_on_handler_task_start(self, task): self._start_task(task) def v2_runner_on_failed(self, result, ignore_errors=False): if ignore_errors: self._finish_task('ok', result) else: self._finish_task('failed', result) def v2_runner_on_ok(self, result): self._finish_task('ok', result) def v2_runner_on_skipped(self, result): self._finish_task('skipped', result) def v2_playbook_on_include(self, included_file): self._finish_task('included', included_file) def v2_playbook_on_stats(self, stats): self._generate_report() class TaskData: """ Data about an individual task. """ def __init__(self, uuid, name, path, play): self.uuid = uuid self.name = name self.path = path self.play = play self.start = None self.host_data = OrderedDict() self.start = time.time() def add_host(self, host): if host.uuid in self.host_data: if host.status == 'included': # concatenate task include output from multiple items host.result = '%s\n%s' % (self.host_data[host.uuid].result, host.result) else: raise Exception('%s: %s: %s: duplicate host callback: %s' % (self.path, self.play, self.name, host.name)) self.host_data[host.uuid] = host class HostData: """ Data about an individual host. """ def __init__(self, uuid, name, status, result): self.uuid = uuid self.name = name self.status = status self.result = result self.finish = time.time()
gpl-3.0
erjohnso/ansible
lib/ansible/modules/monitoring/sensu_silence.py
60
8521
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2017, Steven Bambling <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: sensu_silence version_added: "2.4" author: Steven Bambling(@smbambling) short_description: Manage Sensu silence entries description: - Create and clear (delete) a silence entries via the Sensu API for subscriptions and checks. options: check: description: - Specifies the check which the silence entry applies to. creator: description: - Specifies the entity responsible for this entry. expire: description: - If specified, the silence entry will be automatically cleared after this number of seconds. expire_on_resolve: description: - If specified as true, the silence entry will be automatically cleared once the condition it is silencing is resolved. type: bool reason: description: - If specified, this free-form string is used to provide context or rationale for the reason this silence entry was created. state: description: - Specifies to create or clear (delete) a silence entry via the Sensu API required: true default: present choices: ['present', 'absent'] subscription: description: - Specifies the subscription which the silence entry applies to. - To create a silence entry for a client append C(client:) to client name. Example - C(client:server1.example.dev) required: true default: [] url: description: - Specifies the URL of the Sensu monitoring host server. required: false default: http://127.0.01:4567 ''' EXAMPLES = ''' # Silence ALL checks for a given client - name: Silence server1.example.dev sensu_silence: subscription: client:server1.example.dev creator: "{{ ansible_user_id }}" reason: Performing maintenance # Silence specific check for a client - name: Silence CPU_Usage check for server1.example.dev sensu_silence: subscription: client:server1.example.dev check: CPU_Usage creator: "{{ ansible_user_id }}" reason: Investigation alert issue # Silence multiple clients from a dict silence: server1.example.dev: reason: 'Deployment in progress' server2.example.dev: reason: 'Deployment in progress' - name: Silence several clients from a dict sensu_silence: subscription: "client:{{ item.key }}" reason: "{{ item.value.reason }}" creator: "{{ ansible_user_id }}" with_dict: "{{ silence }}" ''' RETURN = ''' ''' try: import json except ImportError: import simplejson as json from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.urls import fetch_url def query(module, url, check, subscription): headers = { 'Content-Type': 'application/json', } url = url + '/silenced' request_data = { 'check': check, 'subscription': subscription, } # Remove keys with None value for k, v in dict(request_data).items(): if v is None: del request_data[k] response, info = fetch_url( module, url, method='GET', headers=headers, data=json.dumps(request_data) ) if info['status'] == 500: module.fail_json( msg="Failed to query silence %s. Reason: %s" % (subscription, info) ) try: json_out = json.loads(response.read()) except: json_out = "" return False, json_out, False def clear(module, url, check, subscription): # Test if silence exists before clearing (rc, out, changed) = query(module, url, check, subscription) d = dict((i['subscription'], i['check']) for i in out) subscription_exists = subscription in d if check and subscription_exists: exists = (check == d[subscription]) else: exists = subscription_exists # If check/subscription doesn't exist # exit with changed state of False if not exists: return False, out, changed # module.check_mode is inherited from the AnsibleMOdule class if not module.check_mode: headers = { 'Content-Type': 'application/json', } url = url + '/silenced/clear' request_data = { 'check': check, 'subscription': subscription, } # Remove keys with None value for k, v in dict(request_data).items(): if v is None: del request_data[k] response, info = fetch_url( module, url, method='POST', headers=headers, data=json.dumps(request_data) ) if info['status'] != 204: module.fail_json( msg="Failed to silence %s. Reason: %s" % (subscription, info) ) try: json_out = json.loads(response.read()) except: json_out = "" return False, json_out, True return False, out, True def create( module, url, check, creator, expire, expire_on_resolve, reason, subscription): (rc, out, changed) = query(module, url, check, subscription) for i in out: if (i['subscription'] == subscription): if ( (check is None or check == i['check']) and ( creator == '' or creator == i['creator'])and ( reason == '' or reason == i['reason']) and ( expire is None or expire == i['expire']) and ( expire_on_resolve is None or expire_on_resolve == i['expire_on_resolve'] ) ): return False, out, False # module.check_mode is inherited from the AnsibleMOdule class if not module.check_mode: headers = { 'Content-Type': 'application/json', } url = url + '/silenced' request_data = { 'check': check, 'creator': creator, 'expire': expire, 'expire_on_resolve': expire_on_resolve, 'reason': reason, 'subscription': subscription, } # Remove keys with None value for k, v in dict(request_data).items(): if v is None: del request_data[k] response, info = fetch_url( module, url, method='POST', headers=headers, data=json.dumps(request_data) ) if info['status'] != 201: module.fail_json( msg="Failed to silence %s. Reason: %s" % (subscription, info['msg']) ) try: json_out = json.loads(response.read()) except: json_out = "" return False, json_out, True return False, out, True def main(): module = AnsibleModule( argument_spec=dict( check=dict(required=False), creator=dict(required=False), expire=dict(required=False), expire_on_resolve=dict(type='bool', required=False), reason=dict(required=False), state=dict(default='present', choices=['present', 'absent']), subscription=dict(required=True), url=dict(required=False, default='http://127.0.01:4567'), ), supports_check_mode=True ) url = module.params['url'] check = module.params['check'] creator = module.params['creator'] expire = module.params['expire'] expire_on_resolve = module.params['expire_on_resolve'] reason = module.params['reason'] subscription = module.params['subscription'] state = module.params['state'] if state == 'present': (rc, out, changed) = create( module, url, check, creator, expire, expire_on_resolve, reason, subscription ) if state == 'absent': (rc, out, changed) = clear(module, url, check, subscription) if rc != 0: module.fail_json(msg="failed", result=out) module.exit_json(msg="success", result=out, changed=changed) if __name__ == '__main__': main()
gpl-3.0
bastik/youtube-dl
youtube_dl/extractor/jove.py
177
3068
from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( ExtractorError, unified_strdate ) class JoveIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?jove\.com/video/(?P<id>[0-9]+)' _CHAPTERS_URL = 'http://www.jove.com/video-chapters?videoid={video_id:}' _TESTS = [ { 'url': 'http://www.jove.com/video/2744/electrode-positioning-montage-transcranial-direct-current', 'md5': '93723888d82dbd6ba8b3d7d0cd65dd2b', 'info_dict': { 'id': '2744', 'ext': 'mp4', 'title': 'Electrode Positioning and Montage in Transcranial Direct Current Stimulation', 'description': 'md5:015dd4509649c0908bc27f049e0262c6', 'thumbnail': 're:^https?://.*\.png$', 'upload_date': '20110523', } }, { 'url': 'http://www.jove.com/video/51796/culturing-caenorhabditis-elegans-axenic-liquid-media-creation', 'md5': '914aeb356f416811d911996434811beb', 'info_dict': { 'id': '51796', 'ext': 'mp4', 'title': 'Culturing Caenorhabditis elegans in Axenic Liquid Media and Creation of Transgenic Worms by Microparticle Bombardment', 'description': 'md5:35ff029261900583970c4023b70f1dc9', 'thumbnail': 're:^https?://.*\.png$', 'upload_date': '20140802', } }, ] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') webpage = self._download_webpage(url, video_id) chapters_id = self._html_search_regex( r'/video-chapters\?videoid=([0-9]+)', webpage, 'chapters id') chapters_xml = self._download_xml( self._CHAPTERS_URL.format(video_id=chapters_id), video_id, note='Downloading chapters XML', errnote='Failed to download chapters XML') video_url = chapters_xml.attrib.get('video') if not video_url: raise ExtractorError('Failed to get the video URL') title = self._html_search_meta('citation_title', webpage, 'title') thumbnail = self._og_search_thumbnail(webpage) description = self._html_search_regex( r'<div id="section_body_summary"><p class="jove_content">(.+?)</p>', webpage, 'description', fatal=False) publish_date = unified_strdate(self._html_search_meta( 'citation_publication_date', webpage, 'publish date', fatal=False)) comment_count = self._html_search_regex( r'<meta name="num_comments" content="(\d+) Comments?"', webpage, 'comment count', fatal=False) return { 'id': video_id, 'title': title, 'url': video_url, 'thumbnail': thumbnail, 'description': description, 'upload_date': publish_date, 'comment_count': comment_count, }
unlicense
shadowbq/dot.atom
home/.atom/packages/wakatime/lib/wakatime-master/wakatime/packages/pygments_py2/pygments/lexers/robotframework.py
77
18633
# -*- coding: utf-8 -*- """ pygments.lexers.robotframework ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Lexer for Robot Framework. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ # Copyright 2012 Nokia Siemens Networks Oyj # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import re from pygments.lexer import Lexer from pygments.token import Token from pygments.util import text_type __all__ = ['RobotFrameworkLexer'] HEADING = Token.Generic.Heading SETTING = Token.Keyword.Namespace IMPORT = Token.Name.Namespace TC_KW_NAME = Token.Generic.Subheading KEYWORD = Token.Name.Function ARGUMENT = Token.String VARIABLE = Token.Name.Variable COMMENT = Token.Comment SEPARATOR = Token.Punctuation SYNTAX = Token.Punctuation GHERKIN = Token.Generic.Emph ERROR = Token.Error def normalize(string, remove=''): string = string.lower() for char in remove + ' ': if char in string: string = string.replace(char, '') return string class RobotFrameworkLexer(Lexer): """ For `Robot Framework <http://robotframework.org>`_ test data. Supports both space and pipe separated plain text formats. .. versionadded:: 1.6 """ name = 'RobotFramework' aliases = ['robotframework'] filenames = ['*.txt', '*.robot'] mimetypes = ['text/x-robotframework'] def __init__(self, **options): options['tabsize'] = 2 options['encoding'] = 'UTF-8' Lexer.__init__(self, **options) def get_tokens_unprocessed(self, text): row_tokenizer = RowTokenizer() var_tokenizer = VariableTokenizer() index = 0 for row in text.splitlines(): for value, token in row_tokenizer.tokenize(row): for value, token in var_tokenizer.tokenize(value, token): if value: yield index, token, text_type(value) index += len(value) class VariableTokenizer(object): def tokenize(self, string, token): var = VariableSplitter(string, identifiers='$@%') if var.start < 0 or token in (COMMENT, ERROR): yield string, token return for value, token in self._tokenize(var, string, token): if value: yield value, token def _tokenize(self, var, string, orig_token): before = string[:var.start] yield before, orig_token yield var.identifier + '{', SYNTAX for value, token in self.tokenize(var.base, VARIABLE): yield value, token yield '}', SYNTAX if var.index: yield '[', SYNTAX for value, token in self.tokenize(var.index, VARIABLE): yield value, token yield ']', SYNTAX for value, token in self.tokenize(string[var.end:], orig_token): yield value, token class RowTokenizer(object): def __init__(self): self._table = UnknownTable() self._splitter = RowSplitter() testcases = TestCaseTable() settings = SettingTable(testcases.set_default_template) variables = VariableTable() keywords = KeywordTable() self._tables = {'settings': settings, 'setting': settings, 'metadata': settings, 'variables': variables, 'variable': variables, 'testcases': testcases, 'testcase': testcases, 'keywords': keywords, 'keyword': keywords, 'userkeywords': keywords, 'userkeyword': keywords} def tokenize(self, row): commented = False heading = False for index, value in enumerate(self._splitter.split(row)): # First value, and every second after that, is a separator. index, separator = divmod(index-1, 2) if value.startswith('#'): commented = True elif index == 0 and value.startswith('*'): self._table = self._start_table(value) heading = True for value, token in self._tokenize(value, index, commented, separator, heading): yield value, token self._table.end_row() def _start_table(self, header): name = normalize(header, remove='*') return self._tables.get(name, UnknownTable()) def _tokenize(self, value, index, commented, separator, heading): if commented: yield value, COMMENT elif separator: yield value, SEPARATOR elif heading: yield value, HEADING else: for value, token in self._table.tokenize(value, index): yield value, token class RowSplitter(object): _space_splitter = re.compile('( {2,})') _pipe_splitter = re.compile('((?:^| +)\|(?: +|$))') def split(self, row): splitter = (row.startswith('| ') and self._split_from_pipes or self._split_from_spaces) for value in splitter(row): yield value yield '\n' def _split_from_spaces(self, row): yield '' # Start with (pseudo)separator similarly as with pipes for value in self._space_splitter.split(row): yield value def _split_from_pipes(self, row): _, separator, rest = self._pipe_splitter.split(row, 1) yield separator while self._pipe_splitter.search(rest): cell, separator, rest = self._pipe_splitter.split(rest, 1) yield cell yield separator yield rest class Tokenizer(object): _tokens = None def __init__(self): self._index = 0 def tokenize(self, value): values_and_tokens = self._tokenize(value, self._index) self._index += 1 if isinstance(values_and_tokens, type(Token)): values_and_tokens = [(value, values_and_tokens)] return values_and_tokens def _tokenize(self, value, index): index = min(index, len(self._tokens) - 1) return self._tokens[index] def _is_assign(self, value): if value.endswith('='): value = value[:-1].strip() var = VariableSplitter(value, identifiers='$@') return var.start == 0 and var.end == len(value) class Comment(Tokenizer): _tokens = (COMMENT,) class Setting(Tokenizer): _tokens = (SETTING, ARGUMENT) _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown', 'suitepostcondition', 'testsetup', 'testprecondition', 'testteardown', 'testpostcondition', 'testtemplate') _import_settings = ('library', 'resource', 'variables') _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags', 'testtimeout') _custom_tokenizer = None def __init__(self, template_setter=None): Tokenizer.__init__(self) self._template_setter = template_setter def _tokenize(self, value, index): if index == 1 and self._template_setter: self._template_setter(value) if index == 0: normalized = normalize(value) if normalized in self._keyword_settings: self._custom_tokenizer = KeywordCall(support_assign=False) elif normalized in self._import_settings: self._custom_tokenizer = ImportSetting() elif normalized not in self._other_settings: return ERROR elif self._custom_tokenizer: return self._custom_tokenizer.tokenize(value) return Tokenizer._tokenize(self, value, index) class ImportSetting(Tokenizer): _tokens = (IMPORT, ARGUMENT) class TestCaseSetting(Setting): _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition', 'template') _import_settings = () _other_settings = ('documentation', 'tags', 'timeout') def _tokenize(self, value, index): if index == 0: type = Setting._tokenize(self, value[1:-1], index) return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)] return Setting._tokenize(self, value, index) class KeywordSetting(TestCaseSetting): _keyword_settings = ('teardown',) _other_settings = ('documentation', 'arguments', 'return', 'timeout') class Variable(Tokenizer): _tokens = (SYNTAX, ARGUMENT) def _tokenize(self, value, index): if index == 0 and not self._is_assign(value): return ERROR return Tokenizer._tokenize(self, value, index) class KeywordCall(Tokenizer): _tokens = (KEYWORD, ARGUMENT) def __init__(self, support_assign=True): Tokenizer.__init__(self) self._keyword_found = not support_assign self._assigns = 0 def _tokenize(self, value, index): if not self._keyword_found and self._is_assign(value): self._assigns += 1 return SYNTAX # VariableTokenizer tokenizes this later. if self._keyword_found: return Tokenizer._tokenize(self, value, index - self._assigns) self._keyword_found = True return GherkinTokenizer().tokenize(value, KEYWORD) class GherkinTokenizer(object): _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE) def tokenize(self, value, token): match = self._gherkin_prefix.match(value) if not match: return [(value, token)] end = match.end() return [(value[:end], GHERKIN), (value[end:], token)] class TemplatedKeywordCall(Tokenizer): _tokens = (ARGUMENT,) class ForLoop(Tokenizer): def __init__(self): Tokenizer.__init__(self) self._in_arguments = False def _tokenize(self, value, index): token = self._in_arguments and ARGUMENT or SYNTAX if value.upper() in ('IN', 'IN RANGE'): self._in_arguments = True return token class _Table(object): _tokenizer_class = None def __init__(self, prev_tokenizer=None): self._tokenizer = self._tokenizer_class() self._prev_tokenizer = prev_tokenizer self._prev_values_on_row = [] def tokenize(self, value, index): if self._continues(value, index): self._tokenizer = self._prev_tokenizer yield value, SYNTAX else: for value_and_token in self._tokenize(value, index): yield value_and_token self._prev_values_on_row.append(value) def _continues(self, value, index): return value == '...' and all(self._is_empty(t) for t in self._prev_values_on_row) def _is_empty(self, value): return value in ('', '\\') def _tokenize(self, value, index): return self._tokenizer.tokenize(value) def end_row(self): self.__init__(prev_tokenizer=self._tokenizer) class UnknownTable(_Table): _tokenizer_class = Comment def _continues(self, value, index): return False class VariableTable(_Table): _tokenizer_class = Variable class SettingTable(_Table): _tokenizer_class = Setting def __init__(self, template_setter, prev_tokenizer=None): _Table.__init__(self, prev_tokenizer) self._template_setter = template_setter def _tokenize(self, value, index): if index == 0 and normalize(value) == 'testtemplate': self._tokenizer = Setting(self._template_setter) return _Table._tokenize(self, value, index) def end_row(self): self.__init__(self._template_setter, prev_tokenizer=self._tokenizer) class TestCaseTable(_Table): _setting_class = TestCaseSetting _test_template = None _default_template = None @property def _tokenizer_class(self): if self._test_template or (self._default_template and self._test_template is not False): return TemplatedKeywordCall return KeywordCall def _continues(self, value, index): return index > 0 and _Table._continues(self, value, index) def _tokenize(self, value, index): if index == 0: if value: self._test_template = None return GherkinTokenizer().tokenize(value, TC_KW_NAME) if index == 1 and self._is_setting(value): if self._is_template(value): self._test_template = False self._tokenizer = self._setting_class(self.set_test_template) else: self._tokenizer = self._setting_class() if index == 1 and self._is_for_loop(value): self._tokenizer = ForLoop() if index == 1 and self._is_empty(value): return [(value, SYNTAX)] return _Table._tokenize(self, value, index) def _is_setting(self, value): return value.startswith('[') and value.endswith(']') def _is_template(self, value): return normalize(value) == '[template]' def _is_for_loop(self, value): return value.startswith(':') and normalize(value, remove=':') == 'for' def set_test_template(self, template): self._test_template = self._is_template_set(template) def set_default_template(self, template): self._default_template = self._is_template_set(template) def _is_template_set(self, template): return normalize(template) not in ('', '\\', 'none', '${empty}') class KeywordTable(TestCaseTable): _tokenizer_class = KeywordCall _setting_class = KeywordSetting def _is_template(self, value): return False # Following code copied directly from Robot Framework 2.7.5. class VariableSplitter: def __init__(self, string, identifiers): self.identifier = None self.base = None self.index = None self.start = -1 self.end = -1 self._identifiers = identifiers self._may_have_internal_variables = False try: self._split(string) except ValueError: pass else: self._finalize() def get_replaced_base(self, variables): if self._may_have_internal_variables: return variables.replace_string(self.base) return self.base def _finalize(self): self.identifier = self._variable_chars[0] self.base = ''.join(self._variable_chars[2:-1]) self.end = self.start + len(self._variable_chars) if self._has_list_variable_index(): self.index = ''.join(self._list_variable_index_chars[1:-1]) self.end += len(self._list_variable_index_chars) def _has_list_variable_index(self): return self._list_variable_index_chars\ and self._list_variable_index_chars[-1] == ']' def _split(self, string): start_index, max_index = self._find_variable(string) self.start = start_index self._open_curly = 1 self._state = self._variable_state self._variable_chars = [string[start_index], '{'] self._list_variable_index_chars = [] self._string = string start_index += 2 for index, char in enumerate(string[start_index:]): index += start_index # Giving start to enumerate only in Py 2.6+ try: self._state(char, index) except StopIteration: return if index == max_index and not self._scanning_list_variable_index(): return def _scanning_list_variable_index(self): return self._state in [self._waiting_list_variable_index_state, self._list_variable_index_state] def _find_variable(self, string): max_end_index = string.rfind('}') if max_end_index == -1: raise ValueError('No variable end found') if self._is_escaped(string, max_end_index): return self._find_variable(string[:max_end_index]) start_index = self._find_start_index(string, 1, max_end_index) if start_index == -1: raise ValueError('No variable start found') return start_index, max_end_index def _find_start_index(self, string, start, end): index = string.find('{', start, end) - 1 if index < 0: return -1 if self._start_index_is_ok(string, index): return index return self._find_start_index(string, index+2, end) def _start_index_is_ok(self, string, index): return string[index] in self._identifiers\ and not self._is_escaped(string, index) def _is_escaped(self, string, index): escaped = False while index > 0 and string[index-1] == '\\': index -= 1 escaped = not escaped return escaped def _variable_state(self, char, index): self._variable_chars.append(char) if char == '}' and not self._is_escaped(self._string, index): self._open_curly -= 1 if self._open_curly == 0: if not self._is_list_variable(): raise StopIteration self._state = self._waiting_list_variable_index_state elif char in self._identifiers: self._state = self._internal_variable_start_state def _is_list_variable(self): return self._variable_chars[0] == '@' def _internal_variable_start_state(self, char, index): self._state = self._variable_state if char == '{': self._variable_chars.append(char) self._open_curly += 1 self._may_have_internal_variables = True else: self._variable_state(char, index) def _waiting_list_variable_index_state(self, char, index): if char != '[': raise StopIteration self._list_variable_index_chars.append(char) self._state = self._list_variable_index_state def _list_variable_index_state(self, char, index): self._list_variable_index_chars.append(char) if char == ']': raise StopIteration
mit
shupelneker/gae_new_structure
bp_content/themes/default/handlers/_lib/twitter.py
22
5437
from boilerplate.lib.oauth2 import Consumer as OAuthConsumer, Token, Request as OAuthRequest, \ SignatureMethod_HMAC_SHA1 import urllib2 import json import webapp2 from urlparse import parse_qs # Twitter configuration TWITTER_SERVER = 'api.twitter.com' TWITTER_REQUEST_TOKEN_URL = 'https://%s/oauth/request_token' % TWITTER_SERVER TWITTER_ACCESS_TOKEN_URL = 'https://%s/oauth/access_token' % TWITTER_SERVER # Note: oauth/authorize forces the user to authorize every time. # oauth/authenticate uses their previous selection, barring revocation. TWITTER_AUTHORIZATION_URL = 'http://%s/oauth/authenticate' % TWITTER_SERVER TWITTER_CHECK_AUTH = 'https://%s/1.1/account/verify_credentials.json' % TWITTER_SERVER class TwitterAuth(object): """Twitter OAuth authentication mechanism""" AUTHORIZATION_URL = TWITTER_AUTHORIZATION_URL REQUEST_TOKEN_URL = TWITTER_REQUEST_TOKEN_URL ACCESS_TOKEN_URL = TWITTER_ACCESS_TOKEN_URL SERVER_URL = TWITTER_SERVER AUTH_BACKEND_NAME = 'twitter' SETTINGS_KEY_NAME = 'TWITTER_CONSUMER_KEY' SETTINGS_SECRET_NAME = 'TWITTER_CONSUMER_SECRET' def __init__(self, request, redirect_uri=None): """Init method""" self.request = request self.redirect_uri = redirect_uri def auth_url(self): """Return redirect url""" token = self.unauthorized_token() name = self.AUTH_BACKEND_NAME + 'unauthorized_token_name' self.request.session[name] = token.to_string() return str(self.oauth_request(token, self.AUTHORIZATION_URL).to_url()) def auth_complete(self, oauth_token, oauth_verifier): """Return user, might be logged in""" name = self.AUTH_BACKEND_NAME + 'unauthorized_token_name' unauthed_token = self.request.session[name] del self.request.session[name] if not unauthed_token: raise ValueError('Missing unauthorized token') token = Token.from_string(unauthed_token) if token.key != oauth_token: raise ValueError('Incorrect tokens') access_token, user_data = self.access_token(token, oauth_verifier) return user_data # Uncomment this line if your application needs more user data #return self.user_data(access_token) def save_association_data(self, user_data): name = self.AUTH_BACKEND_NAME + 'association_data' self.request.session[name] = json.dumps(user_data) def get_association_data(self): name = self.AUTH_BACKEND_NAME + 'association_data' if name in self.request.session: association_data = json.loads(self.request.session[name]) del self.request.session[name] else: association_data = None return association_data def unauthorized_token(self): """Return request for unauthorized token (first stage)""" request = self.oauth_request(token=None, url=self.REQUEST_TOKEN_URL) response = self.fetch_response(request) return Token.from_string(response) def oauth_request(self, token, url, oauth_verifier=None, extra_params=None): """Generate OAuth request, setups callback url""" params = {} if self.redirect_uri: params['oauth_callback'] = self.redirect_uri if extra_params: params.update(extra_params) if oauth_verifier: params['oauth_verifier'] = oauth_verifier request = OAuthRequest.from_consumer_and_token(self.consumer, token=token, http_url=url, parameters=params) request.sign_request(SignatureMethod_HMAC_SHA1(), self.consumer, token) return request def fetch_response(self, request): """Executes request and fetchs service response""" response = urllib2.urlopen(request.to_url()) return '\n'.join(response.readlines()) def access_token(self, token, oauth_verifier): """Return request for access token value""" request = self.oauth_request(token, self.ACCESS_TOKEN_URL, oauth_verifier) response = self.fetch_response(request) params = parse_qs(response, keep_blank_values=False) user_data = dict() for key in 'user_id', 'screen_name': try: user_data[key] = params[key][0] except Exception: raise ValueError("'%s' not found in OAuth response." % key) return Token.from_string(response), user_data def user_data(self, access_token): """Return user data provided""" request = self.oauth_request(access_token, TWITTER_CHECK_AUTH) data = self.fetch_response(request) try: return json.loads(data) except ValueError: return None @property def consumer(self): """Setups consumer""" return OAuthConsumer(*self.get_key_and_secret()) def get_key_and_secret(self): """Return tuple with Consumer Key and Consumer Secret for current service provider. Must return (key, secret), order *must* be respected. """ app = webapp2.get_app() return app.config.get('twitter_consumer_key'), app.config.get('twitter_consumer_secret')
lgpl-3.0
ArthurGarnier/SickRage
lib/github/tests/Issue.py
7
9092
# -*- coding: utf-8 -*- # ########################## Copyrights and license ############################ # # # Copyright 2012 Vincent Jacques <[email protected]> # # Copyright 2012 Zearin <[email protected]> # # Copyright 2013 Stuart Glaser <[email protected]> # # Copyright 2013 Vincent Jacques <[email protected]> # # # # This file is part of PyGithub. # # http://pygithub.github.io/PyGithub/v1/index.html # # # # PyGithub is free software: you can redistribute it and/or modify it under # # the terms of the GNU Lesser General Public License as published by the Free # # Software Foundation, either version 3 of the License, or (at your option) # # any later version. # # # # PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY # # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # # details. # # # # You should have received a copy of the GNU Lesser General Public License # # along with PyGithub. If not, see <http://www.gnu.org/licenses/>. # # # # ############################################################################## import Framework import datetime class Issue(Framework.TestCase): def setUp(self): Framework.TestCase.setUp(self) self.repo = self.g.get_user().get_repo("PyGithub") self.issue = self.repo.get_issue(28) def testAttributes(self): self.assertEqual(self.issue.assignee.login, "jacquev6") self.assertListKeyEqual(self.issue.assignees, lambda a: a.login, ["jacquev6", "stuglaser"]) self.assertEqual(self.issue.body, "Body edited by PyGithub") self.assertEqual(self.issue.closed_at, datetime.datetime(2012, 5, 26, 14, 59, 33)) self.assertEqual(self.issue.closed_by.login, "jacquev6") self.assertEqual(self.issue.comments, 0) self.assertEqual(self.issue.created_at, datetime.datetime(2012, 5, 19, 10, 38, 23)) self.assertEqual(self.issue.html_url, "https://github.com/jacquev6/PyGithub/issues/28") self.assertEqual(self.issue.id, 4653757) self.assertListKeyEqual(self.issue.labels, lambda l: l.name, ["Bug", "Project management", "Question"]) self.assertEqual(self.issue.milestone.title, "Version 0.4") self.assertEqual(self.issue.number, 28) self.assertEqual(self.issue.pull_request.diff_url, None) self.assertEqual(self.issue.pull_request.patch_url, None) self.assertEqual(self.issue.pull_request.html_url, None) self.assertEqual(self.issue.state, "closed") self.assertEqual(self.issue.title, "Issue created by PyGithub") self.assertEqual(self.issue.updated_at, datetime.datetime(2012, 5, 26, 14, 59, 33)) self.assertEqual(self.issue.url, "https://api.github.com/repos/jacquev6/PyGithub/issues/28") self.assertEqual(self.issue.user.login, "jacquev6") self.assertEqual(self.issue.repository.name, "PyGithub") # test __repr__() based on this attributes self.assertEqual(self.issue.__repr__(), 'Issue(title="Issue created by PyGithub", number=28)') def testEditWithoutParameters(self): self.issue.edit() def testEditWithAllParameters(self): user = self.g.get_user("jacquev6") milestone = self.repo.get_milestone(2) self.issue.edit("Title edited by PyGithub", "Body edited by PyGithub", user, "open", milestone, ["Bug"], ["jacquev6", "stuglaser"]) self.assertEqual(self.issue.assignee.login, "jacquev6") self.assertListKeyEqual(self.issue.assignees, lambda a: a.login, ["jacquev6", "stuglaser"]) self.assertEqual(self.issue.body, "Body edited by PyGithub") self.assertEqual(self.issue.state, "open") self.assertEqual(self.issue.title, "Title edited by PyGithub") self.assertListKeyEqual(self.issue.labels, lambda l: l.name, ["Bug"]) def testEditResetMilestone(self): self.assertEqual(self.issue.milestone.title, "Version 0.4") self.issue.edit(milestone=None) self.assertEqual(self.issue.milestone, None) def testEditResetAssignee(self): self.assertEqual(self.issue.assignee.login, "jacquev6") self.issue.edit(assignee=None) self.assertEqual(self.issue.assignee, None) def testCreateComment(self): comment = self.issue.create_comment("Comment created by PyGithub") self.assertEqual(comment.id, 5808311) def testGetComments(self): self.assertListKeyEqual(self.issue.get_comments(), lambda c: c.user.login, ["jacquev6", "roskakori"]) def testGetCommentsSince(self): self.assertListKeyEqual(self.issue.get_comments(datetime.datetime(2012, 5, 26, 13, 59, 33)), lambda c: c.user.login, ["jacquev6", "roskakori"]) def testGetEvents(self): self.assertListKeyEqual(self.issue.get_events(), lambda e: e.id, [15819975, 15820048]) def testGetLabels(self): self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Project management", "Question"]) def testAddAndRemoveAssignees(self): user1 = "jayfk" user2 = self.g.get_user("jzelinskie") self.assertListKeyEqual(self.issue.assignees, lambda a: a.login, ["jacquev6", "stuglaser"]) self.issue.add_to_assignees(user1, user2) self.assertListKeyEqual(self.issue.assignees, lambda a: a.login, ["jacquev6", "stuglaser", "jayfk", "jzelinskie"]) self.issue.remove_from_assignees(user1, user2) self.assertListKeyEqual(self.issue.assignees, lambda a: a.login, ["jacquev6", "stuglaser"]) def testAddAndRemoveLabels(self): bug = self.repo.get_label("Bug") question = self.repo.get_label("Question") self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Project management", "Question"]) self.issue.remove_from_labels(bug) self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Project management", "Question"]) self.issue.remove_from_labels(question) self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Project management"]) self.issue.add_to_labels(bug, question) self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Project management", "Question"]) def testAddAndRemoveLabelsWithStringArguments(self): bug = "Bug" question = "Question" self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Project management", "Question"]) self.issue.remove_from_labels(bug) self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Project management", "Question"]) self.issue.remove_from_labels(question) self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Project management"]) self.issue.add_to_labels(bug, question) self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Project management", "Question"]) def testDeleteAndSetLabels(self): bug = self.repo.get_label("Bug") question = self.repo.get_label("Question") self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Project management", "Question"]) self.issue.delete_labels() self.assertListKeyEqual(self.issue.get_labels(), None, []) self.issue.set_labels(bug, question) self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Question"]) def testDeleteAndSetLabelsWithStringArguments(self): bug = "Bug" question = "Question" self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Project management", "Question"]) self.issue.delete_labels() self.assertListKeyEqual(self.issue.get_labels(), None, []) self.issue.set_labels(bug, question) self.assertListKeyEqual(self.issue.get_labels(), lambda l: l.name, ["Bug", "Question"]) def testGetReactions(self): reactions = self.issue.get_reactions() self.assertEqual(reactions[0].content, "+1") def testCreateReaction(self): reaction = self.issue.create_reaction("hooray") self.assertEqual(reaction.id, 16917472) self.assertEqual(reaction.content, "hooray")
gpl-3.0