repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringlengths 1
5
| size
stringlengths 4
7
| content
stringlengths 475
1M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,293,591B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
log2timeline/dfvfs | dfvfs/resolver_helpers/tsk_partition_resolver_helper.py | 2 | 1312 | # -*- coding: utf-8 -*-
"""The TSK partition path specification resolver helper implementation."""
from dfvfs.file_io import tsk_partition_file_io
from dfvfs.lib import definitions
from dfvfs.resolver_helpers import manager
from dfvfs.resolver_helpers import resolver_helper
from dfvfs.vfs import tsk_partition_file_system
class TSKPartitionResolverHelper(resolver_helper.ResolverHelper):
"""SleuthKit (TSK) partition presolver helper."""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_TSK_PARTITION
def NewFileObject(self, resolver_context, path_spec):
"""Creates a new file input/output (IO) object.
Args:
resolver_context (Context): resolver context.
path_spec (PathSpec): a path specification.
Returns:
FileIO: file input/output (IO) object.
"""
return tsk_partition_file_io.TSKPartitionFile(resolver_context, path_spec)
def NewFileSystem(self, resolver_context, path_spec):
"""Creates a new file system object.
Args:
resolver_context (Context): resolver context.
path_spec (PathSpec): a path specification.
Returns:
FileSystem: file system.
"""
return tsk_partition_file_system.TSKPartitionFileSystem(
resolver_context, path_spec)
manager.ResolverHelperManager.RegisterHelper(TSKPartitionResolverHelper())
| apache-2.0 | 8,970,855,385,252,428,000 | 30.238095 | 78 | 0.737043 | false |
seanegoodwin/PandaViewer | PandaViewer/utils.py | 1 | 6258 | import os
import re
import sys
import hashlib
from sqlalchemy.engine import ResultProxy
from typing import List, Dict, Any, Tuple, Iterable, Optional
from PandaViewer.logger import Logger
class Utils(Logger):
ensure_trailing_sep = lambda x: x if x[-1] == os.path.sep else x + os.path.sep
@staticmethod
def convert_result(result: ResultProxy) -> List:
return list(map(dict, result))
@classmethod
def convert_from_relative_path(cls, path: str = "") -> str:
folder = os.path.dirname(__file__)
# TODO: Ugly hack please fix. Used to tell if program is running from source or not
if not os.path.exists(Utils.normalize_path(os.path.join(folder, __file__))):
folder = os.path.dirname(folder)
return cls.normalize_path(os.path.join(folder, path))
@classmethod
def convert_from_relative_lsv_path(cls, path: str = "") -> str:
portable_path = cls.convert_from_relative_path(os.path.join(".lsv", path))
if os.path.exists(portable_path):
return portable_path
else:
return cls.normalize_path(os.path.join("~/.lsv", path))
@classmethod
def path_exists_under_directory(cls, main_directory: str, sub_directory: str) -> bool:
main_directory = cls.ensure_trailing_sep(cls.normalize_path(main_directory))
sub_directory = cls.ensure_trailing_sep(cls.normalize_path(sub_directory))
return sub_directory.startswith(main_directory)
@classmethod
def get_parent_folder(cls, candidates: List[str], folder: str) -> str:
candidates = map(cls.normalize_path, candidates)
candidates = [c for c in candidates
if cls.path_exists_under_directory(c, folder)]
return max(candidates, key=len)
@staticmethod
def file_has_allowed_extension(check_file: str, allowed_extensions: List[str]) -> bool:
allowed_extensions = [ext.lower() for ext in allowed_extensions]
ext = os.path.splitext(check_file)[-1].lower()
return ext in allowed_extensions
@staticmethod
def normalize_path(path: str) -> str:
return os.path.normpath(os.path.realpath(os.path.expanduser(path)))
@staticmethod
def convert_to_qml_path(path: str) -> str:
base_string = "file://"
if os.name == "nt":
base_string += "/"
return base_string + path
@classmethod
def reduce_gallery_duplicates(cls, duplicate_map):
cls = cls()
for galleries in duplicate_map.values():
paths = [cls.normalize_path(gallery.location) for gallery in galleries]
assert len(paths) == len(set(paths))
method_names = ["has_ex_metadata", "has_custom_metadata", "is_archive_gallery"]
for method_name in method_names:
if any(getattr(gallery, method_name)() for gallery in galleries):
cls.logger.info("Applying method: %s" % method_name)
cls.logger.debug("Before galleries: %s" % galleries)
filtered_galleries = []
for gallery in galleries:
if not getattr(gallery, method_name)():
gallery.mark_for_deletion()
else:
filtered_galleries.append(gallery)
galleries = filtered_galleries
cls.logger.debug("After galleries: %s" % galleries)
for gallery in galleries[1:]:
gallery.mark_for_deletion()
@classmethod
def generate_hash_from_source(cls, source) -> str:
BUFF_SIZE = 65536
hash_algo = hashlib.sha1()
buff = source.read(BUFF_SIZE)
while len(buff) > 0:
hash_algo.update(buff)
buff = source.read(BUFF_SIZE)
return hash_algo.hexdigest()
@classmethod
def debug_trace(cls):
from PyQt5.QtCore import pyqtRemoveInputHook, pyqtRestoreInputHook
from pdb import set_trace
pyqtRemoveInputHook()
set_trace()
pyqtRestoreInputHook()
@classmethod
def convert_ui_tags(cls, ui_tags: str) -> List[str]:
return list(map(lambda x: x.replace(" ", "_"), cls.convert_csv_to_list(ui_tags)))
@classmethod
def process_ex_url(cls, url: str) -> (str, str):
split_url = url.split("/")
if split_url[-1]:
return int(split_url[-2]), split_url[-1]
else:
return int(split_url[-3]), split_url[-2]
@staticmethod
def convert_list_to_csv(input_list: List) -> str:
return ", ".join(input_list)
@staticmethod
def convert_csv_to_list(csv: str) -> List[str]:
return list(filter(None, map(lambda x: re.sub("^\s+", "", x), csv.split(","))))
@staticmethod
def human_sort_paths(paths: List[str]) -> List[str]:
key = None
if os.name == "nt":
import ctypes
import functools
key = functools.cmp_to_key(ctypes.windll.shlwapi.StrCmpLogicalW)
return sorted(paths, key=key)
@staticmethod
def separate_tag(tag: str) -> (str, Optional[str]):
namespace_regex = re.compile("^(.*):(.*)$")
match = re.search(namespace_regex, tag)
if match:
return match.group(2), match.group(1)
return tag, None
@classmethod
def clean_title(cls, title: str, remove_enclosed: bool = True) -> str:
banned_chars = ("=", "-", ":", "|", "~", "+", "]", "[", ",", ")", "(")
if remove_enclosed:
title = cls.removed_enclosed(title)
title = title.lstrip().lower()
for char in banned_chars:
title = title.replace(char, " ")
return " ".join(title.split())
@staticmethod
def removed_enclosed(input_str: str) -> str:
"""
Removes any values between/including containers (braces, parens, etc)
:param input_str: str to operate on
:return: str with enclosed data removed
"""
pairs = (("{", "}"), ("(", ")"), ("[", "]"))
regex = r"\s*\%s[^%s]*\%s"
for pair in pairs:
input_str = re.sub(regex % (pair[0], pair[0], pair[1]), " ", input_str)
return " ".join(filter(None, input_str.split()))
| gpl-3.0 | 3,753,022,364,863,361,000 | 37.392638 | 91 | 0.583733 | false |
hb/gnome-bulk-rename | gnome-bulk-rename/preferences.py | 1 | 6523 | # GNOME bulk rename utility
# Copyright (C) 2010-2012 Holger Berndt <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import Gtk
import constants
class Window:
def __init__(self, previews_model, sorting_model, markups_model, markup_changed_cb):
self._window = None
self._previews_model = previews_model
self._sorting_model = sorting_model
self._markups_model = markups_model
self._markup_changed_cb = markup_changed_cb
def show(self):
if self._window is None:
self._setup()
self._window.show_all()
def _setup(self):
self._window = Gtk.Window(type=Gtk.WindowType.TOPLEVEL)
self._window.set_position(Gtk.WindowPosition.MOUSE)
self._window.set_title(_("Bulk Rename Preferences"))
self._window.set_border_width(4)
self._window.set_default_size(450, 400)
vbox = Gtk.VBox.new(False, 0)
self._window.add(vbox)
notebook = Gtk.Notebook()
vbox.pack_start(notebook, True, True, 0)
notebook.append_page(self._setup_extensible_model_tab(self._previews_model), Gtk.Label(label=_("Previewers")))
notebook.append_page(self._setup_extensible_model_tab(self._sorting_model, frozen_entries=["0"]), Gtk.Label(label=_("Sorting")))
notebook.append_page(self._setup_extensible_model_tab(self._markups_model, markup=True), Gtk.Label(label=_("Markup")))
# button box
buttonbox = Gtk.HButtonBox()
buttonbox.set_layout(Gtk.ButtonBoxStyle.END)
buttonbox.set_spacing(12)
vbox.pack_start(buttonbox, False, False, 4)
close_button = Gtk.Button(stock=Gtk.STOCK_CLOSE)
close_button.connect("clicked", lambda button, window : window.hide(), self._window)
buttonbox.add(close_button)
def _setup_extensible_model_tab(self, model, frozen_entries=None, markup=False):
"""If given, frozen_entries is a list of non-modifyable entry paths."""
def toggled_callback(cell, path, model=None, frozen_entries=None):
# ignore if entry is frozen
if frozen_entries and path in frozen_entries:
return
iter = model.get_iter(path)
is_active = not cell.get_active()
if markup:
if not is_active:
return
for row in model:
row[constants.EXTENSIBLE_MODEL_COLUMN_VISIBLE] = False
else:
short_desc = model.get_value(iter, constants.EXTENSIBLE_MODEL_COLUMN_SHORT_DESCRIPTION)
if is_active:
model.set_value(iter, constants.EXTENSIBLE_MODEL_COLUMN_SHORT_DESCRIPTION_MARKUP, short_desc)
else:
model.set_value(iter, constants.EXTENSIBLE_MODEL_COLUMN_SHORT_DESCRIPTION_MARKUP, "".join(['<span color="gray">', short_desc, '</span>']))
model.set_value(iter, constants.EXTENSIBLE_MODEL_COLUMN_VISIBLE, is_active)
if markup:
self._markup_changed_cb(model.get_path(iter))
def on_selection_changed(selection, infobutton):
(model, iter) = selection.get_selected()
if iter:
previewclass = model.get_value(iter, constants.EXTENSIBLE_MODEL_COLUMN_OBJECT)
infobutton.set_sensitive(hasattr(previewclass, "description"))
else:
infobutton.set_sensitive(False)
def on_info_button_clicked(button, treeview):
(model, iter) = treeview.get_selection().get_selected()
previewclass = model.get_value(iter, constants.EXTENSIBLE_MODEL_COLUMN_OBJECT)
dlg = Gtk.MessageDialog(parent=self._window, flags=Gtk.DialogFlags.DESTROY_WITH_PARENT, message_type=Gtk.MessageType.INFO, buttons=Gtk.ButtonsType.CLOSE, message_format=model.get_value(iter, constants.EXTENSIBLE_MODEL_COLUMN_SHORT_DESCRIPTION))
dlg.format_secondary_markup(previewclass.description)
dlg.connect("response", lambda dlg, response_id : dlg.destroy())
dlg.show_all()
tab_vbox = Gtk.VBox.new(False, 0)
tab_vbox.set_border_width(12)
scrolledwin = Gtk.ScrolledWindow()
scrolledwin.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scrolledwin.set_shadow_type(Gtk.ShadowType.ETCHED_IN)
tab_vbox.pack_start(scrolledwin, True, True, 0)
treeview = Gtk.TreeView(model=model)
treeview.set_headers_visible(False)
scrolledwin.add(treeview)
textrenderer = Gtk.CellRendererText()
togglerenderer = Gtk.CellRendererToggle()
togglerenderer.set_radio(markup)
togglerenderer.set_property("activatable", True)
togglerenderer.connect('toggled', toggled_callback, model, frozen_entries)
# column "active"
column = Gtk.TreeViewColumn(None, togglerenderer, active=constants.EXTENSIBLE_MODEL_COLUMN_VISIBLE)
treeview.append_column(column)
# column "original"
column = Gtk.TreeViewColumn(None, textrenderer, markup=constants.EXTENSIBLE_MODEL_COLUMN_SHORT_DESCRIPTION_MARKUP)
column.set_expand(True)
treeview.append_column(column)
# information button
buttonbox = Gtk.HButtonBox()
buttonbox.set_layout(Gtk.ButtonBoxStyle.END)
buttonbox.set_spacing(12)
tab_vbox.pack_start(buttonbox, False, False, 8)
button = Gtk.Button(stock=Gtk.STOCK_INFO)
button.set_sensitive(False)
button.connect("clicked", on_info_button_clicked, treeview)
buttonbox.add(button)
selection = treeview.get_selection()
selection.connect("changed", on_selection_changed, button)
return tab_vbox
| lgpl-2.1 | 6,412,260,742,002,760,000 | 44.298611 | 256 | 0.650161 | false |
Phonemetra/TurboCoin | test/functional/test_framework/authproxy.py | 1 | 8671 | # Copyright (c) 2011 Jeff Garzik
#
# Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
#
# Copyright (c) 2007 Jan-Klaas Kollhof
#
# This file is part of jsonrpc.
#
# jsonrpc is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this software; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""HTTP proxy for opening RPC connection to turbocoind.
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
"""
import base64
import decimal
from http import HTTPStatus
import http.client
import json
import logging
import os
import socket
import time
import urllib.parse
HTTP_TIMEOUT = 30
USER_AGENT = "AuthServiceProxy/0.1"
log = logging.getLogger("TurbocoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error, http_status=None):
try:
errmsg = '%(message)s (%(code)i)' % rpc_error
except (KeyError, TypeError):
errmsg = ''
super().__init__(errmsg)
self.error = rpc_error
self.http_status = http_status
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy():
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urllib.parse.urlparse(service_url)
user = None if self.__url.username is None else self.__url.username.encode('utf8')
passwd = None if self.__url.password is None else self.__url.password.encode('utf8')
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
self.timeout = timeout
self._set_conn(connection)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
if os.name == 'nt':
# Windows somehow does not like to re-use connections
# TODO: Find out why the connection would disconnect occasionally and make it reusable on Windows
self._set_conn()
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except http.client.BadStatusLine as e:
if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
except (BrokenPipeError, ConnectionResetError):
# Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset
# ConnectionResetError happens on FreeBSD with Python 3.4
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
def get_request(self, *args, **argsn):
AuthServiceProxy.__id_count += 1
log.debug("-{}-> {} {}".format(
AuthServiceProxy.__id_count,
self._service_name,
json.dumps(args or argsn, default=EncodeDecimal, ensure_ascii=self.ensure_ascii),
))
if args and argsn:
raise ValueError('Cannot handle both named and positional arguments')
return {'version': '1.1',
'method': self._service_name,
'params': args or argsn,
'id': AuthServiceProxy.__id_count}
def __call__(self, *args, **argsn):
postdata = json.dumps(self.get_request(*args, **argsn), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'], status)
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'}, status)
elif status != HTTPStatus.OK:
raise JSONRPCException({
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
else:
return response['result']
def batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> " + postdata)
response, status = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if status != HTTPStatus.OK:
raise JSONRPCException({
'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status)
return response
def _get_response(self):
req_start_time = time.time()
try:
http_response = self.__conn.getresponse()
except socket.timeout:
raise JSONRPCException({
'code': -344,
'message': '%r RPC took longer than %f seconds. Consider '
'using larger timeout for calls that take '
'longer to return.' % (self._service_name,
self.__conn.timeout)})
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException(
{'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)},
http_response.status)
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
elapsed = time.time() - req_start_time
if "error" in response and response["error"] is None:
log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
log.debug("<-- [%.6f] %s" % (elapsed, responsedata))
return response, http_response.status
def __truediv__(self, relative_uri):
return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn)
def _set_conn(self, connection=None):
port = 80 if self.__url.port is None else self.__url.port
if connection:
self.__conn = connection
self.timeout = connection.timeout
elif self.__url.scheme == 'https':
self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=self.timeout)
else:
self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=self.timeout)
| mit | 3,080,054,445,633,741,000 | 42.139303 | 155 | 0.619998 | false |
googleinterns/intern2020_cocal | uncertainty/learning/conf.py | 1 | 4630 | import os, sys
from functools import partial
import tensorflow as tf
from learning import BaseLearner
from learning import precision
from data import plot_data
class LearnerConfPred(BaseLearner):
"""Confidence predictor learning class. The legacy code fill be REMOVED after testing."""
def __init__(self, params, model, model_pred, model_cond_iw=None, model_name_postfix='_confpred'):
super().__init__(params, model, model_name_postfix=model_name_postfix)
self.model_pred = model_pred
self.model_cond_iw = model_cond_iw
self.loss_fn_train = precision
self.loss_fn_val = precision
self.loss_fn_test = precision
def train(self, ld_tr, ld_val, ld_te=None):
"""This funciton will be REMOVED after testing."""
## load
if not self.params.find_best and self._check_final():
if self.params.load_final:
self._load_final()
else:
self._load_best()
return
## line search
T_opt, prec_opt = 1.0, 1.0
for T in tf.range(self.params.T_max, self.params.T_min, -self.params.T_step):
self.model.T = tf.Variable(T)
self.model_cond_iw.update_p_q_conf() ## update when update T
# compute precision
prec, n_conf = self.test(ld_tr, iw_fn=lambda x: self.model_cond_iw(x, training=False))
msg = 'eps = %f, T = %f, prec = %f, n_conf = %d'%(self.params.eps, self.model.T, prec, n_conf)
# check a condition
if prec >= 1.0 - self.params.eps:
T_opt, prec_opt = T, prec
## save the best
self.model.T = tf.Variable(T_opt)
self.model_cond_iw.update_p_q_conf() ## update when update T
model_fn = os.path.join(self.params.save_root, 'model_params%s_best'%(self.model_name_postfix))
self.model.save_weights(model_fn)
msg += ', saved'
print(msg)
## save the final
model_fn = os.path.join(self.params.save_root, 'model_params%s_final'%(self.model_name_postfix))
self.model.save_weights(model_fn)
## load the best
self._load_best()
def test(self, ld_te, loss_fn=None, iw_fn=None, ld_name='', verbose=False):
"""This function compute precision and coverage of the psuedo-labeling function."""
# compute precision
prec_vec = []
n_conf = 0
n = 0
for x, y in ld_te:
prec_i, n_conf_i = self.loss_fn_test(
x, y,
lambda x: self.model_pred(x, training=False)['logits'],
self.model,
model_iw=iw_fn, reduce='none')
prec_vec.append(prec_i)
n_conf += n_conf_i
n += y.shape[0]
prec = tf.math.reduce_mean(tf.cast(tf.concat(prec_vec, 0), tf.float32))
if verbose:
## print
print('[test%s] T = %f, precision = %.2f%%, size = %d/%d = %.2f%%'%(
ld_name if ld_name is '' else ' on %s'%(ld_name),
self.model.T if hasattr(self.model, 'T') else -1, prec*100.0,
n_conf, n, float(n_conf)/float(n)*100.0))
## visualize for 2d data
x_list = []
y_list = []
show = True
for x, y in ld_te:
if x.shape[-1] is not 2 or any(y>1):
show = False
break
conf = self.model(x)
i_conf = conf==1
x_list.append(x)
y_list.append(y+2*tf.cast(i_conf, tf.int64))
if show:
x_list = tf.concat(x_list, 0).numpy()
y_list = tf.concat(y_list, 0).numpy()
plot_data(
x_list,
y_list,
markers=['s', 's', 's', 's'],
colors=['r', 'g', 'k', 'k'],
facecolors=['r', 'g', 'r', 'g'],
alphas=[0.5, 0.5, 1.0, 1.0],
labels=[r'$-$', r'$+$', r'$-$'+' (conf)', r'$+$'+' (conf)'],
markersize=4,
linewidth=2,
classifier=lambda x: tf.nn.softmax(self.model_pred(tf.constant(x, dtype=tf.float32), training=False)['logits'], -1).numpy(),
fn=os.path.join(self.params.save_root, "conf_examples_%s"%(ld_name)),
)
return prec, n_conf, n
| apache-2.0 | 7,219,821,659,238,576,000 | 36.33871 | 144 | 0.485313 | false |
lm-tools/situational | situational/settings/base.py | 1 | 8185 | import sys
import os
from os import environ
from os.path import join, abspath, dirname
from django.core.exceptions import ImproperlyConfigured
# PATH vars
def here(*x):
return join(abspath(dirname(__file__)), *x)
PROJECT_ROOT = here("..")
def root(*x):
return join(abspath(PROJECT_ROOT), *x)
sys.path.insert(0, root('apps'))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django_postgrespool',
'NAME': 'situational',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
INTERNAL_IPS = ('127.0.0.1',)
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = root('uploads')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = root('static')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
root('assets'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'static_precompiler.finders.StaticPrecompilerFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = environ.get('DJANGO_SECRET_KEY')
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'log_request_id.middleware.RequestIDMiddleware',
'basicauth.basic_auth_middleware.BasicAuthMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
'django.template.context_processors.request',
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"home_page.context_processors.govuk_frontend_settings",
"home_page.context_processors.get_current_path",
"home_page.context_processors.get_current_namespace",
"home_page.context_processors.google_analytics"
]
ROOT_URLCONF = 'situational.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'situational.wsgi.application'
TEMPLATE_DIRS = (
root('templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.humanize',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'storages',
'static_precompiler',
)
PROJECT_APPS = (
'basicauth',
'detailed_history.apps.DetailedHistoryConfig',
'home_page.apps.HomePageConfig',
'job_discovery.apps.JobDiscoveryConfig',
'quick_history.apps.QuickHistoryConfig',
'sectors.apps.SectorsConfig',
'templated_email',
'template_to_pdf',
'travel_report.apps.TravelReportConfig',
'travel_times',
)
SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies'
SESSION_COOKIE_HTTPONLY = True
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
INSTALLED_APPS += PROJECT_APPS
# Log on standard out. Doing something with the logs is left up to the parent
# process
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'request_id': {
'()': 'log_request_id.filters.RequestIDFilter',
}
},
'formatters': {
'standard': {
'format': '[%(levelname)s] [%(request_id)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
'console_with_request_id': {
'class': 'logging.StreamHandler',
'filters': ['request_id'],
'formatter': 'standard',
},
},
'root': {
'handlers': ['console'],
'level': environ.get('ROOT_LOG_LEVEL', 'INFO'),
},
'loggers': {
'django': {
'handlers': ['console_with_request_id'],
'level': environ.get('DJANGO_LOG_LEVEL', 'INFO'),
'propagate': False,
},
},
}
ADZUNA_APP_ID = os.environ.get('ADZUNA_APP_ID')
ADZUNA_APP_KEY = os.environ.get('ADZUNA_APP_KEY')
# Bacic auth
BASICAUTH_DISABLED = os.environ.get('BASICAUTH_DISABLED', False)
if not BASICAUTH_DISABLED:
BASICAUTH_USERNAME = os.environ.get('HTTP_USERNAME')
BASICAUTH_PASSWORD = os.environ.get('HTTP_PASSWORD')
BASICAUTH_EXEMPT = [
r"/manifest.json$",
]
# Manifest.json
DEFAULT_APP_NAMESPACE = environ.get(
'DEFAULT_APP_NAMESPACE', 'home_page')
# EMAILS
DEFAULT_FROM_EMAIL = environ.get('DEFAULT_FROM_EMAIL', 'webmaster@localhost')
# Jobs API
JOBS_API_BASE_URL = environ.get('JOBS_API_BASE_URL',
'https://lmt-jobs-api.herokuapp.com')
# MAPUMENTAL
MAPUMENTAL_API_KEY = environ.get('MAPUMENTAL_API_KEY')
# GOOGLE ANALYTICS
GOOGLE_ANALYTICS_ID = environ.get('GOOGLE_ANALYTICS_ID')
BROKER_URL = environ.get('REDISTOGO_URL',
'redis://localhost:6379/0')
from travel_times import mapumental
if environ.get('ENABLE_MAPUMENTAL'):
MAPUMENTAL_CLIENT = mapumental.Client
else:
MAPUMENTAL_CLIENT = mapumental.FakeClient
# REDIS
REDIS_URL = environ.get('REDIS_URL', 'redis://')
# REPORT POPULATION
REPORT_POPULATION_TIMEOUT = int(
environ.get('REPORT_POPULATION_TIMEOUT', 300000)
)
# GOVUK Frontend toolkit settings
GOVUK_HOMEPAGE_URL = environ.get('GOVUK_HOMEPAGE_URL', '/')
GOVUK_LOGO_LINK_TITLE = environ.get(
'GOVUK_LOGO_LINK_TITLE', 'Go to the homepage')
LMI_FOR_ALL_API_URL = environ.get(
'LMI_FOR_ALL_API_URL',
'http://api.lmiforall.org.uk/api/v1/'
)
STATIC_PRECOMPILER_COMPILERS = (
('static_precompiler.compilers.SCSS',
{"executable": root('..', 'bin', 'sass')}),
)
# .local.py overrides all the common settings.
try:
from .local import *
except ImportError:
pass
# importing test settings file if necessary (TODO chould be done better)
if len(sys.argv) > 1 and 'test' in sys.argv[1]:
from .testing import *
| bsd-3-clause | 9,175,149,669,012,704,000 | 27.420139 | 79 | 0.686622 | false |
LukeB42/Emissary | emissary/resources/feedgroups.py | 1 | 9800 | # _*_ coding: utf-8 _*_
# This file provides the HTTP endpoints for operating on groups of feeds.
from emissary import app, db
from flask import request
from flask.ext import restful
from sqlalchemy import and_, desc
from emissary.resources.api_key import auth
from emissary.models import FeedGroup, Feed, Article
from emissary.controllers.cron import CronError, parse_timings
from emissary.controllers.utils import cors, gzipped, make_response
class FeedGroupCollection(restful.Resource):
@cors
@gzipped
def get(self):
"""
Paginate an array of feed groups
associated with the requesting key.
"""
key = auth()
parser = restful.reqparse.RequestParser()
parser.add_argument("page", type=int, default=1)
parser.add_argument("per_page", type=int, default=10)
parser.add_argument("content", type=bool, default=None)
args = parser.parse_args()
query = FeedGroup.query.filter(FeedGroup.key == key)\
.order_by(desc(FeedGroup.created)).paginate(args.page, args.per_page)
return make_response(request.url, query)
@cors
@gzipped
def put(self):
"""
Create a new feed group, providing the name isn't already in use.
"""
key = auth(forbid_reader_keys=True)
parser = restful.reqparse.RequestParser()
parser.add_argument("name", type=str, required=True)
parser.add_argument("active", type=bool, default=True, help="Feed is active", required=False)
args = parser.parse_args()
# Check for this name already existing in the groups on this key
if [fg for fg in key.feedgroups if fg.name == args.name]:
return {"message":"Feed group %s already exists." % args.name}, 304
fg = FeedGroup(name=args.name, active=args.active)
key.feedgroups.append(fg)
db.session.add(fg)
db.session.add(key)
db.session.commit()
return fg.jsonify(), 201
class FeedGroupResource(restful.Resource):
@cors
@gzipped
def get(self, groupname):
"""
Review a specific feed group.
"""
key = auth()
fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first()
if not fg:
restful.abort(404)
return fg.jsonify()
@cors
@gzipped
def put(self, groupname):
"""
Create a new feed providing the name and url are unique.
Feeds must be associated with a group.
"""
key = auth(forbid_reader_keys=True)
parser = restful.reqparse.RequestParser()
parser.add_argument("name", type=str, required=True)
parser.add_argument("url", type=str, required=True)
parser.add_argument("schedule", type=str, required=True)
parser.add_argument("active", type=bool, default=True, help="Feed is active", required=False)
args = parser.parse_args()
fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first()
if not fg:
return {"message":"Unknown Feed Group %s" % groupname}, 304
# Verify the schedule
try:
parse_timings(args.schedule)
except CronError, err:
return {"message": err.message}, 500
# Check the URL isn't already scheduled on this key
if [feed for feed in key.feeds if feed.url == args.url]:
return {"message": "A feed on this key already exists with this url."}, 500
# Check the name is unique to this feedgroup
if [feed for feed in fg.feeds if feed.name == args.name]:
return {"message": "A feed in this group already exists with this name."}, 500
feed = Feed(name=args.name, url=args.url, schedule=args.schedule, active=args.active)
# We generally don't want to have objects in this system that don't belong to API keys.
fg.feeds.append(feed)
key.feeds.append(feed)
db.session.add(feed)
db.session.add(fg)
db.session.add(key)
db.session.commit()
feed = Feed.query.filter(and_(Feed.key == key, Feed.name == args.name)).first()
if not feed:
return {"message":"Error saving feed."}, 304
# Schedule this feed. 0 here is a response
# queue ID (we're not waiting for a reply)
app.inbox.put([0, "start", [key,feed.name]])
return feed.jsonify(), 201
@cors
@gzipped
def post(self, groupname):
"Rename a feedgroup or toggle active status"
key = auth(forbid_reader_keys=True)
parser = restful.reqparse.RequestParser()
parser.add_argument("name", type=str, help="Rename a feed group",)
parser.add_argument("active", type=bool, default=None)
args = parser.parse_args()
fg = FeedGroup.query.filter(
and_(FeedGroup.key == key, FeedGroup.name == groupname)
).first()
if not fg:
restful.abort(404)
if args.name:
if FeedGroup.query.filter(
and_(FeedGroup.key == key, FeedGroup.name == args.name)
).first():
return {"message":"A feed already exists with this name."}, 304
fg.name = args.name
if args.active or args.active == False:
fg.active = args.active
db.session.add(fg)
db.session.commit()
return fg.jsonify()
@cors
@gzipped
def delete(self, groupname):
key = auth(forbid_reader_keys=True)
fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first()
if not fg:
restful.abort(404)
count=0
for feed in fg.feeds:
for article in feed.articles:
count += 1
db.session.delete(article)
db.session.delete(feed)
db.session.delete(fg)
db.session.commit()
count = "{:,}".format(count)
app.log('%s: Deleted feed group "%s". (%s articles)' % (key.name, fg.name, count))
return {}
class FeedGroupArticles(restful.Resource):
@cors
def get(self, groupname):
"""
Retrieve articles by feedgroup.
"""
key = auth()
# Summon the group or 404.
fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first()
if not fg: restful.abort(404)
parser = restful.reqparse.RequestParser()
parser.add_argument("page", type=int, default=1)
parser.add_argument("per_page", type=int, default=10)
parser.add_argument("content", type=bool, default=None)
args = parser.parse_args()
if args.content == True:
query = Article.query.filter(
and_(Article.feed.has(group=fg), Article.content != None))\
.order_by(desc(Article.created)).paginate(args.page, args.per_page)
response = make_response(request.url, query)
# for doc in response['data']:
# if not doc['content_available']:
# response['data'].remove(doc)
# return response
if args.content == False:
query = Article.query.filter(
and_(Article.feed.has(group=fg), Article.content == None))\
.order_by(desc(Article.created)).paginate(args.page, args.per_page)
return make_response(request.url, query)
query = Article.query.filter(
Article.feed.has(group=fg))\
.order_by(desc(Article.created)).paginate(args.page, args.per_page)
return make_response(request.url, query)
class FeedGroupStart(restful.Resource):
@cors
def post(self, groupname):
"""
Start all feeds within a group.
"""
key = auth(forbid_reader_keys=True)
fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first()
if not fg:
restful.abort(404)
for feed in fg.feeds:
app.inbox.put([0, "start", [key,feed.name]])
return {}
class FeedGroupStop(restful.Resource):
def post(self, groupname):
key = auth(forbid_reader_keys=True)
fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first()
if not fg:
restful.abort(404)
for feed in fg.feeds:
app.inbox.put([0, "stop", [key,feed.name]])
return {}
class FeedGroupSearch(restful.Resource):
def get(self, groupname, terms):
"""
Return articles on feeds in this group with our search terms in the title.
"""
key = auth()
parser = restful.reqparse.RequestParser()
parser.add_argument("page", type=int, default=1)
parser.add_argument("per_page", type=int, default=10)
# parser.add_argument("content", type=bool, default=None)
args = parser.parse_args()
fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first()
if not fg:
restful.abort(404)
query = Article.query.filter(
and_(Article.feed.has(group=fg), Article.title.like("%" + terms + "%")))\
.order_by(desc(Article.created)).paginate(args.page, args.per_page)
return make_response(request.url, query)
class FeedGroupCount(restful.Resource):
def get(self, groupname):
key = auth()
fg = FeedGroup.query.filter(and_(FeedGroup.key == key, FeedGroup.name == groupname)).first()
if not fg:
restful.abort(404)
return sum(len(f.articles) for f in fg.feeds)
| mit | 5,908,141,031,274,889,000 | 33.027778 | 103 | 0.591327 | false |
gautamMalu/ThingSpeak_tweet | main.py | 1 | 1953 | import os
import time
import urllib2
import json
from twython import Twython
#proxy settings
os.environ['http_proxy'] = 'proxy.rolling_friction.in:8080'
os.environ['https_proxy'] = 'proxy.rolling_friction.in:8080'
# Consumer key aka API key for twitter app
APP_KEY = 'API Key for twitter app'
#Consumer Secret aka API secret obtained from above given url
APP_SECRET = 'Consumer Secret'
#Getting auth tokens
twitter = Twython(APP_KEY, APP_SECRET)
auth = twitter.get_authentication_tokens()
OAUTH_TOKEN = auth['oauth_token']
OAUTH_TOKEN_SECRET = auth['oauth_token_secret']
url=auth['auth_url']
print 'open this in browser and authorize Kaalu app '+url
oauth_verifier = raw_input("Provide PIN Number: ")
twitter = Twython(APP_KEY, APP_SECRET,OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
final_step = twitter.get_authorized_tokens(oauth_verifier)
OAUTH_TOKEN = final_step['oauth_token']
OAUTH_TOKEN_SECRET = final_step['oauth_token_secret']
twitter = Twython(APP_KEY, APP_SECRET,OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
# Getting channel number
chn = raw_input("Give Channel Number: ")
def doit():
# Data coming from given public channel in json format
# for private channel use your API key with ?key="API KEY"
url='http://api.thingspeak.com/channels/'+str(chn)+'/feed.json'
response = urllib2.urlopen(url)
html = response.read()
json_data = json.loads(html)
# Get the size of the array so that we could select the lastest lastest value
n_f=len(json_data["feeds"])
sensor_value = json_data["feeds"][n_f-1]["field1"] # getting data from field1 only
tweet = 'the current sensor value from channel '+str(chn)+' on thingspeak is '+str(sensor_value)
print tweet
twitter.update_status(status=tweet)
# Time intervals between consecutive tweets because of twitter API limit
# things speak API time limit is 15 seconds
time_interval = 15
if __name__ == "__main__":
while True:
doit()
time.sleep(time_interval*60)
| gpl-2.0 | -2,989,019,905,170,956,300 | 31.55 | 104 | 0.724526 | false |
ergo/ziggurat_foundations | ziggurat_foundations/models/services/group.py | 1 | 4140 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from paginate_sqlalchemy import SqlalchemyOrmPage
from ziggurat_foundations.models.base import get_db_session
from ziggurat_foundations.models.services import BaseService
from ziggurat_foundations.permissions import (
ANY_PERMISSION,
ALL_PERMISSIONS,
PermissionTuple,
)
__all__ = ["GroupService"]
class GroupService(BaseService):
@classmethod
def get(cls, group_id, db_session=None):
"""
Fetch row using primary key -
will use existing object in session if already present
:param group_id:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
return db_session.query(cls.model).get(group_id)
@classmethod
def by_group_name(cls, group_name, db_session=None):
"""
fetch group by name
:param group_name:
:param db_session:
:return:
"""
db_session = get_db_session(db_session)
query = db_session.query(cls.model).filter(cls.model.group_name == group_name)
return query.first()
@classmethod
def get_user_paginator(
cls,
instance,
page=1,
item_count=None,
items_per_page=50,
user_ids=None,
GET_params=None,
):
"""
returns paginator over users belonging to the group
:param instance:
:param page:
:param item_count:
:param items_per_page:
:param user_ids:
:param GET_params:
:return:
"""
if not GET_params:
GET_params = {}
GET_params.pop("page", None)
query = instance.users_dynamic
if user_ids:
query = query.filter(cls.models_proxy.UserGroup.user_id.in_(user_ids))
return SqlalchemyOrmPage(
query,
page=page,
item_count=item_count,
items_per_page=items_per_page,
**GET_params
)
@classmethod
def resources_with_possible_perms(
cls,
instance,
perm_names=None,
resource_ids=None,
resource_types=None,
db_session=None,
):
"""
returns list of permissions and resources for this group,
resource_ids restricts the search to specific resources
:param instance:
:param perm_names:
:param resource_ids:
:param resource_types:
:param db_session:
:return:
"""
db_session = get_db_session(db_session, instance)
query = db_session.query(
cls.models_proxy.GroupResourcePermission.perm_name,
cls.models_proxy.Group,
cls.models_proxy.Resource,
)
query = query.filter(
cls.models_proxy.Resource.resource_id
== cls.models_proxy.GroupResourcePermission.resource_id
)
query = query.filter(
cls.models_proxy.Group.id
== cls.models_proxy.GroupResourcePermission.group_id
)
if resource_ids:
query = query.filter(
cls.models_proxy.GroupResourcePermission.resource_id.in_(resource_ids)
)
if resource_types:
query = query.filter(
cls.models_proxy.Resource.resource_type.in_(resource_types)
)
if perm_names not in ([ANY_PERMISSION], ANY_PERMISSION) and perm_names:
query = query.filter(
cls.models_proxy.GroupResourcePermission.perm_name.in_(perm_names)
)
query = query.filter(
cls.models_proxy.GroupResourcePermission.group_id == instance.id
)
perms = [
PermissionTuple(
None, row.perm_name, "group", instance, row.Resource, False, True
)
for row in query
]
for resource in instance.resources:
perms.append(
PermissionTuple(
None, ALL_PERMISSIONS, "group", instance, resource, True, True
)
)
return perms
| bsd-3-clause | -3,172,711,240,208,770,000 | 27.75 | 86 | 0.564976 | false |
DataDog/integrations-core | datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/jmx_metrics.py | 1 | 4297 | # (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from collections import defaultdict
import click
import yaml
from datadog_checks.dev.utils import file_exists, read_file
from ...testing import process_checks_option
from ...utils import complete_valid_checks, get_default_config_spec, get_jmx_metrics_file, is_jmx_integration
from ..console import CONTEXT_SETTINGS, abort, echo_failure, echo_info, echo_success
@click.command('jmx-metrics', context_settings=CONTEXT_SETTINGS, short_help='Validate JMX metrics files')
@click.argument('check', autocompletion=complete_valid_checks, required=False)
@click.option('--verbose', '-v', is_flag=True, help='Verbose mode')
def jmx_metrics(check, verbose):
"""Validate all default JMX metrics definitions.
If `check` is specified, only the check will be validated, if check value is 'changed' will only apply to changed
checks, an 'all' or empty `check` value will validate all README files.
"""
checks = process_checks_option(check, source='integrations')
integrations = sorted(check for check in checks if is_jmx_integration(check))
echo_info(f"Validating JMX metrics files for {len(integrations)} checks ...")
saved_errors = defaultdict(list)
for check_name in integrations:
validate_jmx_metrics(check_name, saved_errors, verbose)
validate_config_spec(check_name, saved_errors)
for check_name, errors in saved_errors.items():
if not errors:
continue
echo_info(f"{check_name}:")
for err in errors:
echo_failure(f" - {err}")
echo_info(f"{len(integrations)} total JMX integrations")
echo_success(f"{len(integrations) - len(saved_errors)} valid metrics files")
if saved_errors:
echo_failure(f"{len(saved_errors)} invalid metrics files")
abort()
def validate_jmx_metrics(check_name, saved_errors, verbose):
jmx_metrics_file, metrics_file_exists = get_jmx_metrics_file(check_name)
if not metrics_file_exists:
saved_errors[check_name].append(f'{jmx_metrics_file} does not exist')
return
jmx_metrics_data = yaml.safe_load(read_file(jmx_metrics_file)).get('jmx_metrics')
if jmx_metrics_data is None:
saved_errors[check_name].append(f'{jmx_metrics_file} does not have jmx_metrics definition')
return
for rule in jmx_metrics_data:
include = rule.get('include')
include_str = truncate_message(str(include), verbose)
rule_str = truncate_message(str(rule), verbose)
if not include:
saved_errors[check_name].append(f"missing include: {rule_str}")
return
domain = include.get('domain')
beans = include.get('bean')
if (not domain) and (not beans):
# Require `domain` or `bean` to be present,
# that helps JMXFetch to better scope the beans to retrieve
saved_errors[check_name].append(f"domain or bean attribute is missing for rule: {include_str}")
def validate_config_spec(check_name, saved_errors):
spec_file = get_default_config_spec(check_name)
if not file_exists(spec_file):
saved_errors[check_name].append(f"config spec does not exist: {spec_file}")
return
spec_files = yaml.safe_load(read_file(spec_file)).get('files')
init_config_jmx = False
instances_jmx = False
for spec_file in spec_files:
for base_option in spec_file.get('options', []):
base_template = base_option.get('template')
for option in base_option.get("options", []):
template = option.get('template')
if template == 'init_config/jmx' and base_template == 'init_config':
init_config_jmx = True
elif template == 'instances/jmx' and base_template == 'instances':
instances_jmx = True
if not init_config_jmx:
saved_errors[check_name].append("config spec: does not use `init_config/jmx` template")
if not instances_jmx:
saved_errors[check_name].append("config spec: does not use `instances/jmx` template")
def truncate_message(s, verbose):
if not verbose:
s = (s[:100] + '...') if len(s) > 100 else s
return s
| bsd-3-clause | -913,970,256,265,694,000 | 38.422018 | 117 | 0.661857 | false |
stczhc/neupy | neupy/helpers/table.py | 1 | 7989 | from __future__ import print_function
import time
import textwrap
from operator import attrgetter
from abc import abstractmethod
import numpy as np
from six import with_metaclass
from neupy.core.docs import SharedDocs, SharedDocsABCMeta
__all__ = ("TableBuilder", "Column", "TimeColumn", "NumberColumn",
"TableDrawingError")
class TableDrawingError(AttributeError):
""" Exception specific for ``TableBuilder`` class functionality.
"""
class Column(SharedDocs):
""" Simple column class that helps discribe structure for
``TableBuilder`` class instance.
Parameters
----------
name : str
Column name. Value would be displayed in header. In case when
``width`` parameter equal to ``None``, string width will identify
column width.
dtype : object
Column data format. Defaults to ``str``.
width : int or None
Column width. Defaults to ``None``.
"""
def __init__(self, name, dtype=str, width=None):
if width is None:
width = len(name)
self.name = name
self.dtype = dtype
self.width = width
def format_value(self, value):
""" Convert input value to specified type
Parameters
----------
value : object
Returns
-------
object
Function return converted input value to specified
data type.
"""
return self.dtype(value)
def format_time(value):
""" Convert seconds to the value format that easy
to understand.
Parameters
----------
value : float
Time interval in seconds.
Returns
-------
str
Examples
--------
>>> col = TimeColumn("Time")
>>> col.format_value(0.001)
'1 ms'
>>> col.format_value(0.5)
'0.5 sec'
>>> col.format_value(1.5)
'1.5 sec'
>>> col.format_value(15)
'00:00:15'
>>> col.format_value(15045)
'04:10:45'
"""
if value < 0.05:
return "{} ms".format(round(value * 10 ** 3))
elif value < 10:
return "{} sec".format(round(value, 1))
return time.strftime("%H:%M:%S", time.gmtime(value))
class TimeColumn(Column):
""" Columns useful for time formating from seconds to more
informative and readable format.
Parameters
----------
{Column.name}
{Column.dtype}
{Column.width}
"""
def format_value(self, value):
return format_time(value)
class NumberColumn(Column):
""" Class describe float column type.
Parameters
----------
places : int
Float number rounding precision. Defaults to ``6``.
{Column.name}
{Column.dtype}
{Column.width}
"""
def __init__(self, places=6, *args, **kwargs):
super(NumberColumn, self).__init__(*args, **kwargs)
self.places = places
def format_value(self, value):
""" Round a number to a given precision in decimal digits
Parameters
----------
value : float
Returns
-------
float
Rounded input value.
"""
if not isinstance(value, (int, float, np.floating, np.integer)):
return value
if value > 100:
return "~{:.0f}".format(value)
return "{value:.{places}f}".format(value=value,
places=self.places)
class BaseState(with_metaclass(SharedDocsABCMeta)):
""" Base abstract class that identify all important methods for
``TableBuilder`` class states.
Parameters
----------
table : TableBuilder instance
Accept summary table instance. State is able to control
properties from main ``TableBuilder`` class instantance
"""
def __init__(self, table):
self.table = table
def line(self):
""" Draw ASCII line. Line width depence on the table
column sizes.
"""
self.table.stdout('\r' + '-' * self.table.total_width)
def message(self, text):
""" Write additional message in table. All seperators
between columns will be ignored.
"""
self.line()
# Excluding from the total width 2 symbols related to
# the separators near the table edges and 2 symbols
# related to the spaces near these edges
max_line_width = self.table.total_width - 4
for text_row in textwrap.wrap(text, max_line_width):
formated_text = text_row.ljust(max_line_width)
self.table.stdout("\r| " + formated_text + " |")
self.line()
@abstractmethod
def start(self):
pass
@abstractmethod
def finish(self):
pass
@abstractmethod
def header(self):
pass
@abstractmethod
def row(self, data):
pass
class DrawingState(BaseState):
""" Identify active state for ``TableBuilder`` class instance.
In this state summary table instance is able to show information
in terminal.
Parameters
----------
{BaseState.table}
"""
def start(self):
raise TableDrawingError("Table drawing already started")
def finish(self):
self.line()
self.table.state = IdleState(self.table)
def header(self):
raise TableDrawingError("Header already drawn")
def row(self, data):
formated_data = []
for val, column in zip(data, self.table.columns):
val = column.format_value(val)
cell_value = str(val).ljust(column.width)
formated_data.append(cell_value)
self.table.stdout("\r| " + " | ".join(formated_data) + " |")
class IdleState(BaseState):
""" Identify idle state for ``TableBuilder`` class instance.
In this state summary table instance isn't able to show information
in terminal.
Parameters
----------
{BaseState.table}
"""
def start(self):
self.header()
self.table.state = DrawingState(self.table)
def finish(self):
raise TableDrawingError("Table drawing already finished or "
"didn't started")
def header(self):
self.line()
headers = []
for column in self.table.columns:
header_name = str(column.name).ljust(column.width)
headers.append(header_name)
self.table.stdout("\r| " + " | ".join(headers) + " |")
self.line()
def row(self, data):
raise TableDrawingError("Table drawing already finished or "
"didn't started")
class TableBuilder(SharedDocs):
""" Build ASCII tables using simple structure.
Parameters
----------
*columns
Table structure. Accept ``Column`` instance classes.
stdout : func
Function through which the message will be transmitted.
"""
def __init__(self, *columns, **kwargs):
valid_kwargs = ['stdout']
# In Python 2 doesn't work syntax like
# def __init__(self, *columns, stdout=print):
# Code below implements the same.
stdout = kwargs.get('stdout', print)
if any(kwarg not in valid_kwargs for kwarg in kwargs):
raise ValueError("Invalid keyword arguments. Available "
"only: {}".format(valid_kwargs))
for column in columns:
if not isinstance(column, Column):
raise TypeError("Column should be ``Column`` class "
"instance.")
self.columns = columns
self.stdout = stdout
self.state = IdleState(self)
text_width = sum(map(attrgetter('width'), columns))
n_columns = len(columns)
n_separators = n_columns + 1
n_margins = 2 * n_columns
self.total_width = text_width + n_separators + n_margins
def __getattr__(self, attr):
if attr not in self.__dict__:
return getattr(self.state, attr)
return super(TableBuilder, self).__getattr__(attr)
| mit | -877,040,094,586,411,600 | 24.938312 | 73 | 0.577294 | false |
myshkov/bnn-analysis | experiments/experiment.py | 1 | 9636 | import logging
from collections import OrderedDict
import numpy as np
import tensorflow as tf
import utils
from sampler import Sampler
from models.mcmc_sampler import MCMC_sampler
from models.dropout_sampler import DropoutSampler
from models.bbb_sampler import BBBSampler
from models.pbp_sampler import PBPSampler
import evaluation.visualisation as vis
class Experiment:
"""
Configures, tests and evaluates models (Sampler) for a particular environment (Env).
Contains default setups for common types of samplers for easier configuration.
"""
def __init__(self):
pass
def _setup_sampler_defaults(self, sampler_params):
pass
def setup_env_defaults(self, env):
env.create_training_test_sets()
def configure_env_mcmc(self, env, sampler_class=None, sampler_params=None, loss='mse'):
env.model_parameters_size = Sampler.get_model_parameters_size(env.layers_description)
loss = MCMC_sampler.get_mse_loss if loss is 'mse' else MCMC_sampler.get_ce_loss
def sampler_factory():
params = env.get_default_sampler_params()
self._setup_sampler_defaults(params)
params['loss_fn'] = loss(env.chains_num, env.layers_description)
params['initial_position'] = MCMC_sampler.create_random_position(env.chains_num,
env.layers_description)
params['burn_in'] = int(.45 * env.n_chunks * env.samples_per_chunk())
if sampler_params is not None:
params.update(sampler_params)
sampler = sampler_class(**params)
pos_size = env.model_parameters_size
model_parameters = tf.placeholder(dtype=tf.float32, shape=[1, pos_size])
model = MCMC_sampler.model_from_position(env.layers_description, model_parameters, sampler.test_x)
sampler.test_model = [model, model_parameters]
sampler.construct()
sampler.fit()
return sampler
env.sampler_factory = sampler_factory
def configure_env_dropout(self, env, sampler_params=None, dropout=0.01, tau=0.15, length_scale=1e-2):
def sampler_factory():
params = env.get_default_sampler_params()
params['n_epochs'] = 50
wreg = length_scale ** 2 * (1 - dropout) / (2. * env.get_train_x().shape[0] * tau)
model = DropoutSampler.model_from_description(env.layers_description, wreg, dropout)
logging.info(f'Reg: {wreg}')
if sampler_params is not None:
params.update(sampler_params)
sampler = DropoutSampler(model=model, **params)
sampler.construct()
return sampler
env.sampler_factory = sampler_factory
def configure_env_bbb(self, env, sampler_params=None, noise_std=0.01, weigts_std=1., n_epochs=5):
def sampler_factory():
params = env.get_default_sampler_params()
params['step_size'] = .1
if sampler_params is not None:
params.update(sampler_params)
params['n_epochs'] = n_epochs
model = BBBSampler.model_from_description(env.layers_description, noise_std, weigts_std, env.batch_size,
env.get_train_x().shape[0])
sampler = BBBSampler(model=model, **params)
sampler.construct()
return sampler
env.sampler_factory = sampler_factory
def configure_env_pbp(self, env, sampler_params=None, n_epochs=50):
def sampler_factory():
params = env.get_default_sampler_params()
params['model_desc'] = env.layers_description
params['n_epochs'] = n_epochs
if sampler_params is not None:
params.update(sampler_params)
sampler = PBPSampler(**params)
sampler.construct()
return sampler
env.sampler_factory = sampler_factory
def run_queue(self, queue, skip_completed=True, cpu=False):
if cpu:
with tf.device('/cpu:0'):
self._run_queue(queue, skip_completed=skip_completed)
else:
self._run_queue(queue, skip_completed=skip_completed)
def is_complete(self, name):
return utils.get_latest_data_subdir(self.__to_pattern(name)) is not None
def plot_predictive_baseline(self, name=None, split=0, discard=.5):
env, samples = self.__load_env_baseline(name, split, discard_left=discard)
vis.plot_predictive_baseline(env, samples, title_name=name)
def plot_predictive_comparison(self, baseline, target, split=0, discard_left=0., discard_right=0.,
target_metrics=None):
# baseline
env, baseline_samples = self.__load_env_baseline(baseline, split=split, discard_left=0.5)
# target
target_samples, target_times = self.__load_target(env, target, split, discard_left=discard_left,
discard_right=discard_right)
vis.plot_predictive_comparison(env, baseline_samples, target_samples, target_metrics=target_metrics,
title_name=target)
def plot_predictive_point(self, baseline, target, split=0, discard_left=0., discard_right=0., point_index=0):
# baseline
env, baseline_samples = self.__load_env_baseline(baseline, split=split, discard_left=0.5)
# target
target_samples, target_times = self.__load_target(env, target, split, discard_left=discard_left,
discard_right=discard_right)
true_x = env.get_test_x()[point_index][0]
true_y = env.get_test_y()[point_index][0]
vis.plot_hist(baseline_samples[:, point_index], target_samples[:, point_index], true_x, true_y)
def compute_metrics(self, baseline, target, split=0, discard_left=0., discard_right=0., metric_names=None):
# baseline
env, baseline_samples = self.__load_env_baseline(baseline, split=split, discard_left=0.5)
# target
target_samples, target_times = self.__load_target(env, target, split, discard_left=discard_left,
discard_right=discard_right)
return env.compute_metrics(baseline_samples, target_samples, metric_names=metric_names)
def plot_metrics(self, baseline, target, metric_names, split=0):
# baseline
env, baseline_samples = self.__load_env_baseline(baseline, split=split, discard_left=.5)
# target
target_samples, target_times = self.__load_target(env, target, split)
samples_dict = OrderedDict()
samples_dict[target] = target_samples
times_dict = OrderedDict()
times_dict[target] = target_times
vis.plot_metrics(baseline_samples, samples_dict, times_dict, metric_names)
def plot_multiple_metrics(self, baseline, targets, metric_names, split=0, max_time=60, title_name=None):
# baseline
env, baseline_samples = self.__load_env_baseline(baseline, split=split, discard_left=.5)
# targets
samples_dict = OrderedDict()
times_dict = OrderedDict()
for t in targets:
samples_dict[t], times_dict[t] = self.__load_target(env, name=t, split=split)
vis.plot_metrics(baseline_samples, samples_dict, times_dict, metric_names, max_time=max_time,
title_name=title_name)
def report_metrics_table(self, queue, discard_left=.75):
for target in queue.keys():
metrics = []
for split in range(4):
target_metrics = self.compute_metrics('HMC', target, discard_left=discard_left, discard_right=.0,
metric_names=['RMSE', 'KS', 'KL', 'Precision', 'Recall', 'F1'])
metrics.append([v for v in target_metrics.values()])
print(self.__report_avg_metrics(target, metrics))
def __report_metrics(self, target, scores):
str = target
for name, score in scores.items():
str += f' & {score:.2f}'
str += ' \\\\'
return str
def __report_avg_metrics(self, target, scores):
scores = np.asarray(scores)
mean = scores.mean(axis=0)
std = scores.std(axis=0)
str = target
for m, s in zip(mean, std):
str += f' & {m:.2f} $\\pm$ {s:.3f}'
str += ' \\\\'
return str
def _run_queue(self, queue, skip_completed):
for name, run_fn in queue.items():
if not skip_completed or not self.is_complete(name):
run_fn()
def __to_pattern(self, name):
return '-' + name.lower() + '-'
def __load_env_baseline(self, name=None, split=0, discard_left=.5, discard_right=0.):
utils.set_latest_data_subdir(pattern=self.__to_pattern(name))
env = utils.deserialize('env')
env.current_split = split
samples = env.load_samples(split=split, discard_left=discard_left, discard_right=discard_right)
return env, samples
def __load_target(self, env, name=None, split=0, discard_left=0., discard_right=0.):
utils.set_latest_data_subdir(pattern=self.__to_pattern(name))
samples = env.load_samples(split=split, discard_left=discard_left, discard_right=discard_right)
times = env.load_times(split=split, discard_left=discard_left, discard_right=discard_right)
return samples, times
| mit | 2,032,147,816,376,529,400 | 38.012146 | 117 | 0.603051 | false |
rjw57/rbc | rbc/compiler.py | 1 | 7098 | """
High-level interface to the B compiler.
"""
import os
import subprocess
import llvmlite.binding as llvm
import pkg_resources
import whichcraft
import rbc.codegen as codegen
from rbc.parser import BParser
from rbc.semantics import BSemantics
from rbc._backport import TemporaryDirectory
# pylint: disable=assignment-from-no-return
_LIBB_C_SOURCE_FILE = pkg_resources.resource_filename(__name__, 'libb.c')
_LIBB_B_SOURCE_FILE = pkg_resources.resource_filename(__name__, 'libb.b')
def _ensure_llvm():
"""Ensure that LLVM has been initialised."""
if _ensure_llvm.was_initialized:
return
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
_ensure_llvm.was_initialized = True
_ensure_llvm.was_initialized = False
class CompilerOptions(object):
"""There are many options which affect the behaviour of the compiler. They
are collected into this class for easy transport.
The target and machine attributes are given default values based on the host
machine running the compiler. The default optimisation level is 1.
IMPORTANT: Make sure that LLVM and in particular the native target has been
initialised via the llvmlite.binding.initialize...() functions before
constructing an instance of this object.
Attributes:
target: The llvm.Target which is the target of compilation.
machine: The llvm.TargetMachine which is the target of compilation.
opt_level: The optimisation level from 0 (no optimisation) to 3 (full
optimisation.)
"""
def __init__(self):
_ensure_llvm()
self.target = llvm.Target.from_default_triple()
self.machine = self.target.create_target_machine(codemodel='default')
self.opt_level = 1
def compile_b_source(source, options):
"""The B front end converts B source code into a LLVM module. No significant
optimisation is performed.
Args:
source (str): B source code as a string
options (CompilerOptions): compiler options
Returns:
A string with the LLVM assembly code for an unoptimised module
corresponding to the input source.
"""
# Set parser semantics and go forth and parse.
program = BParser().parse(source, 'program',
semantics=BSemantics(codegen.make_node))
# Emit LLVM assembly for the correct target.
module_str = program.emit(options.target, options.machine)
# Return the string representation of the module.
return module_str
def optimize_module(module_assembly, options):
"""Verify and optimise the passed LLVM module assembly.
Args:
module_assembly (str): LLVM module assembly
options (CompilerOptions): options for the compiler
Returns:
A llvmlite.binding.ModuleRef for the verified and optimised module.
"""
_ensure_llvm()
# Parse LLVM module assembly
module = llvm.parse_assembly(module_assembly)
module.verify()
# Create optimiser pass manager
pass_manager = llvm.ModulePassManager()
# Populate with target passes
options.machine.target_data.add_pass(pass_manager)
# Populate with optimisation passes
pass_manager_builder = llvm.PassManagerBuilder()
pass_manager_builder.opt_level = options.opt_level
pass_manager_builder.populate(pass_manager)
# Run optimiser
pass_manager.run(module)
return module
class CompilationEnvironment(object):
"""
Detect compiler tools available in the environment.
Some parts of ``rbc`` call out to external compiler tools. This class
centralises the automatic discovery of these tools. Custom environments may
be created by creating an instance of this class and setting attributes
manually.
Attributes:
gcc: path to the GCC compiler binary or None if no GCC present
cppflags: list of C pre-processor flags
cflags: list of C compiler flags
ldflags: list of linker flags
"""
def __init__(self):
self.gcc = whichcraft.which('gcc')
self.cflags = ['-std=gnu99']
self.cppflags = []
self.ldflags = []
def compile_c_source(self, obj_filename, c_filename):
subprocess.check_call(
[self.gcc] + self.cppflags + self.cflags +
['-c', '-o', obj_filename, c_filename])
def link_objects(self, output_filename, obj_filenames):
subprocess.check_call(
[self.gcc] + self.ldflags +
['-o', output_filename] + obj_filenames)
_DEFAULT_ENVIRONMENT = CompilationEnvironment()
def compile_b_to_native_object(obj_filename, b_filename, options):
"""Convenience function to compile an on-disk B file to a native object.
Args:
obj_filename (str): file to write object code to
b_filename (str): file containing B source
options (CompilerOptions): compiler options to use
"""
with open(b_filename) as fobj:
source = fobj.read()
module_asm = compile_b_source(source, options)
module = optimize_module(module_asm, options)
module.name = os.path.basename(b_filename)
with open(obj_filename, 'wb') as fobj:
fobj.write(options.machine.emit_object(module))
def compile_and_link(output, source_files, options=None,
env=_DEFAULT_ENVIRONMENT):
"""Compile and link source files into an output file. Uses GCC for the heavy
lifting. This will implicitly link in the B standard library.
Input files may be anything GCC accepts along with B source files.
If no compiler options are used, a new CompilerOptions object is
constructed.
Note: the passed compiler options *only* affect the B compiler. Use the
'cflags', 'ldflags' and 'cppflags' attributes in the compilation
environment.
Args:
output (str): path to output file
source_files (sequence): paths of input files
options (CompilerOptions): compiler options
env (CompilationEnvironment): specify custom compiler environment
"""
options = options if options is not None else CompilerOptions()
with TemporaryDirectory() as tmp_dir:
libb1_obj = os.path.join(tmp_dir, 'libb1.o')
env.compile_c_source(libb1_obj, _LIBB_C_SOURCE_FILE)
libb2_obj = os.path.join(tmp_dir, 'libb2.o')
compile_b_to_native_object(libb2_obj, _LIBB_B_SOURCE_FILE, options)
compiled_source_files = [libb1_obj, libb2_obj]
for file_idx, source_file in enumerate(source_files):
out_file = os.path.join(tmp_dir, 'tmp{}.o'.format(file_idx))
_, ext = os.path.splitext(source_file)
if ext == '.b':
compile_b_to_native_object(out_file, source_file, options)
compiled_source_files.append(out_file)
elif ext == '.c':
env.compile_c_source(out_file, source_file)
compiled_source_files.append(out_file)
else:
compiled_source_files.append(source_file)
env.link_objects(output, compiled_source_files)
| mit | -6,122,891,864,101,704,000 | 33.125 | 80 | 0.672866 | false |
BD2KGenomics/dcc-storage-schemas | metadata_indexer.py | 1 | 53653 | # Authors: Jean Rodriguez & Chris Wong
# Date: July 2016
#
# Description:This script merges metadata json files into one jsonl file. Each json object is grouped by donor and then each individual
# donor object is merged into one jsonl file.
#
# Usage: python metadata_indexer.py --only_Program TEST --only_Project TEST --awsAccessToken `cat ucsc-storage-client/accessToken` --clientPath ucsc-storage-client/ --metadataSchema metadata_schema.json
import semver
import logging
import os
import os.path
import platform
import argparse
import json
import jsonschema
import datetime
import re
import dateutil
import ssl
import dateutil.parser
import ast
from urllib import urlopen
from subprocess import Popen, PIPE
first_write = dict()
index_index = 0
#Dictionary to hold the File UUIDs to later get the right file size
bundle_uuid_filename_to_file_uuid = {}
#Call the storage endpoint and get the list of the
def get_size_list(token, redwood_host):
"""
This function assigns file_uuid_and_size with all the ids and file size,
so they can be used later to fill the missing file_size entries
"""
print "Downloading the listing"
#Attempt to download
try:
command = ["curl"]
command.append("-k")
command.append("-H")
command.append("Authorization: Bearer "+token)
command.append("https://aws:"+token+"@"+redwood_host+":5431/listing")
c_data=Popen(command, stdout=PIPE, stderr=PIPE)
size_list, stderr = c_data.communicate()
file_uuid_and_size = ast.literal_eval(size_list)
print "Done downloading the file size listing"
except Exception:
logging.error('Error while getting the list of file sizes')
print 'Error while while getting the list of file sizes'
#Return the list of file sizes.
return file_uuid_and_size
#Fills in the contents of bundle_uuid_filename_to_file_uuid
def requires(redwood_host):
"""
Fills the dictionary for the files and their UUIDs.
"""
print "** COORDINATOR **"
print "**ACQUIRING FILE UUIDS**"
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
# now query the metadata service so I have the mapping of bundle_uuid & file names -> file_uuid
print str("https://"+redwood_host+":8444/entities?page=0")
json_str = urlopen(str("https://"+redwood_host+":8444/entities?page=0"), context=ctx).read()
metadata_struct = json.loads(json_str)
print "** METADATA TOTAL PAGES: "+str(metadata_struct["totalPages"])
for i in range(0, metadata_struct["totalPages"]):
print "** CURRENT METADATA TOTAL PAGES: "+str(i)
json_str = urlopen(str("https://"+redwood_host+":8444/entities?page="+str(i)), context=ctx).read()
metadata_struct = json.loads(json_str)
for file_hash in metadata_struct["content"]:
bundle_uuid_filename_to_file_uuid[file_hash["gnosId"]+"_"+file_hash["fileName"]] = file_hash["id"]
# HACK!!! Please remove once the behavior has been fixed in the workflow!!
if file_hash["fileName"].endswith(".sortedByCoord.md.bam"):
bundle_uuid_filename_to_file_uuid[file_hash["gnosId"] + "_sortedByCoord.md.bam"] = file_hash["id"]
if file_hash["fileName"].endswith(".tar.gz"):
bundle_uuid_filename_to_file_uuid[file_hash["gnosId"] + "_tar.gz"] = file_hash["id"]
if file_hash["fileName"].endswith(".wiggle.bg"):
bundle_uuid_filename_to_file_uuid[file_hash["gnosId"] + "_wiggle.bg"] = file_hash["id"]
def insert_size(file_name, file_uuid_and_size):
"""
Opens the file and inserts any missing file_size
"""
#Open the file and do the size insertion
with open(file_name, 'r') as f:
data = json.load(f)
#Special flat-ish kind of format.
if 'workflow_outputs' in data:
bundle_uuid = data['bundle_uuid']
for file_ in data['workflow_outputs']:
file_name_uploaded = file_['file_path']
if 'file_size' not in file_:
try:
file_uuid = bundle_uuid_filename_to_file_uuid[bundle_uuid+'_'+file_name_uploaded]
file_entry = filter(lambda x:x['id'] == file_uuid, file_uuid_and_size)
file_['file_size'] = file_entry[0]['size']
except Exception as e:
logging.error('Error while assigning missing size. Associated file may not exist. File Id: %s' % file_uuid)
print 'Error while assigning missing size. Associated file may not exist. File Id: %s' % file_uuid
#The more generic format
else:
for specimen in data['specimen']:
for sample in specimen['samples']:
for analysis in sample['analysis']:
bundle_uuid = analysis['bundle_uuid']
for file_ in analysis['workflow_outputs']:
file_name_uploaded = file_['file_path']
if 'file_size' not in file_:
try:
#Get the size for the file uuid
file_uuid = bundle_uuid_filename_to_file_uuid[bundle_uuid+'_'+file_name_uploaded]
file_entry = filter(lambda x: x['id'] == file_uuid, file_uuid_and_size)
file_['file_size'] = file_entry[0]['size']
except Exception as e:
logging.error('Error while assigning missing size. Associated file may not exist. File Id: %s' % file_uuid)
print 'Error while assigning missing size. Associated file may not exist. File Id: %s' % file_uuid
#Remove and replace the old file with the new one.
os.remove(file_name)
with open(file_name, 'w') as f:
json.dump(data, f, indent=4)
def input_Options():
"""
Creates the parse options
"""
parser = argparse.ArgumentParser(description='Directory that contains Json files.')
parser.add_argument('-d', '--test-directory', help='Directory that contains the json metadata files')
parser.add_argument('-u', '--skip-uuid-directory', help='Directory that contains files with file uuids (bundle uuids, one per line, file ending with .redacted) that represent databundles that should be skipped, useful for redacting content (but not deleting it)')
parser.add_argument('-m', '--metadata-schema', help='File that contains the metadata schema')
parser.add_argument('-s', '--skip-program', help='Lets user skip certain json files that contain a specific program test')
parser.add_argument('-o', '--only-program', help='Lets user include certain json files that contain a specific program test')
parser.add_argument('-r', '--skip-project', help='Lets user skip certain json files that contain a specific program test')
parser.add_argument('-t', '--only-project', help='Lets user include certain json files that contain a specific program test')
parser.add_argument('-a', '--storage-access-token', default="NA", help='Storage access token to download the metadata.json files')
parser.add_argument('-c', '--client-path', default="ucsc-storage-client/", help='Path to access the ucsc-storage-client tool')
parser.add_argument('-n', '--server-host', default="storage.ucsc-cgl.org", help='hostname for the storage service')
parser.add_argument('-p', '--max-pages', default=None, type=int, help='Specify maximum number of pages to download')
parser.add_argument('-preserve-version',action='store_true', default=False, help='Keep all copies of analysis events')
args = parser.parse_args()
return args
def make_output_dir():
"""
Creates directory named "endpoint_metadata" to store all the metadata that is downloaded
"""
directory= "endpoint_metadata"
mkdir_Command=["mkdir"]
mkdir_Command.append(directory)
c_data=Popen(mkdir_Command, stdout=PIPE, stderr=PIPE)
stdout, stderr = c_data.communicate()
logging.info("created directory: %s/" % (directory))
print "created directory: %s/" % (directory)
return directory
def endpint_mapping(data_array):
"""
data_array: array of json objects
create a maping: gnos-id -> id
"""
numberOfElements=0
page=0
my_dictionary= dict()
for j_obj in data_array:
numberOfElements += j_obj["numberOfElements"]
page= j_obj["number"]
for content in j_obj["content"]:
content_id= content["id"]
my_dictionary[content_id]={"content": content, "page": page}
page += 1
logging.info("Total pages downloaded: %s" % page)
logging.info("Total number of elements: %s" % numberOfElements)
print "Total pages downloaded: ",page
print "Total number of elements: ", numberOfElements
return my_dictionary
def create_merge_input_folder(id_to_content,directory,accessToken,client_Path, size_list):
"""
id_to_content: dictionary that maps content id to content object.
directory: name of directory where the json files will be stored.
Uses the ucsc-download.sh script to download the json files
and store them in the "directory".
"""
"""
java
-Djavax.net.ssl.trustStore=/ucsc-storage-client/ssl/cacerts
-Djavax.net.ssl.trustStorePassword=changeit
-Dmetadata.url=https://storage.ucsc-cgl.org:8444
-Dmetadata.ssl.enabled=true -Dclient.ssl.custom=false
-Dstorage.url=https://storage.ucsc-cgl.org:5431
-DaccessToken=${accessToken}
-jar
/ucsc-storage-client/icgc-storage-client-1.0.14-SNAPSHOT/lib/icgc-storage-client.jar
download
--output-dir ${download}
--object-id ${object}
--output-layout bundle
"""
args = input_Options()
metadataClientJar = os.path.join(client_Path,"icgc-storage-client-1.0.14-SNAPSHOT/lib/icgc-storage-client.jar")
metadataUrl= "https://"+args.server_host+":8444"
storageUrl= "https://"+args.server_host+":5431"
trustStore = os.path.join(client_Path,"ssl/cacerts")
trustStorePw = "changeit"
# If the path is not correct then the download and merge will not be performed.
if not os.path.isfile(metadataClientJar):
logging.critical("File not found: %s. Path may not be correct: %s" % (metadataClientJar,client_Path))
print "File not found: %s" % metadataClientJar
print "Path may not be correct: %s" % client_Path
print "Exiting program."
exit(1)
logging.info('Begin Download.')
print "downloading metadata..."
for content_id in id_to_content:
file_create_time_server = id_to_content[content_id]["content"]["createdTime"]
if os.path.isfile(directory+"/"+id_to_content[content_id]["content"]["gnosId"]+"/metadata.json") and \
creation_date(directory+"/"+id_to_content[content_id]["content"]["gnosId"]+"/metadata.json") == file_create_time_server/1000:
#Assign any missing file size
insert_size(directory+"/"+id_to_content[content_id]["content"]["gnosId"]+"/metadata.json", size_list)
#Set the time created to be the one supplied by redwood (since insert_size() modifies the file)
os.utime(directory + "/" + id_to_content[content_id]["content"]["gnosId"] + "/metadata.json",
(file_create_time_server/1000, file_create_time_server/1000))
#Open the file and add the file size if missing.
print " + using cached file "+directory+"/"+id_to_content[content_id]["content"]["gnosId"]+"/metadata.json created on "+str(file_create_time_server)
#os.utime(directory + "/" + id_to_content[content_id]["content"]["gnosId"] + "/metadata.json", (file_create_time_server/1000, file_create_time_server/1000))
else:
print " + downloading "+content_id
# build command string
command = ["java"]
command.append("-Djavax.net.ssl.trustStore=" + trustStore)
command.append("-Djavax.net.ssl.trustStorePassword=" + trustStorePw)
command.append("-Dmetadata.url=" + str(metadataUrl))
command.append("-Dmetadata.ssl.enabled=true")
command.append("-Dclient.ssl.custom=false")
command.append("-Dstorage.url=" + str(storageUrl))
command.append("-DaccessToken=" + str(accessToken))
command.append("-jar")
command.append(metadataClientJar)
command.append("download")
command.append("--output-dir")
command.append(str(directory))
command.append("--object-id")
command.append(str(content_id))
command.append("--output-layout")
command.append("bundle")
#print " ".join(command)
try:
c_data=Popen(command, stdout=PIPE, stderr=PIPE)
stdout, stderr = c_data.communicate()
# now set the create timestamp
insert_size(directory+"/"+id_to_content[content_id]["content"]["gnosId"]+"/metadata.json", size_list)
os.utime(directory + "/" + id_to_content[content_id]["content"]["gnosId"] + "/metadata.json",
(file_create_time_server/1000, file_create_time_server/1000))
except Exception:
logging.error('Error while downloading file with content ID: %s' % content_id)
print 'Error while downloading file with content ID: %s' % content_id
logging.info('End Download.')
def creation_date(path_to_file):
"""
Try to get the date that a file was created, falling back to when it was
last modified if that isn't possible.
See http://stackoverflow.com/a/39501288/1709587 for explanation.
"""
if platform.system() == 'Windows':
return os.path.getctime(path_to_file)
else:
stat = os.stat(path_to_file)
try:
return stat.st_birthtime
except AttributeError:
# We're probably on Linux. No easy way to get creation dates here,
# so we'll settle for when its content was last modified.
return stat.st_mtime
def load_json_obj(json_path):
"""
:param json_path: Name or path of the json metadata file.
:return: A json object.
"""
json_file = open(json_path, 'r')
print "JSON FILE: "+json_path
json_obj = json.load(json_file)
json_file.close()
return json_obj
def load_json_arr(input_dir, data_arr, redacted):
"""
:param input_dir: Directory that contains the json files.
:param data_arr: Empty array.
Gets all of the json files, converts them into objects and stores
them in an array.
"""
for folder in os.listdir(input_dir):
current_folder = os.path.join(input_dir, folder)
if os.path.isdir(current_folder):
for file in os.listdir(current_folder):
if file.endswith(".json") and folder not in redacted:
current_file = os.path.join(current_folder, file)
try:
json_obj = load_json_obj(current_file)
data_arr.append(json_obj)
except ValueError:
print "ERROR PARSING JSON: will skip this record."
def skip_option(donorLevelObjs, option_skip, key):
for json_obj in donorLevelObjs:
keys = json_obj[key]
if keys == option_skip:
donorLevelObjs.remove(json_obj)
def only_option(donorLevelObjs,option_only, key):
for json_obj in donorLevelObjs:
keys = json_obj[key]
if keys != option_only:
donorLevelObjs.remove(json_obj)
def validate_json(json_obj,schema):
"""
:return: Returns true if the json is in the correct schema.
"""
try:
jsonschema.validate(json_obj, schema)
except Exception as exc:
logging.error("jsonschema.validate FAILED in validate_json: %s" % (str(exc)))
return False
return True
def insert_detached_metadata(detachedObjs, uuid_mapping, preserve_version=False):
"""
Inserts a Analysis object, that contains a parent ID, to its respective donor object.
"""
de_timestamp = dateutil.parser.parse(detachedObjs["timestamp"])
for parent_uuid in detachedObjs["parent_uuids"]:
for key in uuid_mapping:
donor_obj= uuid_mapping[key]
donor_timestamp= dateutil.parser.parse(donor_obj["timestamp"])
donor_uuid = donor_obj["donor_uuid"]
# Check if it needs to be inserted in the donor section
if parent_uuid== donor_uuid:
if "analysis" in donor_obj:
donor_obj["analysis"].append(detachedObjs)
else:
donor_obj["analysis"]= [detachedObjs]
# Check if it needs to be inserted in the specimen section
for specimen in donor_obj["specimen"]:
specimen_uuid =specimen["specimen_uuid"]
if specimen_uuid == parent_uuid:
if "analysis" in specimen:
specimen["analysis"].append(detachedObjs)
else:
specimen["analysis"]= [detachedObjs]
# Check if it needs to be inserted in the sample section
for sample in specimen["samples"]:
sample_uuid= sample["sample_uuid"]
if sample_uuid == parent_uuid:
analysis_type = detachedObjs["analysis_type"]
savedAnalysisTypes = set()
for donor_analysis in sample["analysis"]:
savedAnalysisType = donor_analysis["analysis_type"]
savedAnalysisTypes.add(savedAnalysisType)
if analysis_type == savedAnalysisType:
analysisObj = donor_analysis
if not analysis_type in savedAnalysisTypes:
sample["analysis"].append(detachedObjs)
continue
else:
# compare 2 analysis to keep only most relevant one
# saved is analysisObj
# currently being considered is new_analysis
if preserve_version:
sample["analysis"].append(detachedObjs)
else:
new_workflow_version = detachedObjs["workflow_version"]
saved_version = analysisObj["workflow_version"]
# current is older than new
if saved_version == new_workflow_version:
# use the timestamp
if "timestamp" in detachedObjs and "timestamp" in analysisObj:
saved_timestamp = dateutil.parser.parse(analysisObj["timestamp"])
new_timestamp = dateutil.parser.parse(detachedObjs["timestamp"])
timestamp_diff = saved_timestamp - new_timestamp
if timestamp_diff.total_seconds() < 0:
sample["analysis"].remove(analysisObj)
sample["analysis"].append(detachedObjs)
elif semver.compare(saved_version, new_workflow_version) == -1:
sample["analysis"].remove(analysisObj)
sample["analysis"].append(detachedObjs)
#if semver.compare(saved_version, new_workflow_version) == 0:
timestamp_diff = donor_timestamp - de_timestamp
if timestamp_diff.total_seconds() < 0:
donor_obj["timestamp"] = detachedObjs["timestamp"]
def mergeDonors(metadataObjs, preserve_version):
'''
Merge data bundle metadata.json objects into correct donor objects.
'''
donorMapping = {}
uuid_to_timestamp={}
for metaObj in metadataObjs:
# check if donor exists
donor_uuid = metaObj["donor_uuid"]
if not donor_uuid in donorMapping:
donorMapping[donor_uuid] = metaObj
uuid_to_timestamp[donor_uuid]= [metaObj["timestamp"]]
continue
# check if specimen exists
donorObj = donorMapping[donor_uuid]
for specimen in metaObj["specimen"]:
specimen_uuid = specimen["specimen_uuid"]
savedSpecUuids = set()
for savedSpecObj in donorObj["specimen"]:
savedSpecUuid = savedSpecObj["specimen_uuid"]
savedSpecUuids.add(savedSpecUuid)
if specimen_uuid == savedSpecUuid:
specObj = savedSpecObj
if not specimen_uuid in savedSpecUuids:
donorObj["specimen"].append(specimen)
continue
# check if sample exists
for sample in specimen["samples"]:
sample_uuid = sample["sample_uuid"]
savedSampleUuids = set()
for savedSampleObj in specObj["samples"]:
savedSampleUuid = savedSampleObj["sample_uuid"]
savedSampleUuids.add(savedSampleUuid)
if sample_uuid == savedSampleUuid:
sampleObj = savedSampleObj
if not sample_uuid in savedSampleUuids:
specObj["samples"].append(sample)
continue
# check if analysis exists
# need to compare analysis for uniqueness by looking at analysis_type... bundle_uuid is not the right one here.
for bundle in sample["analysis"]:
bundle_uuid = bundle["bundle_uuid"]
analysis_type = bundle["analysis_type"]
savedAnalysisTypes = set()
for savedBundle in sampleObj["analysis"]:
savedAnalysisType = savedBundle["analysis_type"]
savedAnalysisTypes.add(savedAnalysisType)
if analysis_type == savedAnalysisType:
analysisObj = savedBundle
if not analysis_type in savedAnalysisTypes or preserve_version:
sampleObj["analysis"].append(bundle)
# timestamp mapping
if "timestamp" in bundle:
uuid_to_timestamp[donor_uuid].append(bundle["timestamp"])
continue
else:
# compare 2 analysis to keep only most relevant one
# saved is analysisObj
# currently being considered is bundle
new_workflow_version= bundle["workflow_version"]
saved_version= analysisObj["workflow_version"]
# current is older than new
if semver.compare(saved_version, new_workflow_version) == -1:
sampleObj["analysis"].remove(analysisObj)
sampleObj["analysis"].append(bundle)
# timestamp mapping
if "timestamp" in bundle:
uuid_to_timestamp[donor_uuid].append(bundle["timestamp"])
if semver.compare(saved_version, new_workflow_version) == 0:
# use the timestamp to determine which analysis to choose
if "timestamp" in bundle and "timestamp" in analysisObj :
saved_timestamp = dateutil.parser.parse(analysisObj["timestamp"])
new_timestamp= dateutil.parser.parse(bundle["timestamp"])
timestamp_diff = saved_timestamp - new_timestamp
if timestamp_diff.total_seconds() < 0:
sampleObj["analysis"].remove(analysisObj)
sampleObj["analysis"].append(bundle)
# timestamp mapping
if "timestamp" in bundle:
uuid_to_timestamp[donor_uuid].append(bundle["timestamp"])
# Get the most recent timstamp from uuid_to_timestamp(for each donor) and use donorMapping to substitute it
for uuid in uuid_to_timestamp:
timestamp_list= uuid_to_timestamp[uuid]
donorMapping[uuid]["timestamp"] = max(timestamp_list)
return donorMapping
def validate_Donor(uuid_mapping, schema):
"""
Validates each donor object with the schema provided.
"""
valid = []
invalid = []
for uuid in uuid_mapping:
donor_Obj = uuid_mapping[uuid]
if validate_json(donor_Obj, schema):
valid.append(donor_Obj)
else:
invalid.append(donor_Obj)
return valid, invalid
def allHaveItems(lenght):
"""
Returns the value of each flag, based on the lenght of the array in 'missing_items'.
"""
#print ("ALLHAVEITEMS: %s" % lenght)
result= False
if lenght == 0:
result =True
#print "RESULT: %s" % result
return result
def arrayMissingItems(itemsName, regex, items,submitter_specimen_types):
"""
Returns a list of 'sample_uuid' for the analysis that were missing.
"""
return arrayItems(itemsName, regex, items,submitter_specimen_types, True)
def arrayContainingItems(itemsName, regex, items,submitter_specimen_types):
"""
Returns a list of 'sample_uuid' for the analysis that were present.
"""
return arrayItems(itemsName, regex, items,submitter_specimen_types, False)
def arrayItems(itemsName, regex, items,submitter_specimen_types, missing):
"""
Returns a list of 'sample_uuid' for the analysis that were missing.
"""
analysis_type = False
results = []
for specimen in items['specimen']:
if re.search(regex, specimen['submitter_specimen_type']):
submitter_specimen_types.append(specimen['submitter_specimen_type'])
for sample in specimen['samples']:
for analysis in sample['analysis']:
if analysis["analysis_type"] == itemsName:
analysis_type = True
break
if (missing and not analysis_type) or (not missing and analysis_type):
results.append(sample['sample_uuid'])
analysis_type = False
return results
def arrayMissingItemsWorkflow(workflow_name, workflow_version_regex, regex, items,submitter_specimen_types):
"""
Returns a list of 'sample_uuid' for the analysis that were missing.
"""
return arrayItemsWorkflow(workflow_name, workflow_version_regex, regex, items,submitter_specimen_types, True)
def arrayContainingItemsWorkflow(workflow_name, workflow_version_regex, regex, items,submitter_specimen_types):
"""
Returns a list of 'sample_uuid' for the analysis that were present.
"""
return arrayItemsWorkflow(workflow_name, workflow_version_regex, regex, items,submitter_specimen_types, False)
def arrayItemsWorkflow(workflow_name, workflow_version_regex, regex, items,submitter_specimen_types, missing):
"""
Returns a list of 'sample_uuid' for the analysis that were missing.
"""
analysis_type = False
results = []
for specimen in items['specimen']:
if re.search(regex, specimen['submitter_specimen_type']):
submitter_specimen_types.append(specimen['submitter_specimen_type'])
for sample in specimen['samples']:
for analysis in sample['analysis']:
if analysis["workflow_name"] == workflow_name and re.search(workflow_version_regex, analysis["workflow_version"]):
analysis_type = True
break
if (missing and not analysis_type) or (not missing and analysis_type):
results.append(sample['sample_uuid'])
analysis_type = False
return results
def createFlags(uuid_to_donor):
"""
uuid_to_donor: dictionary that maps uuid with its json object.
Creates and adds "flags" and "missing_items" to each donor object.
"""
for uuid in uuid_to_donor:
json_object = uuid_to_donor[uuid]
submitter_specimen_types=[]
flagsWithArrs = {'normal_sequence': arrayMissingItems('sequence_upload', "^Normal - ", json_object,submitter_specimen_types),
'tumor_sequence': arrayMissingItems('sequence_upload',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line - ",
json_object,submitter_specimen_types),
'normal_sequence_qc_report': arrayMissingItems('sequence_upload_qc_report', "^Normal - ", json_object,submitter_specimen_types),
'tumor_sequence_qc_report': arrayMissingItems('sequence_upload_qc_report',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_alignment': arrayMissingItems('alignment', "^Normal - ", json_object,submitter_specimen_types),
'tumor_alignment': arrayMissingItems('alignment',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_alignment_qc_report': arrayMissingItems('alignment_qc_report', "^Normal - ", json_object,submitter_specimen_types),
'tumor_alignment_qc_report': arrayMissingItems('alignment_qc_report',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_rna_seq_quantification': arrayMissingItems('rna_seq_quantification', "^Normal - ", json_object,submitter_specimen_types),
'tumor_rna_seq_quantification': arrayMissingItems('rna_seq_quantification',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_rna_seq_cgl_workflow_3_0_x': arrayMissingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.0\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_rna_seq_cgl_workflow_3_0_x': arrayMissingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.0\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_rna_seq_cgl_workflow_3_1_x': arrayMissingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.1\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_rna_seq_cgl_workflow_3_1_x': arrayMissingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.1\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_rna_seq_cgl_workflow_3_2_x': arrayMissingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.2\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_rna_seq_cgl_workflow_3_2_x': arrayMissingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.2\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_protect_cgl_workflow_2_3_x': arrayMissingItemsWorkflow('quay.io/ucsc_cgl/protect', '2\.3\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_protect_cgl_workflow_2_3_x': arrayMissingItemsWorkflow('quay.io/ucsc_cgl/protect', '2\.3\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_cnv_workflow': arrayContainingItemsWorkflow('https://github.com/BD2KGenomics/dockstore_workflow_cnv', '1\.0\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_cnv_workflow': arrayContainingItemsWorkflow('https://github.com/BD2KGenomics/dockstore_workflow_cnv', '1\.0\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_germline_variants': arrayMissingItems('germline_variant_calling', "^Normal - ", json_object,submitter_specimen_types),
'tumor_somatic_variants': arrayMissingItems('somatic_variant_calling',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types)}
flagsPresentWithArrs = {'normal_sequence': arrayContainingItems('sequence_upload', "^Normal - ", json_object,submitter_specimen_types),
'tumor_sequence': arrayContainingItems('sequence_upload',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_sequence_qc_report': arrayContainingItems('sequence_upload_qc_report', "^Normal - ", json_object,submitter_specimen_types),
'tumor_sequence_qc_report': arrayContainingItems('sequence_upload_qc_report',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_alignment': arrayContainingItems('alignment', "^Normal - ", json_object,submitter_specimen_types),
'tumor_alignment': arrayContainingItems('alignment',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_alignment_qc_report': arrayContainingItems('alignment_qc_report', "^Normal - ", json_object,submitter_specimen_types),
'tumor_alignment_qc_report': arrayContainingItems('alignment_qc_report',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_rna_seq_quantification': arrayContainingItems('rna_seq_quantification', "^Normal - ", json_object,submitter_specimen_types),
'tumor_rna_seq_quantification': arrayContainingItems('rna_seq_quantification',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_rna_seq_cgl_workflow_3_0_x': arrayContainingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.0\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_rna_seq_cgl_workflow_3_0_x': arrayContainingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.0\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_rna_seq_cgl_workflow_3_1_x': arrayContainingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.1\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_rna_seq_cgl_workflow_3_1_x': arrayContainingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.1\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_rna_seq_cgl_workflow_3_2_x': arrayContainingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.2\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_rna_seq_cgl_workflow_3_2_x': arrayContainingItemsWorkflow('quay.io/ucsc_cgl/rnaseq-cgl-pipeline', '3\.2\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_protect_cgl_workflow_2_3_x': arrayContainingItemsWorkflow('quay.io/ucsc_cgl/protect', '2\.3\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_protect_cgl_workflow_2_3_x': arrayContainingItemsWorkflow('quay.io/ucsc_cgl/protect', '2\.3\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_cnv_workflow': arrayContainingItemsWorkflow('https://github.com/BD2KGenomics/dockstore_workflow_cnv', '1\.0\.', "^Normal - ", json_object,submitter_specimen_types),
'tumor_cnv_workflow': arrayContainingItemsWorkflow('https://github.com/BD2KGenomics/dockstore_workflow_cnv', '1\.0\.',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types),
'normal_germline_variants': arrayContainingItems('germline_variant_calling', "^Normal - ", json_object,submitter_specimen_types),
'tumor_somatic_variants': arrayContainingItems('somatic_variant_calling',
"^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line -",
json_object,submitter_specimen_types)}
flagsWithStr = {'normal_sequence' : len(flagsWithArrs["normal_sequence"]) == 0 and len(flagsPresentWithArrs["normal_sequence"]) > 0,
'normal_sequence_qc_report' : len(flagsWithArrs["normal_sequence_qc_report"]) == 0 and len(flagsPresentWithArrs["normal_sequence_qc_report"]) > 0,
'tumor_sequence': len(flagsWithArrs["tumor_sequence"]) == 0 and len(flagsPresentWithArrs["tumor_sequence"]) > 0,
'tumor_sequence_qc_report' :len(flagsWithArrs["tumor_sequence_qc_report"]) == 0 and len(flagsPresentWithArrs["tumor_sequence_qc_report"]) > 0,
'normal_alignment': len(flagsWithArrs["normal_alignment"]) == 0 and len(flagsPresentWithArrs["normal_alignment"]) > 0,
'normal_alignment_qc_report': len(flagsWithArrs["normal_alignment_qc_report"]) == 0 and len(flagsPresentWithArrs["normal_alignment_qc_report"]) > 0,
'tumor_alignment': len(flagsWithArrs["tumor_alignment"]) == 0 and len(flagsPresentWithArrs["tumor_alignment"]) > 0,
'tumor_alignment_qc_report': len(flagsWithArrs["tumor_alignment_qc_report"]) == 0 and len(flagsPresentWithArrs["tumor_alignment_qc_report"]) > 0,
'normal_rna_seq_quantification': len(flagsWithArrs["normal_rna_seq_quantification"]) == 0 and len(flagsPresentWithArrs["normal_rna_seq_quantification"]) > 0,
'tumor_rna_seq_quantification': len(flagsWithArrs["tumor_rna_seq_quantification"]) == 0 and len(flagsPresentWithArrs["tumor_rna_seq_quantification"]) > 0,
'normal_rna_seq_cgl_workflow_3_0_x': len(flagsWithArrs["normal_rna_seq_cgl_workflow_3_0_x"]) == 0 and len(flagsPresentWithArrs["normal_rna_seq_cgl_workflow_3_0_x"]) > 0,
'tumor_rna_seq_cgl_workflow_3_0_x': len(flagsWithArrs["tumor_rna_seq_cgl_workflow_3_0_x"]) == 0 and len(flagsPresentWithArrs["tumor_rna_seq_cgl_workflow_3_0_x"]) > 0,
'normal_rna_seq_cgl_workflow_3_1_x': len(flagsWithArrs["normal_rna_seq_cgl_workflow_3_1_x"]) == 0 and len(flagsPresentWithArrs["normal_rna_seq_cgl_workflow_3_1_x"]) > 0,
'tumor_rna_seq_cgl_workflow_3_1_x': len(flagsWithArrs["tumor_rna_seq_cgl_workflow_3_1_x"]) == 0 and len(flagsPresentWithArrs["tumor_rna_seq_cgl_workflow_3_1_x"]) > 0,
'normal_rna_seq_cgl_workflow_3_2_x': len(flagsWithArrs["normal_rna_seq_cgl_workflow_3_2_x"]) == 0 and len(flagsPresentWithArrs["normal_rna_seq_cgl_workflow_3_2_x"]) > 0,
'tumor_rna_seq_cgl_workflow_3_2_x': len(flagsWithArrs["tumor_rna_seq_cgl_workflow_3_2_x"]) == 0 and len(flagsPresentWithArrs["tumor_rna_seq_cgl_workflow_3_2_x"]) > 0,
'normal_protect_cgl_workflow_2_3_x': len(flagsWithArrs["normal_protect_cgl_workflow_2_3_x"]) == 0 and len(flagsPresentWithArrs["normal_protect_cgl_workflow_2_3_x"]) > 0,
'tumor_protect_cgl_workflow_2_3_x': len(flagsWithArrs["tumor_protect_cgl_workflow_2_3_x"]) == 0 and len(flagsPresentWithArrs["tumor_protect_cgl_workflow_2_3_x"]) > 0,
'normal_cnv_workflow': len(flagsWithArrs["normal_cnv_workflow"]) == 0 and len(flagsPresentWithArrs["normal_cnv_workflow"]) > 0,
'tumor_cnv_workflow': len(flagsWithArrs["tumor_cnv_workflow"]) == 0 and len(flagsPresentWithArrs["tumor_cnv_workflow"]) > 0,
'normal_germline_variants': len(flagsWithArrs["normal_germline_variants"]) == 0 and len(flagsPresentWithArrs["normal_germline_variants"]) > 0,
'tumor_somatic_variants': len(flagsWithArrs["tumor_somatic_variants"]) == 0 and len(flagsPresentWithArrs["tumor_somatic_variants"]) > 0}
json_object['flags'] = flagsWithStr
json_object['missing_items'] = flagsWithArrs
json_object['present_items'] = flagsPresentWithArrs
def dumpResult(result, filename, ES_file_name="elasticsearch.jsonl"):
"""
Creates the .jsonl files.
"""
global index_index
for donor in result:
if filename not in first_write:
with open(filename, 'w') as outfile:
if filename == ES_file_name:
outfile.write('{"index":{"_id":"' + str(index_index) + '","_type":"meta"}}\n')
index_index += 1
json.dump(donor, outfile)
outfile.write('\n')
first_write[filename] = "true"
else:
with open(filename, 'a') as outfile:
if filename == ES_file_name:
outfile.write('{"index":{"_id":"' + str(index_index) + '","_type":"meta"}}\n')
index_index += 1
json.dump(donor, outfile)
outfile.write('\n')
def findRedactedUuids(skip_uuid_directory):
"""
Creates a dict of file UUIDs that need to be skipped
"""
result = {}
if skip_uuid_directory is not None:
for file in os.listdir(skip_uuid_directory):
if file.endswith(".redacted"):
current_file = os.path.join(skip_uuid_directory, file)
f = open(current_file, "r")
for line in f.readlines():
result[line.rstrip()] = True
f.close()
print result
return result
def main():
args = input_Options()
directory_meta = args.test_directory
# redacted metadata.json file UUIDs
skip_uuid_directory = args.skip_uuid_directory
skip_uuids = findRedactedUuids(skip_uuid_directory)
preserve_version = args.preserve_version
logfileName = os.path.basename(__file__).replace(".py", ".log")
logging_format= '%(asctime)s - %(levelname)s: %(message)s'
logging.basicConfig(filename=logfileName, level=logging.DEBUG, format=logging_format, datefmt='%m/%d/%Y %I:%M:%S %p')
if not directory_meta:
#Getting the File UUIDs
requires(args.server_host)
#Get the size listing
file_uuid_and_size = get_size_list(args.storage_access_token, args.server_host)
#Trying to download the data.
last= False
page=0
obj_arr=[]
# figure out the pages
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
json_str = urlopen(str("https://"+args.server_host+":8444/entities?fileName=metadata.json&page=0"), context=ctx).read()
metadata_struct = json.loads(json_str)
# Download all of the data that is stored.
if args.max_pages is not None:
metadata_struct["totalPages"] = int(args.max_pages)
for page in range(0, metadata_struct["totalPages"]):
print "DOWNLOADING PAGE "+str(page)
meta_cmd= ["curl", "-k"]
url= 'https://'+args.server_host+':8444/entities?fileName=metadata.json&page='
new_url= url + str(page)
meta_cmd.append(new_url)
c_data=Popen(meta_cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = c_data.communicate()
json_obj= json.loads(stdout)
last = json_obj["last"]
obj_arr.append(json_obj)
# Create a mapping of all the data provided from the endpoint.
id_to_content= endpint_mapping(obj_arr)
# Download the metadata.json files using the id stored in id_to_content dictionary
directory_meta= make_output_dir()
access_Token=args.storage_access_token
client_Path= args.client_path
create_merge_input_folder(id_to_content, directory_meta,access_Token,client_Path, file_uuid_and_size)
# END DOWNLOAD
# BEGIN json Merge
logging.info("Begin Merging.")
print "Begin Merging."
schema = load_json_obj(args.metadata_schema)
#if there is no schema the program cannot continue.
if schema == None:
logging.critical("No metadata schema was recognized. Exiting program.")
exit(1)
schema_version= schema["definitions"]["schema_version"]["pattern"]
#sche_version= schema_version.replace("^","")
#schema_version= sche_version.replace("$","")
logging.info("Schema Version: %s" % schema_version)
print "Schema Version: ",schema_version
data_arr = []
# Loads the json files and stores them into an array.
load_json_arr(directory_meta, data_arr, skip_uuids)
donorLevelObjs = []
detachedObjs = []
# Separates the detached anlaysis obj from the donor obj.
for metaobj in data_arr:
if "donor_uuid" in metaobj:
donorLevelObjs.append(metaobj)
elif "parent_uuids" in metaobj:
detachedObjs.append(metaobj)
# Skip Program Test Option.
skip_prog_option= args.skip_program
if skip_prog_option:
logging.info("Skip Programs with values: %s" % (skip_prog_option))
print "Skip Programs with values: %s" % (skip_prog_option)
skip_option(donorLevelObjs, skip_prog_option,'program')
# Use Only Program Test Option.
only_program_option= args.only_program
if only_program_option:
logging.info("Only use Programs with values: %s" % (only_program_option))
print "Only use Programs with values: %s" % (only_program_option)
only_option(donorLevelObjs,only_program_option,'program')
# Skip Program Test Option.
skip_project_option= args.skip_project
if skip_project_option:
logging.info("Skip Projects with values: %s" % (skip_project_option))
print "Skip Projects with values: %s" % (skip_project_option)
skip_option(donorLevelObjs, skip_project_option,"project")
# Use Only Program Test Option.
only_project_option= args.only_project
if only_project_option:
logging.info("Only use Projects with values: %s" % (only_project_option))
print "Only use Projects with values: %s" % (only_project_option)
only_option(donorLevelObjs,only_project_option,"project")
# Merge only those that are of the same schema_version as the Schema.
invalid_version_arr= []
valid_version_arr= []
for donor_object in donorLevelObjs:
obj_schema_version= donor_object["schema_version"]
p = re.compile(schema_version)
if not p.match(obj_schema_version):
invalid_version_arr.append(donor_object)
else:
valid_version_arr.append(donor_object)
logging.info("%s valid donor objects with correct schema version." % str(len(valid_version_arr)))
print len(valid_version_arr), " valid donor objects with correct schema version."
# Inserts the detached analysis to the merged donor obj.
uuid_mapping = mergeDonors(valid_version_arr, preserve_version)
for de_obj in detachedObjs:
insert_detached_metadata(de_obj, uuid_mapping, preserve_version)
# Creates and adds the flags and missingItems to each donor obj.
createFlags(uuid_mapping)
# Validates each donor obj
(validated, invalid) = validate_Donor(uuid_mapping,schema)
# Check if there are invalid json objects.
invalid_num= len(invalid)
if invalid_num:
logging.info("%s merged donor objects invalid." % (invalid_num))
print "%s merged donor objects invalid." % (invalid_num)
dumpResult(invalid, "invalid.jsonl")
logging.info("Invalid merged objects in invalid.jsonl.")
print "Invalid merged objects in invalid.jsonl. "
# Creates the jsonl files .
validated_num= len(validated)
if validated_num:
logging.info("%s merged json objects were valid." % (validated_num))
print "%s merged json objects were valid." % (validated_num)
if preserve_version:
dumpResult(validated, "duped_validated.jsonl")
dumpResult(validated, 'duped_elasticsearch.jsonl', ES_file_name="duped_elasticsearch.jsonl")
logging.info("All done, find index in duped_elasticsearch.jsonl")
print "All done, find index in duped_elasticsearch.jsonl"
else:
dumpResult(validated, "validated.jsonl")
dumpResult(validated, 'elasticsearch.jsonl')
logging.info("All done, find index in elasticsearch.jsonl")
print "All done, find index in elasticsearch.jsonl"
if not validated:
logging.info("No objects were merged.")
print "No objects were merged."
if __name__ == "__main__":
main()
| apache-2.0 | 6,270,652,349,861,014,000 | 53.304656 | 267 | 0.575774 | false |
marmarek/qubes-core-mgmt-client | qubesadmin/tools/qvm_features.py | 1 | 3066 | # coding=utf-8
#
# The Qubes OS Project, https://www.qubes-os.org/
#
# Copyright (C) 2010-2016 Joanna Rutkowska <[email protected]>
# Copyright (C) 2016 Wojtek Porczyk <[email protected]>
# Copyright (C) 2017 Marek Marczykowski-Górecki
# <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
'''qvm-features - Manage domain's features'''
from __future__ import print_function
import sys
import qubesadmin
import qubesadmin.tools
parser = qubesadmin.tools.QubesArgumentParser(
vmname_nargs=1,
description='manage domain\'s features')
parser.add_argument('feature', metavar='FEATURE',
action='store', nargs='?',
help='name of the feature')
parser.add_argument('value', metavar='VALUE',
action='store', nargs='?',
help='new value of the feature')
parser.add_argument('--unset', '--default', '--delete', '-D',
dest='delete',
action='store_true',
help='unset the feature')
def main(args=None, app=None):
'''Main routine of :program:`qvm-features`.
:param list args: Optional arguments to override those delivered from \
command line.
'''
args = parser.parse_args(args, app=app)
vm = args.domains[0]
if args.feature is None:
if args.delete:
parser.error('--unset requires a feature')
try:
features = [(feat, vm.features[feat]) for feat in vm.features]
qubesadmin.tools.print_table(features)
except qubesadmin.exc.QubesException as e:
parser.error_runtime(e)
elif args.delete:
if args.value is not None:
parser.error('cannot both set and unset a value')
try:
del vm.features[args.feature]
except KeyError:
pass
except qubesadmin.exc.QubesException as e:
parser.error_runtime(e)
elif args.value is None:
try:
print(vm.features[args.feature])
return 0
except KeyError:
return 1
except qubesadmin.exc.QubesException as e:
parser.error_runtime(e)
else:
try:
vm.features[args.feature] = args.value
except qubesadmin.exc.QubesException as e:
parser.error_runtime(e)
return 0
if __name__ == '__main__':
sys.exit(main())
| lgpl-2.1 | 2,822,929,185,765,776,400 | 29.65 | 80 | 0.649918 | false |
matt-hayden/dedup | futil.py | 1 | 4783 | #! /usr/bin/env python3
"""
"""
from datetime import datetime
import os, os.path
import tarfile
import zipfile
#from . import *
from __init__ import *
import characterize
def comm(lhs, rhs):
"""Returns (left-only, common, right-only)
"""
com = lhs & rhs
return (lhs-com), com, (rhs-com)
def cmp_stat(lhs, rhs):
if lhs.st_size == rhs.st_size:
if lhs.st_dev == rhs.st_dev:
if lhs.st_ino == rhs.st_ino:
assert lhs.st_mtime == rhs.st_mtime
return 0
if lhs.st_mtime < rhs.st_mtime:
return 1
if lhs.st_size < rhs.st_size:
return 1
return -1
def get_match_code(lhs, rhs):
_, com, _ = comm(lhs, rhs)
return pack_match_code(com)
class Comparable:
"""stat, sums
"""
def __eq__(self, other):
if hasattr(self, 'stat') and hasattr(other, 'stat'):
if (cmp_stat(self.stat, other.stat) == 0):
return True
if self.matches(other):
return True
return False
def matches(self, other):
return 1 <= self.get_match_value(other)
def get_match_value(self, other, divisor=float(THRESHOLD_FOR_EQUALITY)):
if isinstance(other, Comparable):
mc = get_match_code(self.sums, other.sums)
else:
mc = get_match_code(self.sums, other)
return mc/divisor
def __and__(self, other):
if isinstance(other, Comparable):
return self.matches(other)
else:
return self.sums & set(other)
def __ior__(self, other):
# TODO: conservative
assert self.stat == other.stat
self.sums |= other.sums
return self
class FileObj(Comparable):
def __init__(self, my_stat):
self.members = []
self.is_dup = None
if my_stat:
self.datetime = datetime.fromtimestamp(my_stat.st_mtime)
self.size = my_stat.st_size
self.stat = my_stat
else:
self.datetime = ()
self.size = None
self.stat = ()
def get_flags(self):
if hasattr(self, 'size'):
if self.size in (0, None):
yield '0'
if hasattr(self, 'sums'):
for tup in self.sums:
label = tup[0]
if 'TOTAL' in label:
try:
s = len(tup[-1])
if 10 < s:
yield 'H{}'.format(s)
except TypeError:
pass
continue
yield ' '
for tup in self.sums:
label = tup[0]
if 'FINGERPRINT' in label:
yield 'f'
elif 'BW' in label:
yield 't'
elif 'COLOR' in label:
yield 't'
if hasattr(self, 'members'):
if self.members:
yield 'a'
if hasattr(self, 'is_dup'):
if self.is_dup:
yield 'D'
def describe(self):
return [ self.datetime or '',
self.size,
''.join(self.get_flags()) ]
def __repr__(self):
return "<File {1:,} b modified {0:%c} flags '{2}'>".format(*self.describe())
def __str__(self):
blank = ' '
parts = zip(('{:%c}', '{:12d}', '{:>10}'),
self.describe(),
(24, 12, 10))
return blank.join( (fs.format(s) if s else blank*fl) for fs, s, fl in parts)
def get_file_info(arg, sums=None, method=characterize.fast, method_for_archives=characterize.exhaustive):
row = FileObj(STAT(arg))
row.filename = arg
if sums:
row.sums = sums
else:
c = method(arg, size_hint=row.size)
row.sums = set(c)
if tarfile.is_tarfile(arg):
row.members = dict(expand_tarfile(arg, method=method_for_archives))
elif zipfile.is_zipfile(arg):
row.members = dict(expand_zipinfo(arg, method=method_for_archives))
return row
class ZipFileObj(FileObj):
def __init__(self, zi):
self.members = None
# zi is a ZipInfo object
dt = datetime(*zi.date_time)
self.datetime = dt if (datetime(1980, 1, 1) < dt) else None
self.filename = zi.filename
self.size = zi.file_size
self.volume = zi.volume
def expand_zipinfo(arg, method=characterize.fast):
with zipfile.ZipFile(arg) as zf:
for internal_f in zf.infolist():
if internal_f.filename.endswith('/'): # dirs end in / across platforms?
continue
row = ZipFileObj(internal_f)
if row.size == 0:
continue
row.sums = set( method(zf.open(internal_f), size_hint=row.size) )
row.sums.update( [ (('TOTAL', 'CRC'), hex(internal_f.CRC)) ] )
yield os.path.join(arg, row.filename), row
class TarFileObj(FileObj):
def __init__(self, ti):
self.members = None
self.datetime = datetime.fromtimestamp(ti.mtime)
self.filename = ti.name
self.size = ti.size
def expand_tarfile(arg, method=characterize.fast, ignore_symlinks=True):
"""
st_mode, st_ino, st_dev, st_nlink, st_uid, st_gid, st_size, st_atime, st_mtime, st_ctime
"""
with tarfile.open(arg) as tf:
for internal_f in tf.getmembers():
if not internal_f.isfile():
continue
# internal_f also has islnk() and issym()
if ignore_symlinks and internal_f.issym():
continue
row = TarFileObj(internal_f)
if not row.size:
continue
row.sums = set( method(internal_f.tobuf(), size_hint=row.size) )
yield os.path.join(arg, row.filename), row
# vim: tabstop=4 shiftwidth=4 softtabstop=4 number :
| unlicense | -1,474,127,793,389,779,500 | 24.441489 | 105 | 0.639557 | false |
kburts/drf-music | Backend/api/tasks.py | 1 | 2798 | import re
import requests
from music.celery import app
from .models import Playlist, Song, User
@app.task
def task():
print 'Hello, world!'
return 'Returned hello!'
@app.task
def create_playlist_from_yt(url, user):
"""
Generate a playlist and populate it from a url to a youtube playlist
args:
url: youtube playlist url, examples:
https://www.youtube.com/watch?v=PpJVIhidBXM&index=15&list=PLXNnxXrfrLitw1tTuUFigZhY4C2FZhvLe
https://www.youtube.com/watch?v=k7Z7USWo2Lk&list=PLXNnxXrfrLitw1tTuUFigZhY4C2FZhvLe&index=18
user: username (required)
title: title of the playlist (default title from youtube)
description: description of playlist (default auto-generated playlist from a youtube playlist url.)
"""
playlist_id = re.search('list=[\w_-]+', url) # \w, _ or -
playlist_id = playlist_id.group()[5:]
if playlist_id is None:
print 'No youtube playlist ID found in URL (should contain list=\\w+)'
return 1
# Make youtube api request
api_key = "AIzaSyBvdmvgZzy3N59lM4pp_0L2h8u5cPD17ro"
data = get_videos_from_playlist(playlist_id, api_key)
songs_to_add = []
playlist_title = requests.get((
"https://www.googleapis.com/youtube/v3/playlists?part=snippet"
"&id={0}"
"&key={1}"
).format(playlist_id, api_key)).json()['items'][0]['snippet']['title']
user = User.objects.get(username=user)
playlist = Playlist(
title = playlist_title,
description = "auto-generated playlist from a youtube playlist url.",
user = user)
playlist.save()
for item in data:
s = Song(
name = item[0],
url = "https://www.youtube.com/watch?v=%s" %item[1],
added_by = user
)
s.save()
songs_to_add.append(s)
playlist.songs.add(*songs_to_add)
return playlist.id
def get_videos_from_playlist(playlist_id, api_key):
"""
Returns a list of tuples: [(title: video_id)...] of youtube videos and their youtube id's
args:
playlist_id -- Id of youtube playlist (eg. PLXNnxXrfrLitw1tTuUFigZhY4C2FZhvLe)
api_key -- youtube api key
"""
page_token = ""
videos = []
while True:
# Request
url = (
"https://www.googleapis.com/youtube/v3/playlistItems?"
"part=snippet"
"&playlistId={0}"
"&pageToken={1}"
"&key={2}"
).format(playlist_id, page_token, api_key)
data = requests.get(url).json()
for item in data['items']:
videos.append((item['snippet']['title'], item['snippet']['resourceId']['videoId']))
if not 'nextPageToken' in data.keys():
break
else:
page_token = data['nextPageToken']
return videos | mit | -825,444,804,723,014,100 | 31.172414 | 103 | 0.613295 | false |
anchore/anchore | anchore/anchore-modules/examples/gates/gate-template.py | 1 | 1045 | #!/usr/bin/env python
import sys
import os
import json
import re
import anchore
from anchore import anchore_utils
gate_name = "GATENAMEHERE"
triggers = {
'TRIGGER1':
{
'description':'triggers if this happens',
'params':'TRIGGER1_PARAMS'
},
'TRIGGER2':
{
'description':'triggers if that happens',
'params':'None'
},
}
try:
config = anchore.anchore_utils.init_gate_cmdline(sys.argv, gate_name, gate_help=triggers)
except Exception as err:
print str(err)
sys.exit(1)
if not config:
print "ERROR: could not set up environment for gate"
sys.exit(1)
imgid = config['imgid']
try:
params = config['params']
except:
params = None
outlist = list()
# do somthing
try:
image = anchore.anchore_image.AnchoreImage(imgid, allimages={})
#outlist.append("TRIGGER1 Some text")
except Exception as err:
#print "ERROR: could not do something" + str(err)
exit(1)
# write output
anchore.anchore_utils.save_gate_output(imgid, gate_name, outlist)
sys.exit(0)
| apache-2.0 | -2,019,843,133,459,232,000 | 18.716981 | 93 | 0.661244 | false |
bilke/OpenSG-1.8 | SConsLocal/scons-local-0.96.1/SCons/Environment.py | 2 | 48546 | """SCons.Environment
Base class for construction Environments. These are
the primary objects used to communicate dependency and
construction information to the build engine.
Keyword arguments supplied when the construction Environment
is created are construction variables used to initialize the
Environment
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "/home/scons/scons/branch.0/baseline/src/engine/SCons/Environment.py 0.96.1.D001 2004/08/23 09:55:29 knight"
import copy
import os
import os.path
import string
from UserDict import UserDict
import SCons.Action
import SCons.Builder
from SCons.Debug import logInstanceCreation
import SCons.Defaults
import SCons.Errors
import SCons.Node
import SCons.Node.Alias
import SCons.Node.FS
import SCons.Node.Python
import SCons.Platform
import SCons.SConsign
import SCons.Sig
import SCons.Sig.MD5
import SCons.Sig.TimeStamp
import SCons.Tool
import SCons.Util
import SCons.Warnings
class _Null:
pass
_null = _Null
CleanTargets = {}
CalculatorArgs = {}
# Pull UserError into the global name space for the benefit of
# Environment().SourceSignatures(), which has some import statements
# which seem to mess up its ability to reference SCons directly.
UserError = SCons.Errors.UserError
def installFunc(target, source, env):
"""Install a source file into a target using the function specified
as the INSTALL construction variable."""
try:
install = env['INSTALL']
except KeyError:
raise SCons.Errors.UserError('Missing INSTALL construction variable.')
return install(target[0].path, source[0].path, env)
def installString(target, source, env):
return 'Install file: "%s" as "%s"' % (source[0], target[0])
installAction = SCons.Action.Action(installFunc, installString)
InstallBuilder = SCons.Builder.Builder(action=installAction)
def alias_builder(env, target, source):
pass
AliasBuilder = SCons.Builder.Builder(action = alias_builder,
target_factory = SCons.Node.Alias.default_ans.Alias,
source_factory = SCons.Node.FS.default_fs.Entry,
multi = 1)
def our_deepcopy(x):
"""deepcopy lists and dictionaries, and just copy the reference
for everything else."""
if SCons.Util.is_Dict(x):
copy = {}
for key in x.keys():
copy[key] = our_deepcopy(x[key])
elif SCons.Util.is_List(x):
copy = map(our_deepcopy, x)
try:
copy = x.__class__(copy)
except AttributeError:
pass
else:
copy = x
return copy
def apply_tools(env, tools, toolpath):
if tools:
# Filter out null tools from the list.
tools = filter(None, tools)
for tool in tools:
if SCons.Util.is_String(tool):
env.Tool(tool, toolpath)
else:
tool(env)
# These names are controlled by SCons; users should never set or override
# them. This warning can optionally be turned off, but scons will still
# ignore the illegal variable names even if it's off.
reserved_construction_var_names = \
['TARGET', 'TARGETS', 'SOURCE', 'SOURCES']
def copy_non_reserved_keywords(dict):
result = our_deepcopy(dict)
for k in result.keys():
if k in reserved_construction_var_names:
SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning,
"Ignoring attempt to set reserved variable `%s'" % k)
del result[k]
return result
class BuilderWrapper:
"""Wrapper class that associates an environment with a Builder at
instantiation."""
def __init__(self, env, builder):
self.env = env
self.builder = builder
def __call__(self, *args, **kw):
return apply(self.builder, (self.env,) + args, kw)
# This allows a Builder to be executed directly
# through the Environment to which it's attached.
# In practice, we shouldn't need this, because
# builders actually get executed through a Node.
# But we do have a unit test for this, and can't
# yet rule out that it would be useful in the
# future, so leave it for now.
def execute(self, **kw):
kw['env'] = self.env
apply(self.builder.execute, (), kw)
class BuilderDict(UserDict):
"""This is a dictionary-like class used by an Environment to hold
the Builders. We need to do this because every time someone changes
the Builders in the Environment's BUILDERS dictionary, we must
update the Environment's attributes."""
def __init__(self, dict, env):
# Set self.env before calling the superclass initialization,
# because it will end up calling our other methods, which will
# need to point the values in this dictionary to self.env.
self.env = env
UserDict.__init__(self, dict)
def __setitem__(self, item, val):
UserDict.__setitem__(self, item, val)
try:
self.setenvattr(item, val)
except AttributeError:
# Have to catch this because sometimes __setitem__ gets
# called out of __init__, when we don't have an env
# attribute yet, nor do we want one!
pass
def setenvattr(self, item, val):
"""Set the corresponding environment attribute for this Builder.
If the value is already a BuilderWrapper, we pull the builder
out of it and make another one, so that making a copy of an
existing BuilderDict is guaranteed separate wrappers for each
Builder + Environment pair."""
try:
builder = val.builder
except AttributeError:
builder = val
setattr(self.env, item, BuilderWrapper(self.env, builder))
def __delitem__(self, item):
UserDict.__delitem__(self, item)
delattr(self.env, item)
def update(self, dict):
for i, v in dict.items():
self.__setitem__(i, v)
class Base:
"""Base class for construction Environments. These are
the primary objects used to communicate dependency and
construction information to the build engine.
Keyword arguments supplied when the construction Environment
is created are construction variables used to initialize the
Environment.
"""
#######################################################################
# This is THE class for interacting with the SCons build engine,
# and it contains a lot of stuff, so we're going to try to keep this
# a little organized by grouping the methods.
#######################################################################
#######################################################################
# Methods that make an Environment act like a dictionary. These have
# the expected standard names for Python mapping objects. Note that
# we don't actually make an Environment a subclass of UserDict for
# performance reasons. Note also that we only supply methods for
# dictionary functionality that we actually need and use.
#######################################################################
def __init__(self,
platform=None,
tools=None,
toolpath=[],
options=None,
**kw):
if __debug__: logInstanceCreation(self)
self.fs = SCons.Node.FS.default_fs
self.ans = SCons.Node.Alias.default_ans
self.lookup_list = SCons.Node.arg2nodes_lookups
self._dict = our_deepcopy(SCons.Defaults.ConstructionEnvironment)
self._dict['__env__'] = self
self._dict['BUILDERS'] = BuilderDict(self._dict['BUILDERS'], self)
if platform is None:
platform = self._dict.get('PLATFORM', None)
if platform is None:
platform = SCons.Platform.Platform()
if SCons.Util.is_String(platform):
platform = SCons.Platform.Platform(platform)
self._dict['PLATFORM'] = str(platform)
platform(self)
# Apply the passed-in variables before calling the tools,
# because they may use some of them:
apply(self.Replace, (), kw)
# Update the environment with the customizable options
# before calling the tools, since they may use some of the options:
if options:
options.Update(self)
if tools is None:
tools = self._dict.get('TOOLS', None)
if tools is None:
tools = ['default']
apply_tools(self, tools, toolpath)
# Reapply the passed in variables after calling the tools,
# since they should overide anything set by the tools:
apply(self.Replace, (), kw)
# Update the environment with the customizable options
# after calling the tools, since they should override anything
# set by the tools:
if options:
options.Update(self)
def __cmp__(self, other):
# Since an Environment now has an '__env__' construction variable
# that refers to itself, delete that variable to avoid infinite
# loops when comparing the underlying dictionaries in some Python
# versions (*cough* 1.5.2 *cough*)...
sdict = self._dict.copy()
del sdict['__env__']
odict = other._dict.copy()
del odict['__env__']
return cmp(sdict, odict)
def __getitem__(self, key):
return self._dict[key]
def __setitem__(self, key, value):
if key in reserved_construction_var_names:
SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning,
"Ignoring attempt to set reserved variable `%s'" % key)
elif key == 'BUILDERS':
try:
bd = self._dict[key]
for k in bd.keys():
del bd[k]
except KeyError:
self._dict[key] = BuilderDict(kwbd, self)
self._dict[key].update(value)
elif key == 'SCANNERS':
self._dict[key] = value
self.scanner_map_delete()
else:
if not SCons.Util.is_valid_construction_var(key):
raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key
self._dict[key] = value
def __delitem__(self, key):
del self._dict[key]
def items(self):
"Emulates the items() method of dictionaries."""
return self._dict.items()
def has_key(self, key):
return self._dict.has_key(key)
def get(self, key, default=None):
"Emulates the get() method of dictionaries."""
return self._dict.get(key, default)
#######################################################################
# Utility methods that are primarily for internal use by SCons.
# These begin with lower-case letters. Note that the subst() method
# is actually already out of the closet and used by people.
#######################################################################
def arg2nodes(self, args, node_factory=_null, lookup_list=_null):
if node_factory is _null:
node_factory = self.fs.File
if lookup_list is _null:
lookup_list = self.lookup_list
if not args:
return []
if SCons.Util.is_List(args):
args = SCons.Util.flatten(args)
else:
args = [args]
nodes = []
for v in args:
if SCons.Util.is_String(v):
n = None
for l in lookup_list:
n = l(v)
if not n is None:
break
if not n is None:
if SCons.Util.is_String(n):
n = self.subst(n, raw=1)
if node_factory:
n = node_factory(n)
if SCons.Util.is_List(n):
nodes.extend(n)
else:
nodes.append(n)
elif node_factory:
v = node_factory(self.subst(v, raw=1))
if SCons.Util.is_List(v):
nodes.extend(v)
else:
nodes.append(v)
else:
nodes.append(v)
return nodes
def get_calculator(self):
try:
return self._calculator
except AttributeError:
try:
module = self._calc_module
c = apply(SCons.Sig.Calculator, (module,), CalculatorArgs)
except AttributeError:
# Note that we're calling get_calculator() here, so the
# DefaultEnvironment() must have a _calc_module attribute
# to avoid infinite recursion.
c = SCons.Defaults.DefaultEnvironment().get_calculator()
self._calculator = c
return c
def get_builder(self, name):
"""Fetch the builder with the specified name from the environment.
"""
try:
return self._dict['BUILDERS'][name]
except KeyError:
return None
def get_scanner(self, skey):
"""Find the appropriate scanner given a key (usually a file suffix).
"""
try:
sm = self.scanner_map
except AttributeError:
try:
scanners = self._dict['SCANNERS']
except KeyError:
self.scanner_map = {}
return None
else:
self.scanner_map = sm = {}
# Reverse the scanner list so that, if multiple scanners
# claim they can scan the same suffix, earlier scanners
# in the list will overwrite later scanners, so that
# the result looks like a "first match" to the user.
if not SCons.Util.is_List(scanners):
scanners = [scanners]
scanners.reverse()
for scanner in scanners:
for k in scanner.get_skeys(self):
sm[k] = scanner
try:
return sm[skey]
except KeyError:
return None
def scanner_map_delete(self, kw=None):
"""Delete the cached scanner map (if we need to).
"""
if not kw is None and not kw.has_key('SCANNERS'):
return
try:
del self.scanner_map
except AttributeError:
pass
def subst(self, string, raw=0, target=None, source=None, dict=None, conv=None):
"""Recursively interpolates construction variables from the
Environment into the specified string, returning the expanded
result. Construction variables are specified by a $ prefix
in the string and begin with an initial underscore or
alphabetic character followed by any number of underscores
or alphanumeric characters. The construction variable names
may be surrounded by curly braces to separate the name from
trailing characters.
"""
return SCons.Util.scons_subst(string, self, raw, target, source, dict, conv)
def subst_kw(self, kw, raw=0, target=None, source=None, dict=None):
nkw = {}
for k, v in kw.items():
k = self.subst(k, raw, target, source, dict)
if SCons.Util.is_String(v):
v = self.subst(v, raw, target, source, dict)
nkw[k] = v
return nkw
def subst_list(self, string, raw=0, target=None, source=None, dict=None, conv=None):
"""Calls through to SCons.Util.scons_subst_list(). See
the documentation for that function."""
return SCons.Util.scons_subst_list(string, self, raw, target, source, dict, conv)
def subst_path(self, path):
"""Substitute a path list, turning EntryProxies into Nodes
and leaving Nodes (and other objects) as-is."""
if not SCons.Util.is_List(path):
path = [path]
def s(obj):
"""This is the "string conversion" routine that we have our
substitutions use to return Nodes, not strings. This relies
on the fact that an EntryProxy object has a get() method that
returns the underlying Node that it wraps, which is a bit of
architectural dependence that we might need to break or modify
in the future in response to additional requirements."""
try:
get = obj.get
except AttributeError:
pass
else:
obj = get()
return obj
r = []
for p in path:
if SCons.Util.is_String(p):
p = self.subst(p, conv=s)
if SCons.Util.is_List(p):
if len(p) == 1:
p = p[0]
else:
# We have an object plus a string, or multiple
# objects that we need to smush together. No choice
# but to make them into a string.
p = string.join(map(SCons.Util.to_String, p), '')
else:
p = s(p)
r.append(p)
return r
subst_target_source = subst
def _update(self, dict):
"""Update an environment's values directly, bypassing the normal
checks that occur when users try to set items.
"""
self._dict.update(dict)
def use_build_signature(self):
try:
return self._build_signature
except AttributeError:
b = SCons.Defaults.DefaultEnvironment()._build_signature
self._build_signature = b
return b
#######################################################################
# Public methods for manipulating an Environment. These begin with
# upper-case letters. The essential characteristic of methods in
# this section is that they do *not* have corresponding same-named
# global functions. For example, a stand-alone Append() function
# makes no sense, because Append() is all about appending values to
# an Environment's construction variables.
#######################################################################
def Append(self, **kw):
"""Append values to existing construction variables
in an Environment.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
# It would be easier on the eyes to write this using
# "continue" statements whenever we finish processing an item,
# but Python 1.5.2 apparently doesn't let you use "continue"
# within try:-except: blocks, so we have to nest our code.
try:
orig = self._dict[key]
except KeyError:
# No existing variable in the environment, so just set
# it to the new value.
self._dict[key] = val
else:
try:
# Most straightforward: just try to add them
# together. This will work in most cases, when the
# original and new values are of compatible types.
self._dict[key] = orig + val
except TypeError:
try:
# Try to update a dictionary value with another.
# If orig isn't a dictionary, it won't have an
# update() method; if val isn't a dictionary,
# it won't have a keys() method. Either way,
# it's an AttributeError.
orig.update(val)
except AttributeError:
try:
# Check if the original is a list.
add_to_orig = orig.append
except AttributeError:
# The original isn't a list, but the new
# value is (by process of elimination),
# so insert the original in the new value
# (if there's one to insert) and replace
# the variable with it.
if orig:
val.insert(0, orig)
self._dict[key] = val
else:
# The original is a list, so append the new
# value to it (if there's a value to append).
if val:
add_to_orig(val)
self.scanner_map_delete(kw)
def AppendENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep):
"""Append path elements to the path 'name' in the 'ENV'
dictionary for this environment. Will only add any particular
path once, and will normpath and normcase all paths to help
assure this. This can also handle the case where the env
variable is a list instead of a string.
"""
orig = ''
if self._dict.has_key(envname) and self._dict[envname].has_key(name):
orig = self._dict[envname][name]
nv = SCons.Util.AppendPath(orig, newpath, sep)
if not self._dict.has_key(envname):
self._dict[envname] = {}
self._dict[envname][name] = nv
def AppendUnique(self, **kw):
"""Append values to existing construction variables
in an Environment, if they're not already there.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
if not self._dict.has_key(key):
self._dict[key] = val
elif SCons.Util.is_Dict(self._dict[key]) and \
SCons.Util.is_Dict(val):
self._dict[key].update(val)
elif SCons.Util.is_List(val):
dk = self._dict[key]
if not SCons.Util.is_List(dk):
dk = [dk]
val = filter(lambda x, dk=dk: x not in dk, val)
self._dict[key] = dk + val
else:
dk = self._dict[key]
if SCons.Util.is_List(dk):
if not val in dk:
self._dict[key] = dk + val
else:
self._dict[key] = self._dict[key] + val
self.scanner_map_delete(kw)
def Copy(self, tools=None, toolpath=[], **kw):
"""Return a copy of a construction Environment. The
copy is like a Python "deep copy"--that is, independent
copies are made recursively of each objects--except that
a reference is copied when an object is not deep-copyable
(like a function). There are no references to any mutable
objects in the original Environment.
"""
clone = copy.copy(self)
clone._dict = our_deepcopy(self._dict)
clone['__env__'] = clone
try:
cbd = clone._dict['BUILDERS']
clone._dict['BUILDERS'] = BuilderDict(cbd, clone)
except KeyError:
pass
apply_tools(clone, tools, toolpath)
# Apply passed-in variables after the new tools.
kw = copy_non_reserved_keywords(kw)
new = {}
for key, value in kw.items():
new[key] = SCons.Util.scons_subst_once(value, self, key)
apply(clone.Replace, (), new)
return clone
def Detect(self, progs):
"""Return the first available program in progs.
"""
if not SCons.Util.is_List(progs):
progs = [ progs ]
for prog in progs:
path = self.WhereIs(prog)
if path: return prog
return None
def Dictionary(self, *args):
if not args:
return self._dict
dlist = map(lambda x, s=self: s._dict[x], args)
if len(dlist) == 1:
dlist = dlist[0]
return dlist
def FindIxes(self, paths, prefix, suffix):
"""
Search a list of paths for something that matches the prefix and suffix.
paths - the list of paths or nodes.
prefix - construction variable for the prefix.
suffix - construction variable for the suffix.
"""
suffix = self.subst('$'+suffix)
prefix = self.subst('$'+prefix)
for path in paths:
dir,name = os.path.split(str(path))
if name[:len(prefix)] == prefix and name[-len(suffix):] == suffix:
return path
def Override(self, overrides):
"""
Produce a modified environment whose variables
are overriden by the overrides dictionaries.
overrides - a dictionary that will override
the variables of this environment.
This function is much more efficient than Copy()
or creating a new Environment because it doesn't do
a deep copy of the dictionary, and doesn't do a copy
at all if there are no overrides.
"""
if overrides:
env = copy.copy(self)
env._dict = copy.copy(self._dict)
env['__env__'] = env
overrides = copy_non_reserved_keywords(overrides)
new = {}
for key, value in overrides.items():
new[key] = SCons.Util.scons_subst_once(value, self, key)
env._dict.update(new)
return env
else:
return self
def ParseConfig(self, command, function=None):
"""
Use the specified function to parse the output of the command
in order to modify the current environment. The 'command' can
be a string or a list of strings representing a command and
it's arguments. 'Function' is an optional argument that takes
the environment and the output of the command. If no function is
specified, the output will be treated as the output of a typical
'X-config' command (i.e. gtk-config) and used to append to the
ASFLAGS, CCFLAGS, CPPFLAGS, CPPPATH, LIBPATH, LIBS, LINKFLAGS
and CCFLAGS variables.
"""
# the default parse function
def parse_conf(env, output):
dict = {
'ASFLAGS' : [],
'CCFLAGS' : [],
'CPPFLAGS' : [],
'CPPPATH' : [],
'LIBPATH' : [],
'LIBS' : [],
'LINKFLAGS' : [],
}
static_libs = []
params = string.split(output)
for arg in params:
if arg[0] != '-':
static_libs.append(arg)
elif arg[:2] == '-L':
dict['LIBPATH'].append(arg[2:])
elif arg[:2] == '-l':
dict['LIBS'].append(arg[2:])
elif arg[:2] == '-I':
dict['CPPPATH'].append(arg[2:])
elif arg[:4] == '-Wa,':
dict['ASFLAGS'].append(arg)
elif arg[:4] == '-Wl,':
dict['LINKFLAGS'].append(arg)
elif arg[:4] == '-Wp,':
dict['CPPFLAGS'].append(arg)
elif arg == '-pthread':
dict['CCFLAGS'].append(arg)
dict['LINKFLAGS'].append(arg)
else:
dict['CCFLAGS'].append(arg)
apply(env.Append, (), dict)
return static_libs
if function is None:
function = parse_conf
if type(command) is type([]):
command = string.join(command)
command = self.subst(command)
return function(self, os.popen(command).read())
def Platform(self, platform):
platform = self.subst(platform)
return SCons.Platform.Platform(platform)(self)
def Prepend(self, **kw):
"""Prepend values to existing construction variables
in an Environment.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
# It would be easier on the eyes to write this using
# "continue" statements whenever we finish processing an item,
# but Python 1.5.2 apparently doesn't let you use "continue"
# within try:-except: blocks, so we have to nest our code.
try:
orig = self._dict[key]
except KeyError:
# No existing variable in the environment, so just set
# it to the new value.
self._dict[key] = val
else:
try:
# Most straightforward: just try to add them
# together. This will work in most cases, when the
# original and new values are of compatible types.
self._dict[key] = val + orig
except TypeError:
try:
# Try to update a dictionary value with another.
# If orig isn't a dictionary, it won't have an
# update() method; if val isn't a dictionary,
# it won't have a keys() method. Either way,
# it's an AttributeError.
orig.update(val)
except AttributeError:
try:
# Check if the added value is a list.
add_to_val = val.append
except AttributeError:
# The added value isn't a list, but the
# original is (by process of elimination),
# so insert the the new value in the original
# (if there's one to insert).
if val:
orig.insert(0, val)
else:
# The added value is a list, so append
# the original to it (if there's a value
# to append).
if orig:
add_to_val(orig)
self._dict[key] = val
self.scanner_map_delete(kw)
def PrependENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep):
"""Prepend path elements to the path 'name' in the 'ENV'
dictionary for this environment. Will only add any particular
path once, and will normpath and normcase all paths to help
assure this. This can also handle the case where the env
variable is a list instead of a string.
"""
orig = ''
if self._dict.has_key(envname) and self._dict[envname].has_key(name):
orig = self._dict[envname][name]
nv = SCons.Util.PrependPath(orig, newpath, sep)
if not self._dict.has_key(envname):
self._dict[envname] = {}
self._dict[envname][name] = nv
def PrependUnique(self, **kw):
"""Append values to existing construction variables
in an Environment, if they're not already there.
"""
kw = copy_non_reserved_keywords(kw)
for key, val in kw.items():
if not self._dict.has_key(key):
self._dict[key] = val
elif SCons.Util.is_Dict(self._dict[key]) and \
SCons.Util.is_Dict(val):
self._dict[key].update(val)
elif SCons.Util.is_List(val):
dk = self._dict[key]
if not SCons.Util.is_List(dk):
dk = [dk]
val = filter(lambda x, dk=dk: x not in dk, val)
self._dict[key] = val + dk
else:
dk = self._dict[key]
if SCons.Util.is_List(dk):
if not val in dk:
self._dict[key] = val + dk
else:
self._dict[key] = val + dk
self.scanner_map_delete(kw)
def Replace(self, **kw):
"""Replace existing construction variables in an Environment
with new construction variables and/or values.
"""
try:
kwbd = our_deepcopy(kw['BUILDERS'])
del kw['BUILDERS']
self.__setitem__('BUILDERS', kwbd)
except KeyError:
pass
kw = copy_non_reserved_keywords(kw)
self._dict.update(our_deepcopy(kw))
self.scanner_map_delete(kw)
def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix):
"""
Replace old_prefix with new_prefix and old_suffix with new_suffix.
env - Environment used to interpolate variables.
path - the path that will be modified.
old_prefix - construction variable for the old prefix.
old_suffix - construction variable for the old suffix.
new_prefix - construction variable for the new prefix.
new_suffix - construction variable for the new suffix.
"""
old_prefix = self.subst('$'+old_prefix)
old_suffix = self.subst('$'+old_suffix)
new_prefix = self.subst('$'+new_prefix)
new_suffix = self.subst('$'+new_suffix)
dir,name = os.path.split(str(path))
if name[:len(old_prefix)] == old_prefix:
name = name[len(old_prefix):]
if name[-len(old_suffix):] == old_suffix:
name = name[:-len(old_suffix)]
return os.path.join(dir, new_prefix+name+new_suffix)
def Tool(self, tool, toolpath=[]):
tool = self.subst(tool)
return SCons.Tool.Tool(tool, map(self.subst, toolpath))(self)
def WhereIs(self, prog, path=None, pathext=None, reject=[]):
"""Find prog in the path.
"""
if path is None:
try:
path = self['ENV']['PATH']
except KeyError:
pass
elif SCons.Util.is_String(path):
path = self.subst(path)
if pathext is None:
try:
pathext = self['ENV']['PATHEXT']
except KeyError:
pass
elif SCons.Util.is_String(pathext):
pathext = self.subst(pathext)
path = SCons.Util.WhereIs(prog, path, pathext, reject)
if path: return path
return None
#######################################################################
# Public methods for doing real "SCons stuff" (manipulating
# dependencies, setting attributes on targets, etc.). These begin
# with upper-case letters. The essential characteristic of methods
# in this section is that they all *should* have corresponding
# same-named global functions.
#######################################################################
def Action(self, *args, **kw):
nargs = self.subst(args)
nkw = self.subst_kw(kw)
return apply(SCons.Action.Action, nargs, nkw)
def AddPreAction(self, files, action):
nodes = self.arg2nodes(files, self.fs.Entry)
action = SCons.Action.Action(action)
for n in nodes:
n.add_pre_action(action)
return nodes
def AddPostAction(self, files, action):
nodes = self.arg2nodes(files, self.fs.Entry)
action = SCons.Action.Action(action)
for n in nodes:
n.add_post_action(action)
return nodes
def Alias(self, target, *source, **kw):
if not SCons.Util.is_List(target):
target = [target]
tlist = []
for t in target:
if not isinstance(t, SCons.Node.Alias.Alias):
t = self.arg2nodes(self.subst(t), self.ans.Alias)[0]
tlist.append(t)
try:
s = kw['source']
except KeyError:
try:
s = source[0]
except IndexError:
s = None
if s:
if not SCons.Util.is_List(s):
s = [s]
s = filter(None, s)
s = self.arg2nodes(s, self.fs.Entry)
for t in tlist:
AliasBuilder(self, t, s)
return tlist
def AlwaysBuild(self, *targets):
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.File))
for t in tlist:
t.set_always_build()
return tlist
def BuildDir(self, build_dir, src_dir, duplicate=1):
build_dir = self.arg2nodes(build_dir, self.fs.Dir)[0]
src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0]
self.fs.BuildDir(build_dir, src_dir, duplicate)
def Builder(self, **kw):
nkw = self.subst_kw(kw)
return apply(SCons.Builder.Builder, [], nkw)
def CacheDir(self, path):
self.fs.CacheDir(self.subst(path))
def Clean(self, targets, files):
global CleanTargets
tlist = self.arg2nodes(targets, self.fs.Entry)
flist = self.arg2nodes(files, self.fs.Entry)
for t in tlist:
try:
CleanTargets[t].extend(flist)
except KeyError:
CleanTargets[t] = flist
def Configure(self, *args, **kw):
nargs = [self]
if args:
nargs = nargs + self.subst_list(args)[0]
nkw = self.subst_kw(kw)
try:
nkw['custom_tests'] = self.subst_kw(nkw['custom_tests'])
except KeyError:
pass
return apply(SCons.SConf.SConf, nargs, nkw)
def Command(self, target, source, action, **kw):
"""Builds the supplied target files from the supplied
source files using the supplied action. Action may
be any type that the Builder constructor will accept
for an action."""
nkw = self.subst_kw(kw)
nkw['action'] = action
nkw['source_factory'] = self.fs.Entry
bld = apply(SCons.Builder.Builder, (), nkw)
return bld(self, target, source)
def Depends(self, target, dependency):
"""Explicity specify that 'target's depend on 'dependency'."""
tlist = self.arg2nodes(target, self.fs.Entry)
dlist = self.arg2nodes(dependency, self.fs.Entry)
for t in tlist:
t.add_dependency(dlist)
return tlist
def Dir(self, name, *args, **kw):
"""
"""
return apply(self.fs.Dir, (self.subst(name),) + args, kw)
def Environment(self, **kw):
return apply(SCons.Environment.Environment, [], self.subst_kw(kw))
def Execute(self, action, *args, **kw):
"""Directly execute an action through an Environment
"""
action = apply(self.Action, (action,) + args, kw)
return action([], [], self)
def File(self, name, *args, **kw):
"""
"""
return apply(self.fs.File, (self.subst(name),) + args, kw)
def FindFile(self, file, dirs):
file = self.subst(file)
nodes = self.arg2nodes(dirs, self.fs.Dir)
return SCons.Node.FS.find_file(file, nodes, self.fs.File)
def Flatten(self, sequence):
return SCons.Util.flatten(sequence)
def GetBuildPath(self, files):
result = map(str, self.arg2nodes(files, self.fs.Entry))
if SCons.Util.is_List(files):
return result
else:
return result[0]
def Ignore(self, target, dependency):
"""Ignore a dependency."""
tlist = self.arg2nodes(target, self.fs.Entry)
dlist = self.arg2nodes(dependency, self.fs.Entry)
for t in tlist:
t.add_ignore(dlist)
return tlist
def Install(self, dir, source):
"""Install specified files in the given directory."""
try:
dnodes = self.arg2nodes(dir, self.fs.Dir)
except TypeError:
raise SCons.Errors.UserError, "Target `%s' of Install() is a file, but should be a directory. Perhaps you have the Install() arguments backwards?" % str(dir)
try:
sources = self.arg2nodes(source, self.fs.File)
except TypeError:
if SCons.Util.is_List(source):
raise SCons.Errors.UserError, "Source `%s' of Install() contains one or more non-files. Install() source must be one or more files." % repr(map(str, source))
else:
raise SCons.Errors.UserError, "Source `%s' of Install() is not a file. Install() source must be one or more files." % str(source)
tgt = []
for dnode in dnodes:
for src in sources:
target = self.fs.File(src.name, dnode)
tgt.extend(InstallBuilder(self, target, src))
return tgt
def InstallAs(self, target, source):
"""Install sources as targets."""
sources = self.arg2nodes(source, self.fs.File)
targets = self.arg2nodes(target, self.fs.File)
result = []
for src, tgt in map(lambda x, y: (x, y), sources, targets):
result.extend(InstallBuilder(self, tgt, src))
return result
def Literal(self, string):
return SCons.Util.Literal(string)
def Local(self, *targets):
ret = []
for targ in targets:
if isinstance(targ, SCons.Node.Node):
targ.set_local()
ret.append(targ)
else:
for t in self.arg2nodes(targ, self.fs.Entry):
t.set_local()
ret.append(t)
return ret
def Precious(self, *targets):
tlist = []
for t in targets:
tlist.extend(self.arg2nodes(t, self.fs.Entry))
for t in tlist:
t.set_precious()
return tlist
def Repository(self, *dirs, **kw):
dirs = self.arg2nodes(list(dirs), self.fs.Dir)
apply(self.fs.Repository, dirs, kw)
def Scanner(self, *args, **kw):
nargs = []
for arg in args:
if SCons.Util.is_String(arg):
arg = self.subst(arg)
nargs.append(arg)
nkw = self.subst_kw(kw)
return apply(SCons.Scanner.Scanner, nargs, nkw)
def SConsignFile(self, name=".sconsign", dbm_module=None):
name = self.subst(name)
if not os.path.isabs(name):
name = os.path.join(str(self.fs.SConstruct_dir), name)
SCons.SConsign.File(name, dbm_module)
def SideEffect(self, side_effect, target):
"""Tell scons that side_effects are built as side
effects of building targets."""
side_effects = self.arg2nodes(side_effect, self.fs.Entry)
targets = self.arg2nodes(target, self.fs.Entry)
for side_effect in side_effects:
if side_effect.multiple_side_effect_has_builder():
raise SCons.Errors.UserError, "Multiple ways to build the same target were specified for: %s" % str(side_effect)
side_effect.add_source(targets)
side_effect.side_effect = 1
self.Precious(side_effect)
for target in targets:
target.side_effects.append(side_effect)
return side_effects
def SourceCode(self, entry, builder):
"""Arrange for a source code builder for (part of) a tree."""
entries = self.arg2nodes(entry, self.fs.Entry)
for entry in entries:
entry.set_src_builder(builder)
return entries
def SourceSignatures(self, type):
type = self.subst(type)
if type == 'MD5':
import SCons.Sig.MD5
self._calc_module = SCons.Sig.MD5
elif type == 'timestamp':
import SCons.Sig.TimeStamp
self._calc_module = SCons.Sig.TimeStamp
else:
raise UserError, "Unknown source signature type '%s'"%type
def Split(self, arg):
"""This function converts a string or list into a list of strings
or Nodes. This makes things easier for users by allowing files to
be specified as a white-space separated list to be split.
The input rules are:
- A single string containing names separated by spaces. These will be
split apart at the spaces.
- A single Node instance
- A list containing either strings or Node instances. Any strings
in the list are not split at spaces.
In all cases, the function returns a list of Nodes and strings."""
if SCons.Util.is_List(arg):
return map(self.subst, arg)
elif SCons.Util.is_String(arg):
return string.split(self.subst(arg))
else:
return [self.subst(arg)]
def TargetSignatures(self, type):
type = self.subst(type)
if type == 'build':
self._build_signature = 1
elif type == 'content':
self._build_signature = 0
else:
raise SCons.Errors.UserError, "Unknown target signature type '%s'"%type
def Value(self, value):
"""
"""
return SCons.Node.Python.Value(value)
# The entry point that will be used by the external world
# to refer to a construction environment. This allows the wrapper
# interface to extend a construction environment for its own purposes
# by subclassing SCons.Environment.Base and then assigning the
# class to SCons.Environment.Environment.
Environment = Base
# An entry point for returning a proxy subclass instance that overrides
# the subst*() methods so they don't actually perform construction
# variable substitution. This is specifically intended to be the shim
# layer in between global function calls (which don't want construction
# variable substitution) and the DefaultEnvironment() (which would
# substitute variables if left to its own devices)."""
#
# We have to wrap this in a function that allows us to delay definition of
# the class until it's necessary, so that when it subclasses Environment
# it will pick up whatever Environment subclass the wrapper interface
# might have assigned to SCons.Environment.Environment.
def NoSubstitutionProxy(subject):
class _NoSubstitutionProxy(Environment):
def __init__(self, subject):
self.__dict__['__subject'] = subject
def __getattr__(self, name):
return getattr(self.__dict__['__subject'], name)
def __setattr__(self, name, value):
return setattr(self.__dict__['__subject'], name, value)
def raw_to_mode(self, dict):
try:
raw = dict['raw']
except KeyError:
pass
else:
del dict['raw']
dict['mode'] = raw
def subst(self, string, *args, **kwargs):
return string
def subst_kw(self, kw, *args, **kwargs):
return kw
def subst_list(self, string, *args, **kwargs):
nargs = (string, self,) + args
nkw = kwargs.copy()
nkw['gvars'] = {}
self.raw_to_mode(nkw)
return apply(SCons.Util.scons_subst_list, nargs, nkw)
def subst_target_source(self, string, *args, **kwargs):
nargs = (string, self,) + args
nkw = kwargs.copy()
nkw['gvars'] = {}
self.raw_to_mode(nkw)
return apply(SCons.Util.scons_subst, nargs, nkw)
return _NoSubstitutionProxy(subject)
| lgpl-2.1 | 1,026,291,506,311,848,700 | 37.195122 | 174 | 0.554649 | false |
angus-ai/angus-doc | restful/building-blocks/services/sceneanalysis/sceneanalysis_fromwebcam.py | 1 | 2774 | # -*- coding: utf-8 -*-
import StringIO
import angus.client
import cv2
import numpy as np
import datetime
import pytz
def main(stream_index):
camera = cv2.VideoCapture(stream_index)
camera.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640)
camera.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 480)
camera.set(cv2.cv.CV_CAP_PROP_FPS, 10)
if not camera.isOpened():
print("Cannot open stream of index {}".format(stream_index))
exit(1)
print("Input stream is of resolution: {} x {}".format(camera.get(3), camera.get(4)))
conn = angus.client.connect()
service = conn.services.get_service("scene_analysis", version=1)
service.enable_session()
while camera.isOpened():
ret, frame = camera.read()
if not ret:
break
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
ret, buff = cv2.imencode(".jpg", gray, [cv2.IMWRITE_JPEG_QUALITY, 80])
buff = StringIO.StringIO(np.array(buff).tostring())
t = datetime.datetime.now(pytz.utc)
job = service.process({"image": buff,
"timestamp" : t.isoformat(),
"camera_position": "facing",
"sensitivity": {
"appearance": 0.7,
"disappearance": 0.7,
"age_estimated": 0.4,
"gender_estimated": 0.5,
"focus_locked": 0.9,
"emotion_detected": 0.4,
"direction_estimated": 0.8
},
})
res = job.result
if "error" in res:
print(res["error"])
else:
# This parses the events
if "events" in res:
for event in res["events"]:
value = res["entities"][event["entity_id"]][event["key"]]
print("{}| {}, {}".format(event["type"],
event["key"],
value))
# This parses the entities data
for key, val in res["entities"].iteritems():
x, y, dx, dy = map(int, val["face_roi"])
cv2.rectangle(frame, (x, y), (x+dx, y+dy), (0, 255, 0), 2)
cv2.imshow("original", frame)
if cv2.waitKey(1) & 0xFF == 27:
break
service.disable_session()
camera.release()
cv2.destroyAllWindows()
if __name__ == "__main__":
### Web cam index might be different from 0 on your setup.
### To grab a given video file instead of the host computer cam, try:
### main("/path/to/myvideo.avi")
main(0)
| apache-2.0 | 6,750,106,073,965,175,000 | 33.675 | 88 | 0.483778 | false |
Honkl/general-ai | Controller/utils/miscellaneous.py | 1 | 2411 | """
Some miscellaneous and useful functions that not belongs to some specific class.
"""
import json
import constants
import tensorflow as tf
import time
from games.alhambra import Alhambra
from games.torcs import Torcs
from games.mario import Mario
from games.game2048 import Game2048
def get_game_config(game_name):
game_config_file = None
if game_name == "alhambra":
game_config_file = constants.ALHAMBRA_CONFIG_FILE
if game_name == "2048":
game_config_file = constants.GAME2048_CONFIG_FILE
if game_name == "mario":
game_config_file = constants.MARIO_CONFIG_FILE
if game_name == "torcs":
game_config_file = constants.TORCS_CONFIG_FILE
with open(game_config_file, "r") as f:
game_config = json.load(f)
return game_config
def get_game_instance(game_name, params, test=False):
game_instance = None
if game_name == "alhambra":
game_instance = Alhambra(*params)
if game_name == "2048":
game_instance = Game2048(*params)
if game_name == "torcs":
game_instance = Torcs(*params, test=test)
if game_name == "mario":
game_instance = Mario(*params)
return game_instance
def get_game_class(game_name):
game_class = None
if game_name == "alhambra":
game_class = Alhambra
if game_name == "2048":
game_class = Game2048
if game_name == "torcs":
game_class = Torcs
if game_name == "mario":
game_class = Mario
return game_class
def get_rnn_cell(cell_type):
if cell_type == "lstm":
return tf.nn.rnn_cell.BasicLSTMCell
if cell_type == "gru":
return tf.nn.rnn_cell.GRUCell
def get_elapsed_time(start):
now = time.time()
t = now - start
h = t // 3600
m = (t % 3600) // 60
s = t - (h * 3600) - (m * 60)
elapsed_time = "{}h {}m {}s".format(int(h), int(m), s)
return elapsed_time
def get_pretty_time():
current = time.localtime()
t_string = "{}-{}-{}_{}-{}-{}".format(str(current.tm_year).zfill(2),
str(current.tm_mon).zfill(2),
str(current.tm_mday).zfill(2),
str(current.tm_hour).zfill(2),
str(current.tm_min).zfill(2),
str(current.tm_sec).zfill(2))
return t_string
| mit | 1,660,097,160,885,952,300 | 29.1375 | 80 | 0.572791 | false |
shanot/imp | modules/isd/test/test_MarginalNOERestraint.py | 2 | 12748 | #!/usr/bin/env python
# general imports
from numpy import *
from random import uniform
# imp general
import IMP
# our project
from IMP.isd import Scale, JeffreysRestraint, MarginalNOERestraint
# unit testing framework
import IMP.test
class Tests(IMP.test.TestCase):
"simple test cases to check if MarginalNOERestraint works"
def setUp(self):
IMP.test.TestCase.setUp(self)
# IMP.set_log_level(IMP.MEMORY)
IMP.set_log_level(0)
self.m = IMP.Model()
self.p0 = IMP.core.XYZ.setup_particle(IMP.Particle(self.m),
IMP.algebra.Vector3D(0, 0, 0))
self.p1 = IMP.core.XYZ.setup_particle(IMP.Particle(self.m),
IMP.algebra.Vector3D(1, 1, 1))
self.p2 = IMP.core.XYZ.setup_particle(IMP.Particle(self.m),
IMP.algebra.Vector3D(1, 0, 0))
self.DA = IMP.DerivativeAccumulator()
self.V_obs = 3.0
self.ls = \
IMP.container.ListPairContainer(self.m,
[(self.p0, self.p1), (self.p0, self.p2)])
self.noe = IMP.isd.MarginalNOERestraint(self.m)
def testValuePDist1(self):
"""Test MarginalNOERestraint probability on three particles"""
self.skipTest("probability currently broken")
v1, v2 = 1.0, 2.0
p0, p1, p2 = self.p0, self.p1, self.p2
self.noe.add_contribution(self.p0, self.p1, 1.0)
self.noe.add_contribution(
IMP.container.ListPairContainer(self.m, [(self.p0, self.p2)], 2.0))
self.m.add_restraint(self.noe)
for i in range(100):
for p in [self.p0, self.p1, self.p2]:
p.set_coordinates(IMP.algebra.Vector3D(*[uniform(0.1, 100)
for i in range(3)]))
dist1 = IMP.core.get_distance(p0, p1) ** -6
dist2 = IMP.core.get_distance(p0, p2) ** -6
v = sqrt(v1 / dist1 * v2 / dist2)
b = log(v1 / (dist1 * v)) ** 2 + log(v2 / (dist2 * v)) ** 2
expected = b ** (-1. / 2)
self.assertAlmostEqual(self.noe.get_probability(),
expected, delta=0.001)
def testValueEDist1(self):
"""Test MarginalNOERestraint energy on three particles"""
self.skipTest("energy currently broken")
v1, v2 = 1.0, 2.0
p0, p1, p2 = self.p0, self.p1, self.p2
self.noe.add_contribution(self.p0, self.p1, v1)
self.noe.add_contribution(
IMP.container.ListPairContainer([(self.p0, self.p2)]), v2)
self.m.add_restraint(self.noe)
for i in range(100):
for p in [self.p0, self.p1, self.p2]:
p.set_coordinates(IMP.algebra.Vector3D(*[uniform(0.1, 100)
for i in range(3)]))
dist1 = IMP.core.get_distance(p0, p1) ** -6
dist2 = IMP.core.get_distance(p0, p2) ** -6
v = sqrt(v1 / dist1 * v2 / dist2)
b = log(v1 / (dist1 * v)) ** 2 + log(v2 / (dist2 * v)) ** 2
expected = 0.5 * log(b)
self.noe.evaluate(False)
self.assertAlmostEqual(self.noe.evaluate(False),
expected, delta=0.002)
def testValuePDist2(self):
"""Test MarginalNOERestraint probability on n particles"""
self.skipTest("probability currently broken")
pairs = []
volumes = []
distances = []
self.m.add_restraint(self.noe)
for i in range(2, 100):
while len(pairs) <= i:
pair = [IMP.core.XYZ.setup_particle(
IMP.Particle(self.m),
IMP.algebra.Vector3D(*[uniform(-10, 10) for r in range(3)]))
for p in range(2)]
pairs.append(pair)
distances.append(IMP.core.get_distance(pair[0], pair[1]))
volumes.append(uniform(0.1, 10))
self.noe.add_contribution(
IMP.container.ListPairContainer([pair]),
volumes[-1])
v = 1.0
for j in range(len(pairs)):
v *= volumes[j] * distances[j] ** 6
v = v ** (1.0 / len(pairs))
b = 0
for j in range(len(pairs)):
b += log(volumes[j] * distances[j] ** 6 / v) ** 2
expected = b ** (-(len(pairs) - 1) / 2.0)
self.assertAlmostEqual(self.noe.get_probability(),
expected, delta=0.001)
def testValueEDist2(self):
"""Test MarginalNOERestraint energy on n particles"""
self.skipTest("energy currently broken")
pairs = []
volumes = []
distances = []
self.m.add_restraint(self.noe)
for i in range(2, 100):
while len(pairs) <= i:
pair = [IMP.core.XYZ.setup_particle(
IMP.Particle(self.m),
IMP.algebra.Vector3D(*[uniform(-10, 10) for r in range(3)]))
for p in range(2)]
pairs.append(pair)
distances.append(IMP.core.get_distance(pair[0], pair[1]))
volumes.append(uniform(0.1, 10))
self.noe.add_contribution(
IMP.container.ListPairContainer([pair]),
volumes[-1])
v = 1.0
for j in range(len(pairs)):
v *= volumes[j] * distances[j] ** 6
v = v ** (1.0 / len(pairs))
b = 0
for j in range(len(pairs)):
b += log(volumes[j] * distances[j] ** 6 / v) ** 2
expected = (len(pairs) - 1) / 2.0 * log(b)
self.assertAlmostEqual(self.noe.evaluate(False),
expected, delta=0.002)
@IMP.test.expectedFailure
def testValueGammaHat(self):
"""Test MarginalNOERestraint gamma hat on n particles"""
pairs = []
volumes = []
distances = []
self.m.add_restraint(self.noe)
for i in range(2, 100):
while len(pairs) <= i:
pair = [IMP.core.XYZ.setup_particle(
IMP.Particle(self.m),
IMP.algebra.Vector3D(*[uniform(-10, 10) for r in range(3)]))
for p in range(2)]
pairs.append(pair)
distances.append(IMP.core.get_distance(pair[0], pair[1]))
volumes.append(uniform(0.1, 10))
self.noe.add_contribution(
IMP.container.ListPairContainer([pair]),
volumes[-1])
v = 1.0
for j in range(len(pairs)):
v *= volumes[j] * distances[j] ** 6
v = v ** (1.0 / len(pairs))
expected = v
self.noe.evaluate(False)
self.assertAlmostEqual(self.noe.get_gammahat(),
expected, delta=0.001)
@IMP.test.expectedFailure
def testValueSS(self):
"""Test MarginalNOERestraint sum of squares on n particles"""
pairs = []
volumes = []
distances = []
self.m.add_restraint(self.noe)
for i in range(2, 100):
while len(pairs) <= i:
pair = [IMP.core.XYZ.setup_particle(
IMP.Particle(self.m),
IMP.algebra.Vector3D(*[uniform(-10, 10) for r in range(3)]))
for p in range(2)]
pairs.append(pair)
distances.append(IMP.core.get_distance(pair[0], pair[1]))
volumes.append(uniform(0.1, 10))
self.noe.add_contribution(
IMP.container.ListPairContainer([pair]),
volumes[-1])
v = 1.0
for j in range(len(pairs)):
v *= volumes[j] * distances[j] ** 6
v = v ** (1.0 / len(pairs))
b = 0
for j in range(len(pairs)):
b += log(volumes[j] * distances[j] ** 6 / v) ** 2
expected = b
self.noe.evaluate(False)
self.assertAlmostEqual(self.noe.get_SS(),
expected, delta=0.001)
def testValueN(self):
"""Test MarginalNOERestraint n on n particles"""
pairs = []
volumes = []
distances = []
for i in range(2, 100):
while len(pairs) <= i:
pair = [IMP.core.XYZ.setup_particle(
IMP.Particle(self.m),
IMP.algebra.Vector3D(*[uniform(-10, 10) for r in range(3)]))
for p in range(2)]
pairs.append(pair)
distances.append(IMP.core.get_distance(pair[0], pair[1]))
volumes.append(uniform(0.1, 10))
self.noe.add_contribution(
IMP.container.ListPairContainer(self.m,
[pair]), volumes[-1])
expected = len(volumes)
self.assertAlmostEqual(self.noe.get_number_of_contributions(),
expected, delta=0.001)
def testDerivative(self):
"Test MarginalNOERestraint x deriv for 3 particles & 2 contributions"
self.skipTest("derivatives currently broken")
v1, v2 = 1.0, 2.0
p0, p1, p2 = self.p0, self.p1, self.p2
self.noe.add_contribution(self.p0, self.p1, v1)
self.noe.add_contribution(
IMP.container.ListPairContainer(self.m, [(self.p0, self.p2)]), v2)
self.m.add_restraint(self.noe)
p0.set_coordinates(IMP.algebra.Vector3D(0, 0, 0))
p1.set_coordinates(IMP.algebra.Vector3D(1, 1, 1))
p2.set_coordinates(IMP.algebra.Vector3D(1, 0, 0))
self.noe.evaluate(True)
# p0
self.assertAlmostEqual(self.p0.get_derivative(0),
1.53687, delta=0.001)
self.assertAlmostEqual(self.p0.get_derivative(1),
-0.76843, delta=0.001)
self.assertAlmostEqual(self.p0.get_derivative(2),
-0.76843, delta=0.001)
# p1
self.assertAlmostEqual(self.p1.get_derivative(0),
0.76843, delta=0.001)
self.assertAlmostEqual(self.p1.get_derivative(1),
0.76843, delta=0.001)
self.assertAlmostEqual(self.p1.get_derivative(2),
0.76843, delta=0.001)
# p2
self.assertAlmostEqual(self.p2.get_derivative(0),
-2.30530, delta=0.001)
self.assertAlmostEqual(self.p2.get_derivative(1),
0., delta=0.001)
self.assertAlmostEqual(self.p2.get_derivative(2),
0., delta=0.001)
def test_get_inputs(self):
"Test MarginalNOERestraint::get_input_particles"
v1, v2 = 1.0, 2.0
c0 = IMP.container.ListPairContainer(self.m, [(self.p0, self.p2)])
self.noe.add_contribution(c0, v2)
self.assertEqual([x.get_name() for x in self.noe.get_inputs()],
[y.get_name() for y in
[self.p0, self.p2, c0]])
def testSanityEP(self):
"Test if MarginalNOE score is -log(prob)"
v1, v2 = 1.0, 2.0
p0, p1, p2 = self.p0, self.p1, self.p2
c1 = IMP.container.ListPairContainer(self.m, [(self.p0, self.p1)])
c2 = IMP.container.ListPairContainer(self.m, [(self.p0, self.p2)])
self.noe.add_contribution(c1, v1)
self.noe.add_contribution(c2, v2)
for i in range(100):
p0.set_coordinates(IMP.algebra.Vector3D(*[uniform(-10, 10) for i in
range(3)]))
self.assertAlmostEqual(self.noe.evaluate(False),
-log(self.noe.get_probability()), delta=0.001)
def testSanityPE(self):
"Test if MarginalNOERestraint prob is exp(-score)"
v1, v2 = 1.0, 2.0
p0, p1, p2 = self.p0, self.p1, self.p2
c1 = IMP.container.ListPairContainer(self.m, [(self.p0, self.p1)])
c2 = IMP.container.ListPairContainer(self.m, [(self.p0, self.p2)])
self.noe.add_contribution(c1, v1)
self.noe.add_contribution(c2, v2)
for i in range(100):
p0.set_coordinates(IMP.algebra.Vector3D(*[uniform(-10, 10) for i in
range(3)]))
self.assertAlmostEqual(self.noe.get_probability(),
exp(-self.noe.evaluate(False)), delta=0.001)
if __name__ == '__main__':
IMP.test.main()
| gpl-3.0 | -6,068,620,226,501,303,000 | 42.067568 | 81 | 0.50455 | false |
lsaffre/atelier | atelier/sphinxconf/interproject.py | 1 | 4949 | # -*- coding: utf-8 -*-
# Copyright 2011-2020 Rumma & Ko Ltd
# License: BSD, see LICENSE for more details.
"""
Defines the :func:`atelier.sphinxconf.interproject.configure` function.
"""
import os
from pathlib import Path
# from importlib import import_module
from sphinx.util import logging ; logger = logging.getLogger(__name__)
# from invoke import Context
# import atelier
from atelier.projects import load_projects, get_project_info_from_mod
from atelier.projects import get_project_from_nickname
USE_LOCAL_BUILDS = os.environ.get("ATELIER_IGNORE_LOCAL_BUILDS", "") != "yes"
# Whether to use objects.inv files from other local doctrees if they exist.
# E.g. on Travis no other projects are installed from source, so there we
# cannot use it.
def configure(globals_dict, prjspec=None, **nicknames):
"""
Install doctrees of all (or some) atelier projects into the
:envvar:`intersphinx_mapping` of your :xfile:`conf.py`.
See :doc:`/sphinxext/interproject`.
"""
intersphinx_mapping = dict()
# extlinks = dict()
# this = atelier.current_project
# if this is None:
# raise Exception("current_project in {} is None!".format(globals_dict['__file__']))
this_conf_file = Path(globals_dict['__file__']).resolve()
if prjspec:
if isinstance(prjspec, str):
prjspec = prjspec.split()
prjlist = [get_project_info_from_mod(n) for n in prjspec]
else:
prjlist = []
# for p in load_projects():
for p in reversed(list(load_projects())):
if str(this_conf_file).startswith(str(p.root_dir)):
# print("20190122 {} startswith {}".format(this_conf_file, p.root_dir))
continue
prjlist.append(p)
for k, v in nicknames.items():
p = get_project_from_nickname(k)
if p:
prjlist.append(p)
else:
intersphinx_mapping[k] = v
# logger.info("20180907 prjlist {}".format(prjlist))
for prj in prjlist:
# This will load the `tasks.py` of other
# projects. Possible side effects.
# print("20180428 {} {}".format(prj.name, prj.config['doc_trees']))
# config = prj.inv_namespace.configuration()
# print("20180428 {} {}".format(prj.name, config['doc_trees']))
# ctx = Context(config)
# for doc_tree in prj.config['doc_trees']:
count = 0
for doc_tree in prj.get_doc_trees():
if not doc_tree.has_intersphinx:
logger.info("%s has no intersphinx", p)
continue
count += 1
urls = prj.get_xconfig('intersphinx_urls') or {}
url = urls.get(doc_tree.rel_path)
if not url:
if prjspec:
logger.warning(
"No intersphinx mapping for {} of {} ({})".format(
doc_tree.rel_path, prj.nickname, urls))
continue
# if prj.nickname == "getlino":
# raise Exception("20191003 {}".format(doc_tree.src_path))
p = None
src_path = doc_tree.src_path
if src_path is not None:
if this_conf_file == src_path / 'conf.py':
# don't add myself to intersphinx.
continue
if USE_LOCAL_BUILDS:
# print("20190306a", doc_tree, src_path)
# p = prj.root_dir / (doc_tree + '/.build/objects.inv')
p = src_path / '.build/objects.inv'
if p.exists():
logger.info("Found local {}".format(p))
else:
logger.info("File %s does not exist", p)
p = None
# The unique identifier can be used to prefix cross-reference targets
# http://www.sphinx-doc.org/en/master/ext/intersphinx.html#confval-intersphinx_mapping
k = prj.nickname + doc_tree.rel_path
k = k.replace('_', '')
k = str(k)
if k in intersphinx_mapping:
raise Exception("Duplicate intersphinx key {} used for {} "
"(you ask to redefine it to {})".format(
k, intersphinx_mapping[k], p))
if p is not None:
p = str(p)
intersphinx_mapping[k] = (url, p)
if count == 0 and prjspec:
logger.warning("No doctree for {}".format(prj))
# if prj.srcref_url:
# k = '%s_srcref' % prj.nickname
# extlinks[str(k)] = (prj.srcref_url, '')
# atelier.current_project = this
globals_dict.update(intersphinx_mapping=intersphinx_mapping)
# logger.info("20190306 prjlist is {}, intersphinx_mapping is {}".format(
# prjlist, intersphinx_mapping))
# if False: # no longer used
# globals_dict.update(extlinks=extlinks)
| bsd-2-clause | -5,703,355,728,723,782,000 | 34.099291 | 98 | 0.553849 | false |
thomasyu888/synapsePythonClient | synapseclient/client.py | 1 | 186978 | """
**************
Synapse Client
**************
The `Synapse` object encapsulates a connection to the Synapse service and is used for building projects, uploading and
retrieving data, and recording provenance of data analysis.
~~~~~
Login
~~~~~
.. automethod:: synapseclient.client.login
~~~~~~~
Synapse
~~~~~~~
.. autoclass:: synapseclient.Synapse
:members:
~~~~~~~~~~~~~~~~
More information
~~~~~~~~~~~~~~~~
See also the `Synapse API documentation <https://docs.synapse.org/rest/>`_.
"""
import collections
import collections.abc
import configparser
import deprecated
import errno
import functools
import getpass
import hashlib
import json
import logging
import mimetypes
import os
import requests
import shutil
import sys
import tempfile
import time
import typing
import urllib.parse as urllib_urlparse
import urllib.request as urllib_request
import warnings
import webbrowser
import zipfile
import synapseclient
from .annotations import (
from_synapse_annotations,
to_synapse_annotations,
Annotations,
convert_old_annotation_json,
check_annotations_changed,
)
from .activity import Activity
import synapseclient.core.multithread_download as multithread_download
from .entity import Entity, File, Folder, Versionable,\
split_entity_namespaces, is_versionable, is_container, is_synapse_entity
from synapseclient.core.models.dict_object import DictObject
from .evaluation import Evaluation, Submission, SubmissionStatus
from .table import Schema, SchemaBase, Column, TableQueryResult, CsvFileTable, EntityViewSchema, SubmissionViewSchema
from .team import UserProfile, Team, TeamMember, UserGroupHeader
from .wiki import Wiki, WikiAttachment
from synapseclient.core import cache, exceptions, utils
from synapseclient.core.constants import config_file_constants
from synapseclient.core.constants import concrete_types
from synapseclient.core import cumulative_transfer_progress
from synapseclient.core.credentials import (
cached_sessions,
delete_stored_credentials,
get_default_credential_chain,
UserLoginArgs,
)
from synapseclient.core.exceptions import (
SynapseAuthenticationError,
SynapseError,
SynapseFileNotFoundError,
SynapseHTTPError,
SynapseMd5MismatchError,
SynapseNoCredentialsError,
SynapseProvenanceError,
SynapseTimeoutError,
SynapseUnmetAccessRestrictions,
)
from synapseclient.core.logging_setup import DEFAULT_LOGGER_NAME, DEBUG_LOGGER_NAME, SILENT_LOGGER_NAME
from synapseclient.core.version_check import version_check
from synapseclient.core.pool_provider import DEFAULT_NUM_THREADS
from synapseclient.core.utils import id_of, get_properties, MB, memoize, is_json, extract_synapse_id_from_query, \
find_data_file_handle, extract_zip_file_to_directory, is_integer, require_param
from synapseclient.core.retry import (
with_retry,
DEFAULT_RETRY_STATUS_CODES,
RETRYABLE_CONNECTION_ERRORS,
RETRYABLE_CONNECTION_EXCEPTIONS,
)
from synapseclient.core import sts_transfer
from synapseclient.core.upload.multipart_upload import multipart_upload_file, multipart_upload_string
from synapseclient.core.remote_file_storage_wrappers import S3ClientWrapper, SFTPWrapper
from synapseclient.core.upload.upload_functions import upload_file_handle, upload_synapse_s3
from synapseclient.core.dozer import doze
PRODUCTION_ENDPOINTS = {'repoEndpoint': 'https://repo-prod.prod.sagebase.org/repo/v1',
'authEndpoint': 'https://auth-prod.prod.sagebase.org/auth/v1',
'fileHandleEndpoint': 'https://file-prod.prod.sagebase.org/file/v1',
'portalEndpoint': 'https://www.synapse.org/'}
STAGING_ENDPOINTS = {'repoEndpoint': 'https://repo-staging.prod.sagebase.org/repo/v1',
'authEndpoint': 'https://auth-staging.prod.sagebase.org/auth/v1',
'fileHandleEndpoint': 'https://file-staging.prod.sagebase.org/file/v1',
'portalEndpoint': 'https://staging.synapse.org/'}
CONFIG_FILE = os.path.join(os.path.expanduser('~'), '.synapseConfig')
SESSION_FILENAME = '.session'
FILE_BUFFER_SIZE = 2*MB
CHUNK_SIZE = 5*MB
QUERY_LIMIT = 1000
CHUNK_UPLOAD_POLL_INTERVAL = 1 # second
ROOT_ENTITY = 'syn4489'
PUBLIC = 273949 # PrincipalId of public "user"
AUTHENTICATED_USERS = 273948
DEBUG_DEFAULT = False
REDIRECT_LIMIT = 5
MAX_THREADS_CAP = 128
# Defines the standard retry policy applied to the rest methods
# The retry period needs to span a minute because sending messages is limited to 10 per 60 seconds.
STANDARD_RETRY_PARAMS = {"retry_status_codes": DEFAULT_RETRY_STATUS_CODES,
"retry_errors": RETRYABLE_CONNECTION_ERRORS,
"retry_exceptions": RETRYABLE_CONNECTION_EXCEPTIONS,
"retries": 60, # Retries for up to about 30 minutes
"wait": 1,
"max_wait": 30,
"back_off": 2}
# Add additional mimetypes
mimetypes.add_type('text/x-r', '.R', strict=False)
mimetypes.add_type('text/x-r', '.r', strict=False)
mimetypes.add_type('text/tab-separated-values', '.maf', strict=False)
mimetypes.add_type('text/tab-separated-values', '.bed5', strict=False)
mimetypes.add_type('text/tab-separated-values', '.bed', strict=False)
mimetypes.add_type('text/tab-separated-values', '.vcf', strict=False)
mimetypes.add_type('text/tab-separated-values', '.sam', strict=False)
mimetypes.add_type('text/yaml', '.yaml', strict=False)
mimetypes.add_type('text/x-markdown', '.md', strict=False)
mimetypes.add_type('text/x-markdown', '.markdown', strict=False)
DEFAULT_STORAGE_LOCATION_ID = 1
def login(*args, **kwargs):
"""
Convenience method to create a Synapse object and login.
See :py:func:`synapseclient.Synapse.login` for arguments and usage.
Example::
import synapseclient
syn = synapseclient.login()
"""
syn = Synapse()
syn.login(*args, **kwargs)
return syn
class Synapse(object):
"""
Constructs a Python client object for the Synapse repository service
:param repoEndpoint: Location of Synapse repository
:param authEndpoint: Location of authentication service
:param fileHandleEndpoint: Location of file service
:param portalEndpoint: Location of the website
:param serviceTimeoutSeconds: Wait time before timeout (currently unused)
:param debug: Print debugging messages if True
:param skip_checks: Skip version and endpoint checks
:param configPath: Path to config File with setting for Synapse
defaults to ~/.synapseConfig
:param requests_session a custom requests.Session object that this Synapse instance will use
when making http requests
Typically, no parameters are needed::
import synapseclient
syn = synapseclient.Synapse()
See:
- :py:func:`synapseclient.Synapse.login`
- :py:func:`synapseclient.Synapse.setEndpoints`
"""
# TODO: add additional boolean for write to disk?
def __init__(self, repoEndpoint=None, authEndpoint=None, fileHandleEndpoint=None, portalEndpoint=None,
debug=None, skip_checks=False, configPath=CONFIG_FILE, requests_session=None,
cache_root_dir=None, silent=None):
self._requests_session = requests_session or requests.Session()
cache_root_dir = cache.CACHE_ROOT_DIR if cache_root_dir is None else cache_root_dir
config_debug = None
# Check for a config file
self.configPath = configPath
if os.path.isfile(configPath):
config = self.getConfigFile(configPath)
if config.has_option('cache', 'location'):
cache_root_dir = config.get('cache', 'location')
if config.has_section('debug'):
config_debug = True
if debug is None:
debug = config_debug if config_debug is not None else DEBUG_DEFAULT
self.cache = cache.Cache(cache_root_dir)
self._sts_token_store = sts_transfer.StsTokenStore()
self.setEndpoints(repoEndpoint, authEndpoint, fileHandleEndpoint, portalEndpoint, skip_checks)
self.default_headers = {'content-type': 'application/json; charset=UTF-8',
'Accept': 'application/json; charset=UTF-8'}
self.credentials = None
if not isinstance(debug, bool):
raise ValueError("debug must be set to a bool (either True or False)")
self.debug = debug
self.silent = silent
self._init_logger() # initializes self.logger
self.skip_checks = skip_checks
self.table_query_sleep = 2
self.table_query_backoff = 1.1
self.table_query_max_sleep = 20
self.table_query_timeout = 600 # in seconds
self.multi_threaded = True # if set to True, multi threaded download will be used for http and https URLs
transfer_config = self._get_transfer_config()
self.max_threads = transfer_config['max_threads']
self.use_boto_sts_transfers = transfer_config['use_boto_sts']
# initialize logging
def _init_logger(self):
logger_name = SILENT_LOGGER_NAME if self.silent else DEBUG_LOGGER_NAME if self.debug else DEFAULT_LOGGER_NAME
self.logger = logging.getLogger(logger_name)
logging.getLogger('py.warnings').handlers = self.logger.handlers
@property
def max_threads(self):
return self._max_threads
@max_threads.setter
def max_threads(self, value: int):
self._max_threads = min(max(value, 1), MAX_THREADS_CAP)
@property
def username(self):
# for backwards compatability when username was a part of the Synapse object and not in credentials
return self.credentials.username if self.credentials is not None else None
@functools.lru_cache()
def getConfigFile(self, configPath):
"""
Retrieves the client configuration information.
:param configPath: Path to configuration file on local file system
:return: a RawConfigParser populated with properties from the user's configuration file.
"""
try:
config = configparser.RawConfigParser()
config.read(configPath) # Does not fail if the file does not exist
return config
except configparser.Error as ex:
raise ValueError("Error parsing Synapse config file: {}".format(configPath)) from ex
def setEndpoints(self, repoEndpoint=None, authEndpoint=None, fileHandleEndpoint=None, portalEndpoint=None,
skip_checks=False):
"""
Sets the locations for each of the Synapse services (mostly useful for testing).
:param repoEndpoint: Location of synapse repository
:param authEndpoint: Location of authentication service
:param fileHandleEndpoint: Location of file service
:param portalEndpoint: Location of the website
:param skip_checks: Skip version and endpoint checks
To switch between staging and production endpoints::
syn.setEndpoints(**synapseclient.client.STAGING_ENDPOINTS)
syn.setEndpoints(**synapseclient.client.PRODUCTION_ENDPOINTS)
"""
endpoints = {'repoEndpoint': repoEndpoint,
'authEndpoint': authEndpoint,
'fileHandleEndpoint': fileHandleEndpoint,
'portalEndpoint': portalEndpoint}
# For unspecified endpoints, first look in the config file
config = self.getConfigFile(self.configPath)
for point in endpoints.keys():
if endpoints[point] is None and config.has_option('endpoints', point):
endpoints[point] = config.get('endpoints', point)
# Endpoints default to production
for point in endpoints.keys():
if endpoints[point] is None:
endpoints[point] = PRODUCTION_ENDPOINTS[point]
# Update endpoints if we get redirected
if not skip_checks:
response = self._requests_session.get(endpoints[point], allow_redirects=False,
headers=synapseclient.USER_AGENT)
if response.status_code == 301:
endpoints[point] = response.headers['location']
self.repoEndpoint = endpoints['repoEndpoint']
self.authEndpoint = endpoints['authEndpoint']
self.fileHandleEndpoint = endpoints['fileHandleEndpoint']
self.portalEndpoint = endpoints['portalEndpoint']
def login(self, email=None, password=None, apiKey=None, sessionToken=None, rememberMe=False, silent=False,
forced=False, authToken=None):
"""
Valid combinations of login() arguments:
- email/username and password
- email/username and apiKey (Base64 encoded string)
- authToken
- sessionToken (**DEPRECATED**)
If no login arguments are provided or only username is provided, login() will attempt to log in using
information from these sources (in order of preference):
#. User's personal access token from environment the variable: SYNAPSE_AUTH_TOKEN
#. .synapseConfig file (in user home folder unless configured otherwise)
#. cached credentials from previous `login()` where `rememberMe=True` was passed as a parameter
:param email: Synapse user name (or an email address associated with a Synapse account)
:param password: password
:param apiKey: Base64 encoded Synapse API key
:param sessionToken: **!!DEPRECATED FIELD!!** User's current session token. Using this field will ignore the
following fields: email, password, apiKey
:param rememberMe: Whether the authentication information should be cached in your operating system's
credential storage.
:param authToken: A bearer authorization token, e.g. a personal access token, can be used in lieu of a
password or apiKey
**GNOME Keyring** (recommended) or **KWallet** is recommended to be installed for credential storage on
**Linux** systems.
If it is not installed/setup, credentials will be stored as PLAIN-TEXT file with read and write permissions for
the current user only (chmod 600).
On Windows and Mac OS, a default credentials storage exists so it will be preferred over the plain-text file.
To install GNOME Keyring on Ubuntu::
sudo apt-get install gnome-keyring
sudo apt-get install python-dbus #(for Python 2 installed via apt-get)
OR
sudo apt-get install python3-dbus #(for Python 3 installed via apt-get)
OR
sudo apt-get install libdbus-glib-1-dev #(for custom installation of Python or vitualenv)
sudo pip install dbus-python #(may take a while to compile C code)
If you are on a headless Linux session (e.g. connecting via SSH), please run the following commands before
running your Python session::
dbus-run-session -- bash #(replace 'bash' with 'sh' if bash is unavailable)
echo -n "REPLACE_WITH_YOUR_KEYRING_PASSWORD"|gnome-keyring-daemon -- unlock
:param silent: Defaults to False. Suppresses the "Welcome ...!" message.
:param forced: Defaults to False. Bypass the credential cache if set.
Example::
syn.login('my-username', 'secret-password', rememberMe=True)
#> Welcome, Me!
After logging in with the *rememberMe* flag set, an API key will be cached and
used to authenticate for future logins::
syn.login()
#> Welcome, Me!
"""
# Note: the order of the logic below reflects the ordering in the docstring above.
# Check version before logging in
if not self.skip_checks:
version_check()
# Make sure to invalidate the existing session
self.logout()
credential_provider_chain = get_default_credential_chain()
# TODO: remove deprecated sessionToken when we move to a different solution
self.credentials = credential_provider_chain.get_credentials(
self,
UserLoginArgs(
email,
password,
apiKey,
forced,
sessionToken,
authToken,
)
)
# Final check on login success
if not self.credentials:
raise SynapseNoCredentialsError("No credentials provided.")
# Save the API key in the cache
if rememberMe:
delete_stored_credentials(self.credentials.username)
self.credentials.store_to_keyring()
cached_sessions.set_most_recent_user(self.credentials.username)
if not silent:
profile = self.getUserProfile(refresh=True)
# TODO-PY3: in Python2, do we need to ensure that this is encoded in utf-8
self.logger.info("Welcome, %s!\n" % (profile['displayName'] if 'displayName' in profile
else self.credentials.username))
def _get_config_section_dict(self, section_name):
config = self.getConfigFile(self.configPath)
try:
return dict(config.items(section_name))
except configparser.NoSectionError:
# section not present
return {}
def _get_config_authentication(self):
return self._get_config_section_dict(config_file_constants.AUTHENTICATION_SECTION_NAME)
def _get_client_authenticated_s3_profile(self, endpoint, bucket):
config_section = endpoint + "/" + bucket
return self._get_config_section_dict(config_section).get("profile_name", "default")
def _get_transfer_config(self):
# defaults
transfer_config = {
'max_threads': DEFAULT_NUM_THREADS,
'use_boto_sts': False
}
for k, v in self._get_config_section_dict('transfer').items():
if v:
if k == 'max_threads' and v:
try:
transfer_config['max_threads'] = int(v)
except ValueError as cause:
raise ValueError(f"Invalid transfer.max_threads config setting {v}") from cause
elif k == 'use_boto_sts':
lower_v = v.lower()
if lower_v not in ('true', 'false'):
raise ValueError(f"Invalid transfer.use_boto_sts config setting {v}")
transfer_config['use_boto_sts'] = 'true' == lower_v
return transfer_config
def _getSessionToken(self, email, password):
"""Returns a validated session token."""
try:
req = {'email': email, 'password': password}
session = self.restPOST('/session', body=json.dumps(req), endpoint=self.authEndpoint,
headers=self.default_headers)
return session['sessionToken']
except SynapseHTTPError as err:
if err.response.status_code == 403 or err.response.status_code == 404 or err.response.status_code == 401:
raise SynapseAuthenticationError("Invalid username or password.")
raise
def _getAPIKey(self, sessionToken):
"""Uses a session token to fetch an API key."""
headers = {'sessionToken': sessionToken, 'Accept': 'application/json'}
secret = self.restGET('/secretKey', endpoint=self.authEndpoint, headers=headers)
return secret['secretKey']
def _loggedIn(self):
"""Test whether the user is logged in to Synapse."""
if self.credentials is None:
return False
try:
user = self.restGET('/userProfile')
if 'displayName' in user:
if user['displayName'] == 'Anonymous':
return False
return user['displayName']
except SynapseHTTPError as err:
if err.response.status_code == 401:
return False
raise
def logout(self, forgetMe=False):
"""
Removes authentication information from the Synapse client.
:param forgetMe: Set as True to clear any local storage of authentication information.
See the flag "rememberMe" in :py:func:`synapseclient.Synapse.login`.
"""
# Delete the user's API key from the cache
if forgetMe and self.credentials:
self.credentials.delete_from_keyring()
self.credentials = None
def invalidateAPIKey(self):
"""Invalidates authentication across all clients."""
# Logout globally
if self._loggedIn():
self.restDELETE('/secretKey', endpoint=self.authEndpoint)
@memoize
def getUserProfile(self, id=None, sessionToken=None, refresh=False):
"""
Get the details about a Synapse user.
Retrieves information on the current user if 'id' is omitted.
:param id: The 'userId' (aka 'ownerId') of a user or the userName
:param sessionToken: The session token to use to find the user profile
:param refresh: If set to True will always fetch the data from Synape otherwise will use cached information
:returns: The user profile for the user of interest.
Example::
my_profile = syn.getUserProfile()
freds_profile = syn.getUserProfile('fredcommo')
"""
try:
# if id is unset or a userID, this will succeed
id = '' if id is None else int(id)
except (TypeError, ValueError):
if isinstance(id, collections.abc.Mapping) and 'ownerId' in id:
id = id.ownerId
elif isinstance(id, TeamMember):
id = id.member.ownerId
else:
principals = self._findPrincipals(id)
if len(principals) == 1:
id = principals[0]['ownerId']
else:
for principal in principals:
if principal.get('userName', None).lower() == id.lower():
id = principal['ownerId']
break
else: # no break
raise ValueError('Can\'t find user "%s": ' % id)
uri = '/userProfile/%s' % id
return UserProfile(**self.restGET(uri, headers={'sessionToken': sessionToken} if sessionToken else None))
def _findPrincipals(self, query_string):
"""
Find users or groups by name or email.
:returns: A list of userGroupHeader objects with fields displayName, email, firstName, lastName, isIndividual,
ownerId
Example::
syn._findPrincipals('test')
[{u'displayName': u'Synapse Test',
u'email': u'[email protected]',
u'firstName': u'Synapse',
u'isIndividual': True,
u'lastName': u'Test',
u'ownerId': u'1560002'},
{u'displayName': ... }]
"""
uri = '/userGroupHeaders?prefix=%s' % urllib_urlparse.quote(query_string)
return [UserGroupHeader(**result) for result in self._GET_paginated(uri)]
def _get_certified_passing_record(self, userid: int) -> dict:
"""Retrieve the Passing Record on the User Certification test for the given user.
:params userid: Synapse user Id
:returns: Synapse Passing Record
https://rest-docs.synapse.org/rest/org/sagebionetworks/repo/model/quiz/PassingRecord.html
"""
response = self.restGET(f"/user/{userid}/certifiedUserPassingRecord")
return response
def is_certified(self, user: typing.Union[str, int]) -> bool:
"""Determines whether a Synapse user is a certified user.
:params user: Synapse username or Id
:returns: True if the Synapse user is certified
"""
# Check if userid or username exists
syn_user = self.getUserProfile(user)
# Get passing record
try:
certification_status = self._get_certified_passing_record(syn_user['ownerId'])
return certification_status['passed']
except SynapseHTTPError as ex:
if ex.response.status_code == 404:
# user hasn't taken the quiz
return False
raise
def onweb(self, entity, subpageId=None):
"""Opens up a browser window to the entity page or wiki-subpage.
:param entity: Either an Entity or a Synapse ID
:param subpageId: (Optional) ID of one of the wiki's sub-pages
"""
if isinstance(entity, str) and os.path.isfile(entity):
entity = self.get(entity, downloadFile=False)
synId = id_of(entity)
if subpageId is None:
webbrowser.open("%s#!Synapse:%s" % (self.portalEndpoint, synId))
else:
webbrowser.open("%s#!Wiki:%s/ENTITY/%s" % (self.portalEndpoint, synId, subpageId))
def printEntity(self, entity, ensure_ascii=True):
"""
Pretty prints an Entity.
:param entity: The entity to be printed.
:param ensure_ascii: If True, escapes all non-ASCII characters
"""
if utils.is_synapse_id(entity):
entity = self._getEntity(entity)
try:
self.logger.info(json.dumps(entity, sort_keys=True, indent=2, ensure_ascii=ensure_ascii))
except TypeError:
self.logger.info(str(entity))
def _print_transfer_progress(self, *args, **kwargs):
# Checking synapse if the mode is silent mode.
# If self.silent is True, no need to print out transfer progress.
if self.silent is not True:
cumulative_transfer_progress.printTransferProgress(*args, **kwargs)
############################################################
# Get / Store methods #
############################################################
def get(self, entity, **kwargs):
"""
Gets a Synapse entity from the repository service.
:param entity: A Synapse ID, a Synapse Entity object, a plain dictionary in which 'id' maps to a
Synapse ID or a local file that is stored in Synapse (found by the file MD5)
:param version: The specific version to get.
Defaults to the most recent version.
:param downloadFile: Whether associated files(s) should be downloaded.
Defaults to True
:param downloadLocation: Directory where to download the Synapse File Entity.
Defaults to the local cache.
:param followLink: Whether the link returns the target Entity.
Defaults to False
:param ifcollision: Determines how to handle file collisions.
May be "overwrite.local", "keep.local", or "keep.both".
Defaults to "keep.both".
:param limitSearch: a Synanpse ID used to limit the search in Synapse if entity is specified as a local
file. That is, if the file is stored in multiple locations in Synapse only the ones
in the specified folder/project will be returned.
:returns: A new Synapse Entity object of the appropriate type
Example::
# download file into cache
entity = syn.get('syn1906479')
print(entity.name)
print(entity.path)
# download file into current working directory
entity = syn.get('syn1906479', downloadLocation='.')
print(entity.name)
print(entity.path)
# Determine the provenance of a locally stored file as indicated in Synapse
entity = syn.get('/path/to/file.txt', limitSearch='syn12312')
print(syn.getProvenance(entity))
"""
# If entity is a local file determine the corresponding synapse entity
if isinstance(entity, str) and os.path.isfile(entity):
bundle = self._getFromFile(entity, kwargs.pop('limitSearch', None))
kwargs['downloadFile'] = False
kwargs['path'] = entity
elif isinstance(entity, str) and not utils.is_synapse_id(entity):
raise SynapseFileNotFoundError(
('The parameter %s is neither a local file path '
' or a valid entity id' % entity)
)
# have not been saved entities
elif isinstance(entity, Entity) and not entity.get('id'):
raise ValueError(
"Cannot retrieve entity that has not been saved."
" Please use syn.store() to save your entity and try again."
)
else:
version = kwargs.get('version', None)
bundle = self._getEntityBundle(entity, version)
# Check and warn for unmet access requirements
self._check_entity_restrictions(bundle, entity, kwargs.get('downloadFile', True))
return self._getWithEntityBundle(entityBundle=bundle, entity=entity, **kwargs)
def _check_entity_restrictions(self, bundle, entity, downloadFile):
restrictionInformation = bundle['restrictionInformation']
if restrictionInformation['hasUnmetAccessRequirement']:
warning_message = ("\nThis entity has access restrictions. Please visit the web page for this entity "
"(syn.onweb(\"%s\")). Click the downward pointing arrow next to the file's name to "
"review and fulfill its download requirement(s).\n" % id_of(entity))
if downloadFile and bundle.get('entityType') not in ('project', 'folder'):
raise SynapseUnmetAccessRestrictions(warning_message)
warnings.warn(warning_message)
def _getFromFile(self, filepath, limitSearch=None):
"""
Gets a Synapse entityBundle based on the md5 of a local file
See :py:func:`synapseclient.Synapse.get`.
:param filepath: path to local file
:param limitSearch: Limits the places in Synapse where the file is searched for.
"""
results = self.restGET('/entity/md5/%s' % utils.md5_for_file(filepath).hexdigest())['results']
if limitSearch is not None:
# Go through and find the path of every entity found
paths = [self.restGET('/entity/%s/path' % ent['id']) for ent in results]
# Filter out all entities whose path does not contain limitSearch
results = [ent for ent, path in zip(results, paths) if
utils.is_in_path(limitSearch, path)]
if len(results) == 0: # None found
raise SynapseFileNotFoundError('File %s not found in Synapse' % (filepath,))
elif len(results) > 1:
id_txts = '\n'.join(['%s.%i' % (r['id'], r['versionNumber']) for r in results])
self.logger.warning('\nThe file %s is associated with many files in Synapse:\n%s\n'
'You can limit to files in specific project or folder by setting the limitSearch to the'
' synapse Id of the project or folder.\n'
'Will use the first one returned: \n'
'%s version %i\n' % (filepath, id_txts, results[0]['id'], results[0]['versionNumber']))
entity = results[0]
bundle = self._getEntityBundle(entity, version=entity['versionNumber'])
self.cache.add(bundle['entity']['dataFileHandleId'], filepath)
return bundle
def move(self, entity, new_parent):
"""
Move a Synapse entity to a new container.
:param entity: A Synapse ID, a Synapse Entity object, or a local file that is stored in Synapse
:param new_parent: The new parent container (Folder or Project) to which the entity should be moved.
:returns: The Synapse Entity object that has been moved.
Example::
entity = syn.move('syn456', 'syn123')
"""
entity = self.get(entity, downloadFile=False)
entity.parentId = id_of(new_parent)
entity = self.store(entity, forceVersion=False)
return entity
def _getWithEntityBundle(self, entityBundle, entity=None, **kwargs):
"""
Creates a :py:mod:`synapseclient.Entity` from an entity bundle returned by Synapse.
An existing Entity can be supplied in case we want to refresh a stale Entity.
:param entityBundle: Uses the given dictionary as the meta information of the Entity to get
:param entity: Optional, entity whose local state will be copied into the returned entity
:param submission: Optional, access associated files through a submission rather than
through an entity.
See :py:func:`synapseclient.Synapse.get`.
See :py:func:`synapseclient.Synapse._getEntityBundle`.
See :py:mod:`synapseclient.Entity`.
"""
# Note: This version overrides the version of 'entity' (if the object is Mappable)
kwargs.pop('version', None)
downloadFile = kwargs.pop('downloadFile', True)
downloadLocation = kwargs.pop('downloadLocation', None)
ifcollision = kwargs.pop('ifcollision', None)
submission = kwargs.pop('submission', None)
followLink = kwargs.pop('followLink', False)
path = kwargs.pop('path', None)
# make sure user didn't accidentlaly pass a kwarg that we don't handle
if kwargs: # if there are remaining items in the kwargs
raise TypeError('Unexpected **kwargs: %r' % kwargs)
# If Link, get target ID entity bundle
if entityBundle['entity']['concreteType'] == 'org.sagebionetworks.repo.model.Link' and followLink:
targetId = entityBundle['entity']['linksTo']['targetId']
targetVersion = entityBundle['entity']['linksTo'].get('targetVersionNumber')
entityBundle = self._getEntityBundle(targetId, targetVersion)
# TODO is it an error to specify both downloadFile=False and downloadLocation?
# TODO this matters if we want to return already cached files when downloadFile=False
# Make a fresh copy of the Entity
local_state = entity.local_state() if entity and isinstance(entity, Entity) else {}
if path is not None:
local_state['path'] = path
properties = entityBundle['entity']
annotations = from_synapse_annotations(entityBundle['annotations'])
entity = Entity.create(properties, annotations, local_state)
# Handle download of fileEntities
if isinstance(entity, File):
# update the entity with FileHandle metadata
file_handle = next((handle for handle in entityBundle['fileHandles']
if handle['id'] == entity.dataFileHandleId), None)
entity._update_file_handle(file_handle)
if downloadFile:
if file_handle:
self._download_file_entity(downloadLocation, entity, ifcollision, submission)
else: # no filehandle means that we do not have DOWNLOAD permission
warning_message = "WARNING: You have READ permission on this file entity but not DOWNLOAD " \
"permission. The file has NOT been downloaded."
self.logger.warning('\n' + '!'*len(warning_message)+'\n' + warning_message + '\n'
+ '!'*len(warning_message)+'\n')
return entity
def _ensure_download_location_is_directory(self, downloadLocation):
download_dir = os.path.expandvars(os.path.expanduser(downloadLocation))
if os.path.isfile(download_dir):
raise ValueError("Parameter 'downloadLocation' should be a directory, not a file.")
return download_dir
def _download_file_entity(self, downloadLocation, entity, ifcollision, submission):
# set the initial local state
entity.path = None
entity.files = []
entity.cacheDir = None
# check to see if an UNMODIFIED version of the file (since it was last downloaded) already exists
# this location could be either in .synapseCache or a user specified location to which the user previously
# downloaded the file
cached_file_path = self.cache.get(entity.dataFileHandleId, downloadLocation)
# location in .synapseCache where the file would be corresponding to its FileHandleId
synapseCache_location = self.cache.get_cache_dir(entity.dataFileHandleId)
file_name = entity._file_handle.fileName if cached_file_path is None else os.path.basename(cached_file_path)
# Decide the best download location for the file
if downloadLocation is not None:
# Make sure the specified download location is a fully resolved directory
downloadLocation = self._ensure_download_location_is_directory(downloadLocation)
elif cached_file_path is not None:
# file already cached so use that as the download location
downloadLocation = os.path.dirname(cached_file_path)
else:
# file not cached and no user-specified location so default to .synapseCache
downloadLocation = synapseCache_location
# resolve file path collisions by either overwriting, renaming, or not downloading, depending on the
# ifcollision value
downloadPath = self._resolve_download_path_collisions(downloadLocation, file_name, ifcollision,
synapseCache_location, cached_file_path)
if downloadPath is None:
return
if cached_file_path is not None: # copy from cache
if downloadPath != cached_file_path:
# create the foider if it does not exist already
if not os.path.exists(downloadLocation):
os.makedirs(downloadLocation)
shutil.copy(cached_file_path, downloadPath)
else: # download the file from URL (could be a local file)
objectType = 'FileEntity' if submission is None else 'SubmissionAttachment'
objectId = entity['id'] if submission is None else submission
# reassign downloadPath because if url points to local file (e.g. file://~/someLocalFile.txt)
# it won't be "downloaded" and, instead, downloadPath will just point to '~/someLocalFile.txt'
# _downloadFileHandle may also return None to indicate that the download failed
downloadPath = self._downloadFileHandle(entity.dataFileHandleId, objectId, objectType, downloadPath)
if downloadPath is None or not os.path.exists(downloadPath):
return
# converts the path format from forward slashes back to backward slashes on Windows
entity.path = os.path.normpath(downloadPath)
entity.files = [os.path.basename(downloadPath)]
entity.cacheDir = os.path.dirname(downloadPath)
def _resolve_download_path_collisions(self, downloadLocation, file_name, ifcollision, synapseCache_location,
cached_file_path):
# always overwrite if we are downloading to .synapseCache
if utils.normalize_path(downloadLocation) == synapseCache_location:
if ifcollision is not None:
self.logger.warning('\n' + '!'*50+'\nifcollision=' + ifcollision
+ 'is being IGNORED because the download destination is synapse\'s cache.'
' Instead, the behavior is "overwrite.local". \n'+'!'*50+'\n')
ifcollision = 'overwrite.local'
# if ifcollision not specified, keep.local
ifcollision = ifcollision or 'keep.both'
downloadPath = utils.normalize_path(os.path.join(downloadLocation, file_name))
# resolve collison
if os.path.exists(downloadPath):
if ifcollision == "overwrite.local":
pass
elif ifcollision == "keep.local":
# Don't want to overwrite the local file.
return None
elif ifcollision == "keep.both":
if downloadPath != cached_file_path:
return utils.unique_filename(downloadPath)
else:
raise ValueError('Invalid parameter: "%s" is not a valid value '
'for "ifcollision"' % ifcollision)
return downloadPath
def store(self, obj, *, createOrUpdate=True, forceVersion=True, versionLabel=None, isRestricted=False,
activity=None, used=None, executed=None, activityName=None, activityDescription=None):
"""
Creates a new Entity or updates an existing Entity, uploading any files in the process.
:param obj: A Synapse Entity, Evaluation, or Wiki
:param used: The Entity, Synapse ID, or URL used to create the object (can also be a list of
these)
:param executed: The Entity, Synapse ID, or URL representing code executed to create the object
(can also be a list of these)
:param activity: Activity object specifying the user's provenance.
:param activityName: Activity name to be used in conjunction with *used* and *executed*.
:param activityDescription: Activity description to be used in conjunction with *used* and *executed*.
:param createOrUpdate: Indicates whether the method should automatically perform an update if the 'obj'
conflicts with an existing Synapse object. Defaults to True.
:param forceVersion: Indicates whether the method should increment the version of the object even if
nothing has changed. Defaults to True.
:param versionLabel: Arbitrary string used to label the version.
:param isRestricted: If set to true, an email will be sent to the Synapse access control team to start
the process of adding terms-of-use or review board approval for this entity.
You will be contacted with regards to the specific data being restricted and the
requirements of access.
:returns: A Synapse Entity, Evaluation, or Wiki
Example::
from synapseclient import Project
project = Project('My uniquely named project')
project = syn.store(project)
Adding files with `provenance <Activity.html>`_::
from synapseclient import File, Activity
# A synapse entity *syn1906480* contains data
# entity *syn1917825* contains code
activity = Activity(
'Fancy Processing',
description='No seriously, really fancy processing',
used=['syn1906480', 'http://data_r_us.com/fancy/data.txt'],
executed='syn1917825')
test_entity = File('/path/to/data/file.xyz', description='Fancy new data', parent=project)
test_entity = syn.store(test_entity, activity=activity)
"""
# _before_store hook
# give objects a chance to do something before being stored
if hasattr(obj, '_before_synapse_store'):
obj._before_synapse_store(self)
# _synapse_store hook
# for objects that know how to store themselves
if hasattr(obj, '_synapse_store'):
obj = obj._synapse_store(self)
return self._apply_provenance(
obj,
activity=activity,
used=used,
executed=executed,
activityName=activityName,
activityDescription=activityDescription,
)
# Handle all non-Entity objects
if not (isinstance(obj, Entity) or type(obj) == dict):
if isinstance(obj, Wiki):
return self._storeWiki(obj, createOrUpdate)
if 'id' in obj: # If ID is present, update
return type(obj)(**self.restPUT(obj.putURI(), obj.json()))
try: # If no ID is present, attempt to POST the object
return type(obj)(**self.restPOST(obj.postURI(), obj.json()))
except SynapseHTTPError as err:
# If already present and we want to update attempt to get the object content
if createOrUpdate and err.response.status_code == 409:
newObj = self.restGET(obj.getByNameURI(obj.name))
newObj.update(obj)
obj = type(obj)(**newObj)
obj.update(self.restPUT(obj.putURI(), obj.json()))
return obj
raise
# If the input object is an Entity or a dictionary
entity = obj
properties, annotations, local_state = split_entity_namespaces(entity)
bundle = None
# Explicitly set an empty versionComment property if none is supplied,
# otherwise an existing entity bundle's versionComment will be copied to the update.
properties['versionComment'] = properties['versionComment'] if 'versionComment' in properties else None
# Anything with a path is treated as a cache-able item
if entity.get('path', False):
if 'concreteType' not in properties:
properties['concreteType'] = File._synapse_entity_type
# Make sure the path is fully resolved
entity['path'] = os.path.expanduser(entity['path'])
# Check if the File already exists in Synapse by fetching metadata on it
bundle = self._getEntityBundle(entity)
if bundle:
if createOrUpdate:
# update our properties from the existing bundle so that we have
# enough to process this as an entity update.
properties = {**bundle['entity'], **properties}
# Check if the file should be uploaded
fileHandle = find_data_file_handle(bundle)
if fileHandle \
and fileHandle['concreteType'] == "org.sagebionetworks.repo.model.file.ExternalFileHandle":
# switching away from ExternalFileHandle or the url was updated
needs_upload = entity['synapseStore'] or (fileHandle['externalURL'] != entity['externalURL'])
else:
# Check if we need to upload a new version of an existing
# file. If the file referred to by entity['path'] has been
# modified, we want to upload the new version.
# If synapeStore is false then we must upload a ExternalFileHandle
needs_upload = not entity['synapseStore'] \
or not self.cache.contains(bundle['entity']['dataFileHandleId'], entity['path'])
elif entity.get('dataFileHandleId', None) is not None:
needs_upload = False
else:
needs_upload = True
if needs_upload:
local_state_fh = local_state.get('_file_handle', {})
synapseStore = local_state.get('synapseStore', True)
fileHandle = upload_file_handle(self,
entity['parentId'],
local_state['path'] if (synapseStore
or local_state_fh.get('externalURL') is None)
else local_state_fh.get('externalURL'),
synapseStore=synapseStore,
md5=local_state_fh.get('contentMd5'),
file_size=local_state_fh.get('contentSize'),
mimetype=local_state_fh.get('contentType'),
max_threads=self.max_threads)
properties['dataFileHandleId'] = fileHandle['id']
local_state['_file_handle'] = fileHandle
elif 'dataFileHandleId' not in properties:
# Handle the case where the Entity lacks an ID
# But becomes an update() due to conflict
properties['dataFileHandleId'] = bundle['entity']['dataFileHandleId']
# update the file_handle metadata if the FileEntity's FileHandle id has changed
local_state_fh_id = local_state.get('_file_handle', {}).get('id')
if local_state_fh_id and properties['dataFileHandleId'] != local_state_fh_id:
local_state['_file_handle'] = find_data_file_handle(
self._getEntityBundle(properties['id'], requestedObjects={'includeEntity': True,
'includeFileHandles': True})
)
# check if we already have the filehandleid cached somewhere
cached_path = self.cache.get(properties['dataFileHandleId'])
if cached_path is None:
local_state['path'] = None
local_state['cacheDir'] = None
local_state['files'] = []
else:
local_state['path'] = cached_path
local_state['cacheDir'] = os.path.dirname(cached_path)
local_state['files'] = [os.path.basename(cached_path)]
# Create or update Entity in Synapse
if 'id' in properties:
properties = self._updateEntity(properties, forceVersion, versionLabel)
else:
# If Link, get the target name, version number and concrete type and store in link properties
if properties['concreteType'] == "org.sagebionetworks.repo.model.Link":
target_properties = self._getEntity(properties['linksTo']['targetId'],
version=properties['linksTo'].get('targetVersionNumber'))
if target_properties['parentId'] == properties['parentId']:
raise ValueError("Cannot create a Link to an entity under the same parent.")
properties['linksToClassName'] = target_properties['concreteType']
if target_properties.get('versionNumber') is not None \
and properties['linksTo'].get('targetVersionNumber') is not None:
properties['linksTo']['targetVersionNumber'] = target_properties['versionNumber']
properties['name'] = target_properties['name']
try:
properties = self._createEntity(properties)
except SynapseHTTPError as ex:
if createOrUpdate and ex.response.status_code == 409:
# Get the existing Entity's ID via the name and parent
existing_entity_id = self.findEntityId(properties['name'], properties.get('parentId', None))
if existing_entity_id is None:
raise
# get existing properties and annotations
if not bundle:
bundle = self._getEntityBundle(existing_entity_id,
requestedObjects={'includeEntity': True,
'includeAnnotations': True})
properties = {**bundle['entity'], **properties}
# we additionally merge the annotations under the assumption that a missing annotation
# from a resolved conflict represents an newer annotation that should be preserved
# rather than an intentionally deleted annotation.
annotations = {**from_synapse_annotations(bundle['annotations']), **annotations}
properties = self._updateEntity(properties, forceVersion, versionLabel)
else:
raise
# Deal with access restrictions
if isRestricted:
self._createAccessRequirementIfNone(properties)
# Update annotations
if not bundle or check_annotations_changed(bundle['annotations'], annotations):
annotations = self.set_annotations(Annotations(properties['id'], properties['etag'], annotations))
properties['etag'] = annotations.etag
properties = self._apply_provenance(
properties,
activity=activity,
used=used,
executed=executed,
activityName=activityName,
activityDescription=activityDescription,
)
# Return the updated Entity object
entity = Entity.create(properties, annotations, local_state)
return self.get(entity, downloadFile=False)
def _apply_provenance(
self,
entity,
activity=None,
used=None,
executed=None,
activityName=None,
activityDescription=None
):
# apply any provenance passed to via the store method to the entity
# If the parameters 'used' or 'executed' are given, create an Activity object
if used or executed:
if activity is not None:
raise SynapseProvenanceError(
'Provenance can be specified as an Activity object or as used/executed'
' item(s), but not both.'
)
activity = Activity(name=activityName, description=activityDescription, used=used, executed=executed)
# If we have an Activity, set it as the Entity's provenance record
if activity:
self.setProvenance(entity, activity)
# 'etag' has changed, so get the new Entity
entity = self._getEntity(entity)
return entity
def _createAccessRequirementIfNone(self, entity):
"""
Checks to see if the given entity has access requirements.
If not, then one is added
"""
existingRestrictions = self.restGET('/entity/%s/accessRequirement?offset=0&limit=1' % id_of(entity))
if len(existingRestrictions['results']) <= 0:
self.restPOST('/entity/%s/lockAccessRequirement' % id_of(entity), body="")
def _getEntityBundle(self, entity, version=None, requestedObjects=None):
"""
Gets some information about the Entity.
:parameter entity: a Synapse Entity or Synapse ID
:parameter version: the entity's version (defaults to None meaning most recent version)
:parameter requestedObjects: A dict indicating settings for what to include
default value for requestedObjects is::
requestedObjects = {'includeEntity': True,
'includeAnnotations': True,
'includeFileHandles': True,
'includeRestrictionInformation': True}
Keys available for requestedObjects::
includeEntity
includeAnnotations
includePermissions
includeEntityPath
includeHasChildren
includeAccessControlList
includeFileHandles
includeTableBundle
includeRootWikiId
includeBenefactorACL
includeDOIAssociation
includeFileName
includeThreadCount
includeRestrictionInformation
Keys with values set to False may simply be omitted.
For example, we might ask for an entity bundle containing file handles, annotations, and properties::
requested_objects = {'includeEntity':True
'includeAnnotations':True,
'includeFileHandles':True}
bundle = syn._getEntityBundle('syn111111', )
:returns: An EntityBundle with the requested fields or by default Entity header, annotations, unmet access
requirements, and file handles
"""
# If 'entity' is given without an ID, try to find it by 'parentId' and 'name'.
# Use case:
# If the user forgets to catch the return value of a syn.store(e)
# this allows them to recover by doing: e = syn.get(e)
if requestedObjects is None:
requestedObjects = {'includeEntity': True,
'includeAnnotations': True,
'includeFileHandles': True,
'includeRestrictionInformation': True}
if isinstance(entity, collections.abc.Mapping) and 'id' not in entity and 'name' in entity:
entity = self.findEntityId(entity['name'], entity.get('parentId', None))
# Avoid an exception from finding an ID from a NoneType
try:
id_of(entity)
except ValueError:
return None
if version is not None:
uri = f'/entity/{id_of(entity)}/version/{int(version):d}/bundle2'
else:
uri = f'/entity/{id_of(entity)}/bundle2'
bundle = self.restPOST(uri, body=json.dumps(requestedObjects))
return bundle
def delete(self, obj, version=None):
"""
Removes an object from Synapse.
:param obj: An existing object stored on Synapse such as
Evaluation, File, Project, or Wiki
:param version: For entities, specify a particular version to
delete.
"""
# Handle all strings as the Entity ID for backward compatibility
if isinstance(obj, str):
if version:
self.restDELETE(uri=f'/entity/{id_of(obj)}/version/{version}')
else:
self.restDELETE(uri=f'/entity/{id_of(obj)}')
elif hasattr(obj, "_synapse_delete"):
return obj._synapse_delete(self)
else:
try:
if isinstance(obj, Versionable):
self.restDELETE(obj.deleteURI(versionNumber=version))
else:
self.restDELETE(obj.deleteURI())
except AttributeError:
raise SynapseError(f"Can't delete a {type(obj)}. Please specify a Synapse object or id")
_user_name_cache = {}
def _get_user_name(self, user_id):
if user_id not in self._user_name_cache:
self._user_name_cache[user_id] = utils.extract_user_name(self.getUserProfile(user_id))
return self._user_name_cache[user_id]
def _list(self, parent, recursive=False, long_format=False, show_modified=False, indent=0, out=sys.stdout):
"""
List child objects of the given parent, recursively if requested.
"""
fields = ['id', 'name', 'nodeType']
if long_format:
fields.extend(['createdByPrincipalId', 'createdOn', 'versionNumber'])
if show_modified:
fields.extend(['modifiedByPrincipalId', 'modifiedOn'])
results = self.getChildren(parent)
results_found = False
for result in results:
results_found = True
fmt_fields = {'name': result['name'],
'id': result['id'],
'padding': ' ' * indent,
'slash_or_not': '/' if is_container(result) else ''}
fmt_string = "{id}"
if long_format:
fmt_fields['createdOn'] = utils.iso_to_datetime(result['createdOn'])\
.strftime("%Y-%m-%d %H:%M")
fmt_fields['createdBy'] = self._get_user_name(result['createdBy'])[:18]
fmt_fields['version'] = result['versionNumber']
fmt_string += " {version:3} {createdBy:>18} {createdOn}"
if show_modified:
fmt_fields['modifiedOn'] = utils.iso_to_datetime(result['modifiedOn'])\
.strftime("%Y-%m-%d %H:%M")
fmt_fields['modifiedBy'] = self._get_user_name(result['modifiedBy'])[:18]
fmt_string += " {modifiedBy:>18} {modifiedOn}"
fmt_string += " {padding}{name}{slash_or_not}\n"
out.write(fmt_string.format(**fmt_fields))
if (indent == 0 or recursive) and is_container(result):
self._list(result['id'], recursive=recursive, long_format=long_format,
show_modified=show_modified, indent=indent+2, out=out)
if indent == 0 and not results_found:
out.write('No results visible to {username} found for id {id}\n'.format(username=self.credentials.username,
id=id_of(parent)))
def uploadFileHandle(self, path, parent, synapseStore=True, mimetype=None, md5=None, file_size=None):
"""Uploads the file in the provided path (if necessary) to a storage location based on project settings.
Returns a new FileHandle as a dict to represent the stored file.
:param parent: parent of the entity to which we upload.
:param path: file path to the file being uploaded
:param synapseStore: If False, will not upload the file, but instead create an ExternalFileHandle that
references the file on the local machine.
If True, will upload the file based on StorageLocation determined by the
entity_parent_id
:param mimetype: The MIME type metadata for the uploaded file
:param md5: The MD5 checksum for the file, if known. Otherwise if the file is a local file, it will
be calculated automatically.
:param file_size: The size the file, if known. Otherwise if the file is a local file, it will be
calculated automatically.
:param file_type: The MIME type the file, if known. Otherwise if the file is a local file, it will be
calculated automatically.
:returns: a dict of a new FileHandle as a dict that represents the uploaded file
"""
return upload_file_handle(self, parent, path, synapseStore, md5, file_size, mimetype)
############################################################
# Get / Set Annotations #
############################################################
def _getRawAnnotations(self, entity, version=None):
"""
Retrieve annotations for an Entity returning them in the native Synapse format.
"""
# Note: Specifying the version results in a zero-ed out etag,
# even if the version is the most recent.
# See `PLFM-1874 <https://sagebionetworks.jira.com/browse/PLFM-1874>`_ for more details.
if version:
uri = f'/entity/{id_of(entity)}/version/{str(version)}/annotations2'
else:
uri = f'/entity/{id_of(entity)}/annotations2'
return self.restGET(uri)
@deprecated.sphinx.deprecated(version='2.1.0', reason='deprecated and replaced with :py:meth:`get_annotations`')
def getAnnotations(self, entity, version=None):
return self.get_annotations(entity, version=version)
def get_annotations(self, entity: typing.Union[str, Entity], version: typing.Union[str, int] = None) -> Annotations:
"""
Retrieve annotations for an Entity from the Synapse Repository as a Python dict.
Note that collapsing annotations from the native Synapse format to a Python dict may involve some loss of
information. See :py:func:`_getRawAnnotations` to get annotations in the native format.
:param entity: An Entity or Synapse ID to lookup
:param version: The version of the Entity to retrieve.
:returns: A :py:class:`synapseclient.annotations.Annotations` object, \
a dict that also has id and etag attributes
:rtype: :py:class:`synapseclient.annotations.Annotations`
"""
return from_synapse_annotations(self._getRawAnnotations(entity, version))
@deprecated.sphinx.deprecated(version='2.1.0', reason='deprecated and replaced with :py:meth:`set_annotations` '
'This method is UNSAFE and may overwrite existing annotations'
' without confirming that you have retrieved and'
' updated the latest annotations')
def setAnnotations(self, entity, annotations=None, **kwargs):
"""
Store annotations for an Entity in the Synapse Repository.
:param entity: The Entity or Synapse Entity ID whose annotations are to be updated
:param annotations: A dictionary of annotation names and values
:param kwargs: annotation names and values
:returns: the updated annotations for the entity
"""
if not annotations:
annotations = {}
annotations.update(kwargs)
id = id_of(entity)
etag = annotations.etag if hasattr(annotations, 'etag') else annotations.get('etag')
if not etag:
if 'etag' in entity:
etag = entity['etag']
else:
uri = '/entity/%s/annotations2' % id_of(entity)
old_annos = self.restGET(uri)
etag = old_annos['etag']
return self.set_annotations(Annotations(id, etag, annotations))
def set_annotations(self, annotations: Annotations):
"""
Store annotations for an Entity in the Synapse Repository.
:param annotations: A :py:class:`synapseclient.annotations.Annotations` of annotation names and values,
with the id and etag attribute set
:returns: the updated :py:class:`synapseclient.annotations.Annotations` for the entity
Example::
annos = syn.get_annotations('syn123')
# annos will contain the id and etag associated with the entity upon retrieval
print(annos.id)
# syn123
print(annos.etag)
# 7bdb83e9-a50a-46e4-987a-4962559f090f (Usually some UUID in the form of a string)
# returned annos object from get_annotations() can be used as if it were a dict
# set key 'foo' to have value of 'bar' and 'baz'
annos['foo'] = ['bar', 'baz']
# single values will automatically be wrapped in a list once stored
annos['qwerty'] = 'asdf'
# store the annotations
annos = syn.set_annotations(annos)
print(annos)
# {'foo':['bar','baz], 'qwerty':['asdf']}
"""
if not isinstance(annotations, Annotations):
raise TypeError("Expected a synapseclient.Annotations object")
synapseAnnos = to_synapse_annotations(annotations)
return from_synapse_annotations(self.restPUT(f'/entity/{id_of(annotations)}/annotations2',
body=json.dumps(synapseAnnos)))
############################################################
# Querying #
############################################################
def getChildren(self, parent, includeTypes=["folder", "file", "table", "link", "entityview", "dockerrepo"],
sortBy="NAME", sortDirection="ASC"):
"""
Retrieves all of the entities stored within a parent such as folder or project.
:param parent: An id or an object of a Synapse container or None to retrieve all projects
:param includeTypes: Must be a list of entity types (ie. ["folder","file"]) which can be found here:
http://docs.synapse.org/rest/org/sagebionetworks/repo/model/EntityType.html
:param sortBy: How results should be sorted. Can be NAME, or CREATED_ON
:param sortDirection: The direction of the result sort. Can be ASC, or DESC
:returns: An iterator that shows all the children of the container.
Also see:
- :py:func:`synapseutils.walk`
"""
parentId = id_of(parent) if parent is not None else None
entityChildrenRequest = {'parentId': parentId,
'includeTypes': includeTypes,
'sortBy': sortBy,
'sortDirection': sortDirection,
'nextPageToken': None}
entityChildrenResponse = {"nextPageToken": "first"}
while entityChildrenResponse.get('nextPageToken') is not None:
entityChildrenResponse = self.restPOST('/entity/children', body=json.dumps(entityChildrenRequest))
for child in entityChildrenResponse['page']:
yield child
if entityChildrenResponse.get('nextPageToken') is not None:
entityChildrenRequest['nextPageToken'] = entityChildrenResponse['nextPageToken']
def md5Query(self, md5):
"""
Find the Entities which have attached file(s) which have the given MD5 hash.
:param md5: The MD5 to query for (hexadecimal string)
:returns: A list of Entity headers
"""
return self.restGET('/entity/md5/%s' % md5)['results']
############################################################
# ACL manipulation #
############################################################
def _getBenefactor(self, entity):
"""An Entity gets its ACL from its benefactor."""
if utils.is_synapse_id(entity) or is_synapse_entity(entity):
return self.restGET('/entity/%s/benefactor' % id_of(entity))
return entity
def _getACL(self, entity):
"""Get the effective ACL for a Synapse Entity."""
if hasattr(entity, 'getACLURI'):
uri = entity.getACLURI()
else:
# Get the ACL from the benefactor (which may be the entity itself)
benefactor = self._getBenefactor(entity)
uri = '/entity/%s/acl' % (benefactor['id'])
return self.restGET(uri)
def _storeACL(self, entity, acl):
"""
Create or update the ACL for a Synapse Entity.
:param entity: An entity or Synapse ID
:param acl: An ACl as a dict
:returns: the new or updated ACL
.. code-block:: python
{'resourceAccess': [
{'accessType': ['READ'],
'principalId': 222222}
]}
"""
if hasattr(entity, 'putACLURI'):
return self.restPUT(entity.putACLURI(), json.dumps(acl))
else:
# Get benefactor. (An entity gets its ACL from its benefactor.)
entity_id = id_of(entity)
uri = '/entity/%s/benefactor' % entity_id
benefactor = self.restGET(uri)
# Update or create new ACL
uri = '/entity/%s/acl' % entity_id
if benefactor['id'] == entity_id:
return self.restPUT(uri, json.dumps(acl))
else:
return self.restPOST(uri, json.dumps(acl))
def _getUserbyPrincipalIdOrName(self, principalId=None):
"""
Given either a string, int or None finds the corresponding user where None implies PUBLIC
:param principalId: Identifier of a user or group
:returns: The integer ID of the user
"""
if principalId is None or principalId == 'PUBLIC':
return PUBLIC
try:
return int(principalId)
# If principalId is not a number assume it is a name or email
except ValueError:
userProfiles = self.restGET('/userGroupHeaders?prefix=%s' % principalId)
totalResults = len(userProfiles['children'])
if totalResults == 1:
return int(userProfiles['children'][0]['ownerId'])
elif totalResults > 1:
for profile in userProfiles['children']:
if profile['userName'] == principalId:
return int(profile['ownerId'])
supplementalMessage = 'Please be more specific' if totalResults > 1 else 'No matches'
raise SynapseError('Unknown Synapse user (%s). %s.' % (principalId, supplementalMessage))
def getPermissions(self, entity, principalId=None):
"""Get the permissions that a user or group has on an Entity.
:param entity: An Entity or Synapse ID to lookup
:param principalId: Identifier of a user or group (defaults to PUBLIC users)
:returns: An array containing some combination of
['READ', 'CREATE', 'UPDATE', 'DELETE', 'CHANGE_PERMISSIONS', 'DOWNLOAD']
or an empty array
"""
# TODO: what if user has permissions by membership in a group?
principalId = self._getUserbyPrincipalIdOrName(principalId)
acl = self._getACL(entity)
for permissions in acl['resourceAccess']:
if 'principalId' in permissions and permissions['principalId'] == int(principalId):
return permissions['accessType']
return []
def setPermissions(self, entity, principalId=None, accessType=['READ', 'DOWNLOAD'], modify_benefactor=False,
warn_if_inherits=True, overwrite=True):
"""
Sets permission that a user or group has on an Entity.
An Entity may have its own ACL or inherit its ACL from a benefactor.
:param entity: An Entity or Synapse ID to modify
:param principalId: Identifier of a user or group
:param accessType: Type of permission to be granted. One or more of CREATE, READ, DOWNLOAD, UPDATE,
DELETE, CHANGE_PERMISSIONS
:param modify_benefactor: Set as True when modifying a benefactor's ACL
:param warn_if_inherits: Set as False, when creating a new ACL.
Trying to modify the ACL of an Entity that inherits its ACL will result in a warning
:param overwrite: By default this function overwrites existing permissions for the specified user.
Set this flag to False to add new permissions non-destructively.
:returns: an Access Control List object
"""
benefactor = self._getBenefactor(entity)
if benefactor['id'] != id_of(entity):
if modify_benefactor:
entity = benefactor
elif warn_if_inherits:
self.logger.warning('Creating an ACL for entity %s, which formerly inherited access control from a'
' benefactor entity, "%s" (%s).\n'
% (id_of(entity), benefactor['name'], benefactor['id']))
acl = self._getACL(entity)
principalId = self._getUserbyPrincipalIdOrName(principalId)
# Find existing permissions
permissions_to_update = None
for permissions in acl['resourceAccess']:
if 'principalId' in permissions and permissions['principalId'] == principalId:
permissions_to_update = permissions
break
if accessType is None or accessType == []:
# remove permissions
if permissions_to_update and overwrite:
acl['resourceAccess'].remove(permissions_to_update)
else:
# add a 'resourceAccess' entry, if necessary
if not permissions_to_update:
permissions_to_update = {u'accessType': [], u'principalId': principalId}
acl['resourceAccess'].append(permissions_to_update)
if overwrite:
permissions_to_update['accessType'] = accessType
else:
permissions_to_update['accessType'] = list(set(permissions_to_update['accessType']) | set(accessType))
return self._storeACL(entity, acl)
############################################################
# Provenance #
############################################################
# TODO: rename these to Activity
def getProvenance(self, entity, version=None):
"""
Retrieve provenance information for a Synapse Entity.
:param entity: An Entity or Synapse ID to lookup
:param version: The version of the Entity to retrieve.
Gets the most recent version if omitted
:returns: An Activity object or
raises exception if no provenance record exists
"""
# Get versionNumber from Entity
if version is None and 'versionNumber' in entity:
version = entity['versionNumber']
if version:
uri = '/entity/%s/version/%d/generatedBy' % (id_of(entity), version)
else:
uri = '/entity/%s/generatedBy' % id_of(entity)
return Activity(data=self.restGET(uri))
def setProvenance(self, entity, activity):
"""
Stores a record of the code and data used to derive a Synapse entity.
:param entity: An Entity or Synapse ID to modify
:param activity: a :py:class:`synapseclient.activity.Activity`
:returns: An updated :py:class:`synapseclient.activity.Activity` object
"""
# Assert that the entity was generated by a given Activity.
activity = self._saveActivity(activity)
# assert that an entity is generated by an activity
uri = '/entity/%s/generatedBy?generatedBy=%s' % (id_of(entity), activity['id'])
activity = Activity(data=self.restPUT(uri))
return activity
def deleteProvenance(self, entity):
"""
Removes provenance information from an Entity and deletes the associated Activity.
:param entity: An Entity or Synapse ID to modify
"""
activity = self.getProvenance(entity)
if not activity:
return
uri = '/entity/%s/generatedBy' % id_of(entity)
self.restDELETE(uri)
# TODO: what happens if the activity is shared by more than one entity?
uri = '/activity/%s' % activity['id']
self.restDELETE(uri)
def _saveActivity(self, activity):
if 'id' in activity:
# We're updating provenance
uri = '/activity/%s' % activity['id']
activity = Activity(data=self.restPUT(uri, json.dumps(activity)))
else:
activity = self.restPOST('/activity', body=json.dumps(activity))
return activity
def updateActivity(self, activity):
"""
Modifies an existing Activity.
:param activity: The Activity to be updated.
:returns: An updated Activity object
"""
if 'id' not in activity:
raise ValueError("The activity you want to update must exist on Synapse")
return self._saveActivity(activity)
def _convertProvenanceList(self, usedList, limitSearch=None):
"""Convert a list of synapse Ids, URLs and local files by replacing local files with Synapse Ids"""
if usedList is None:
return None
usedList = [self.get(target, limitSearch=limitSearch) if
(os.path.isfile(target) if isinstance(target, str) else False) else target for
target in usedList]
return usedList
############################################################
# File handle service calls #
############################################################
def _getFileHandleDownload(self, fileHandleId, objectId, objectType=None):
"""
Gets the URL and the metadata as filehandle object for a filehandle or fileHandleId
:param fileHandleId: ID of fileHandle to download
:param objectId: The ID of the object associated with the file e.g. syn234
:param objectType: Type of object associated with a file e.g. FileEntity, TableEntity
:returns: dictionary with keys: fileHandle, fileHandleId and preSignedURL
"""
body = {'includeFileHandles': True, 'includePreSignedURLs': True,
'requestedFiles': [{'fileHandleId': fileHandleId,
'associateObjectId': objectId,
'associateObjectType': objectType or 'FileEntity'}]}
response = self.restPOST('/fileHandle/batch', body=json.dumps(body),
endpoint=self.fileHandleEndpoint)
result = response['requestedFiles'][0]
failure = result.get('failureCode')
if failure == 'NOT_FOUND':
raise SynapseFileNotFoundError("The fileHandleId %s could not be found" % fileHandleId)
elif failure == "UNAUTHORIZED":
raise SynapseError(
"You are not authorized to access fileHandleId %s associated with the Synapse"
" %s: %s" % (fileHandleId, objectType, objectId)
)
return result
@staticmethod
def _is_retryable_download_error(ex):
# some exceptions caught during download indicate non-recoverable situations that
# will not be remedied by a repeated download attempt.
return not (
(isinstance(ex, OSError) and ex.errno == errno.ENOSPC) or # out of disk space
isinstance(ex, SynapseMd5MismatchError)
)
def _downloadFileHandle(self, fileHandleId, objectId, objectType, destination, retries=5):
"""
Download a file from the given URL to the local file system.
:param fileHandleId: id of the FileHandle to download
:param objectId: id of the Synapse object that uses the FileHandle e.g. "syn123"
:param objectType: type of the Synapse object that uses the FileHandle e.g. "FileEntity"
:param destination: destination on local file system
:param retries: (default=5) Number of download retries attempted before throwing an exception.
:returns: path to downloaded file
"""
os.makedirs(os.path.dirname(destination), exist_ok=True)
while retries > 0:
try:
fileResult = self._getFileHandleDownload(fileHandleId, objectId, objectType)
fileHandle = fileResult['fileHandle']
concreteType = fileHandle['concreteType']
storageLocationId = fileHandle.get('storageLocationId')
if concreteType == concrete_types.EXTERNAL_OBJECT_STORE_FILE_HANDLE:
profile = self._get_client_authenticated_s3_profile(fileHandle['endpointUrl'], fileHandle['bucket'])
downloaded_path = S3ClientWrapper.download_file(fileHandle['bucket'], fileHandle['endpointUrl'],
fileHandle['fileKey'], destination,
profile_name=profile,
show_progress=not self.silent)
elif sts_transfer.is_boto_sts_transfer_enabled(self) and \
sts_transfer.is_storage_location_sts_enabled(self, objectId, storageLocationId) and \
concreteType == concrete_types.S3_FILE_HANDLE:
def download_fn(credentials):
return S3ClientWrapper.download_file(
fileHandle['bucketName'],
None,
fileHandle['key'],
destination,
credentials=credentials,
show_progress=not self.silent,
# pass through our synapse threading config to boto s3
transfer_config_kwargs={'max_concurrency': self.max_threads},
)
downloaded_path = sts_transfer.with_boto_sts_credentials(
download_fn,
self,
objectId,
'read_only',
)
elif self.multi_threaded and \
concreteType == concrete_types.S3_FILE_HANDLE and \
fileHandle.get('contentSize', 0) > multithread_download.SYNAPSE_DEFAULT_DOWNLOAD_PART_SIZE:
# run the download multi threaded if the file supports it, we're configured to do so,
# and the file is large enough that it would be broken into parts to take advantage of
# multiple downloading threads. otherwise it's more efficient to run the download as a simple
# single threaded URL download.
downloaded_path = self._download_from_url_multi_threaded(fileHandleId,
objectId,
objectType,
destination,
expected_md5=fileHandle.get('contentMd5'))
else:
downloaded_path = self._download_from_URL(fileResult['preSignedURL'],
destination,
fileHandle['id'],
expected_md5=fileHandle.get('contentMd5'))
self.cache.add(fileHandle['id'], downloaded_path)
return downloaded_path
except Exception as ex:
if not self._is_retryable_download_error(ex):
raise
exc_info = sys.exc_info()
ex.progress = 0 if not hasattr(ex, 'progress') else ex.progress
self.logger.debug("\nRetrying download on error: [%s] after progressing %i bytes" %
(exc_info[0], ex.progress), exc_info=True) # this will include stack trace
if ex.progress == 0: # No progress was made reduce remaining retries.
retries -= 1
if retries <= 0:
# Re-raise exception
raise
raise Exception("should not reach this line")
def _download_from_url_multi_threaded(self,
file_handle_id,
object_id,
object_type,
destination,
*,
expected_md5=None):
destination = os.path.abspath(destination)
temp_destination = utils.temp_download_filename(destination, file_handle_id)
request = multithread_download.DownloadRequest(file_handle_id=int(file_handle_id),
object_id=object_id,
object_type=object_type,
path=temp_destination)
multithread_download.download_file(self, request)
if expected_md5: # if md5 not set (should be the case for all except http download)
actual_md5 = utils.md5_for_file(temp_destination).hexdigest()
# check md5 if given
if actual_md5 != expected_md5:
try:
os.remove(temp_destination)
except FileNotFoundError:
# file already does not exist. nothing to do
pass
raise SynapseMd5MismatchError(
"Downloaded file {filename}'s md5 {md5} does not match expected MD5 of"
" {expected_md5}".format(
filename=temp_destination, md5=actual_md5, expected_md5=expected_md5
)
)
# once download completed, rename to desired destination
shutil.move(temp_destination, destination)
return destination
def _is_synapse_uri(self, uri):
# check whether the given uri is hosted at the configured synapse repo endpoint
uri_domain = urllib_urlparse.urlparse(uri).netloc
synapse_repo_domain = urllib_urlparse.urlparse(self.repoEndpoint).netloc
return uri_domain.lower() == synapse_repo_domain.lower()
def _download_from_URL(self, url, destination, fileHandleId=None, expected_md5=None):
"""
Download a file from the given URL to the local file system.
:param url: source of download
:param destination: destination on local file system
:param fileHandleId: (optional) if given, the file will be given a temporary name that includes the file
handle id which allows resuming partial downloads of the same file from previous
sessions
:param expected_md5: (optional) if given, check that the MD5 of the downloaded file matched the expected MD5
:returns: path to downloaded file
"""
destination = os.path.abspath(destination)
actual_md5 = None
redirect_count = 0
delete_on_md5_mismatch = True
while redirect_count < REDIRECT_LIMIT:
redirect_count += 1
scheme = urllib_urlparse.urlparse(url).scheme
if scheme == 'file':
delete_on_md5_mismatch = False
destination = utils.file_url_to_path(url, verify_exists=True)
if destination is None:
raise IOError("Local file (%s) does not exist." % url)
break
elif scheme == 'sftp':
username, password = self._getUserCredentials(url)
destination = SFTPWrapper.download_file(url, destination, username, password,
show_progress=not self.silent)
break
elif scheme == 'ftp':
urllib_request.urlretrieve(url, destination)
break
elif scheme == 'http' or scheme == 'https':
# if a partial download exists with the temporary name,
temp_destination = utils.temp_download_filename(destination, fileHandleId)
range_header = {"Range": "bytes={start}-".format(start=os.path.getsize(temp_destination))} \
if os.path.exists(temp_destination) else {}
# pass along synapse auth credentials only if downloading directly from synapse
auth = self.credentials if self._is_synapse_uri(url) else None
response = with_retry(
lambda: self._requests_session.get(
url,
headers=self._generate_headers(range_header),
stream=True,
allow_redirects=False,
auth=auth,
),
verbose=self.debug, **STANDARD_RETRY_PARAMS)
try:
exceptions._raise_for_status(response, verbose=self.debug)
except SynapseHTTPError as err:
if err.response.status_code == 404:
raise SynapseError("Could not download the file at %s" % url)
elif err.response.status_code == 416: # Requested Range Not Statisfiable
# this is a weird error when the client already finished downloading but the loop continues
# When this exception occurs, the range we request is guaranteed to be >= file size so we
# assume that the file has been fully downloaded, rename it to destination file
# and break out of the loop to perform the MD5 check.
# If it fails the user can retry with another download.
shutil.move(temp_destination, destination)
break
raise
# handle redirects
if response.status_code in [301, 302, 303, 307, 308]:
url = response.headers['location']
# don't break, loop again
else:
# get filename from content-disposition, if we don't have it already
if os.path.isdir(destination):
filename = utils.extract_filename(
content_disposition_header=response.headers.get('content-disposition', None),
default_filename=utils.guess_file_name(url))
destination = os.path.join(destination, filename)
# Stream the file to disk
if 'content-length' in response.headers:
toBeTransferred = float(response.headers['content-length'])
else:
toBeTransferred = -1
transferred = 0
# Servers that respect the Range header return 206 Partial Content
if response.status_code == 206:
mode = 'ab'
previouslyTransferred = os.path.getsize(temp_destination)
toBeTransferred += previouslyTransferred
transferred += previouslyTransferred
sig = utils.md5_for_file(temp_destination)
else:
mode = 'wb'
previouslyTransferred = 0
sig = hashlib.md5()
try:
with open(temp_destination, mode) as fd:
t0 = time.time()
for nChunks, chunk in enumerate(response.iter_content(FILE_BUFFER_SIZE)):
fd.write(chunk)
sig.update(chunk)
# the 'content-length' header gives the total number of bytes that will be transferred
# to us len(chunk) cannot be used to track progress because iter_content automatically
# decodes the chunks if the response body is encoded so the len(chunk) could be
# different from the total number of bytes we've read read from the response body
# response.raw.tell() is the total number of response body bytes transferred over the
# wire so far
transferred = response.raw.tell() + previouslyTransferred
self._print_transfer_progress(
transferred,
toBeTransferred,
'Downloading ',
os.path.basename(destination),
dt=time.time() - t0
)
except Exception as ex: # We will add a progress parameter then push it back to retry.
ex.progress = transferred-previouslyTransferred
raise
# verify that the file was completely downloaded and retry if it is not complete
if toBeTransferred > 0 and transferred < toBeTransferred:
self.logger.warning("\nRetrying download because the connection ended early.\n")
continue
actual_md5 = sig.hexdigest()
# rename to final destination
shutil.move(temp_destination, destination)
break
else:
self.logger.error('Unable to download URLs of type %s' % scheme)
return None
else: # didn't break out of loop
raise SynapseHTTPError('Too many redirects')
if actual_md5 is None: # if md5 not set (should be the case for all except http download)
actual_md5 = utils.md5_for_file(destination).hexdigest()
# check md5 if given
if expected_md5 and actual_md5 != expected_md5:
if delete_on_md5_mismatch and os.path.exists(destination):
os.remove(destination)
raise SynapseMd5MismatchError(
"Downloaded file {filename}'s md5 {md5} does not match expected MD5 of"
" {expected_md5}".format(filename=destination, md5=actual_md5, expected_md5=expected_md5)
)
return destination
def _createExternalFileHandle(self, externalURL, mimetype=None, md5=None, fileSize=None):
"""Create a new FileHandle representing an external URL."""
fileName = externalURL.split('/')[-1]
externalURL = utils.as_url(externalURL)
fileHandle = {
'concreteType': concrete_types.EXTERNAL_FILE_HANDLE,
'fileName': fileName,
'externalURL': externalURL,
'contentMd5': md5,
'contentSize': fileSize
}
if mimetype is None:
(mimetype, enc) = mimetypes.guess_type(externalURL, strict=False)
if mimetype is not None:
fileHandle['contentType'] = mimetype
return self.restPOST('/externalFileHandle', json.dumps(fileHandle), self.fileHandleEndpoint)
def _createExternalObjectStoreFileHandle(self, s3_file_key, file_path, storage_location_id, mimetype=None):
if mimetype is None:
mimetype, enc = mimetypes.guess_type(file_path, strict=False)
file_handle = {
'concreteType': concrete_types.EXTERNAL_OBJECT_STORE_FILE_HANDLE,
'fileKey': s3_file_key,
'fileName': os.path.basename(file_path),
'contentMd5': utils.md5_for_file(file_path).hexdigest(),
'contentSize': os.stat(file_path).st_size,
'storageLocationId': storage_location_id,
'contentType': mimetype
}
return self.restPOST('/externalFileHandle', json.dumps(file_handle), self.fileHandleEndpoint)
def create_external_s3_file_handle(self, bucket_name, s3_file_key, file_path, *,
parent=None, storage_location_id=None, mimetype=None):
"""
Create an external S3 file handle for e.g. a file that has been uploaded directly to
an external S3 storage location.
:param bucket_name: Name of the S3 bucket
:param s3_file_key: S3 key of the uploaded object
:param file_path: Local path of the uploaded file
:param parent: Parent entity to create the file handle in, the file handle will be created
in the default storage location of the parent. Mutually exclusive with
storage_location_id
:param storage_location_id: Explicit storage location id to create the file handle in, mutually exclusive
with parent
:param mimetype: Mimetype of the file, if known
"""
if storage_location_id:
if parent:
raise ValueError("Pass parent or storage_location_id, not both")
elif not parent:
raise ValueError("One of parent or storage_location_id is required")
else:
upload_destination = self._getDefaultUploadDestination(parent)
storage_location_id = upload_destination['storageLocationId']
if mimetype is None:
mimetype, enc = mimetypes.guess_type(file_path, strict=False)
file_handle = {
'concreteType': concrete_types.S3_FILE_HANDLE,
'key': s3_file_key,
'bucketName': bucket_name,
'fileName': os.path.basename(file_path),
'contentMd5': utils.md5_for_file(file_path).hexdigest(),
'contentSize': os.stat(file_path).st_size,
'storageLocationId': storage_location_id,
'contentType': mimetype
}
return self.restPOST('/externalFileHandle/s3', json.dumps(file_handle), endpoint=self.fileHandleEndpoint)
def _get_file_handle_as_creator(self, fileHandle):
"""Retrieve a fileHandle from the fileHandle service.
You must be the creator of the filehandle to use this method. Otherwise, an 403-Forbidden error will be raised
"""
uri = "/fileHandle/%s" % (id_of(fileHandle),)
return self.restGET(uri, endpoint=self.fileHandleEndpoint)
def _deleteFileHandle(self, fileHandle):
"""
Delete the given file handle.
Note: Only the user that created the FileHandle can delete it. Also, a FileHandle cannot be deleted if it is
associated with a FileEntity or WikiPage
"""
uri = "/fileHandle/%s" % (id_of(fileHandle),)
self.restDELETE(uri, endpoint=self.fileHandleEndpoint)
return fileHandle
############################################################
# SFTP #
############################################################
def _getDefaultUploadDestination(self, parent_entity):
return self.restGET('/entity/%s/uploadDestination' % id_of(parent_entity),
endpoint=self.fileHandleEndpoint)
def _getUserCredentials(self, url, username=None, password=None):
"""Get user credentials for a specified URL by either looking in the configFile or querying the user.
:param username: username on server (optionally specified)
:param password: password for authentication on the server (optionally specified)
:returns: tuple of username, password
"""
# Get authentication information from configFile
parsedURL = urllib_urlparse.urlparse(url)
baseURL = parsedURL.scheme+'://'+parsedURL.hostname
config = self.getConfigFile(self.configPath)
if username is None and config.has_option(baseURL, 'username'):
username = config.get(baseURL, 'username')
if password is None and config.has_option(baseURL, 'password'):
password = config.get(baseURL, 'password')
# If I still don't have a username and password prompt for it
if username is None:
username = getpass.getuser() # Default to login name
# Note that if we hit the following line from within nosetests in
# Python 3, we get "TypeError: bad argument type for built-in operation".
# Luckily, this case isn't covered in our test suite!
user = input('Username for %s (%s):' % (baseURL, username))
username = username if user == '' else user
if password is None:
password = getpass.getpass('Password for %s:' % baseURL)
return username, password
############################################
# Project/Folder storage location settings #
############################################
def createStorageLocationSetting(self, storage_type, **kwargs):
"""
Creates an IMMUTABLE storage location based on the specified type.
For each storage_type, the following kwargs should be specified:
ExternalObjectStorage: (S3-like (e.g. AWS S3 or Openstack) bucket not accessed by Synapse)
- endpointUrl: endpoint URL of the S3 service (for example: 'https://s3.amazonaws.com')
- bucket: the name of the bucket to use
ExternalS3Storage: (Amazon S3 bucket accessed by Synapse)
- bucket: the name of the bucket to use
ExternalStorage: (SFTP or FTP storage location not accessed by Synapse)
- url: the base URL for uploading to the external destination
- supportsSubfolders(optional): does the destination support creating subfolders under the base url
(default: false)
ProxyStorage: (a proxy server that controls access to a storage)
- secretKey: The encryption key used to sign all pre-signed URLs used to communicate with the proxy.
- proxyUrl: The HTTPS URL of the proxy used for upload and download.
Optional kwargs for ALL types:
- banner: The optional banner to show every time a file is uploaded
- description: The description to show the user when the user has to choose which upload destination to use
:param storage_type: the type of the StorageLocationSetting to create
:param kwargs: fields necessary for creation of the specified storage_type
:return: a dict of the created StorageLocationSetting
"""
upload_type_dict = {"ExternalObjectStorage": "S3",
"ExternalS3Storage": "S3",
"ExternalStorage": "SFTP",
"ProxyStorage": "PROXYLOCAL"}
if storage_type not in upload_type_dict:
raise ValueError("Unknown storage_type: %s", storage_type)
# ProxyStorageLocationSettings has an extra 's' at the end >:(
kwargs['concreteType'] = 'org.sagebionetworks.repo.model.project.' + storage_type + 'LocationSetting' \
+ ('s' if storage_type == 'ProxyStorage' else '')
kwargs['uploadType'] = upload_type_dict[storage_type]
return self.restPOST('/storageLocation', body=json.dumps(kwargs))
def getMyStorageLocationSetting(self, storage_location_id):
"""
Get a StorageLocationSetting by its id.
:param storage_location_id: id of the StorageLocationSetting to retrieve.
The corresponding StorageLocationSetting must have been created by this user.
:return: a dict describing the StorageLocationSetting retrieved by its id
"""
return self.restGET('/storageLocation/%s' % storage_location_id)
def setStorageLocation(self, entity, storage_location_id):
"""
Sets the storage location for a Project or Folder
:param entity: a Project or Folder to which the StorageLocationSetting is set
:param storage_location_id: a StorageLocation id or a list of StorageLocation ids. Pass in None for the default
Synapse storage.
:return: The created or updated settings as a dict
"""
if storage_location_id is None:
storage_location_id = DEFAULT_STORAGE_LOCATION_ID
locations = storage_location_id if isinstance(storage_location_id, list) else [storage_location_id]
existing_setting = self.getProjectSetting(entity, 'upload')
if existing_setting is not None:
existing_setting['locations'] = locations
self.restPUT('/projectSettings', body=json.dumps(existing_setting))
return self.getProjectSetting(entity, 'upload')
else:
project_destination = {'concreteType':
'org.sagebionetworks.repo.model.project.UploadDestinationListSetting',
'settingsType': 'upload',
'locations': locations,
'projectId': id_of(entity)
}
return self.restPOST('/projectSettings', body=json.dumps(project_destination))
def getProjectSetting(self, project, setting_type):
"""
Gets the ProjectSetting for a project.
:param project: Project entity or its id as a string
:param setting_type: type of setting. Choose from: {'upload', 'external_sync', 'requester_pays'}
:return: The ProjectSetting as a dict or None if no settings of the specified type exist.
"""
if setting_type not in {'upload', 'external_sync', 'requester_pays'}:
raise ValueError("Invalid project_type: %s" % setting_type)
response = self.restGET('/projectSettings/{projectId}/type/{type}'.format(projectId=id_of(project),
type=setting_type))
return response if response else None # if no project setting, a empty string is returned as the response
def get_sts_storage_token(self, entity, permission, *, output_format='json', min_remaining_life=None):
"""Get STS credentials for the given entity_id and permission, outputting it in the given format
:param entity: the entity or entity id whose credentials are being returned
:param permission: one of 'read_only' or 'read_write'
:param output_format: one of 'json', 'boto', 'shell', 'bash', 'cmd', 'powershell'
json: the dictionary returned from the Synapse STS API including expiration
boto: a dictionary compatible with a boto session (aws_access_key_id, etc)
shell: output commands for exporting credentials appropriate for the detected shell
bash: output commands for exporting credentials into a bash shell
cmd: output commands for exporting credentials into a windows cmd shell
powershell: output commands for exporting credentials into a windows powershell
:param min_remaining_life: the minimum allowable remaining life on a cached token to return. if a cached token
has left than this amount of time left a fresh token will be fetched
"""
return sts_transfer.get_sts_credentials(
self, id_of(entity), permission,
output_format=output_format, min_remaining_life=min_remaining_life
)
def create_s3_storage_location(self, *,
parent=None, folder_name=None,
folder=None,
bucket_name=None, base_key=None,
sts_enabled=False):
"""
Create a storage location in the given parent, either in the given folder or by creating a new
folder in that parent with the given name. This will both create a StorageLocationSetting,
and a ProjectSetting together, optionally creating a new folder in which to locate it,
and optionally enabling this storage location for access via STS. If enabling an existing folder for STS,
it must be empty.
:param parent: The parent in which to locate the storage location (mutually exclusive with folder)
:param folder_name: The name of a new folder to create (mutually exclusive with folder)
:param folder: The existing folder in which to create the storage location
(mutually exclusive with folder_name)
:param bucket_name: The name of an S3 bucket, if this is an external storage location,
if None will use Synapse S3 storage
:param base_key: The base key of within the bucket, None to use the bucket root,
only applicable if bucket_name is passed
:param sts_enabled: Whether this storage location should be STS enabled
:return: a 3-tuple of the synapse Folder, a the storage location setting, and the project setting dictionaries
"""
if folder_name and parent:
if folder:
raise ValueError("folder and folder_name are mutually exclusive, only one should be passed")
folder = self.store(Folder(name=folder_name, parent=parent))
elif not folder:
raise ValueError("either folder or folder_name should be required")
storage_location_kwargs = {
'uploadType': 'S3',
'stsEnabled': sts_enabled,
}
if bucket_name:
storage_location_kwargs['concreteType'] = concrete_types.EXTERNAL_S3_STORAGE_LOCATION_SETTING
storage_location_kwargs['bucket'] = bucket_name
if base_key:
storage_location_kwargs['baseKey'] = base_key
else:
storage_location_kwargs['concreteType'] = concrete_types.SYNAPSE_S3_STORAGE_LOCATION_SETTING
storage_location_setting = self.restPOST('/storageLocation', json.dumps(storage_location_kwargs))
storage_location_id = storage_location_setting['storageLocationId']
project_setting = self.setStorageLocation(
folder,
storage_location_id,
)
return folder, storage_location_setting, project_setting
############################################################
# CRUD for Evaluations #
############################################################
def getEvaluation(self, id):
"""
Gets an Evaluation object from Synapse.
:param id: The ID of the :py:class:`synapseclient.evaluation.Evaluation` to return.
:return: an :py:class:`synapseclient.evaluation.Evaluation` object
See: :py:mod:`synapseclient.evaluation`
Example::
evaluation = syn.getEvaluation(2005090)
"""
evaluation_id = id_of(id)
uri = Evaluation.getURI(evaluation_id)
return Evaluation(**self.restGET(uri))
# TODO: Should this be combined with getEvaluation?
def getEvaluationByName(self, name):
"""
Gets an Evaluation object from Synapse.
:param name: The name of the :py:class:`synapseclient.evaluation.Evaluation` to return.
:return: an :py:class:`synapseclient.evaluation.Evaluation` object
See: :py:mod:`synapseclient.evaluation`
"""
uri = Evaluation.getByNameURI(urllib_urlparse.quote(name))
return Evaluation(**self.restGET(uri))
def getEvaluationByContentSource(self, entity):
"""
Returns a generator over evaluations that derive their content from the given entity
:param entity: The :py:class:`synapseclient.entity.Project` whose Evaluations are to be fetched.
:return: a Generator over the :py:class:`synapseclient.evaluation.Evaluation` objects for the given
:py:class:`synapseclient.entity.Project`
"""
entityId = id_of(entity)
url = "/entity/%s/evaluation" % entityId
for result in self._GET_paginated(url):
yield Evaluation(**result)
def _findTeam(self, name):
"""
Retrieve a Teams matching the supplied name fragment
"""
for result in self._GET_paginated("/teams?fragment=%s" % name):
yield Team(**result)
def getTeam(self, id):
"""
Finds a team with a given ID or name.
:param id: The ID or name of the team or a Team object to retrieve
:return: An object of type :py:class:`synapseclient.team.Team`
"""
# Retrieves team id
teamid = id_of(id)
try:
int(teamid)
except (TypeError, ValueError):
if isinstance(id, str):
for team in self._findTeam(id):
if team.name == id:
teamid = team.id
break
else:
raise ValueError("Can't find team \"{}\"".format(teamid))
else:
raise ValueError("Can't find team \"{}\"".format(teamid))
return Team(**self.restGET('/team/%s' % teamid))
def getTeamMembers(self, team):
"""
Lists the members of the given team.
:parameter team: A :py:class:`synapseclient.team.Team` object or a team's ID.
:returns: a generator over :py:class:`synapseclient.team.TeamMember` objects.
"""
for result in self._GET_paginated('/teamMembers/{id}'.format(id=id_of(team))):
yield TeamMember(**result)
def _get_docker_digest(self, entity, docker_tag="latest"):
'''
Get matching Docker sha-digest of a DockerRepository given a Docker tag
:param entity: Synapse id or entity of Docker repository
:param docker_tag: Docker tag
:returns: Docker digest matching Docker tag
'''
entityid = id_of(entity)
uri = '/entity/{entityId}/dockerTag'.format(entityId=entityid)
docker_commits = self._GET_paginated(uri)
docker_digest = None
for commit in docker_commits:
if docker_tag == commit['tag']:
docker_digest = commit['digest']
if docker_digest is None:
raise ValueError("Docker tag {docker_tag} not found. Please specify a "
"docker tag that exists. 'latest' is used as "
"default.".format(docker_tag=docker_tag))
return(docker_digest)
def get_team_open_invitations(self, team):
"""Retrieve the open requests submitted to a Team
https://docs.synapse.org/rest/GET/team/id/openInvitation.html
:param team: A :py:class:`synapseclient.team.Team` object or a
team's ID.
:returns: generator of MembershipRequest
"""
teamid = id_of(team)
request = "/team/{team}/openInvitation".format(team=teamid)
open_requests = self._GET_paginated(request)
return open_requests
def get_membership_status(self, userid, team):
"""Retrieve a user's Team Membership Status bundle.
https://docs.synapse.org/rest/GET/team/id/member/principalId/membershipStatus.html
:param user: Synapse user ID
:param team: A :py:class:`synapseclient.team.Team` object or a
team's ID.
:returns: dict of TeamMembershipStatus"""
teamid = id_of(team)
request = "/team/{team}/member/{user}/membershipStatus".format(
team=teamid,
user=userid)
membership_status = self.restGET(request)
return membership_status
def _delete_membership_invitation(self, invitationid):
"""Delete open membership invitation
:param invitationid: Open invitation id
"""
self.restDELETE("/membershipInvitation/{id}".format(id=invitationid))
def send_membership_invitation(self, teamId, inviteeId=None,
inviteeEmail=None,
message=None):
"""Create a membership invitation and send an email notification
to the invitee.
:param teamId: Synapse teamId
:param inviteeId: Synapse username or profile id of user
:param inviteeEmail: Email of user
:param message: Additional message for the user getting invited to the
team. Default to None.
:returns: MembershipInvitation
"""
invite_request = {'teamId': str(teamId),
'message': message}
if inviteeEmail is not None:
invite_request['inviteeEmail'] = str(inviteeEmail)
if inviteeId is not None:
invite_request['inviteeId'] = str(inviteeId)
response = self.restPOST("/membershipInvitation",
body=json.dumps(invite_request))
return response
def invite_to_team(self, team, user=None, inviteeEmail=None,
message=None, force=False):
"""Invite user to a Synapse team via Synapse username or email
(choose one or the other)
:param syn: Synapse object
:param team: A :py:class:`synapseclient.team.Team` object or a
team's ID.
:param user: Synapse username or profile id of user
:param inviteeEmail: Email of user
:param message: Additional message for the user getting invited to the
team. Default to None.
:param force: If an open invitation exists for the invitee,
the old invite will be cancelled. Default to False.
:returns: MembershipInvitation or None if user is already a member
"""
# Throw error if both user and email is specified and if both not
# specified
id_email_specified = inviteeEmail is not None and user is not None
id_email_notspecified = inviteeEmail is None and user is None
if id_email_specified or id_email_notspecified:
raise ValueError("Must specify either 'user' or 'inviteeEmail'")
teamid = id_of(team)
is_member = False
open_invitations = self.get_team_open_invitations(teamid)
if user is not None:
inviteeId = self.getUserProfile(user)['ownerId']
membership_status = self.get_membership_status(inviteeId, teamid)
is_member = membership_status['isMember']
open_invites_to_user = [invitation
for invitation in open_invitations
if invitation.get('inviteeId') == inviteeId]
else:
inviteeId = None
open_invites_to_user = [invitation
for invitation in open_invitations
if invitation.get('inviteeEmail') == inviteeEmail]
# Only invite if the invitee is not a member and
# if invitee doesn't have an open invitation unless force=True
if not is_member and (not open_invites_to_user or force):
# Delete all old invitations
for invite in open_invites_to_user:
self._delete_membership_invitation(invite['id'])
return self.send_membership_invitation(teamid, inviteeId=inviteeId,
inviteeEmail=inviteeEmail,
message=message)
if is_member:
not_sent_reason = "invitee is already a member"
else:
not_sent_reason = ("invitee already has an open invitation "
"Set force=True to send new invite.")
self.logger.warning("No invitation sent: {}".format(not_sent_reason))
# Return None if no invite is sent.
return None
def submit(self, evaluation, entity, name=None, team=None,
silent=False, submitterAlias=None, teamName=None,
dockerTag="latest"):
"""
Submit an Entity for `evaluation <Evaluation.html>`_.
:param evaluation: Evaluation queue to submit to
:param entity: The Entity containing the Submission
:param name: A name for this submission.
In the absent of this parameter, the entity name will be used.
:param team: (optional) A :py:class:`Team` object, ID or name of a Team that is registered for the
challenge
:param silent: Set to True to suppress output.
:param submitterAlias: (optional) A nickname, possibly for display in leaderboards in place of the submitter's
name
:param teamName: (deprecated) A synonym for submitterAlias
:param dockerTag: (optional) The Docker tag must be specified if the entity is a DockerRepository.
Defaults to "latest".
:returns: A :py:class:`synapseclient.evaluation.Submission` object
In the case of challenges, a team can optionally be provided to give credit to members of the team that
contributed to the submission. The team must be registered for the challenge with which the given evaluation is
associated. The caller must be a member of the submitting team.
Example::
evaluation = syn.getEvaluation(123)
entity = syn.get('syn456')
submission = syn.submit(evaluation, entity, name='Our Final Answer', team='Blue Team')
"""
require_param(evaluation, "evaluation")
require_param(entity, "entity")
evaluation_id = id_of(evaluation)
entity_id = id_of(entity)
if isinstance(entity, synapseclient.DockerRepository):
# Edge case if dockerTag is specified as None
if dockerTag is None:
raise ValueError('A dockerTag is required to submit a DockerEntity. Cannot be None')
docker_repository = entity['repositoryName']
else:
docker_repository = None
if 'versionNumber' not in entity:
entity = self.get(entity, downloadFile=False)
# version defaults to 1 to hack around required version field and allow submission of files/folders
entity_version = entity.get('versionNumber', 1)
# default name of submission to name of entity
if name is None and 'name' in entity:
name = entity['name']
team_id = None
if team:
team = self.getTeam(team)
team_id = id_of(team)
contributors, eligibility_hash = self._get_contributors(evaluation_id, team)
# for backward compatible until we remove supports for teamName
if not submitterAlias:
if teamName:
submitterAlias = teamName
elif team and 'name' in team:
submitterAlias = team['name']
if isinstance(entity, synapseclient.DockerRepository):
docker_digest = self._get_docker_digest(entity, dockerTag)
else:
docker_digest = None
submission = {'evaluationId': evaluation_id,
'name': name,
'entityId': entity_id,
'versionNumber': entity_version,
'dockerDigest': docker_digest,
'dockerRepositoryName': docker_repository,
'teamId': team_id,
'contributors': contributors,
'submitterAlias': submitterAlias}
submitted = self._submit(submission, entity['etag'], eligibility_hash)
# if we want to display the receipt message, we need the full object
if not silent:
if not(isinstance(evaluation, Evaluation)):
evaluation = self.getEvaluation(evaluation_id)
if 'submissionReceiptMessage' in evaluation:
self.logger.info(evaluation['submissionReceiptMessage'])
return Submission(**submitted)
def _submit(self, submission, entity_etag, eligibility_hash):
require_param(submission, "submission")
require_param(entity_etag, "entity_etag")
# URI requires the etag of the entity and, in the case of a team submission, requires an eligibilityStateHash
uri = '/evaluation/submission?etag=%s' % entity_etag
if eligibility_hash:
uri += "&submissionEligibilityHash={0}".format(eligibility_hash)
submitted = self.restPOST(uri, json.dumps(submission))
return submitted
def _get_contributors(self, evaluation_id, team):
if not evaluation_id or not team:
return None, None
team_id = id_of(team)
# see http://docs.synapse.org/rest/GET/evaluation/evalId/team/id/submissionEligibility.html
eligibility = self.restGET('/evaluation/{evalId}/team/{id}/submissionEligibility'
.format(evalId=evaluation_id, id=team_id))
if not eligibility['teamEligibility']['isEligible']:
# Check team eligibility and raise an exception if not eligible
if not eligibility['teamEligibility']['isRegistered']:
raise SynapseError('Team "{team}" is not registered.'.format(team=team.name))
if eligibility['teamEligibility']['isQuotaFilled']:
raise SynapseError(
'Team "{team}" has already submitted the full quota of submissions.'.format(team=team.name))
raise SynapseError('Team "{team}" is not eligible.'.format(team=team.name))
# Include all team members who are eligible.
contributors = [{'principalId': member['principalId']}
for member in eligibility['membersEligibility']
if member['isEligible'] and not member['hasConflictingSubmission']]
return contributors, eligibility['eligibilityStateHash']
def _allowParticipation(self, evaluation, user, rights=["READ", "PARTICIPATE", "SUBMIT", "UPDATE_SUBMISSION"]):
"""
Grants the given user the minimal access rights to join and submit to an Evaluation.
Note: The specification of this method has not been decided yet, so the method is likely to change in future.
:param evaluation: An Evaluation object or Evaluation ID
:param user: Either a user group or the principal ID of a user to grant rights to.
To allow all users, use "PUBLIC".
To allow authenticated users, use "AUTHENTICATED_USERS".
:param rights: The access rights to give to the users.
Defaults to "READ", "PARTICIPATE", "SUBMIT", and "UPDATE_SUBMISSION".
"""
# Check to see if the user is an ID or group
userId = -1
try:
# TODO: is there a better way to differentiate between a userID and a group name?
# What if a group is named with just numbers?
userId = int(user)
# Verify that the user exists
try:
self.getUserProfile(userId)
except SynapseHTTPError as err:
if err.response.status_code == 404:
raise SynapseError("The user (%s) does not exist" % str(userId))
raise
except ValueError:
# Fetch the ID of the user group
userId = self._getUserbyPrincipalIdOrName(user)
if not isinstance(evaluation, Evaluation):
evaluation = self.getEvaluation(id_of(evaluation))
self.setPermissions(evaluation, userId, accessType=rights, overwrite=False)
def getSubmissions(self, evaluation, status=None, myOwn=False, limit=20, offset=0):
"""
:param evaluation: Evaluation to get submissions from.
:param status: Optionally filter submissions for a specific status.
One of {OPEN, CLOSED, SCORED,INVALID,VALIDATED,
EVALUATION_IN_PROGRESS,RECEIVED, REJECTED, ACCEPTED}
:param myOwn: Determines if only your Submissions should be fetched.
Defaults to False (all Submissions)
:param limit: Limits the number of submissions in a single response.
Because this method returns a generator and repeatedly
fetches submissions, this argument is limiting the
size of a single request and NOT the number of sub-
missions returned in total.
:param offset: Start iterating at a submission offset from the first
submission.
:returns: A generator over :py:class:`synapseclient.evaluation.Submission` objects for an Evaluation
Example::
for submission in syn.getSubmissions(1234567):
print(submission['entityId'])
See: :py:mod:`synapseclient.evaluation`
"""
evaluation_id = id_of(evaluation)
uri = "/evaluation/%s/submission%s" % (evaluation_id, "" if myOwn else "/all")
if status is not None:
uri += "?status=%s" % status
for result in self._GET_paginated(uri, limit=limit, offset=offset):
yield Submission(**result)
def _getSubmissionBundles(self, evaluation, status=None, myOwn=False, limit=20, offset=0):
"""
:param evaluation: Evaluation to get submissions from.
:param status: Optionally filter submissions for a specific status.
One of {OPEN, CLOSED, SCORED, INVALID}
:param myOwn: Determines if only your Submissions should be fetched.
Defaults to False (all Submissions)
:param limit: Limits the number of submissions coming back from the
service in a single response.
:param offset: Start iterating at a submission offset from the first
submission.
:returns: A generator over dictionaries with keys 'submission' and 'submissionStatus'.
Example::
for sb in syn._getSubmissionBundles(1234567):
print(sb['submission']['name'], \\
sb['submission']['submitterAlias'], \\
sb['submissionStatus']['status'], \\
sb['submissionStatus']['score'])
This may later be changed to return objects, pending some thought on how submissions along with related status
and annotations should be represented in the clients.
See: :py:mod:`synapseclient.evaluation`
"""
evaluation_id = id_of(evaluation)
url = "/evaluation/%s/submission/bundle%s" % (evaluation_id, "" if myOwn else "/all")
if status is not None:
url += "?status=%s" % status
return self._GET_paginated(url, limit=limit, offset=offset)
def getSubmissionBundles(self, evaluation, status=None, myOwn=False, limit=20, offset=0):
"""
Retrieve submission bundles (submission and submissions status) for an evaluation queue, optionally filtered by
submission status and/or owner.
:param evaluation: Evaluation to get submissions from.
:param status: Optionally filter submissions for a specific status.
One of {OPEN, CLOSED, SCORED, INVALID}
:param myOwn: Determines if only your Submissions should be fetched.
Defaults to False (all Submissions)
:param limit: Limits the number of submissions coming back from the
service in a single response.
:param offset: Start iterating at a submission offset from the first
submission.
:returns: A generator over tuples containing a :py:class:`synapseclient.evaluation.Submission`
and a :py:class:`synapseclient.evaluation.SubmissionStatus`.
Example::
for submission, status in syn.getSubmissionBundles(evaluation):
print(submission.name, \\
submission.submitterAlias, \\
status.status, \\
status.score)
This may later be changed to return objects, pending some thought on how submissions along with related status
and annotations should be represented in the clients.
See: :py:mod:`synapseclient.evaluation`
"""
for bundle in self._getSubmissionBundles(evaluation, status=status, myOwn=myOwn, limit=limit, offset=offset):
yield (Submission(**bundle['submission']), SubmissionStatus(**bundle['submissionStatus']))
def _GET_paginated(self, uri, limit=20, offset=0):
"""
:param uri: A URI that returns paginated results
:param limit: How many records should be returned per request
:param offset: At what record offset from the first should iteration start
:returns: A generator over some paginated results
The limit parameter is set at 20 by default. Using a larger limit results in fewer calls to the service, but if
responses are large enough to be a burden on the service they may be truncated.
"""
prev_num_results = sys.maxsize
while prev_num_results > 0:
uri = utils._limit_and_offset(uri, limit=limit, offset=offset)
page = self.restGET(uri)
results = page['results'] if 'results' in page else page['children']
prev_num_results = len(results)
for result in results:
offset += 1
yield result
def getSubmission(self, id, **kwargs):
"""
Gets a :py:class:`synapseclient.evaluation.Submission` object by its id.
:param id: The id of the submission to retrieve
:return: a :py:class:`synapseclient.evaluation.Submission` object
See: :py:func:`synapseclient.Synapse.get` for information
on the *downloadFile*, *downloadLocation*, and *ifcollision* parameters
"""
submission_id = id_of(id)
uri = Submission.getURI(submission_id)
submission = Submission(**self.restGET(uri))
# Pre-fetch the Entity tied to the Submission, if there is one
if 'entityId' in submission and submission['entityId'] is not None:
entityBundleJSON = json.loads(submission['entityBundleJSON'])
# getWithEntityBundle expects a bundle services v2 style
# annotations dict, but the evaluations API may return
# an older format annotations object in the encoded JSON
# depending on when the original submission was made.
annotations = entityBundleJSON.get('annotations')
if annotations:
entityBundleJSON['annotations'] = convert_old_annotation_json(annotations)
related = self._getWithEntityBundle(
entityBundle=entityBundleJSON,
entity=submission['entityId'],
submission=submission_id, **kwargs)
submission.entity = related
submission.filePath = related.get('path', None)
return submission
def getSubmissionStatus(self, submission):
"""
Downloads the status of a Submission.
:param submission: The Submission to lookup
:returns: A :py:class:`synapseclient.evaluation.SubmissionStatus` object
"""
submission_id = id_of(submission)
uri = SubmissionStatus.getURI(submission_id)
val = self.restGET(uri)
return SubmissionStatus(**val)
############################################################
# CRUD for Wikis #
############################################################
def getWiki(self, owner, subpageId=None, version=None):
"""
Get a :py:class:`synapseclient.wiki.Wiki` object from Synapse. Uses wiki2 API which supports versioning.
:param owner: The entity to which the Wiki is attached
:param subpageId: The id of the specific sub-page or None to get the root Wiki page
:param version: The version of the page to retrieve or None to retrieve the latest
:return: a :py:class:`synapseclient.wiki.Wiki` object
"""
uri = "/entity/{ownerId}/wiki2".format(ownerId=id_of(owner))
if subpageId is not None:
uri += "/{wikiId}".format(wikiId=subpageId)
if version is not None:
uri += "?wikiVersion={version}".format(version=version)
wiki = self.restGET(uri)
wiki['owner'] = owner
wiki = Wiki(**wiki)
path = self.cache.get(wiki.markdownFileHandleId)
if not path:
cache_dir = self.cache.get_cache_dir(wiki.markdownFileHandleId)
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
path = self._downloadFileHandle(wiki['markdownFileHandleId'], wiki['id'], 'WikiMarkdown',
os.path.join(cache_dir, str(wiki.markdownFileHandleId) + ".md"))
try:
import gzip
with gzip.open(path) as f:
markdown = f.read().decode('utf-8')
except IOError:
with open(path) as f:
markdown = f.read().decode('utf-8')
wiki.markdown = markdown
wiki.markdown_path = path
return wiki
def getWikiHeaders(self, owner):
"""
Retrieves the headers of all Wikis belonging to the owner (the entity to which the Wiki is attached).
:param owner: An Entity
:returns: A list of Objects with three fields: id, title and parentId.
"""
uri = '/entity/%s/wikiheadertree' % id_of(owner)
return [DictObject(**header) for header in self._GET_paginated(uri)]
def _storeWiki(self, wiki, createOrUpdate): # type: (Wiki, bool) -> Wiki
"""
Stores or updates the given Wiki.
:param wiki: A Wiki object
:returns: An updated Wiki object
"""
# Make sure the file handle field is a list
if 'attachmentFileHandleIds' not in wiki:
wiki['attachmentFileHandleIds'] = []
# Convert all attachments into file handles
if wiki.get('attachments') is not None:
for attachment in wiki['attachments']:
fileHandle = upload_synapse_s3(self, attachment)
wiki['attachmentFileHandleIds'].append(fileHandle['id'])
del wiki['attachments']
# Perform an update if the Wiki has an ID
if 'id' in wiki:
updated_wiki = Wiki(owner=wiki.ownerId, **self.restPUT(wiki.putURI(), wiki.json()))
# Perform a create if the Wiki has no ID
else:
try:
updated_wiki = Wiki(owner=wiki.ownerId, **self.restPOST(wiki.postURI(), wiki.json()))
except SynapseHTTPError as err:
# If already present we get an unhelpful SQL error
if createOrUpdate and ((err.response.status_code == 400 and "DuplicateKeyException" in err.message)
or err.response.status_code == 409):
existing_wiki = self.getWiki(wiki.ownerId)
# overwrite everything except for the etag (this will keep unmodified fields in the existing wiki)
etag = existing_wiki['etag']
existing_wiki.update(wiki)
existing_wiki.etag = etag
updated_wiki = Wiki(owner=wiki.ownerId, **self.restPUT(existing_wiki.putURI(),
existing_wiki.json()))
else:
raise
return updated_wiki
def getWikiAttachments(self, wiki):
"""
Retrieve the attachments to a wiki page.
:param wiki: the Wiki object for which the attachments are to be returned.
:return: A list of file handles for the files attached to the Wiki.
"""
uri = "/entity/%s/wiki/%s/attachmenthandles" % (wiki.ownerId, wiki.id)
results = self.restGET(uri)
file_handles = list(WikiAttachment(**fh) for fh in results['list'])
return file_handles
############################################################
# Tables #
############################################################
def _waitForAsync(self, uri, request, endpoint=None):
if endpoint is None:
endpoint = self.repoEndpoint
async_job_id = self.restPOST(uri+'/start', body=json.dumps(request), endpoint=endpoint)
# http://docs.synapse.org/rest/org/sagebionetworks/repo/model/asynch/AsynchronousJobStatus.html
sleep = self.table_query_sleep
start_time = time.time()
lastMessage, lastProgress, lastTotal, progressed = '', 0, 1, False
while time.time()-start_time < self.table_query_timeout:
result = self.restGET(uri+'/get/%s' % async_job_id['token'], endpoint=endpoint)
if result.get('jobState', None) == 'PROCESSING':
progressed = True
message = result.get('progressMessage', lastMessage)
progress = result.get('progressCurrent', lastProgress)
total = result.get('progressTotal', lastTotal)
if message != '':
self._print_transfer_progress(progress, total, message, isBytes=False)
# Reset the time if we made progress (fix SYNPY-214)
if message != lastMessage or lastProgress != progress:
start_time = time.time()
lastMessage, lastProgress, lastTotal = message, progress, total
sleep = min(self.table_query_max_sleep, sleep * self.table_query_backoff)
doze(sleep)
else:
break
else:
raise SynapseTimeoutError('Timeout waiting for query results: %0.1f seconds ' % (time.time()-start_time))
if result.get('jobState', None) == 'FAILED':
raise SynapseError(
result.get('errorMessage', None) + '\n' + result.get('errorDetails', None),
asynchronousJobStatus=result
)
if progressed:
self._print_transfer_progress(total, total, message, isBytes=False)
return result
def getColumn(self, id):
"""
Gets a Column object from Synapse by ID.
See: :py:mod:`synapseclient.table.Column`
:param id: the ID of the column to retrieve
:return: an object of type :py:class:`synapseclient.table.Column`
Example::
column = syn.getColumn(123)
"""
return Column(**self.restGET(Column.getURI(id)))
def getColumns(self, x, limit=100, offset=0):
"""
Get the columns defined in Synapse either (1) corresponding to a set of column headers, (2) those for a given
schema, or (3) those whose names start with a given prefix.
:param x: a list of column headers, a Table Entity object (Schema/EntityViewSchema), a Table's Synapse ID,
or a string prefix
:param limit: maximum number of columns to return (pagination parameter)
:param offset: the index of the first column to return (pagination parameter)
:return: a generator of Column objects
"""
if x is None:
uri = '/column'
for result in self._GET_paginated(uri, limit=limit, offset=offset):
yield Column(**result)
elif isinstance(x, (list, tuple)):
for header in x:
try:
# if header is an integer, it's a columnID, otherwise it's an aggregate column, like "AVG(Foo)"
int(header)
yield self.getColumn(header)
except ValueError:
# ignore aggregate column
pass
elif isinstance(x, SchemaBase) or utils.is_synapse_id(x):
for col in self.getTableColumns(x):
yield col
elif isinstance(x, str):
uri = '/column?prefix=' + x
for result in self._GET_paginated(uri, limit=limit, offset=offset):
yield Column(**result)
else:
ValueError("Can't get columns for a %s" % type(x))
def create_snapshot_version(self, table: typing.Union[EntityViewSchema, Schema, str, SubmissionViewSchema],
comment: str = None, label: str = None, activity: typing.Union[Activity, str] = None,
wait: bool = True) -> int:
"""Create a new Table Version or a new View version.
:param table: The schema of the Table/View, or its ID.
:param comment: Optional snapshot comment.
:param label: Optional snapshot label.
:param activity: Optional activity ID applied to snapshot version.
:param wait: True if this method should return the snapshot version after waiting for any necessary
asynchronous table updates to complete. If False this method will return
as soon as any updates are initiated.
:return: the snapshot version number if wait=True, None if wait=False
"""
ent = self.get(id_of(table), downloadFile=False)
if isinstance(ent, (EntityViewSchema, SubmissionViewSchema)):
result = self._async_table_update(
table,
create_snapshot=True,
comment=comment,
label=label,
activity=activity,
wait=wait,
)
elif isinstance(ent, Schema):
result = self._create_table_snapshot(
table,
comment=comment,
label=label,
activity=activity,
)
else:
raise ValueError("This function only accepts Synapse ids of Tables or Views")
# for consistency we return nothing if wait=False since we can't
# supply the snapshot version on an async table update without waiting
return result['snapshotVersionNumber'] if wait else None
def _create_table_snapshot(self, table: typing.Union[Schema, str], comment: str = None,
label: str = None, activity: typing.Union[Activity, str] = None) -> dict:
"""Creates Table snapshot
:param table: The schema of the Table
:param comment: Optional snapshot comment.
:param label: Optional snapshot label.
:param activity: Optional activity ID or activity instance applied to snapshot version.
:return: Snapshot Response
"""
# check the activity id or object is provided
activity_id = None
if isinstance(activity, collections.abc.Mapping):
if 'id' not in activity:
activity = self._saveActivity(activity)
activity_id = activity['id']
elif activity is not None:
activity_id = str(activity)
snapshot_body = {"snapshotComment": comment,
"snapshotLabel": label,
"snapshotActivityId": activity_id}
new_body = {key: value for key, value in snapshot_body.items() if value is not None}
snapshot = self.restPOST("/entity/{}/table/snapshot".format(id_of(table)),
body=json.dumps(new_body))
return snapshot
def _async_table_update(self, table: typing.Union[EntityViewSchema, Schema, str, SubmissionViewSchema],
changes: typing.List[dict] = [], create_snapshot: bool = False,
comment: str = None, label: str = None, activity: str = None,
wait: bool = True) -> dict:
"""Creates view updates and snapshots
:param table: The schema of the EntityView or its ID.
:param changes: Array of Table changes
:param create_snapshot: Create snapshot
:param comment: Optional snapshot comment.
:param label: Optional snapshot label.
:param activity: Optional activity ID applied to snapshot version.
:param wait: True to wait for async table update to complete
:return: Snapshot Response
"""
snapshot_options = {'snapshotComment': comment,
'snapshotLabel': label,
'snapshotActivityId': activity}
new_snapshot = {key: value for key, value in snapshot_options.items() if value is not None}
table_update_body = {'changes': changes,
'createSnapshot': create_snapshot,
'snapshotOptions': new_snapshot}
uri = "/entity/{}/table/transaction/async".format(id_of(table))
if wait:
result = self._waitForAsync(uri, table_update_body)
else:
result = self.restPOST("{}/start".format(uri), body=json.dumps(table_update_body))
return result
def getTableColumns(self, table):
"""
Retrieve the column models used in the given table schema.
:param table: the schema of the Table whose columns are to be retrieved
:return: a Generator over the Table's columns
"""
uri = '/entity/{id}/column'.format(id=id_of(table))
# The returned object type for this service, PaginatedColumnModels, is a misnomer.
# This service always returns the full list of results so the pagination does not not actually matter.
for result in self.restGET(uri)['results']:
yield Column(**result)
def tableQuery(self, query, resultsAs="csv", **kwargs):
"""
Query a Synapse Table.
:param query: query string in a `SQL-like syntax \
<http://docs.synapse.org/rest/org/sagebionetworks/repo/web/controller/TableExamples.html>`_, for example
"SELECT * from syn12345"
:param resultsAs: select whether results are returned as a CSV file ("csv") or incrementally downloaded as
sets of rows ("rowset").
You can receive query results either as a generator over rows or as a CSV file. For smallish tables, either
method will work equally well. Use of a "rowset" generator allows rows to be processed one at a time and
processing may be stopped before downloading the entire table.
Optional keyword arguments differ for the two return types. For the "rowset" option,
:param limit: specify the maximum number of rows to be returned, defaults to None
:param offset: don't return the first n rows, defaults to None
:param isConsistent: defaults to True. If set to False, return results based on current state of the index
without waiting for pending writes to complete.
Only use this if you know what you're doing.
For CSV files, there are several parameters to control the format of the resulting file:
:param quoteCharacter: default double quote
:param escapeCharacter: default backslash
:param lineEnd: defaults to os.linesep
:param separator: defaults to comma
:param header: True by default
:param includeRowIdAndRowVersion: True by default
:param downloadLocation: directory path to download the CSV file to
:return: A Table object that serves as a wrapper around a CSV file (or generator over Row objects if
resultsAs="rowset").
NOTE: When performing queries on frequently updated tables, the table can be inaccessible for a period leading
to a timeout of the query. Since the results are guaranteed to eventually be returned you can change the
max timeout by setting the table_query_timeout variable of the Synapse object::
# Sets the max timeout to 5 minutes.
syn.table_query_timeout = 300
"""
if resultsAs.lower() == "rowset":
return TableQueryResult(self, query, **kwargs)
elif resultsAs.lower() == "csv":
return CsvFileTable.from_table_query(self, query, **kwargs)
else:
raise ValueError("Unknown return type requested from tableQuery: " + str(resultsAs))
def _queryTable(self, query, limit=None, offset=None, isConsistent=True, partMask=None):
"""
Query a table and return the first page of results as a `QueryResultBundle \
<http://docs.synapse.org/rest/org/sagebionetworks/repo/model/table/QueryResultBundle.html>`_.
If the result contains a *nextPageToken*, following pages a retrieved by calling :py:meth:`~._queryTableNext`.
:param partMask: Optional, default all. The 'partsMask' is a bit field for requesting
different elements in the resulting JSON bundle.
Query Results (queryResults) = 0x1
Query Count (queryCount) = 0x2
Select Columns (selectColumns) = 0x4
Max Rows Per Page (maxRowsPerPage) = 0x8
"""
# See: http://docs.synapse.org/rest/org/sagebionetworks/repo/model/table/QueryBundleRequest.html
query_bundle_request = {
"concreteType": "org.sagebionetworks.repo.model.table.QueryBundleRequest",
"query": {
"sql": query,
"isConsistent": isConsistent,
"includeEntityEtag": True
}
}
if partMask:
query_bundle_request["partMask"] = partMask
if limit is not None:
query_bundle_request["query"]["limit"] = limit
if offset is not None:
query_bundle_request["query"]["offset"] = offset
query_bundle_request["query"]["isConsistent"] = isConsistent
uri = '/entity/{id}/table/query/async'.format(id=extract_synapse_id_from_query(query))
return self._waitForAsync(uri=uri, request=query_bundle_request)
def _queryTableNext(self, nextPageToken, tableId):
uri = '/entity/{id}/table/query/nextPage/async'.format(id=tableId)
return self._waitForAsync(uri=uri, request=nextPageToken)
def _uploadCsv(self, filepath, schema, updateEtag=None, quoteCharacter='"', escapeCharacter="\\",
lineEnd=os.linesep, separator=",", header=True, linesToSkip=0):
"""
Send an `UploadToTableRequest \
<http://docs.synapse.org/rest/org/sagebionetworks/repo/model/table/UploadToTableRequest.html>`_ to Synapse.
:param filepath: Path of a `CSV <https://en.wikipedia.org/wiki/Comma-separated_values>`_ file.
:param schema: A table entity or its Synapse ID.
:param updateEtag: Any RowSet returned from Synapse will contain the current etag of the change set.
To update any rows from a RowSet the etag must be provided with the POST.
:returns: `UploadToTableResult \
<http://docs.synapse.org/rest/org/sagebionetworks/repo/model/table/UploadToTableResult.html>`_
"""
fileHandleId = multipart_upload_file(self, filepath, content_type="text/csv")
uploadRequest = {
"concreteType": "org.sagebionetworks.repo.model.table.UploadToTableRequest",
"csvTableDescriptor": {
"isFirstLineHeader": header,
"quoteCharacter": quoteCharacter,
"escapeCharacter": escapeCharacter,
"lineEnd": lineEnd,
"separator": separator},
"linesToSkip": linesToSkip,
"tableId": id_of(schema),
"uploadFileHandleId": fileHandleId
}
if updateEtag:
uploadRequest["updateEtag"] = updateEtag
response = self._async_table_update(schema, changes=[uploadRequest], wait=True)
self._check_table_transaction_response(response)
return response
def _check_table_transaction_response(self, response):
for result in response['results']:
result_type = result['concreteType']
if result_type in {concrete_types.ROW_REFERENCE_SET_RESULTS,
concrete_types.TABLE_SCHEMA_CHANGE_RESPONSE,
concrete_types.UPLOAD_TO_TABLE_RESULT}:
# if these fail, it we would have gotten an HttpError before the results came back
pass
elif result_type == concrete_types.ENTITY_UPDATE_RESULTS:
# TODO: output full response to error file when the logging JIRA issue gets pulled in
successful_updates = []
failed_updates = []
for update_result in result['updateResults']:
failure_code = update_result.get('failureCode')
failure_message = update_result.get('failureMessage')
entity_id = update_result.get('entityId')
if failure_code or failure_message:
failed_updates.append(update_result)
else:
successful_updates.append(entity_id)
if failed_updates:
raise SynapseError(
"Not all of the entities were updated."
" Successful updates: %s. Failed updates: %s" % (successful_updates, failed_updates))
else:
warnings.warn("Unexpected result from a table transaction of type [%s]."
" Please check the result to make sure it is correct. %s" % (result_type, result))
def _queryTableCsv(self, query, quoteCharacter='"', escapeCharacter="\\", lineEnd=os.linesep, separator=",",
header=True, includeRowIdAndRowVersion=True, downloadLocation=None):
"""
Query a Synapse Table and download a CSV file containing the results.
Sends a `DownloadFromTableRequest \
<http://docs.synapse.org/rest/org/sagebionetworks/repo/model/table/DownloadFromTableRequest.html>`_ to Synapse.
:return: a tuple containing a `DownloadFromTableResult \
<http://docs.synapse.org/rest/org/sagebionetworks/repo/model/table/DownloadFromTableResult.html>`_
The DownloadFromTableResult object contains these fields:
* headers: ARRAY<STRING>, The list of ColumnModel IDs that describes the rows of this set.
* resultsFileHandleId: STRING, The resulting file handle ID can be used to download the CSV file created by
this query.
* concreteType: STRING
* etag: STRING, Any RowSet returned from Synapse will contain the current etag of the change
set.
To update any rows from a RowSet the etag must be provided with the POST.
* tableId: STRING, The ID of the table identified in the from clause of the table query.
"""
download_from_table_request = {
"concreteType": "org.sagebionetworks.repo.model.table.DownloadFromTableRequest",
"csvTableDescriptor": {
"isFirstLineHeader": header,
"quoteCharacter": quoteCharacter,
"escapeCharacter": escapeCharacter,
"lineEnd": lineEnd,
"separator": separator},
"sql": query,
"writeHeader": header,
"includeRowIdAndRowVersion": includeRowIdAndRowVersion,
"includeEntityEtag": True
}
uri = "/entity/{id}/table/download/csv/async".format(id=extract_synapse_id_from_query(query))
download_from_table_result = self._waitForAsync(uri=uri, request=download_from_table_request)
file_handle_id = download_from_table_result['resultsFileHandleId']
cached_file_path = self.cache.get(file_handle_id=file_handle_id, path=downloadLocation)
if cached_file_path is not None:
return download_from_table_result, cached_file_path
if downloadLocation:
download_dir = self._ensure_download_location_is_directory(downloadLocation)
else:
download_dir = self.cache.get_cache_dir(file_handle_id)
os.makedirs(download_dir, exist_ok=True)
filename = f'SYNAPSE_TABLE_QUERY_{file_handle_id}.csv'
path = self._downloadFileHandle(file_handle_id, extract_synapse_id_from_query(query),
'TableEntity', os.path.join(download_dir, filename))
return download_from_table_result, path
# This is redundant with syn.store(Column(...)) and will be removed unless people prefer this method.
def createColumn(self, name, columnType, maximumSize=None, defaultValue=None, enumValues=None):
columnModel = Column(name=name, columnType=columnType, maximumSize=maximumSize, defaultValue=defaultValue,
enumValue=enumValues)
return Column(**self.restPOST('/column', json.dumps(columnModel)))
def createColumns(self, columns):
"""
Creates a batch of :py:class:`synapseclient.table.Column` s within a single request.
:param columns: a list of :py:class:`synapseclient.table.Column` objects
:return: a list of :py:class:`synapseclient.table.Column` objects that have been created in Synapse
"""
request_body = {'concreteType': 'org.sagebionetworks.repo.model.ListWrapper',
'list': list(columns)}
response = self.restPOST('/column/batch', json.dumps(request_body))
return [Column(**col) for col in response['list']]
def _getColumnByName(self, schema, column_name):
"""
Given a schema and a column name, get the corresponding py:class:`Column` object.
"""
for column in self.getColumns(schema):
if column.name == column_name:
return column
return None
def downloadTableColumns(self, table, columns, downloadLocation=None, **kwargs):
"""
Bulk download of table-associated files.
:param table: table query result
:param columns: a list of column names as strings
:param downloadLocation: directory into which to download the files
:returns: a dictionary from file handle ID to path in the local file system.
For example, consider a Synapse table whose ID is "syn12345" with two columns of type FILEHANDLEID named 'foo'
and 'bar'. The associated files are JSON encoded, so we might retrieve the files from Synapse and load for the
second 100 of those rows as shown here::
import json
results = syn.tableQuery('SELECT * FROM syn12345 LIMIT 100 OFFSET 100')
file_map = syn.downloadTableColumns(results, ['foo', 'bar'])
for file_handle_id, path in file_map.items():
with open(path) as f:
data[file_handle_id] = f.read()
"""
RETRIABLE_FAILURE_CODES = ["EXCEEDS_SIZE_LIMIT"]
MAX_DOWNLOAD_TRIES = 100
max_files_per_request = kwargs.get('max_files_per_request', 2500)
# Rowset tableQuery result not allowed
if isinstance(table, TableQueryResult):
raise ValueError("downloadTableColumn doesn't work with rowsets. Please use default tableQuery settings.")
if isinstance(columns, str):
columns = [columns]
if not isinstance(columns, collections.abc.Iterable):
raise TypeError('Columns parameter requires a list of column names')
file_handle_associations, file_handle_to_path_map = self._build_table_download_file_handle_list(
table,
columns,
downloadLocation,
)
self.logger.info("Downloading %d files, %d cached locally" % (len(file_handle_associations),
len(file_handle_to_path_map)))
permanent_failures = collections.OrderedDict()
attempts = 0
while len(file_handle_associations) > 0 and attempts < MAX_DOWNLOAD_TRIES:
attempts += 1
file_handle_associations_batch = file_handle_associations[:max_files_per_request]
# ------------------------------------------------------------
# call async service to build zip file
# ------------------------------------------------------------
# returns a BulkFileDownloadResponse:
# http://docs.synapse.org/rest/org/sagebionetworks/repo/model/file/BulkFileDownloadResponse.html
request = dict(
concreteType="org.sagebionetworks.repo.model.file.BulkFileDownloadRequest",
requestedFiles=file_handle_associations_batch)
response = self._waitForAsync(uri='/file/bulk/async', request=request, endpoint=self.fileHandleEndpoint)
# ------------------------------------------------------------
# download zip file
# ------------------------------------------------------------
temp_dir = tempfile.mkdtemp()
zipfilepath = os.path.join(temp_dir, "table_file_download.zip")
try:
zipfilepath = self._downloadFileHandle(response['resultZipFileHandleId'], table.tableId, 'TableEntity',
zipfilepath)
# TODO handle case when no zip file is returned
# TODO test case when we give it partial or all bad file handles
# TODO test case with deleted fileHandleID
# TODO return null for permanent failures
# ------------------------------------------------------------
# unzip into cache
# ------------------------------------------------------------
if downloadLocation:
download_dir = self._ensure_download_location_is_directory(downloadLocation)
with zipfile.ZipFile(zipfilepath) as zf:
# the directory structure within the zip follows that of the cache:
# {fileHandleId modulo 1000}/{fileHandleId}/{fileName}
for summary in response['fileSummary']:
if summary['status'] == 'SUCCESS':
if not downloadLocation:
download_dir = self.cache.get_cache_dir(summary['fileHandleId'])
filepath = extract_zip_file_to_directory(zf, summary['zipEntryName'], download_dir)
self.cache.add(summary['fileHandleId'], filepath)
file_handle_to_path_map[summary['fileHandleId']] = filepath
elif summary['failureCode'] not in RETRIABLE_FAILURE_CODES:
permanent_failures[summary['fileHandleId']] = summary
finally:
if os.path.exists(zipfilepath):
os.remove(zipfilepath)
# Do we have remaining files to download?
file_handle_associations = [fha for fha in file_handle_associations
if fha['fileHandleId'] not in file_handle_to_path_map
and fha['fileHandleId'] not in permanent_failures.keys()]
# TODO if there are files we still haven't downloaded
return file_handle_to_path_map
def _build_table_download_file_handle_list(self, table, columns, downloadLocation):
# ------------------------------------------------------------
# build list of file handles to download
# ------------------------------------------------------------
cols_not_found = [c for c in columns if c not in [h.name for h in table.headers]]
if len(cols_not_found) > 0:
raise ValueError("Columns not found: " + ", ".join('"' + col + '"' for col in cols_not_found))
col_indices = [i for i, h in enumerate(table.headers) if h.name in columns]
# see: http://docs.synapse.org/rest/org/sagebionetworks/repo/model/file/BulkFileDownloadRequest.html
file_handle_associations = []
file_handle_to_path_map = collections.OrderedDict()
seen_file_handle_ids = set() # ensure not sending duplicate requests for the same FileHandle IDs
for row in table:
for col_index in col_indices:
file_handle_id = row[col_index]
if is_integer(file_handle_id):
path_to_cached_file = self.cache.get(file_handle_id, path=downloadLocation)
if path_to_cached_file:
file_handle_to_path_map[file_handle_id] = path_to_cached_file
elif file_handle_id not in seen_file_handle_ids:
file_handle_associations.append(dict(
associateObjectType="TableEntity",
fileHandleId=file_handle_id,
associateObjectId=table.tableId))
seen_file_handle_ids.add(file_handle_id)
else:
warnings.warn("Weird file handle: %s" % file_handle_id)
return file_handle_associations, file_handle_to_path_map
def _get_default_view_columns(self, view_type, view_type_mask=None):
"""Get default view columns"""
uri = f"/column/tableview/defaults?viewEntityType={view_type}"
if view_type_mask:
uri += f"&viewTypeMask={view_type_mask}"
return [Column(**col)
for col in self.restGET(uri)['list']]
def _get_annotation_view_columns(self, scope_ids: list, view_type: str,
view_type_mask: str = None) -> list:
"""Get all the columns of a submission of entity view based on existing annotations
:param scope_ids: List of Evaluation Queue or Project/Folder Ids
:param view_type: submissionview or entityview
:param view_type_mask: Bit mask representing the types to include in the view.
:returns: list of columns
"""
columns = []
next_page_token = None
while True:
view_scope = {
'concreteType': 'org.sagebionetworks.repo.model.table.ViewColumnModelRequest',
'viewScope': {
'scope': scope_ids,
'viewEntityType': view_type,
'viewTypeMask': view_type_mask
}
}
if next_page_token:
view_scope['nextPageToken'] = next_page_token
response = self._waitForAsync(
uri='/column/view/scope/async',
request=view_scope
)
columns.extend(Column(**column) for column in response['results'])
next_page_token = response.get('nextPageToken')
if next_page_token is None:
break
return columns
############################################################
# CRUD for Entities (properties) #
############################################################
def _getEntity(self, entity, version=None):
"""
Get an entity from Synapse.
:param entity: A Synapse ID, a dictionary representing an Entity, or a Synapse Entity object
:param version: The version number to fetch
:returns: A dictionary containing an Entity's properties
"""
uri = '/entity/'+id_of(entity)
if version:
uri += '/version/%d' % version
return self.restGET(uri)
def _createEntity(self, entity):
"""
Create a new entity in Synapse.
:param entity: A dictionary representing an Entity or a Synapse Entity object
:returns: A dictionary containing an Entity's properties
"""
return self.restPOST(uri='/entity', body=json.dumps(get_properties(entity)))
def _updateEntity(self, entity, incrementVersion=True, versionLabel=None):
"""
Update an existing entity in Synapse.
:param entity: A dictionary representing an Entity or a Synapse Entity object
:param incrementVersion: whether to increment the entity version (if Versionable)
:param versionLabel: a label for the entity version (if Versionable)
:returns: A dictionary containing an Entity's properties
"""
uri = '/entity/%s' % id_of(entity)
params = {}
if is_versionable(entity):
if versionLabel:
# a versionLabel implicitly implies incrementing
incrementVersion = True
elif incrementVersion and 'versionNumber' in entity:
versionLabel = str(entity['versionNumber'] + 1)
if incrementVersion:
entity['versionLabel'] = versionLabel
params['newVersion'] = 'true'
return self.restPUT(uri, body=json.dumps(get_properties(entity)), params=params)
def findEntityId(self, name, parent=None):
"""
Find an Entity given its name and parent.
:param name: name of the entity to find
:param parent: An Entity object or the Id of an entity as a string. Omit if searching for a Project by name
:return: the Entity ID or None if not found
"""
# when we want to search for a project by name. set parentId as None instead of ROOT_ENTITY
entity_lookup_request = {"parentId": id_of(parent) if parent else None,
"entityName": name}
try:
return self.restPOST("/entity/child", body=json.dumps(entity_lookup_request)).get("id")
except SynapseHTTPError as e:
if e.response.status_code == 404: # a 404 error is raised if the entity does not exist
return None
raise
############################################################
# Send Message #
############################################################
def sendMessage(self, userIds, messageSubject, messageBody, contentType="text/plain"):
"""
send a message via Synapse.
:param userIds: A list of user IDs to which the message is to be sent
:param messageSubject: The subject for the message
:param messageBody: The body of the message
:param contentType: optional contentType of message body (default="text/plain")
Should be one of "text/plain" or "text/html"
:returns: The metadata of the created message
"""
fileHandleId = multipart_upload_string(self, messageBody, content_type=contentType)
message = dict(
recipients=userIds,
subject=messageSubject,
fileHandleId=fileHandleId)
return self.restPOST(uri='/message', body=json.dumps(message))
############################################################
# Low level Rest calls #
############################################################
def _generate_headers(self, headers=None):
"""Generate headers (auth headers produced separately by credentials object)"""
if headers is None:
headers = dict(self.default_headers)
headers.update(synapseclient.USER_AGENT)
return headers
def _handle_synapse_http_error(self, response):
"""Raise errors as appropriate for returned Synapse http status codes"""
try:
exceptions._raise_for_status(response, verbose=self.debug)
except exceptions.SynapseHTTPError as ex:
# if we get a unauthenticated or forbidden error and the user is not logged in
# then we raise it as an authentication error.
# we can't know for certain that logging in to their particular account will grant them
# access to this resource but more than likely it's the cause of this error.
if response.status_code in (401, 403) and not self.credentials:
raise SynapseAuthenticationError(
"You are not logged in and do not have access to a requested resource."
) from ex
raise
def _rest_call(self, method, uri, data, endpoint, headers, retryPolicy, requests_session, **kwargs):
uri, headers = self._build_uri_and_headers(uri, endpoint=endpoint, headers=headers)
retryPolicy = self._build_retry_policy(retryPolicy)
requests_session = requests_session or self._requests_session
auth = kwargs.pop('auth', self.credentials)
requests_method_fn = getattr(requests_session, method)
response = with_retry(
lambda: requests_method_fn(
uri,
data=data,
headers=headers,
auth=auth,
**kwargs,
),
verbose=self.debug, **retryPolicy
)
self._handle_synapse_http_error(response)
return response
def restGET(self, uri, endpoint=None, headers=None, retryPolicy={}, requests_session=None, **kwargs):
"""
Sends an HTTP GET request to the Synapse server.
:param uri: URI on which get is performed
:param endpoint: Server endpoint, defaults to self.repoEndpoint
:param headers: Dictionary of headers to use rather than the API-key-signed default set of headers
:param requests_session: an external requests.Session object to use when making this specific call
:param kwargs: Any other arguments taken by a
`requests <http://docs.python-requests.org/en/latest/>`_ method
:returns: JSON encoding of response
"""
response = self._rest_call('get', uri, None, endpoint, headers, retryPolicy, requests_session, **kwargs)
return self._return_rest_body(response)
def restPOST(self, uri, body, endpoint=None, headers=None, retryPolicy={}, requests_session=None, **kwargs):
"""
Sends an HTTP POST request to the Synapse server.
:param uri: URI on which get is performed
:param endpoint: Server endpoint, defaults to self.repoEndpoint
:param body: The payload to be delivered
:param headers: Dictionary of headers to use rather than the API-key-signed default set of headers
:param requests_session: an external requests.Session object to use when making this specific call
:param kwargs: Any other arguments taken by a
`requests <http://docs.python-requests.org/en/latest/>`_ method
:returns: JSON encoding of response
"""
response = self._rest_call('post', uri, body, endpoint, headers, retryPolicy, requests_session, **kwargs)
return self._return_rest_body(response)
def restPUT(self, uri, body=None, endpoint=None, headers=None, retryPolicy={}, requests_session=None, **kwargs):
"""
Sends an HTTP PUT request to the Synapse server.
:param uri: URI on which get is performed
:param endpoint: Server endpoint, defaults to self.repoEndpoint
:param body: The payload to be delivered
:param headers: Dictionary of headers to use rather than the API-key-signed default set of headers
:param requests_session: an external requests.session object to use when making this specific call
:param kwargs: Any other arguments taken by a
`requests <http://docs.python-requests.org/en/latest/>`_ method
:returns: JSON encoding of response
"""
response = self._rest_call('put', uri, body, endpoint, headers, retryPolicy, requests_session, **kwargs)
return self._return_rest_body(response)
def restDELETE(self, uri, endpoint=None, headers=None, retryPolicy={}, requests_session=None, **kwargs):
"""
Sends an HTTP DELETE request to the Synapse server.
:param uri: URI of resource to be deleted
:param endpoint: Server endpoint, defaults to self.repoEndpoint
:param headers: Dictionary of headers to use rather than the API-key-signed default set of headers
:param requests_session: an external requests.session object to use when making this specific call
:param kwargs: Any other arguments taken by a
`requests <http://docs.python-requests.org/en/latest/>`_ method
"""
self._rest_call('delete', uri, None, endpoint, headers, retryPolicy, requests_session, **kwargs)
def _build_uri_and_headers(self, uri, endpoint=None, headers=None):
"""Returns a tuple of the URI and headers to request with."""
if endpoint is None:
endpoint = self.repoEndpoint
# Check to see if the URI is incomplete (i.e. a Synapse URL)
# In that case, append a Synapse endpoint to the URI
parsedURL = urllib_urlparse.urlparse(uri)
if parsedURL.netloc == '':
uri = endpoint + uri
if headers is None:
headers = self._generate_headers()
return uri, headers
def _build_retry_policy(self, retryPolicy={}):
"""Returns a retry policy to be passed onto _with_retry."""
defaults = dict(STANDARD_RETRY_PARAMS)
defaults.update(retryPolicy)
return defaults
def _return_rest_body(self, response):
"""Returns either a dictionary or a string depending on the 'content-type' of the response."""
if is_json(response.headers.get('content-type', None)):
return response.json()
return response.text
| apache-2.0 | 6,079,077,354,349,527,000 | 45.814722 | 120 | 0.585914 | false |
tiagoantao/mega-analysis | haploStats/doHaploStats.py | 1 | 4169 | #!/usr/bin/env python3
import os
import sys
import MEGA
from MEGA import karyo
if len(sys.argv) not in [3, 4, 5, 6, 7, 8]:
print("python3 %s COMMAND study [file] [source] [refPop] [chro]" % (sys.argv[0]))
print("""COMMAND is
prepareData
iHS
statIHS
XPEHH
statXPEHH
if command is prepareData, source has the phasedset and
refPop the reference population
""")
sys.exit(-1)
command = sys.argv[1]
myKaryo = sys.argv[2]
karyo.loadBuiltIn(myKaryo)
lexec = MEGA.executor
maxChro = 23 # Needs refactor... No hard max
def removeFixed(tmp, final):
fhap = open("%s.hap" % tmp)
haplo = fhap.readline().rstrip().split(' ')
alleles = [set([a]) for a in haplo]
for l in fhap:
haplo = l.rstrip().split(' ')
for i in range(len(haplo)):
alleles[i].add(haplo[i])
fhap.close()
fmap = open("%s.map" % tmp)
wmap = open("%s.map" % final, 'w')
i = 0
for l in fmap:
if len(alleles[i]) > 1:
wmap.write(l)
i += 1
fmap.close()
wmap.close()
fhap = open("%s.hap" % tmp)
whap = open("%s.hap" % final, 'w')
for l in fhap:
haplo = l.rstrip().split(' ')
cleanHaplo = []
for i in range(len(alleles)):
if len(alleles[i]) > 1:
cleanHaplo.append(haplo[i])
whap.write(' '.join(cleanHaplo))
whap.write('\n')
fhap.close()
whap.close()
def prepPop2(allInds, database, refPop, isXPEHH):
f = open(allInds)
w = open(allInds + "_", "w")
for l in f:
toks = l.rstrip().replace(" ", "\t").split("\t")
w.write(toks[1] + "\n")
w.close()
for chro in range(1, 23 + 1): # Human dependent
if not karyo.accept(chro, 1):
continue
if refPop != "shapeIt":
lexec.submit("python3",
"%s/beagle2ihs.py %s/%s/%s-%d.gz %s tmp-%d %s/37-%d.map %d"
% (MEGA.phasingScripts, MEGA.phaseDB, database,
refPop, chro, allInds, chro, MEGA.geneticMapDB, chro, chro))
else:
lexec.submit("python3",
"%s/beagle2ihs.py %s/%s/%d.gz %s tmp-%d %s/37-%d.map %d"
% (MEGA.phasingScripts, MEGA.phaseDB, database,
chro, allInds, chro, MEGA.geneticMapDB, chro, chro))
lexec.wait(True)
for chro in range(1, 23 + 1): # Human dependent
if not karyo.accept(chro, 1):
continue
if isXPEHH:
os.rename('tmp-%d.hap' % chro, '%d.hap' % chro)
os.rename('tmp-%d.map' % chro, '%d.map' % chro)
else:
removeFixed('tmp-%d' % chro, '%d' % chro)
if command == 'prepareData':
studyName = sys.argv[3]
allInds = sys.argv[4]
source = sys.argv[5]
refPop = sys.argv[6]
if len(sys.argv) > 7:
isXPEHH = True
else:
isXPEHH = False
prepPop2(allInds, source, refPop, isXPEHH)
elif command == 'iHS':
for k in range(1, maxChro + 1):
if not karyo.accept(k, 1): # needs whole chromosome accepted
continue
input = "%d.hap" % (k,)
inmap = "%d.map" % (k,)
out = "%d.uiHS" % (k,)
lexec.out = out
lexec.submit("ihs", "%s %s" % (inmap, input))
lexec.wait(True)
elif command == 'statIHS':
maxBin = sys.argv[3]
winSize = sys.argv[4]
minSNPs = sys.argv[5]
os.system("python3 %s/statIHS.py %s %s %s" % (
MEGA.haploScripts, maxBin, winSize, minSNPs))
elif command in ['XPEHH', 'XPEHH2']:
for k in range(1, maxChro + 1):
if not karyo.accept(k, 1): # needs whole chromosome accepted
continue
input = "%d.hap" % k
sinput = "s%d.hap" % k
inmap = "%d.map" % k
out = "%d.xpEHH" % k
lexec.out = out
print("-m %s -h %s %s" % (inmap, input, sinput))
lexec.submit("xpehh", "-m %s -h %s %s" % (inmap, input, sinput))
lexec.wait(True)
elif command == 'statXPEHH':
winSize = sys.argv[3]
os.system("python3 %s/statXPEHH.py 4 %s" % (MEGA.haploScripts, winSize))
else:
print("ERROR: Command not known!")
| agpl-3.0 | -8,782,770,987,302,517,000 | 28.359155 | 88 | 0.528184 | false |
johnarban/arban | schmidt_funcs.py | 1 | 26232 | import numpy as np
from PIL import Image, ImageDraw
from scipy import interpolate, ndimage, stats, signal, integrate, misc
from astropy.io import ascii, fits
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
import astropy.units as u
import astropy.constants as c
import corner as triangle # formerly dfm/triangle
# from astropy.modeling import models, fitting
from astropy.modeling.models import custom_model
from astropy.modeling.fitting import LevMarLSQFitter # , SimplexLSQFitter
import matplotlib.pyplot as plt
import matplotlib as mpl
import emcee
#import ipdb;
import pdb
# # # # # # # # # # # # # # # # # # # # # #
# make iPython print immediately
import sys
oldsysstdout = sys.stdout
class flushfile():
def __init__(self, f):
self.f = f
def __getattr__(self, name):
return object.__getattribute__(self.f, name)
def write(self, x):
self.f.write(x)
self.f.flush()
def flush(self):
self.f.flush()
# sys.stdout = flushfile(sys.stdout)
# sys.stdout = oldsysstdout
def rot_matrix(theta):
'''
rot_matrix(theta)
2D rotation matrix for theta in radians
returns numpy matrix
'''
c, s = np.cos(theta), np.sin(theta)
return np.matrix([[c, -s], [s, c]])
def rectangle(c, w, h, angle=0, center=True):
'''
create rotated rectangle
for input into PIL ImageDraw.polygon
to make a rectangle polygon mask
Rectagle is created and rotated with center
at zero, and then translated to center position
accepters centers
Default : center
tl, tr, bl, br
'''
cx, cy = c
# define initial polygon irrespective of center
x = -w / 2., +w / 2., +w / 2., -w / 2.
y = +h / 2., +h / 2., -h / 2., -h / 2.
# correct center if starting from corner
if center is not True:
if center[0] == 'b':
# y = tuple([i + h/2. for i in y])
cy = cy + h / 2.
else:
# y = tuple([i - h/2. for i in y])
cy = cy - h / 2.
if center[1] == 'l':
# x = tuple([i + w/2 for i in x])
cx = cx + w / 2.
else:
# x = tuple([i - w/2 for i in x])
cx = cx - w / 2.
R = rot_matrix(angle * np.pi / 180.)
c = []
for i in range(4):
xr, yr = np.dot(R, np.asarray([x[i], y[i]])).A.ravel()
# coord switch to match ordering of FITs dimensions
c.append((cx + xr, cy + yr))
# print (cx,cy)
return c
def comp(arr):
'''
returns the compressed version
of the input array if it is a
numpy MaskedArray
'''
try:
return arr.compressed()
except:
return arr
def mavg(arr, n=2, mode='valid'):
'''
returns the moving average of an array.
returned array is shorter by (n-1)
'''
if len(arr) > 400:
return signal.fftconvolve(arr, [1. / float(n)] * n, mode=mode)
else:
return signal.convolve(arr, [1. / float(n)] * n, mode=mode)
def mgeo(arr, n=2):
'''
Returns array of lenth len(arr) - (n-1)
# # written by me
# # slower for short loops
# # faster for n ~ len(arr) and large arr
a = []
for i in xrange(len(arr)-(n-1)):
a.append(stats.gmean(arr[i:n+i]))
# # Original method# #
# # written by me ... ~10x faster for short arrays
b = np.array([np.roll(np.pad(arr,(0,n),mode='constant',constant_values=1),i)
for i in xrange(n)])
return np.product(b,axis=0)[n-1:-n]**(1./float(n))
'''
a = []
for i in range(len(arr) - (n - 1)):
a.append(stats.gmean(arr[i:n + i]))
return np.asarray(a)
def avg(arr, n=2):
'''
NOT a general averaging function
return bin centers (lin and log)
'''
diff = np.diff(arr)
# 2nd derivative of linear bin is 0
if np.allclose(diff, diff[::-1]):
return mavg(arr, n=n)
else:
return np.power(10., mavg(np.log10(arr), n=n))
# return mgeo(arr, n=n) # equivalent methods, only easier
def shift_bins(arr,phase=0,nonneg=False):
# assume original bins are nonneg
if phase != 0:
diff = np.diff(arr)
if np.allclose(diff,diff[::-1]):
diff = diff[0]
arr = arr + phase*diff
#pre = arr[0] + phase*diff
return arr
else:
arr = np.log10(arr)
diff = np.diff(arr)[0]
arr = arr + phase * diff
return np.power(10.,arr)
else:
return arr
def llspace(xmin, xmax, n=None, log=False, dx=None, dex=None):
'''
llspace(xmin, xmax, n = None, log = False, dx = None, dex = None)
get values evenly spaced in linear or log spaced
n [10] -- Optional -- number of steps
log [false] : switch for log spacing
dx : spacing for linear bins
dex : spacing for log bins (in base 10)
dx and dex override n
'''
xmin, xmax = float(xmin), float(xmax)
nisNone = n is None
dxisNone = dx is None
dexisNone = dex is None
if nisNone & dxisNone & dexisNone:
print('Error: Defaulting to 10 linears steps')
n = 10.
nisNone = False
# either user specifies log or gives dex and not dx
log = log or (dxisNone and (not dexisNone))
if log:
if xmin == 0:
print("log(0) is -inf. xmin must be > 0 for log spacing")
xmin, xmax = np.log10(xmin), np.log10(xmax)
# print nisNone, dxisNone, dexisNone, log # for debugging logic
if not nisNone: # this will make dex or dx if they are not specified
if log and dexisNone: # if want log but dex not given
dex = (xmax - xmin) / n
# print dex
elif (not log) and dxisNone: # else if want lin but dx not given
dx = (xmax - xmin) / n # takes floor
#print dx
if log:
#return np.power(10, np.linspace(xmin, xmax , (xmax - xmin)/dex + 1))
return np.power(10, np.arange(xmin, xmax + dex, dex))
else:
#return np.linspace(xmin, xmax, (xmax-xmin)/dx + 1)
return np.arange(xmin, xmax + dx, dx)
def nametoradec(name):
'''
Get names formatted as
hhmmss.ss+ddmmss to Decimal Degree
only works for dec > 0 (splits on +, not -)
Will fix this eventually...
'''
if 'string' not in str(type(name)):
rightascen = []
declinatio = []
for n in name:
ra, de = n.split('+')
ra = ra[0:2] + ':' + ra[2:4] + ':' + ra[4:6] + '.' + ra[6:8]
de = de[0:2] + ':' + de[2:4] + ':' + de[4:6]
coord = SkyCoord(ra, de, frame='icrs',
unit=('hourangle', 'degree'))
rightascen.append(coord.ra.value)
declinatio.append(coord.dec.value)
return np.array(rightascen), np.array(declinatio)
else:
ra, de = name.split('+')
ra = ra[0:2] + ':' + ra[2:4] + ':' + ra[4:6] + '.' + ra[6:8]
de = de[0:2] + ':' + de[2:4] + ':' + de[4:6]
coord = SkyCoord(ra, de, frame='icrs', unit=('hourangle', 'degree'))
return np.array(coord.ra.value), np.array(coord.dec.value)
def get_ext(extmap, errmap, extwcs, ra, de):
'''
Get the extinction (errors) for a particular position or
list of positions
More generally get the value (error) for a particular
position given a wcs and world coordinates
'''
try:
xp, yp = extwcs.all_world2pix(
np.array([ra]).flatten(), np.array([de]).flatten(), 0)
except:
xp, yp = WCS(extwcs).all_world2pix(
np.array([ra]).flatten(), np.array([de]).flatten(), 0)
ext = []
err = []
for i in range(len(np.array(xp))):
try:
ext.append(extmap[yp[int(round(i))], xp[int(round(i))]])
if errmap is not None:
err.append(errmap[yp[int(round(i))], xp[int(round(i))]])
except IndexError:
ext.append(np.nan)
if errmap is not None:
err.append(np.nan)
if errmap is not None:
return np.array(ext), np.array(err)
else:
return np.array(ext), None
def pdf(values, bins):
'''
** Normalized differential area function. **
(statistical) probability denisty function
normalized so that the integral is 1
and. The integral over a range is the
probability of the value is within
that range.
Returns array of size len(bins)-1
Plot versus bins[:-1]
'''
if hasattr(bins,'__getitem__'):
range=(np.nanmin(bins),np.nanmax(bins))
else:
range = None
h, x = np.histogram(values, bins=bins, range=range, density=False)
# From the definition of Pr(x) = dF(x)/dx this
# is the correct form. It returns the correct
# probabilities when tested
pdf = h / (np.sum(h, dtype=float) * np.diff(x))
return pdf, avg(x)
def pdf2(values, bins):
'''
The ~ PDF normalized so that
the integral is equal to the
total amount of a quantity.
The integral over a range is the
total amount within that range.
Returns array of size len(bins)-1
Plot versus bins[:-1]
'''
if hasattr(bins,'__getitem__'):
range=(np.nanmin(bins),np.nanmax(bins))
else:
range = None
pdf, x = np.histogram(values, bins=bins, range=range, density=False)
pdf = pdf.astype(float) / np.diff(x)
return pdf, avg(x)
def edf(data, pdf=False):
y = np.arange(len(data), dtype=float)
x = np.sort(data).astype(float)
return y, x
def cdf(values, bins):
'''
(statistical) cumulative distribution function
Integral on [-inf, b] is the fraction below b.
CDF is invariant to binning.
This assumes you are using the entire range in the binning.
Returns array of size len(bins)
Plot versus bins[:-1]
'''
if hasattr(bins,'__getitem__'):
range = (np.nanmin(bins),np.nanmax(bins))
else:
range = None
h, bins = np.histogram(values, bins=bins, range=range, density=False) # returns int
c = np.cumsum(h / np.sum(h, dtype=float)) # cumulative fraction below bin_k
# append 0 to beginning because P( X < min(x)) = 0
return np.append(0, c), bins
def cdf2(values, bins):
'''
# # Exclusively for area_function which needs to be unnormalized
(statistical) cumulative distribution function
Value at b is total amount below b.
CDF is invariante to binning
Plot versus bins[:-1]
Not normalized to 1
'''
if hasattr(bins,'__getitem__'):
range=(np.nanmin(bins),np.nanmax(bins))
else:
range = None
h, bins = np.histogram(values, bins=bins, range=range, density=False)
c = np.cumsum(h).astype(float)
return np.append(0., c), bins
def area_function(extmap, bins):
'''
Complimentary CDF for cdf2 (not normalized to 1)
Value at b is total amount above b.
'''
c, bins = cdf2(extmap, bins)
return c.max() - c, bins
def diff_area_function(extmap, bins,scale=1):
'''
See pdf2
'''
s, bins = area_function(extmap, bins)
dsdx = -np.diff(s) / np.diff(bins)
return dsdx*scale, avg(bins)
def log_diff_area_function(extmap, bins):
'''
See pdf2
'''
s, bins = diff_area_function(extmap, bins)
g=s>0
dlnsdlnx = np.diff(np.log(s[g])) / np.diff(np.log(bins[g]))
return dlnsdlnx, avg(bins[g])
def mass_function(values, bins, scale=1, aktomassd=183):
'''
M(>Ak), mass weighted complimentary cdf
'''
if hasattr(bins,'__getitem__'):
range=(np.nanmin(bins),np.nanmax(bins))
else:
range = None
h, bins = np.histogram(values, bins=bins, range=range, density=False, weights=values*aktomassd*scale)
c = np.cumsum(h).astype(float)
return c.max() - c, bins
def hist(values, bins, err=False, density=False, **kwargs):
'''
really just a wrapper for numpy.histogram
'''
if hasattr(bins,'__getitem__'):
range=(np.nanmin(bins),np.nanmax(bins))
else:
range = None
hist, x = np.histogram(values, bins=bins, range=range, density=density, **kwargs)
if (err is None) or (err is False):
return hist.astype(np.float), avg(x)
else:
return hist.astype(np.float), avg(x), np.sqrt(hist)
def bootstrap(X, X_err=None, n=None, smooth=False):
'''
(smooth) bootstrap
bootstrap(X,Xerr,n,smooth=True)
X : array to be resampled
X_err [optional]: errors to perturb data for smooth bootstrap
only provide is doing smooth bootstrapping
n : number of samples. Default - len(X)
smooth: optionally use smooth bootstrapping.
will be set to False if no X_err is provided
'''
if X_err is None:
smooth = False
if n is None: # default n
n = len(X)
resample_i = np.random.randint(0,len(X),size=(n,))
X_resample = np.asarray(X)[resample_i]
if smooth:
X_resample = np.random.normal(X_resample, \
np.asarray(X_err)[resample_i])
return X_resample
def num_above(values, level):
return np.sum((values >= level) & np.isfinite(values), dtype=np.float)
def num_below(values, level):
return np.sum((values < level) & np.isfinite(values), dtype=np.float)
def alpha_ML(data, xmin,xmax):
'''
uses maximum likelihood to estimation
to determine power-law and error
From Clauset et al. 2010
'''
data = data[np.isfinite(data)]
data = data[(data >= xmin) & (data <= xmax)]
alpha = 1 + len(data) * (np.sum(np.log(data / xmin))**(-1))
error = (alpha -1 )/np.sqrt(len(data))
#loglike = np.sum((-1+alpha)*np.log(xmin)-alpha*np.log(data)+np.log(-1+alpha))
N = len(data)
loglike = N*np.log(alpha-1) - N*np.log(xmin) - alpha * np.sum(np.log(data/xmin))
return alpha , error, loglike, xmin, xmax
def sigconf1d(n):
cdf = (1/2.)*(1+special.erf(n/np.sqrt(2)))
return (1-cdf)*100,100* cdf,100*special.erf(n/np.sqrt(2))
def surfd(X, Xmap, bins, Xerr = None, Xmaperr = None, boot=False, scale=1., return_err=False, smooth=False):
'''
call: surfd(X, map, bins,
xerr = None, merr = None, scale = 1.)
calculates H(X)/H(M) = Nx pdf(x) dx / Nm pdf(m) dm ; dm = dx
so it is independent of whether dx or dlog(x)
'''
# get dn/dx
if boot:
n = np.histogram(bootstrap(X,Xerr,smooth=True), bins = bins, range=(bins.min(),bins.max()))[0]
s = np.histogram(bootstrap(Xmap,Xmaperr,smooth=True), bins = bins, range=(bins.min(),bins.max()))[0] * scale
else:
n = np.histogram(X, bins = bins, range=(bins.min(),bins.max()))[0]
s = np.histogram(Xmap, bins = bins, range=(bins.min(),bins.max()))[0] * scale
if not return_err:
return n / s
else:
return n / s, n / s * np.sqrt(1. / n - scale / s)
def alpha(y, x, err=None, return_kappa=False, cov=False):
'''
this returns -1*alpha, and optionally kappa and errors
'''
a1 = set(np.nonzero(np.multiply(x, y))[0])
a2 = set(np.where(np.isfinite(np.add(x, y, err)))[0])
a = np.asarray(list(a1 & a2))
y = np.log(y[a])
x = np.log(x[a])
if err is None:
p, covar = np.polyfit(x, y, 1, cov=True)
m, b = p
me, be = np.sqrt(np.sum(covar * [[1, 0], [0, 1]], axis=1))
me, be
else:
err = err[a]
err = err / y
p, covar = np.polyfit(x, y, 1, w=1. / err**2, cov=True)
m, b = p
me, be = np.sqrt(np.sum(covar * [[1, 0], [0, 1]], axis=1))
me, be
if return_kappa:
if cov:
return m, np.exp(b), me, be
else:
return m, np.exp(b)
else:
if cov:
return m, me
else:
return m
def Heaviside(x):
return 0.5 * (np.sign(x) + 1.)
def schmidt_law(Ak, theta):
'''
schmidt_law(Ak,(beta,kappa))
beta is the power law index (same as alpha)
'''
if len(theta) == 2:
beta, kappa = theta
return kappa * (Ak ** beta)
elif len(theta) == 3:
beta, kappa, Ak0 = theta
sfr = Heaviside(Ak - Ak0) * kappa * (Ak ** beta)
sfr[Ak < Ak0] = 0#np.nan # kappa * (Ak0 ** beta)
return sfr
def lmfit_powerlaw(x, y, yerr=None, xmin=-np.inf, xmax=np.inf, init=None, maxiter=1000000):
@custom_model
def model(x, beta=init[0], kappa=init[1]):
return np.log(kappa * (np.exp(x) ** beta))
keep = np.isfinite(1. / y) & (x >= xmin) & (x <= xmax)
if yerr is not None:
keep = keep & np.isfinite(1. / yerr)
m_init = model()
fit = LevMarLSQFitter()
#weights = (yerr / y)[keep]**(-2.)
m = fit(m_init, np.log(x[keep]), np.log(y[keep]), maxiter=maxiter)
return m, fit
def fit_lmfit_schmidt(x, y, yerr, init=None):
m, _ = lmfit_powerlaw(x,y,yerr,init=init)
return m.parameters
def emcee_schmidt(x, y, yerr, pos=None, pose=None,
nwalkers=None, nsteps=None, burnin=200,verbose=True):
'''
emcee_schmidt provides a convenient wrapper for fitting the schimdt law
to binned x,log(y) data. Generally, it fits a normalization and a slope
'''
def model(x, theta):
'''
theta = (beta, kappa)
'''
return np.log(schmidt_law(x, theta))
def lnlike(theta, x, y, yerr):
mod = model(x, theta)
inv_sigma2 = 1 / yerr**2
# Poisson statistics -- not using this
#mu = (yerr)**2 # often called lambda = poisson variance for bin x_i
#resid = np.abs(y - mod) # where w calculate the poisson probability
#return np.sum(resid * np.log(mu) - mu) - np.sum(np.log(misc.factorial(resid)))
#######################################################
########## CHI^2 log-likelihood #######################
return -0.5 * (np.sum((y - mod)**2 * inv_sigma2))# - 0.5 * 3 * np.log(np.sum(k))
def lnprior(theta):
# different priors for different version of
# the schmidt law
if len(theta) == 3:
beta, kappa, Ak0 = theta
c3 = 0. < Ak0 <= 5.
c4 = True
else:
beta, kappa = theta
c3 = True
c4 = True
c1 = 0 <= beta <= 6# Never run's into this region
c2 = 0 <= kappa # Never run's into this region
if c1 and c2 and c3 and c4:
return 0.0
return -np.inf
def lnprob(theta, x, y, yerr):
## update likelihood
lp = lnprior(theta)
if not np.isfinite(lp):
return -np.inf
return lp + lnlike(theta, x, y, yerr)
ndim, nwalkers = len(pos), nwalkers
pos = [np.array(pos) + np.array(pose) * 0.5 *
(0.5 - np.random.rand(ndim)) for i in range(nwalkers)]
sampler = emcee.EnsembleSampler(
nwalkers, ndim, lnprob, args=(x, y, yerr))
sampler.run_mcmc(pos, nsteps)
# Get input values
# x, y, yerr = sampler.args
samples = sampler.chain[:, burnin:, :].reshape((-1, sampler.ndim))
# # Print out final values # #
theta_mcmc = np.percentile(samples, [16, 50, 84], axis=0).T
if verbose: print(sampler.acor)
if verbose:
for i, item in enumerate(theta_mcmc):
j = ['beta', 'kappa', 'A_{K,0}', 'A_{K,f}']
inserts = (j[i], item[1], item[2] - item[1], item[1] - item[0])
print('%s = %0.2f (+%0.2f,-%0.2f)' % inserts)
return sampler, np.median(samples, axis=0), np.std(samples, axis=0)
def fit(bins, samp, samperr, maps, mapserr, scale=1., sampler=None, log=False,
pos=None, pose=None, nwalkers=100, nsteps=1e4, boot=1000, burnin=200,
threshold=False, threshold2=False,verbose=True):
'''
# # # A Schmidt Law fitting Function using EMCEE by D.F.M.
fit(bins, samp, samperr, maps, mapserr, scale=1.,
pos=None, pose=None, nwalkers=100, nsteps=1e4)
bins: bin edges for binning data (I know it's bad to bin)
samp : values for your sample
samperr : errors on values for you sample
maps: map of values from which you drew your sample
mapserr: error on maps...
pos : initial location of ball of walkers
pose : initial spread of walkers
'''
#print 'Hi!. It\'s hammer time...'
# x values are bin midpoints
x = avg(bins) # assume if log=True, then bins are already log
# x = bins[:-1]
# y = np.asarray([surfd(samp,maps,bins,boot=True,scale=scale) for i in xrange(boot)])
# yerr = np.nanstd(y,axis=0)
#if log:
# samp = np.log10(samp)
# maps = np.log10(maps)
# bins = np.log10(bins) # because bins doesn't get used again after surfd
y, yerr = surfd(samp, maps, bins, scale=scale, return_err=True)
###########################################+
###### ADDED FOR SHIFTING EXPERIMENT ######+
###########################################+
bins2 = shift_bins(bins,0.5)
bin
x2 = avg(bins2)
y2, yerr2 = surfd(samp, maps, bins2, scale=scale, return_err=True)
concatx = np.concatenate((x,x2))
concaty = np.concatenate((y,y2))
concatyerr = np.concatenate((yerr,yerr2))
srt = np.argsort(concatx)
x = concatx[srt]
y = concaty[srt]
yerr = concatyerr[srt]
nonzero = np.isfinite(1. / y) & np.isfinite(yerr) & np.isfinite(1./yerr)
y = y[nonzero]
yerr = yerr[nonzero]
x = x[nonzero]
# initialize walker positions and walker bundle size
init = alpha(y, x, return_kappa=True, cov=True)
if pos is None:
pos = init[:2]
if pose is None:
if np.isnan(init[2] + init[3]):
pose = (1, 1)
else:
pose = (init[2], init[3])
if threshold | threshold2:
pos = pos + (0.4,)
pose = pose + (0.2,)
if threshold2:
pos = pos + (8.,)
pose = pose + (.5,)
#print pos
#print pose
pos = np.asarray(pos)
pose = .1*pos#np.asarray(pose)
# This function only fits sources, it doesn't plot, so don't pass
# and emcee sampler type. it will spit it back out
# # # # # # # RUN EMCEE # # # # # # #
# pdb.set_trace()
if sampler is None:
if verbose: print('Sampler autocorrelation times . . .')
sampler, theta, theta_std = emcee_schmidt(x, np.log(y), yerr/y,
pos=pos, pose=pose,
nwalkers=nwalkers,
nsteps=nsteps, burnin=burnin,verbose=verbose)
else:
print('Next time don\'t give me a ' + str(type(sampler)) + '.')
#
try:
return sampler, x, y, yerr, theta, theta_std
except:
return sampler, x, y, yerr
def schmidt_results_plots(sampler, model, x, y, yerr, burnin=200, akmap=None,
bins=None, scale=None, triangle_plot=True):
'''
model: should pass schmidt_law()
'''
try:
mpl.style.use('john')
except:
None
# Get input values
# x, y, yerr = sampler.args
if hasattr(sampler,'__getitem__'):
chain = sampler
dim = chain.shape[-1]
else:
chain = sampler.chain
dim = sampler.dim
samples = chain[:, burnin:, :].reshape((-1, dim))
# # Print out final values # #
theta_mcmc = np.percentile(samples, [16, 50, 84], axis=0).T # Get percentiles for each parameter
n_params = len(theta_mcmc[:,1])
#print n_params
for i, item in enumerate(theta_mcmc):
j = ['beta', 'kappa', 'A_{K,0}','A_{K,f}']
inserts = (j[i], item[1], item[2] - item[1], item[1] - item[0])
print('%s = %0.2f (+%0.2f,-%0.2f)' % inserts)
# Plot corner plot
if triangle_plot:
if n_params == 3:
labels = ['beta', 'kappa', 'A_{K,0}']
elif n_params == 4:
labels = ['beta', 'kappa', 'A_{K,0}', 'A_{K,f}']
else:
labels = ['beta', 'kappa']
#print labels
_ = triangle.corner(samples, labels=labels,
truths=theta_mcmc[:, 1], quantiles=[.16, .84],
verbose=False)
# generate schmidt laws from parameter samples
xln = np.logspace(np.log10(x.min()*.5),np.log10(x.max()*2.),100)
smlaw_samps = np.asarray([schmidt_law(xln, samp) for samp in samples])
# get percentile bands
percent = lambda x: np.nanpercentile(smlaw_samps, x, interpolation='linear', axis=0)
# Plot fits
fig = plt.figure()
# Plot data with errorbars
plt.plot(xln, percent(50), 'k') # 3 sigma band
# yperr = np.abs(np.exp(np.log(y)+yerr/y) - y)
# ynerr = np.abs(np.exp(np.log(y)-yerr/y) - y)
plt.errorbar(x, y, yerr, fmt='rs', alpha=0.7, mec='none')
plt.legend(['Median', 'Data'],
loc='upper left', fontsize=12)
# draw 1,2,3 sigma bands
plt.fill_between(xln, percent(1), percent(99), color='0.9') # 1 sigma band
plt.fill_between(xln, percent(2), percent(98), color='0.75') # 2 sigma band
plt.fill_between(xln, percent(16), percent(84), color='0.5') # 3 sigma band
plt.loglog(nonposy='clip')
return plt.gca()
def flatchain(chain):
return chain.reshape((-1,chain.shape[-1]))
def norm_chain(chain, axis=0):
std = np.std(flatchain(chain), axis=axis)
med = np.median(flatchain(chain), axis=axis)
return (chain-med)/std
def plot_walkers(sampler,limits = None, bad = None):
'''
sampler : emcee Sampler class
'''
if hasattr(sampler,'__getitem__'):
chain = sampler
ndim = chain.shape[-1]
else:
chain = sampler.chain
ndim = sampler.ndim
fig = plt.figure(figsize=(8 * ndim, 4 * ndim))
if hasattr(limits,'__getitem__'):
limits += [None] * (3-len(limits))
slices = slice(limits[0],limits[1],limits[2])
else:
slices = slice(None,limits,None)
for w,walk in enumerate(chain[:,slices,:]):
if bad is None:
color = 'k'
elif bad[w]:
color = 'r'
else:
color = 'k'
for p, param in enumerate(walk.T):
ax = plt.subplot(ndim, 1, p + 1)
ax.plot(param, color, alpha=.75, lw=0.75)
# ax.set_ylim(param.min()*0.5,param.max()*1.5)
# ax.semilogy()
plt.tight_layout()
return fig
def tester():
print('hi ya\'ll')
| mit | -1,129,073,763,768,099,700 | 30.007092 | 116 | 0.559126 | false |
sangwook236/SWDT | sw_dev/python/ext/test/gui/wxpython/wx_basic.py | 1 | 8474 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import wx
class SimpleFrame(wx.Frame):
def __init__(self, *args, **kwargs):
# Ensure the parent's __init__() is called.
super(SimpleFrame, self).__init__(*args, **kwargs)
# Create a panel in the frame.
panel = wx.Panel(self)
# Put some text with a larger bold font on it.
st = wx.StaticText(panel, label="Hello World!")
font = st.GetFont()
font.PointSize += 10
font = font.Bold()
st.SetFont(font)
# Create a sizer to manage the layout of child widgets.
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(st, wx.SizerFlags().Border(wx.TOP | wx.LEFT, 25))
panel.SetSizer(sizer)
# Initialize UI.
self.InitUI()
#--------------------
self.SetSize((450, 350))
self.SetTitle("Simple Example")
self.Centre()
def InitUI(self):
"""
A menu bar is composed of menus, which are composed of menu items.
This method builds a set of menus and binds handlers to be called
when the menu item is selected.
"""
# Create a menu bar.
fileMenu = wx.Menu()
newItem = fileMenu.Append(wx.ID_NEW, "&New")
openItem = fileMenu.Append(wx.ID_OPEN, "&Open...")
saveAsItem = fileMenu.Append(wx.ID_SAVE, "&Save As...")
fileMenu.AppendSeparator()
importMenu = wx.Menu()
importMenu.Append(wx.ID_ANY, "Import Newsfeed List...")
importMenu.Append(wx.ID_ANY, "mport Bookmarks...")
importMenu.Append(wx.ID_ANY, "Import Mail...")
fileMenu.AppendMenu(wx.ID_ANY, "I&mport", importMenu)
fileMenu.AppendSeparator()
if True:
# When using a stock ID we don't need to specify the menu item's label.
exitItem = fileMenu.Append(wx.ID_EXIT)
else:
exitItem = wx.MenuItem(fileMenu, 1, "&Quit\tCtrl+Q")
exitItem.SetBitmap(wx.Bitmap("./exit.png"))
fileMenu.Append(exitItem)
viewMenu = wx.Menu()
self.showStatusbarItem = viewMenu.Append(wx.ID_ANY, "Show Statusbar", "Show Statusbar", kind=wx.ITEM_CHECK)
self.showToolbarItem = viewMenu.Append(wx.ID_ANY, "Show Toolbar", "Show Toolbar", kind=wx.ITEM_CHECK)
viewMenu.Check(self.showStatusbarItem.GetId(), True)
viewMenu.Check(self.showToolbarItem.GetId(), True)
messageMenu = wx.Menu()
# The "\t..." syntax defines an accelerator key that also triggers the same event.
helloItem = messageMenu.Append(wx.ID_ANY, "&Hello...\tCtrl-H", "Help string shown in status bar for this menu item")
messageMenu.AppendSeparator()
messageItem = messageMenu.Append(wx.ID_ANY, "&Message...\tCtrl-M", "Message")
errorItem = messageMenu.Append(wx.ID_ANY, "&Error...\tCtrl-E", "Error")
questionItem = messageMenu.Append(wx.ID_ANY, "&Question...\tCtrl-U", "Question")
exclamationItem = messageMenu.Append(wx.ID_ANY, "&Exclamation...\tCtrl-C", "Exclamation")
# Now a help menu for the about item.
helpMenu = wx.Menu()
aboutItem = helpMenu.Append(wx.ID_ABOUT)
# Make the menu bar and add the two menus to it. The '&' defines
# that the next letter is the "mnemonic" for the menu item. On the
# platforms that support it those letters are underlined and can be
# triggered from the keyboard.
menuBar = wx.MenuBar()
menuBar.Append(fileMenu, "&File")
menuBar.Append(viewMenu, "&View")
menuBar.Append(messageMenu, "&Message")
menuBar.Append(helpMenu, "&Help")
# Give the menu bar to the frame.
self.SetMenuBar(menuBar)
#--------------------
# Create a status bar.
self.statusbar = self.CreateStatusBar()
self.SetStatusText("Welcome to wxPython!")
#self.statusbar.SetStatusText("Welcome to wxPython!")
#--------------------
# Create a toolbar.
self.toolbar = self.CreateToolBar()
self.toolbar.AddTool(1, "Tool 1", wx.Bitmap("./right.png"), wx.Bitmap("./wrong.png"), kind=wx.ITEM_RADIO, shortHelp="Simple Tool 1")
#self.toolbar.AddStretchableSpace()
self.toolbar.AddTool(1, "Tool 2", wx.Bitmap("./right.png"), wx.Bitmap("./wrong.png"), kind=wx.ITEM_CHECK, shortHelp="Simple Tool 2")
#self.toolbar.AddStretchableSpace()
self.toolbar.AddTool(1, "Tool 3", wx.Bitmap("./right.png"), wx.Bitmap("./wrong.png"), kind=wx.ITEM_NORMAL, shortHelp="Simple Tool 3")
self.toolbar.Realize()
#--------------------
# Finally, associate a handler function with the EVT_MENU event for each of the menu items.
# That means that when that menu item is activated then the associated handler function will be called.
self.Bind(wx.EVT_MENU, self.OnNew, newItem)
self.Bind(wx.EVT_MENU, self.OnOpen, openItem)
self.Bind(wx.EVT_MENU, self.OnSaveAs, saveAsItem)
self.Bind(wx.EVT_MENU, self.OnExit, exitItem)
self.Bind(wx.EVT_MENU, self.OnToggleStatusBar, self.showStatusbarItem)
self.Bind(wx.EVT_MENU, self.OnToggleToolBar, self.showToolbarItem)
self.Bind(wx.EVT_MENU, self.OnHello, helloItem)
self.Bind(wx.EVT_MENU, self.OnMessage, messageItem)
self.Bind(wx.EVT_MENU, self.OnError, errorItem)
self.Bind(wx.EVT_MENU, self.OnQuestion, questionItem)
self.Bind(wx.EVT_MENU, self.OnExclamation, exclamationItem)
self.Bind(wx.EVT_MENU, self.OnAbout, aboutItem)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnNew(self, event):
wx.MessageBox("New MenuItem Clicked")
def OnOpen(self, event):
# REF [site] >> https://docs.wxpython.org/wx.FileDialog.html
with wx.FileDialog(self, "Open File", wildcard="PNG files (*.png)|*.png|JPG files (*.jpg)|*.jpg|BMP and GIF files (*.bmp;*.gif)|*.bmp;*.gif|All files (*.*)|*.*", style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as dlg:
if dlg.ShowModal() == wx.ID_CANCEL:
return
filepath = dlg.GetPath()
try:
with open(filepath, "r") as fd:
wx.MessageBox("{} opened".format(filepath))
except IOError as ex:
wx.LogError("Cannot open {}: {}.".filepath(filepath, ex))
def OnSaveAs(self, event):
# REF [site] >> https://docs.wxpython.org/wx.FileDialog.html
with wx.FileDialog(self, "Save File", wildcard="PNG files (*.png)|*.png|JPG files (*.jpg)|*.jpg|BMP and GIF files (*.bmp;*.gif)|*.bmp;*.gif|All files (*.*)|*.*", style=wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT) as dlg:
if dlg.ShowModal() == wx.ID_CANCEL:
return
filepath = dlg.GetPath()
try:
with open(filepath, "w") as fd:
wx.MessageBox("{} saved".format(filepath))
except IOError as ex:
wx.LogError("Cannot save to {}: {}.".format(filepath, ex))
def OnExit(self, event):
self.Close(True)
def OnToggleStatusBar(self, event):
if self.showStatusbarItem.IsChecked():
self.statusbar.Show()
else:
self.statusbar.Hide()
def OnToggleToolBar(self, event):
if self.showToolbarItem.IsChecked():
self.toolbar.Show()
else:
self.toolbar.Hide()
def OnHello(self, event):
wx.MessageBox("Hello again from wxPython")
def OnMessage(self, event):
dial = wx.MessageDialog(None, "Download completed", "Info", wx.OK)
dial.ShowModal()
def OnError(self, event):
dlg = wx.MessageDialog(None, "Error loading file", "Error", wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
def OnQuestion(self, event):
dlg = wx.MessageDialog(None, "Are you sure to quit?", "Question", wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION)
dlg.ShowModal()
def OnExclamation(self, event):
dlg = wx.MessageDialog(None, "Unallowed operation", "Exclamation", wx.OK | wx.ICON_EXCLAMATION)
dlg.ShowModal()
def OnAbout(self, event):
wx.MessageBox("This is a simple wxPython sample",
"About Simple Example",
wx.OK | wx.ICON_INFORMATION)
def OnPaint(self, event):
dc = wx.PaintDC(self)
dc.SetPen(wx.Pen("#d4d4d4"))
dc.SetBrush(wx.Brush("#c56c00"))
dc.DrawRectangle(10, 15, 90, 60)
dc.SetBrush(wx.Brush("#1ac500"))
dc.DrawRectangle(130, 15, 90, 60)
dc.SetBrush(wx.Brush("#539e47"))
dc.DrawRectangle(250, 15, 90, 60)
dc.SetBrush(wx.Brush("#004fc5"))
dc.DrawRectangle(10, 105, 90, 60)
dc.SetBrush(wx.Brush("#c50024"))
dc.DrawRectangle(130, 105, 90, 60)
dc.SetBrush(wx.Brush("#9e4757"))
dc.DrawRectangle(250, 105, 90, 60)
dc.SetBrush(wx.Brush("#5f3b00"))
dc.DrawRectangle(10, 195, 90, 60)
dc.SetBrush(wx.Brush("#4c4c4c"))
dc.DrawRectangle(130, 195, 90, 60)
dc.SetBrush(wx.Brush("#785f36"))
dc.DrawRectangle(250, 195, 90, 60)
# REF [site] >>
# https://www.wxpython.org/pages/overview/
# https://zetcode.com/wxpython/
def simple_example():
# When this module is run (not imported) then create the app, the frame, show it, and start the event loop.
app = wx.App()
frame = SimpleFrame(None, title="Simple Example !!!")
frame.Show()
app.MainLoop()
def main():
simple_example()
#--------------------------------------------------------------------
if '__main__' == __name__:
main()
| gpl-3.0 | -2,242,071,532,905,609,000 | 33.587755 | 214 | 0.675006 | false |
rossica/assignmentcollectorgrader | collector/tests.py | 1 | 53463 | # Assignment Collector/Grader - a Django app for collecting and grading code
# Copyright (C) 2010,2011,2012 Anthony Rossi <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test import TestCase
from django.test.client import Client
from django import forms
from django.core.files import File
from django.core.files.uploadedfile import SimpleUploadedFile
from collector.models import *
from settings import PROJECT_ROOT
import random, re, datetime, shutil, os
class ScenarioTests(TestCase):
fixtures = ['collector.json', 'users.json']
longMessage=True
def setUp(self):
pass
def tearDown(self):
pass
"""
"""
def test_view_about(self):
cli = Client()
response = cli.get('/about/')
#Verify the response is a success
self.assertEqual(response.status_code, 200, "Non-success HTTP status")
self.assertRegexpMatches(response.content, r'Version.*?{0}\.{1}'.format(MAJOR_VERSION, MINOR_VERSION))
"""
"""
def test_view_course_index(self):
courses = Course.objects.all()
cli = Client()
response = cli.get('/')
#Verify the response is a success
self.assertEqual(response.status_code, 200, "Non-success HTTP status")
#Check the text of the page for course numbers
#Make sure URLs exist for each course
for c in courses:
self.assertRegexpMatches(response.content, '{0}\s+{1}'.format(c.course_num, c.course_title), "Course_num not found")
self.assertRegexpMatches(response.content, r'href=\"{0}\"'.format(c.get_absolute_url()), "Incorrect absolute URL returned by Course " + str(c))
"""
"""
def test_view_specific_term_course_index(self):
cli = Client()
course = Course.objects.get(pk=1)
response = cli.get('/{0}/{1}'.format(course.year, course.term), follow=True)
#Verify the response is a success
self.assertEqual(response.status_code, 200, "Non-success HTTP status")
#Verify the course object is listed in this page
self.assertRegexpMatches(response.content, '{0}\s+{1}'.format(course.course_num, course.course_title), "Course_num not found")
self.assertRegexpMatches(response.content, r'href=\"{0}\"'.format(course.get_absolute_url()), "Incorrect absolute URL returned by Course " + str(course))
"""
"""
def test_view_course(self):
course = Course.objects.get(pk=1)
c = Client()
response = c.get(course.get_absolute_url(), follow=True)
#verify the response is a success
self.assertEqual(response.status_code, 200)
#Check the text of the page for course number
self.assertRegexpMatches(response.content, '{0}'.format(course.course_num), "Course_num not found")
#Check the text for the term and year of the course
self.assertRegexpMatches(response.content, '{0}\s+{1}'.format(course.term.capitalize(), course.year))
#Check to make sure all assignments are listed by name, at least
for assn in course.javaassignment_set.all():
self.assertRegexpMatches(response.content, '{0}'.format(assn.name))
"""
"""
def test_view_assignment(self):
assn = JavaAssignment.objects.get(pk=1)
cli = Client()
response = cli.get(assn.get_absolute_url(), follow=True)
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#verify the Assignment name is listed somewhere
self.assertRegexpMatches(response.content, "{0}<br>".format(assn.name))
#Verify the form submit link is correct
self.assertRegexpMatches(response.content, r'action="{0}submit/"'.format(assn.get_absolute_url()))
#verify the parameters of the form are displayed: first name, last name, and file
self.assertRegexpMatches(response.content, r'id="id_first_name" type="text" name="first_name"')
self.assertRegexpMatches(response.content, r'id="id_last_name" type="text" name="last_name"')
self.assertRegexpMatches(response.content, r'type="file" name="file" id="id_file"')
"""
"""
def test_view_assignment_early(self):
assn = JavaAssignment.objects.get(pk=8)
cli = Client()
response = cli.get(assn.get_absolute_url(), follow=True)
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#verify the Assignment name is listed somewhere
self.assertRegexpMatches(response.content, "{0}<br>".format(assn.name))
self.assertRegexpMatches(response.content, r'(?s)(?!<form.+?>.+?</form>)', "Found a submission form when there shouldn't be one")
"""
"""
def test_view_assignment_late(self):
assn = JavaAssignment.objects.get(pk=6)
cli = Client()
response = cli.get(assn.get_absolute_url(), follow=True)
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#verify the Assignment name is listed somewhere
self.assertRegexpMatches(response.content, "{0}<br>".format(assn.name))
self.assertRegexpMatches(response.content, r'(?s)(?!<form.+?>.+?</form>)', "Found a submission form when there shouldn't be one")
"""
"""
def test_view_assignment_password(self):
assn = JavaAssignment.objects.get(pk=2)
cli = Client()
response = cli.get(assn.get_absolute_url(), follow=True)
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify a form is shown
self.assertRegexpMatches(response.content, r'(?s)<form.+?>.+?</form>', "Didn't find a submission form when there should be one")
#Verify the parameters of the form are displayed: first name, last name, file, and passkey
self.assertRegexpMatches(response.content, r'id="id_first_name" type="text" name="first_name"')
self.assertRegexpMatches(response.content, r'id="id_last_name" type="text" name="last_name"')
self.assertRegexpMatches(response.content, r'type="file" name="file" id="id_file"')
self.assertRegexpMatches(response.content, r'id="id_passkey" type="text" name="passkey"')
"""
"""
def test_view_assignment_course_password(self):
assn = JavaAssignment.objects.get(pk=4)
cli = Client()
response = cli.get(assn.get_absolute_url(), follow=True)
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify a form is shown
self.assertRegexpMatches(response.content, r'(?s)<form.+?>.+?</form>', "Didn't find a submission form when there should be one")
#Verify the parameters of the form are displayed: first name, last name, file, and passkey
self.assertRegexpMatches(response.content, r'id="id_first_name" type="text" name="first_name"')
self.assertRegexpMatches(response.content, r'id="id_last_name" type="text" name="last_name"')
self.assertRegexpMatches(response.content, r'type="file" name="file" id="id_file"')
self.assertRegexpMatches(response.content, r'id="id_passkey" type="text" name="passkey"')
"""
"""
def test_view_submission(self):
s = JavaSubmission.objects.get(pk=1)
cli = Client()
response = cli.get(s.get_absolute_url())
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify the link back to this page is available
self.assertRegexpMatches(response.content, r'href={0}'.format(s.get_absolute_url()))
#Verify the grade is displayed
self.assertRegexpMatches(response.content, r'(?m)Grade:\D+?{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests))
#Verify the name is displayed
self.assertRegexpMatches(response.content, r'Submitted by {0} {1}'.format(s.first_name, s.last_name))
"""
"""
def test_view_submission_no_grade_log(self):
s = JavaSubmission.objects.get(pk=2)
assn = s.assignment
cli = Client()
response = cli.get(s.get_absolute_url())
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify the link back to this page is available
self.assertRegexpMatches(response.content, r'href={0}'.format(s.get_absolute_url()))
#Verify the grade is displayed
self.assertRegexpMatches(response.content, r'(?m)Grade:\D+?{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests))
#Verify the name is displayed
self.assertRegexpMatches(response.content, r'Submitted by {0} {1}'.format(s.first_name, s.last_name))
#Verify the gradelog is not displayed and a message telling the user is
self.assertRegexpMatches(response.content, r'No grade results to display.',)
"""
"""
def test_view_submission_large_grade_log(self):
cli = Client()
s = JavaSubmission.objects.get(pk=3)
response = cli.get(s.get_absolute_url())
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify the link back to this page is available
self.assertRegexpMatches(response.content, r'href={0}'.format(s.get_absolute_url()))
#Verify the grade is displayed
self.assertRegexpMatches(response.content, r'(?m)Grade:\D+?{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests))
#Verify the name is displayed
self.assertRegexpMatches(response.content, r'Submitted by {0} {1}'.format(s.first_name, s.last_name))
#Verify the gradelog is truncated and a message telling the user is displayed
self.assertRegexpMatches(response.content, r'Grade results too long, truncating',)
"""
"""
def test_submit_assignment_late_not_allowed(self):
cli = Client()
a = JavaAssignment.objects.get(pk=6)
self.f = open(PROJECT_ROOT+'/testdata/EmptyJar.jar', 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify the submission is rejected
self.assertRegexpMatches(response.content, r'It is past the due date and late assignments are not accepted. Sorry. :\(', "Submission was not rejected")
"""
"""
def test_submit_assignment_late_allowed(self):
cli = Client()
a = JavaAssignment.objects.get(pk=9)
self.f = open(PROJECT_ROOT+'/testdata/SimpleClass.jar', 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify that we inform the user they are turning in late.
self.assertRegexpMatches(response.content, r'You are turning in this assignment past the due date. But it will be accepted anyway. :\)')
#Delete the submission created for this test
JavaSubmission.objects.filter(assignment__pk=a.id).delete()
"""
"""
def test_submit_assignment_max_submissions_reached(self):
cli = Client()
a = JavaAssignment.objects.get(pk=5)
self.f = open(PROJECT_ROOT+'/testdata/EmptyJar.jar', 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify we inform the user they have reached maximum number of submissions
self.assertRegexpMatches(response.content, r'I'm sorry, but you've reached the maximum number of attempts.')
"""
"""
def test_submit_assignment_GET(self):
cli = Client()
a = JavaAssignment.objects.get(pk=1)
response = cli.get("{0}submit/".format(a.get_absolute_url()), follow=True)
#Verify the client redirected in the past
self.assertRedirects(response, a.get_absolute_url())
#verify the Assignment name is listed somewhere
self.assertRegexpMatches(response.content, "{0}<br>".format(a.name))
#Verify the form submit link is correct
self.assertRegexpMatches(response.content, r'action="{0}submit/"'.format(a.get_absolute_url()))
"""
"""
def test_submit_assignment_early(self):
cli = Client()
a = JavaAssignment.objects.get(pk=8)
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':"asdfasdfasdfasdf"
})
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Inform the user that submission is not available for this assignment yet
self.assertRegexpMatches(response.content, r'Submission has not opened for this assignment. Please wait until the assignment is available to submit your code')
"""
"""
def test_submit_assignment_invalid_form(self):
cli = Client()
a = JavaAssignment.objects.get(pk=1)
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':"asdfasdfasdfasdf"
})
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify that form errors are returned to the user
self.assertRegexpMatches(response.content, r'<ul class="errorlist"><li>This field is required.</li></ul>', "Did not produce error")
"""
"""
def test_submit_assignment_no_test_file(self):
cli = Client()
a = JavaAssignment.objects.get(pk=10)
f = open(PROJECT_ROOT+'/testdata/EmptyJar.jar', 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':f
})
f.close()
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Not really certain how to test this.
#Clean up after the test
JavaSubmission.objects.get(assignment__pk=a.id).delete()
"""
"""
def test_submit_assignment_password(self):
cli = Client()
a = JavaAssignment.objects.get(pk=2)
f = open(PROJECT_ROOT+'/testdata/SimpleClass.jar', 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'passkey':a.passkey,
'file':f
})
f.close()
#Verify the response is a success
self.assertEqual(response.status_code, 200)
#Verify that we don't have an error for bad password
self.assertRegexpMatches(response.content, r'(?!The passkey is incorrect)')
#Clean up after the test
JavaSubmission.objects.get(assignment__pk=a.id).delete()
"""
These tests verify that the various states the grader can be in are covered and handled.
When the grader is moved to its own app, these tests will still remain here, but will
also serve as basis for the tests written for the grader.
"""
class GraderTests(TestCase):
fixtures = ['collector.json', 'users.json']
longMessage = True
unable_to_parse_regex = re.compile(r'Grade[^A-Z]+Unable to parse grade\.')
def setUp(self):
pass
def tearDown(self):
#self.f.close() # always close files, regardless of success or failure
#shutil.rmtree(,ignore_errors=True)
JavaSubmission.objects.filter(pk__gt=3).delete() # delete the files from the disk too
"""
Catch the case where there are no files to compile
"""
def test_compile_failure_no_src(self):
cli = Client()
a = JavaAssignment.objects.get(pk=1)
self.f = open(PROJECT_ROOT+'/testdata/EmptyJar.jar', 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
# verify the correct error message
self.assertRegexpMatches(response.content, r'Grade\D+?{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't find grade")
self.assertRegexpMatches(response.content, r'Reason\D+?\d+ compiler errors', "Didn't find reason for failure")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
"""
Test the case where there is a syntax error in the student-uploaded file
"""
def test_compile_failure_syntax_error(self):
cli = Client()
a = JavaAssignment.objects.get(pk=1)
self.f = open(PROJECT_ROOT + "/testdata/SyntaxError.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
# verify the correct error message
self.assertRegexpMatches(response.content, r'Grade\D+?{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't find grade")
self.assertRegexpMatches(response.content, r'Reason\D+?\d+ compiler errors', "Didn't find reason for failure")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
"""
Test compilation still fails because we are deleting .class files and there are no source files
"""
def test_compile_failure_only_class_files(self):
cli = Client()
a = JavaAssignment.objects.get(pk=1)
self.f = open(PROJECT_ROOT + "/testdata/ClassFileOnly.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
# verify the correct error message
self.assertRegexpMatches(response.content, r'Grade\D+?{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't find grade")
self.assertRegexpMatches(response.content, r'Reason\D+?\d+ compiler errors', "Didn't find reason for failure")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
"""
Test that the watchdog timer kicks in after 30 seconds
"""
def test_watchdog_timer(self):
#self.skipTest("Long test, need to move to its own queue")
cli = Client()
a = JavaAssignment.objects.get(pk=1)
self.f = open(PROJECT_ROOT + "/testdata/WatchdogTimer.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
# verify the correct error message
self.assertRegexpMatches(response.content, r'Grade\D+?{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't hit watchdog timer")
self.assertRegexpMatches(response.content, r'Reason[^A-Z]+?Watchdog timer killed the test after \d+ seconds', "Didn't hit watchdog timer")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
"""
Test that we handle code exceptions gracefully
"""
@unittest.expectedFailure
def test_junit_exception(self):
#self.skipTest("Need to figure out how to throw this specific error.")
cli = Client()
a = JavaAssignment.objects.get(pk=1)
self.f = open(PROJECT_ROOT + "/testdata/Exception.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
# verify the correct error message
self.assertRegexpMatches(response.content, r'Grade\D+?{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't produce exception errors")
self.assertRegexpMatches(response.content, r'Reason[^A-Z]+?Exception in thread \"main\"', "Didn't hit exception")
"""
Test picking up more than one test case failure
"""
def test_junit_failures(self):
cli = Client()
a = JavaAssignment.objects.get(pk=7)
self.f = open(PROJECT_ROOT + "/testdata/ThreeTestFailures.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the failures occurred
self.assertRegexpMatches(response.content, r'Failures:\s+2', "Didn't produce multiple failures")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
Test picking up a single test case failure
"""
def test_junit_failure(self):
cli = Client()
a = JavaAssignment.objects.get(pk=7)
self.f = open(PROJECT_ROOT + "/testdata/ThreeTestFailure.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the correct error message shield
self.assertRegexpMatches(response.content, r'Failures:\s+1', "Didn't produce a single failure")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
Test multiple test case errors
"""
def test_junit_errors(self):
cli = Client()
a = JavaAssignment.objects.get(pk=7)
self.f = open(PROJECT_ROOT + "/testdata/ThreeTestErrors.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the correct error message shield
self.assertRegexpMatches(response.content, r'Errors:\s+2', "Didn't produce multiple errors")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
Test a single test case error out of three tests
"""
def test_junit_error(self):
cli = Client()
a = JavaAssignment.objects.get(pk=7)
self.f = open(PROJECT_ROOT + "/testdata/ThreeTestError.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the correct error message shield
self.assertRegexpMatches(response.content, r'Errors:\s+1', "Didn't produce a single error")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
"""
def test_junit_one_error(self):
cli = Client()
a = JavaAssignment.objects.get(pk=1)
self.f = open(PROJECT_ROOT + "/testdata/SingleError.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the correct error message
self.assertRegexpMatches(response.content, r'Errors:\s+1', "Didn't produce a single error")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
"""
def test_junit_one_of_each_type(self):
cli = Client()
a = JavaAssignment.objects.get(pk=7)
c = a.course
self.f = open(PROJECT_ROOT + "/testdata/ThreeTestOneOfEach.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the correct error message shield
self.assertRegexpMatches(response.content, r'Errors:\s+1', "Didn't produce a single error")
# verify the correct error message shield
self.assertRegexpMatches(response.content, r'Failures:\s+1', "Didn't produce a single failure")
self.assertRegexpMatches(response.content, r'Tests\s+run:\s+3', "Didn't run 3 tests")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
Test a single test case that passes (no failures)
"""
def test_junit_single_pass(self):
cli = Client()
a = JavaAssignment.objects.get(pk=1)
self.f = open(PROJECT_ROOT + "/testdata/SimpleClass.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the correct error message shield
self.assertRegexpMatches(response.content, r'OK\s+\(1\s+test\)', "Didn't pass a single test")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
Test multiple passing test cases with no failures
"""
def test_junit_multiple_pass(self):
cli = Client()
a = JavaAssignment.objects.get(pk=7)
self.f = open(PROJECT_ROOT + "/testdata/ThreeTestClass.jar", 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the correct error message shield
self.assertRegexpMatches(response.content, r'OK\s+\(3\s+tests\)', "Didn't pass all three tests")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
print response.content
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
"""
def test_generate_large_grade_log(self):
cli = Client()
a = JavaAssignment.objects.get(pk=7)
c = a.course
self.f = open(PROJECT_ROOT+'/testdata/GenerateLargeGradeLog.jar', 'rb')
response = cli.post("{0}submit/".format(a.get_absolute_url()),
{'first_name':"tester",
'last_name':"test",
'file':self.f
})
self.f.close()
# check the upload succeeded
self.assertEqual(response.status_code, 200)
# verify the correct error message shield
self.assertRegexpMatches(response.content, r'Grade results too long, truncating', "Didn't truncate grade log")
#Should not be unable to parse the grade (should be able to parse the grade)
if self.unable_to_parse_regex.search(response.content): #pragma: no branch
self.fail("Should have been able to parse grade.")
s = JavaSubmission.objects.get(assignment=a, pk__gt=3)
#Verify the proper grade is given
self.assertRegexpMatches(response.content, r'(?m)Grade\D+{0} / {1}'.format(s.javagrade.tests_passed, s.javagrade.total_tests), "Didn't grade properly")
"""
Generate some random valid data to populate a CourseForm.
Then attempt to validate it.
"""
class CourseFormTests(TestCase):
fixtures = ['users.json']
longMessage = True
def setUp(self):
self.data = {}
course_title = ""
#generate a random valid course title
for i in range(0, random.randint(1,24)):
course_title += random.choice("abcdefghijklmnopqrstuvwxyz")
self.data['course_title']= course_title
self.data['year']= random.randint(0000,9999) # pick a random year
self.data['term']= Course.TERM_CHOICES[random.randint(0,3)][0] # random term
self.data['creator'] = 1;
def tearDown(self):
pass
def test_valid_course_num(self):
course_num = ""
letters = random.randint(1,4)
num_letters = 0
# generate course letters
while num_letters < letters:
course_num += random.choice("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
num_letters +=1
# generate course number
for i in range(3):
course_num += random.choice("0123456789")
# decide whether to include a special letter at the end
if random.randint(0,1): #pragma: no branch
course_num += random.choice("CDW")
self.data['course_num'] = course_num
c = CourseForm(self.data)
# Validate the data. this should pass
self.assertTrue(c.is_valid(),
"""CourseForm failed on valid input:
course_num: {0}
course_title: {1}
year: {2}
term: {3}
""".format(course_num, self.data['course_title'], str(self.data['year']), self.data['term']))
def test_invalid_course_num(self):
# Now generate an invalid course number
bad_course_num = ''.join(random.sample("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789CDW", random.randint(1,8)))
while re.match(r'^[A-Z]{1,4}\d{3}[CDW]?$', bad_course_num): #pragma: no branch
bad_course_num = ''.join(random.sample("ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789CDW", random.randint(1,8)))
self.data['course_num'] = bad_course_num
c = CourseForm(self.data)
# Validate the data. This should fail
self.assertFalse(c.is_valid(),
"""CourseForm succeeded on invalid input:
course_num: {0}
course_title: {1}
year: {2}
term: {3}
""".format(bad_course_num, self.data['course_title'], str(self.data['year']), self.data['term']))
"""
These tests verify that the assignment form validation is functioning properly,
both by rejecting invalid inputs, and by accepting valid inputs.
"""
class AssignmentFormTests(TestCase):
fixtures = ['collector.json', 'users.json']
longMessage=True
def setUp(self):
name = random.choice("abdcefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
name += ''.join(random.sample("aBcDeFgHiJkLmNoPqRsTuVwXyZ0123456789-_", random.randint(0,24)))
self.data = {
'course':1,
'name':name,
'start_date':datetime.datetime.now(),
'due_date':datetime.datetime.now() + datetime.timedelta(hours=2),
'max_submissions': 10,
'java_cmd':"-Xms32m -Xmx32m junit.textui.TestRunner",
'javac_cmd':"-g",
'options': 1,
'creator': 1,
'watchdog_wait':30,
}
def tearDown(self):
pass
"""
Verify that we accept a valid form
"""
def test_valid_form(self):
f = open(PROJECT_ROOT+"/testdata/SimpleJUnitTest.java")
file_data = {
'test_file': SimpleUploadedFile('SimpleJUnitTest.java', f.read())
}
f.close()
asgnmt_form = JavaAssignmentForm(self.data, file_data)
# validate name and JAR file support
self.assertTrue(asgnmt_form.is_valid(), # "Fields in error: " + ', '.join(asgnmt_form.errors.keys()))
"""JavaAssignmentForm failed on valid input
course: {0}
name: {1}
start_date: {2}
due_date: {3}
test_file: {4}
errors: {5}
""".format(self.data['course'], self.data['name'], self.data['start_date'], self.data['due_date'], file_data['test_file'], asgnmt_form.errors))
"""
Verify that the form validation correctly detects a valid .jar file
"""
def test_valid_jar(self):
#Try again with a valid Jar file uploaded
f = open(PROJECT_ROOT+"/testdata/SimpleJUnitTest.jar", 'rb')
file_data= {
'test_file': SimpleUploadedFile('SimpleJUnitTest.jar', f.read())
}
f.close()
asgnmt_form = JavaAssignmentForm(self.data, file_data)
#Validate the valid Jar
self.assertTrue(asgnmt_form.is_valid(), "Failed to accept valid Jar file")
"""
Verify that invalid assignment names are rejected
"""
def test_invalid_name(self):
#Now generate a bad assignment name and see if we catch that
name = ''.join(random.sample("aBcDeFgHiJkLmNoPqRsTuVwXyZ0123456789-_!@#$%^&*()", random.randint(0,25)))
while re.match(r'^[a-zA-Z][\w\-]{,24}$', name): #pragma: no branch
name = ''.join(random.sample("aBcDeFgHiJkLmNoPqRsTuVwXyZ0123456789-_!@#$%^&*()", random.randint(0,25)))
self.data['name'] = name
file_data = {
'test_file': SimpleUploadedFile('ValidJavaFile.java', "ffffff")
}
asgnmt_form = JavaAssignmentForm(self.data, file_data)
#Validate that we catch the bad name
self.assertFalse(asgnmt_form.is_valid(), "Failed to catch bad name: {0}".format(name))
"""
Verify that fake or corrupt .jar files are caught
"""
def test_invalid_jar(self):
# Generate a bad JAR file and see if we catch that
f = open(PROJECT_ROOT+"/testdata/FakeJarFile.jar", 'rb')
file_data = {
'test_file': SimpleUploadedFile('FakeJarFile.jar', f.read())
}
f.close()
asgnmt_form = JavaAssignmentForm(self.data, file_data)
self.assertFalse(asgnmt_form.is_valid(), "Failed to catch bad JAR: {0}".format(file_data['test_file']))
"""
Verify that files without a .java or .jar extension are rejected.
"""
def test_invalid_ext(self):
#Now try a file that is neither Jar nor Java
fake_ext = ''.join(random.sample("aBcDeFgHikLmNoPqRsTuVwXyZ0123456789-_", random.randint(0,5)))
file_data = {
'test_file': SimpleUploadedFile('NotAJarOrJavaFile.'+fake_ext, "ffffff")
}
asgnmt_form = JavaAssignmentForm(self.data, file_data)
self.assertFalse(asgnmt_form.is_valid(), "Failed to catch non-java/jar file: {0}".format(file_data['test_file']))
"""
Verify that files without an extension are rejected.
"""
def test_no_ext(self):
#Test with no file extension
file_data = {
'test_file': SimpleUploadedFile('NoFileExtension', "ffffff")
}
asgnmt_form = JavaAssignmentForm(self.data, file_data)
self.assertFalse(asgnmt_form.is_valid(), "Failed to catch no file extension")
"""
Verify that we provide an error when there is no test file
"""
def test_no_test_file(self):
asgnmt_form = JavaAssignmentForm(self.data)
self.assertFalse(asgnmt_form.is_valid(), "Validated a form with no test file")
self.assertIn(u"This assignment must include a JUnit test script.", asgnmt_form.errors['test_file'])
"""
These tests validate the operation of submission forms.
"""
class SubmissionFormTests(TestCase):
fixtures = ['collector.json', 'users.json']
longMessage = True
def setUp(self):
self.data = {
'first_name': ''.join(random.sample("abcdefghijklmnopqrstuvwxyz", random.randint(1,25))),
'last_name': ''.join(random.sample("abcdefghijklmnopqrstuvwxyz", random.randint(1,25)))
}
f = open(PROJECT_ROOT+'/testdata/SimpleClass.jar', 'rb')
self.file_data= {
'file': SimpleUploadedFile('SimpleClass.jar', f.read())
}
f.close()
def tearDown(self):
pass
"""
Accept a valid form
"""
def test_valid_jar(self):
s = JavaSubmissionForm(self.data, self.file_data)
self.assertTrue(s.is_valid(),
"""Failed to validate a valid submission form.
first name: {0}
last name: {1}
file: {2}
""".format(self.data['first_name'], self.data['last_name'], self.file_data['file']))
"""
Don't accept invalid jar files
"""
def test_invalid_jar(self):
f = open(PROJECT_ROOT+'/testdata/FakeJarFile.jar', 'rb')
file_data = {
'file': SimpleUploadedFile('FakeJar.jar', f.read())
}
f.close()
s = JavaSubmissionForm(self.data, file_data)
self.assertFalse(s.is_valid(), "Accepted an invalid jar.")
"""
Don't accept jar files without an extension
"""
def test_file_ext(self):
f = open(PROJECT_ROOT+'/testdata/SimpleClass.jar')
file_data= {
'file': SimpleUploadedFile('SimpleClass', f.read())
}
f.close()
s = JavaSubmissionForm(self.data, file_data)
self.assertFalse(s.is_valid(), "Accepted a valid jar without an extension.")
"""
Reject empty name strings
"""
def test_empty_name(self):
self.data['first_name'] = ''
self.data['last_name'] = ''
s = JavaSubmissionForm(self.data, self.file_data)
self.assertFalse(s.is_valid(), "Failed to reject empty names")
"""
Reject names that are entirely made up of symbols
"""
def test_invalid_names(self):
self.data['first_name'] = ''.join(random.sample(" `~!@#$%^&*()-_=+0123456789,.<>?|{}[]\\/\t", random.randint(1,25)))
self.data['last_name'] = ''.join(random.sample(" `~!@#$%^&*()-_=+0123456789,.<>?|{}[]\\/\t", random.randint(1,25)))
s = JavaSubmissionForm(self.data, self.file_data)
self.assertFalse(s.is_valid(), "Failed to reject invalid names")
"""
Verify that names with symbols and letters are properly cleaned
"""
def test_name_cleaning(self):
self.data['first_name'] = random.choice("abcdefghijklmnopqrstuvwxyz").join(random.sample(" abcdefghijklmnopqrstuvwxyz`~!@#$%^&*0123456789", random.randint(2,12)))
self.data['last_name'] = random.choice("abcdefghijklmnopqrstuvwxyz").join(random.sample(" abcdefghijklmnopqrstuvwxyz`~!@#$%^&*0123456789", random.randint(2,12)))
s = JavaSubmissionForm(self.data, self.file_data)
self.assertTrue(s.is_valid(),
"""Failed to clean symbols from name.
first name: {0}
last name: {1}
""".format(self.data['first_name'], self.data['last_name']))
fn = re.sub(r'[^a-z\-]', '', self.data['first_name'])
ln = re.sub(r'[^a-z\-]', '', self.data['last_name'])
self.assertEqual(s.cleaned_data['first_name'], fn, "Didn't clean first name")
self.assertEqual(s.cleaned_data['last_name'], ln, "Didn't clean last name")
self.assertIsNotNone(re.match(r'\w+(\-\w+)?', fn), "First name doesn't begin/end with a letter or has more than one hyphen")
self.assertIsNotNone(re.match(r'\w+(\-\w+)?', ln), "last name doesn't begin/end with a letter or has more than one hyphen")
"""
Put the course password on the form and validate
"""
def test_valid_course_password(self):
a = JavaAssignment.objects.get(pk=4) # get the assignment object with no password
c = Course.objects.get(pk=2) # get the course object with a password
s = JavaSubmission(assignment=a)
self.data['passkey'] = c.passkey # set the password to the course password
sfp = JavaSubmissionFormP(self.data, self.file_data, instance=s)
self.assertTrue(sfp.is_valid(), "Did not validate course password")
"""
Put the assignment password on the form and validate
"""
def test_valid_assn_password(self):
a = JavaAssignment.objects.get(pk=2) # get the assignment object with a password
c = Course.objects.get(pk=1) # get the course object with no password
s = JavaSubmission(assignment=a)
self.data['passkey'] = a.passkey # set the password to the assignment password
sfp = JavaSubmissionFormP(self.data, self.file_data, instance=s)
self.assertTrue(sfp.is_valid(), "Did not validate assignment password")
"""
Put the assignment password on the form, see if validation still succeeds
"""
def test_assn_course_password(self):
a = JavaAssignment.objects.get(pk=3) # get the assignment object with a password
c = Course.objects.get(pk=2) # get the course object with a password
s = JavaSubmission(assignment=a)
self.data['passkey'] = a.passkey # set the password to the assignment password
sfp = JavaSubmissionFormP(self.data, self.file_data, instance=s)
self.assertTrue(sfp.is_valid(), "Did not validate assignment password")
"""
Put the course password on the form, see if validation still succeeds
"""
def test_course_assn_password(self):
a = JavaAssignment.objects.get(pk=3) # get the assignment object with a password
c = Course.objects.get(pk=2) # get the course object with a password
s = JavaSubmission(assignment=a)
self.data['passkey'] = c.passkey # set the password to the assignment password
sfp = JavaSubmissionFormP(self.data, self.file_data, instance=s)
self.assertTrue(sfp.is_valid(), "Did not validate course password")
"""
Put the course password on the form, see if validation still succeeds
"""
def test_invalid_password(self):
a = JavaAssignment.objects.get(pk=3) # get the assignment object with a password
c = Course.objects.get(pk=2) # get the course object with a password
s = JavaSubmission(assignment=a)
self.data['passkey'] = 'c.passkey' # set the password to something false
sfp = JavaSubmissionFormP(self.data, self.file_data, instance=s)
self.assertFalse(sfp.is_valid(), "Should not validate either password")
class AdminTests(TestCase):
fixtures = ['users.json', ]
class MiscTests(TestCase):
longMessage = True
fixtures = ['collector.json', 'users.json']
def setUp(self):
pass
def tearDown(self):
pass
"""
"""
@unittest.expectedFailure
def test_display_grades(self):
from collector.admin import AssignmentAdmin
admin = AssignmentAdmin(Assignment, None)
a = JavaAssignment.objects.get(pk=1)
response = admin.display_grades(None, JavaAssignment.objects.filter(pk=1))
if datetime.datetime.now() < a.due_date:
self.assertRegexpMatches(response.__str__(), r'These grades are preliminary\. The assignment is not due yet\.',)
s = a.submission_set.latest('submission_time')
self.assertRegexpMatches(response.__str__(), s.grade, "Didn't find grade listed")
| agpl-3.0 | -2,733,859,677,076,381,700 | 40.976688 | 170 | 0.577745 | false |
afolmert/mentor | src/utils_qt.py | 1 | 15420 | #!/usr/bin/env python
# -*- coding: iso-8859-2 -*-
#
# Copyright (C) 2007 Adam Folmert <[email protected]>
#
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
#
#
"""Here are misc python utilities for PyQt development.
They are collected from different sources and some are written from scratch by
me.
"""
# TODO change import only necessary widgets
import release
import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
__version__ = release.version
#
#----------------------------------------------------------
# Misc routines
def tr(text):
return qApp.tr(text)
#
#----------------------------------------------------------
# dialog boxes
# shortcut for displaying message box
def msgbox(aMesg, parent = None):
QMessageBox.information( parent
, "Info"
, aMesg )
def show_info(message, parent=None):
class InfoWidget(QDialog):
def __init__(self, parent=None):
QDialog.__init__(self, parent)
self.setWindowTitle('Information')
self.setGeometry(400, 300, 200, 200)
self.lbl = QLabel()
self.btn = QPushButton('OK')
self.btn.setStyle(Styles.windowsStyle())
layout = QVBoxLayout()
layout.addWidget(self.lbl)
layout.addWidget(self.btn)
self.setLayout(layout)
self.connect(self.btn, SIGNAL("clicked()"), SLOT("accept()"))
propagate_fonts(self, QFont("Fixed", 8))
widget = InfoWidget(parent)
widget.lbl.setText(message)
widget.exec_()
#
#----------------------------------------------------------
# styles classes and routines
class Styles(object):
"""Singleton object for retrieving styles."""
_windowsStyle = None
_cdeStyle = None
_motifStyle = None
_plastiqueStyle = None
@staticmethod
def windowsStyle():
if Styles._windowsStyle is None:
Styles._windowsStyle = QStyleFactory.create('Windows')
return Styles._windowsStyle
@staticmethod
def cdeStyle():
if Styles._cdeStyle is None:
Styles._cdeStyle = QStyleFactory.create('Cde')
return Styles._cdeStyle
@staticmethod
def motifStyle():
if Styles._motifStyle is None:
Styles._motifStyle = QStyleFactory.create('Motif')
return Styles._motifStyle
@staticmethod
def plastiqueStyle():
if Styles._plastiqueStyle is None:
Styles._plastiqueStyle = QStyleFactory.create('Plastique')
return Styles._plastiqueStyle
#
#----------------------------------------------------------
# border layout
class ItemWrapper(object):
def __init__(self, i, p):
self.item = i
self.position = p
class BorderLayout(QLayout):
West, North, South, East, Center = range(5)
MinimumSize, SizeHint = range(2)
def __init__(self, parent=None, margin=0, spacing=-1):
QLayout.__init__(self, parent)
self.setMargin(margin)
self.setSpacing(spacing)
self.list = []
def __del__(self):
l = self.takeAt(0)
while l:
l = self.takeAt(0)
def addItem(self, item):
self.add(item, BorderLayout.West)
def addWidget(self, widget, position):
self.add(QWidgetItem(widget), position)
def expandingDirections(self):
return Qt.Horizontal | Qt.Vertical
def hasHeightForWidth(self):
return False
def count(self):
return len(self.list)
def itemAt(self, index):
if index < len(self.list):
return self.list[index].item
return None
def minimumSize(self):
return self.calculateSize(BorderLayout.MinimumSize)
def setGeometry(self, rect):
center = 0
eastWidth = 0
westWidth = 0
northHeight = 0
southHeight = 0
centerHeight = 0
QLayout.setGeometry(self, rect)
for wrapper in self.list:
item = wrapper.item
position = wrapper.position
if position == BorderLayout.North:
item.setGeometry(QRect(rect.x(), northHeight, rect.width(), item.sizeHint().height()))
northHeight += item.geometry().height() + self.spacing()
elif position == BorderLayout.South:
item.setGeometry(QRect(item.geometry().x(), item.geometry().y(), rect.width(), item.sizeHint().height()))
southHeight += item.geometry().height() + self.spacing()
item.setGeometry(QRect(rect.x(), rect.y() + rect.height() - southHeight + self.spacing(), item.geometry().width(), item.geometry().height()))
elif position == BorderLayout.Center:
center = wrapper
centerHeight = rect.height() - northHeight - southHeight
for wrapper in self.list:
item = wrapper.item
position = wrapper.position
if position == BorderLayout.West:
item.setGeometry(QRect(rect.x() + westWidth, northHeight, item.sizeHint().width(), centerHeight))
westWidth += item.geometry().width() + self.spacing()
elif position == BorderLayout.East:
item.setGeometry(QRect(item.geometry().x(), item.geometry().y(), item.sizeHint().width(), centerHeight))
eastWidth += item.geometry().width() + self.spacing()
item.setGeometry(QRect(rect.x() + rect.width() - eastWidth + self.spacing(), northHeight, item.geometry().width(), item.geometry().height()))
if center:
center.item.setGeometry(QRect(westWidth, northHeight, rect.width() - eastWidth - westWidth, centerHeight))
def sizeHint(self):
return self.calculateSize(BorderLayout.SizeHint)
def takeAt(self, index):
if index >= 0 and index < len(self.list):
layoutStruct = self.list.pop(index)
return layoutStruct.item
return None
def add(self, item, position):
self.list.append(ItemWrapper(item, position))
def calculateSize(self, sizeType):
totalSize = QSize()
for wrapper in self.list:
position = wrapper.position
itemSize = QSize()
if sizeType == BorderLayout.MinimumSize:
itemSize = wrapper.item.minimumSize()
else: # sizeType == BorderLayout.SizeHint
itemSize = wrapper.item.sizeHint()
if position == BorderLayout.North or position == BorderLayout.South or position == BorderLayout.Center:
totalSize.setHeight(totalSize.height() + itemSize.height())
if position == BorderLayout.West or position == BorderLayout.East or position == BorderLayout.Center:
totalSize.setWidth(totalSize.width() + itemSize.width())
return totalSize
def demoBorderLayout():
class Window(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
centralWidget = QTextBrowser()
centralWidget.setPlainText(self.tr("Central widget"))
layout = BorderLayout()
layout.addWidget(centralWidget, BorderLayout.Center)
# Qt takes ownership of the widgets in the layout when setLayout() is
# called. Therefore we keep a local reference to each label to prevent
# it being garbage collected until the call to setLayout().
label_n = self.createLabel("North")
layout.addWidget(label_n, BorderLayout.North)
label_w = self.createLabel("West")
layout.addWidget(label_w, BorderLayout.West)
label_e1 = self.createLabel("East 1")
layout.addWidget(label_e1, BorderLayout.East)
label_e2 = self.createLabel("East 2")
layout.addWidget(label_e2, BorderLayout.East)
label_s = self.createLabel("South")
layout.addWidget(label_s, BorderLayout.South)
self.setLayout(layout)
self.setWindowTitle(self.tr("Border Layout"))
def createLabel(self, text):
label = QLabel(text)
label.setFrameStyle(QFrame.Box | QFrame.Raised)
return label
app = QApplication(sys.argv)
window = Window()
window.show()
sys.exit(app.exec_())
#
#----------------------------------------------------------
# flow layout
class FlowLayout(QLayout):
def __init__(self, parent=None, margin=0, spacing=-1):
QLayout.__init__(self, parent)
if parent is not None:
self.setMargin(margin)
self.setSpacing(spacing)
self.itemList = []
def addItem(self, item):
self.itemList.append(item)
def count(self):
return len(self.itemList)
def itemAt(self, index):
if index >= 0 and index < len(self.itemList):
return self.itemList[index]
def takeAt(self, index):
if index >= 0 and index < len(self.itemList):
return self.itemList.pop(index)
def expandingDirections(self):
return Qt.Orientations(Qt.Orientation(0))
def hasHeightForWidth(self):
return True
def heightForWidth(self, width):
height = self.doLayout(QRect(0, 0, width, 0), True)
return height
def setGeometry(self, rect):
QLayout.setGeometry(self, rect)
self.doLayout(rect, False)
def sizeHint(self):
return self.minimumSize()
def minimumSize(self):
size = QSize()
for item in self.itemList:
size = size.expandedTo(item.minimumSize())
size += QSize(2 * self.margin(), 2 * self.margin())
return size
def doLayout(self, rect, testOnly):
x = rect.x()
y = rect.y()
lineHeight = 0
for item in self.itemList:
nextX = x + item.sizeHint().width() + self.spacing()
if nextX - self.spacing() > rect.right() and lineHeight > 0:
x = rect.x()
y = y + lineHeight + self.spacing()
nextX = x + item.sizeHint().width() + self.spacing()
lineHeight = 0
if not testOnly:
item.setGeometry(QRect(QPoint(x, y), item.sizeHint()))
x = nextX
lineHeight = max(lineHeight, item.sizeHint().height())
return y + lineHeight - rect.y()
def demoFlowLayout():
class Window(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
flowLayout = FlowLayout()
flowLayout.addWidget(QPushButton(self.tr("Short")))
flowLayout.addWidget(QPushButton(self.tr("Longer")))
flowLayout.addWidget(QPushButton(self.tr("Different text")))
flowLayout.addWidget(QPushButton(self.tr("More text")))
flowLayout.addWidget(QPushButton(self.tr("Even longer button text")))
self.setLayout(flowLayout)
self.setWindowTitle(self.tr("Flow Layout"))
app = QApplication(sys.argv)
mainWin = Window()
mainWin.show()
sys.exit(app.exec_())
#---------------------------------------------------
# This is hackish workaround for smoother displaying dialog boxes and windows in qt
# Basically it delays showing of a window until it is fully drawn.
class MyDesktopFragment(QWidget):
"""This is widget which displays fragment of desktop screen.
It can grab the screen contents and then display it on itself. It may be
useful if we want to simulate buffered dialogs which are initially hidden.
"""
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self._label = QLabel(self)
self._borderWidth = 0
self._initialPalette = self.palette()
self._borderPalette = QPalette(QColor(255, 0, 0))
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
def resizeEvent(self, event):
b = self._borderWidth
self._label.setGeometry(b, b, self.width() - b * 2, self.height() - b * 2)
def setBorderEnabled(self, enabled=True):
"""This enabled or disables widget border for debugging purposes."""
if enabled:
self.setAutoFillBackground(True)
self.setPalette(self._borderPalette)
self._borderWidth = 1
else:
self.setAutoFillBackground(False)
self.setPalette(self._initialPalette)
self._borderWidth = 0
def grabDesktop(self, rect):
"""Grabs desktop fragment which should be displayed."""
p = QPixmap.grabWindow(QApplication.desktop().winId(), rect.x(), rect.y(), rect.width(), rect.height())
self._label.setPixmap(p)
class LazyWidget(object):
"""Widget proxy which delays window showing until it is fully initialized."""
DelayTime = 100
def __init__(self):
self._widget = None
self._savedPos = QPoint(0, 0)
self._desktopFragment = MyDesktopFragment()
def setWidget(self, widget):
self._widget = widget
def _checkWidget(self):
assert isinstance(self._widget, QWidget), "Invalid widget set!"
def show(self):
self._checkWidget()
self._desktopFragment.grabDesktop(QRect(1000, 700, 1010, 710))
self._desktopFragment.setGeometry(QRect(1000, 700, 1010, 710))
self._desktopFragment.show()
self._moveOffScreen()
self._widget.show()
QTimer.singleShot(LazyWidget.DelayTime, self._moveOnScreen)
def _moveOffScreen(self):
"""Moves widget off screen, so it can initialize without flicker."""
self._checkWidget()
self._savedPos = QPoint(self._widget.x(), self._widget.y())
self._widget.move(1019, 716)
def _moveOnScreen(self):
"""Moves widget on screen, after it has initialized."""
self._checkWidget()
self._widget.move(self._savedPos.x(), self._savedPos.y())
self._desktopFragment.hide()
_lazyWidget = None
def lazyshow(widget):
"""Convenience function for showing windows fully initialized."""
# must initialize here, because QApplication must be constructed first
# this works only for not maximized windows
if widget.isMaximized():
widget.show()
else:
global _lazyWidget
if _lazyWidget is None:
_lazyWidget = LazyWidget()
_lazyWidget.setWidget(widget)
_lazyWidget.show()
# FIXME there must be a way to configure another way!
def propagate_fonts(widget, font):
for c in widget.children():
if isinstance(c, QWidget):
c.setFont(font)
propagate_fonts(c, font)
| gpl-2.0 | -325,066,226,027,989,600 | 28.76834 | 157 | 0.596239 | false |
UCSD-CCAL/ccal | ccal/plot_context.py | 1 | 6286 | from numpy import absolute
from pandas import Series
from .compute_context import compute_context
from .plot_and_save import plot_and_save
def plot_context(
_1d_array_or_series,
text=None,
n_data=None,
location=None,
scale=None,
degree_of_freedom=None,
shape=None,
fit_fixed_location=None,
fit_fixed_scale=None,
fit_initial_location=None,
fit_initial_scale=None,
n_grid=1e3,
degree_of_freedom_for_tail_reduction=1e8,
minimum_kl=1e-2,
scale_with_kl=True,
multiply_distance_from_reference_argmax=False,
global_location=None,
global_scale=None,
global_degree_of_freedom=None,
global_shape=None,
y_max_is_pdf_max=False,
plot_rug=True,
layout_width=None,
layout_height=None,
title=None,
xaxis_title=None,
html_file_path=None,
plotly_html_file_path=None,
):
if isinstance(_1d_array_or_series, Series):
if title is None:
title = _1d_array_or_series.name
if xaxis_title is None:
xaxis_title = "Value"
if text is None:
text = _1d_array_or_series.index
_1d_array = _1d_array_or_series.values
else:
_1d_array = _1d_array_or_series
context_dict = compute_context(
_1d_array,
n_data=n_data,
location=location,
scale=scale,
degree_of_freedom=degree_of_freedom,
shape=shape,
fit_fixed_location=fit_fixed_location,
fit_fixed_scale=fit_fixed_scale,
fit_initial_location=fit_initial_location,
fit_initial_scale=fit_initial_scale,
n_grid=n_grid,
degree_of_freedom_for_tail_reduction=degree_of_freedom_for_tail_reduction,
minimum_kl=minimum_kl,
scale_with_kl=scale_with_kl,
multiply_distance_from_reference_argmax=multiply_distance_from_reference_argmax,
global_location=global_location,
global_scale=global_scale,
global_degree_of_freedom=global_degree_of_freedom,
global_shape=global_shape,
)
pdf_max = context_dict["pdf"].max()
context_indices = context_dict["context_indices"]
absolute_context_indices = absolute(context_indices)
absolute_context_indices_max = absolute_context_indices.max()
if y_max_is_pdf_max:
y_max = pdf_max
if y_max < absolute_context_indices_max:
absolute_context_indices = (
absolute_context_indices / absolute_context_indices_max * y_max
)
else:
y_max = max(pdf_max, absolute_context_indices_max)
if plot_rug:
yaxis_max = 0.16
yaxis2_min = yaxis_max + 0.08
else:
yaxis_max = 0
yaxis2_min = 0
layout = dict(
width=layout_width,
height=layout_height,
title=title,
xaxis=dict(anchor="y", title=xaxis_title),
yaxis=dict(
domain=(0, yaxis_max), dtick=1, zeroline=False, showticklabels=False
),
yaxis2=dict(domain=(yaxis2_min, 1)),
legend=dict(orientation="h", xanchor="center", x=0.5, y=-0.2),
)
annotations = []
for i, (template, fit_parameter) in enumerate(
zip(
(
"N = {:.0f}",
"Location = {:.2f}",
"Scale = {:.2f}",
"DF = {:.2f}",
"Shape = {:.2f}",
),
context_dict["fit"],
)
):
annotations.append(
dict(
xref="paper",
yref="paper",
x=(i + 1) / (5 + 1),
y=1.064,
xanchor="center",
text=template.format(fit_parameter),
showarrow=False,
)
)
layout.update(annotations=annotations)
data = []
data.append(
dict(
yaxis="y2",
type="histogram",
name="Data",
legendgroup="Data",
x=_1d_array,
marker=dict(color="#20d9ba"),
histnorm="probability density",
hoverinfo="x+y",
)
)
if plot_rug:
data.append(
dict(
type="scatter",
legendgroup="Data",
showlegend=False,
x=_1d_array,
y=(0,) * _1d_array.size,
text=text,
mode="markers",
marker=dict(symbol="line-ns-open", color="#20d9ba"),
hoverinfo="x+text",
)
)
grid = context_dict["grid"]
line_width = 3.2
pdf = context_dict["pdf"]
data.append(
dict(
yaxis="y2",
type="scatter",
name="PDF",
x=grid,
y=pdf,
line=dict(width=line_width, color="#24e7c0"),
)
)
shape_pdf_reference = context_dict["shape_pdf_reference"]
shape_pdf_reference[pdf <= shape_pdf_reference] = None
data.append(
dict(
yaxis="y2",
type="scatter",
name="Shape Reference",
x=grid,
y=shape_pdf_reference,
line=dict(width=line_width, color="#9017e6"),
)
)
location_pdf_reference = context_dict["location_pdf_reference"]
if location_pdf_reference is not None:
location_pdf_reference[pdf <= location_pdf_reference] = None
data.append(
dict(
yaxis="y2",
type="scatter",
name="Location Reference",
x=grid,
y=location_pdf_reference,
line=dict(width=line_width, color="#4e40d8"),
)
)
is_negative = context_dict["context_indices"] < 0
for name, indices, color in (
("- Context", is_negative, "#0088ff"),
("+ Context", ~is_negative, "#ff1968"),
):
data.append(
dict(
yaxis="y2",
type="scatter",
name=name,
x=grid[indices],
y=absolute_context_indices[indices],
line=dict(width=line_width, color=color),
fill="tozeroy",
)
)
plot_and_save(dict(layout=layout, data=data), html_file_path, plotly_html_file_path)
| mit | -5,359,194,133,037,782,000 | 23.65098 | 88 | 0.522431 | false |
JonW27/labob | viewer.py | 1 | 1519 | #! /usr/bin/python
import cgi
import cgitb
import urllib, cStringIO
import hmac
import hashlib
cgitb.enable()
from bs4 import BeautifulSoup
HTML_HEADER = 'Content-type: text/html\n'
HEAD = '''
<!DOCTYPE html>
<html lang="en">
<head>
<title>Papers</title>
<meta charset="UTF-8">
</head>
<body>
'''
END = '''
</body>
</html>
'''
def screenshotlayer(access_key, secret_keyword, url, args):
# encode URL
query = urllib.urlencode(dict(url=url, **args))
# generate md5 secret key
secret_key = hashlib.md5('{}{}'.format(url, secret_keyword)).hexdigest()
return "https://api.screenshotlayer.com/api/capture?access_key=%s&secret_key=%s&%s" % (access_key, query)
params = {
'fullpage': '1',
'width': '',
'viewport': '',
'format': '',
'css_url': '',
'delay': '',
'ttl': '',
'force': '',
'placeholder': '',
'user_agent': '',
'accept_lang': '',
'export': ''
};
access_key = "b2b1a6a29159797f73e852ab0e012372"
secret_keyword = "hob"
url = ''
def main():
print HTML_HEADER
print HEAD
d = urllib.urlopen('http://marge.stuy.edu/~jonathan.wong/labob/contribs')
links = []
soup = BeautifulSoup(d)
for i in soup.find_all('a', href=True):
links.append(i['href'])
for i in range(len(links)):
url = "http://marge.stuy.edu/~jonathan.wong/labob/contribs" + links[i]
print screenshotlayer(access_key, secret_keyword, url, params)
print links[i]
print END
main() | gpl-3.0 | -4,429,456,980,170,782,000 | 20.408451 | 109 | 0.593812 | false |
largetalk/tenbagger | capital/reactor/cc/migrations/0003_auto_20180228_1145.py | 1 | 1210 | # Generated by Django 2.0.2 on 2018-02-28 03:45
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cc', '0002_auto_20180224_0348'),
]
operations = [
migrations.AddField(
model_name='creditcard',
name='lines',
field=models.PositiveIntegerField(default=0, help_text='额度'),
),
migrations.AlterField(
model_name='cashout',
name='amount',
field=models.DecimalField(decimal_places=2, help_text='金额', max_digits=11),
),
migrations.AlterField(
model_name='cashout',
name='fee',
field=models.DecimalField(decimal_places=2, help_text='手续费', max_digits=9),
),
migrations.AlterField(
model_name='cashout',
name='pos_rate',
field=models.FloatField(default=0.6, help_text='刷卡费率', verbose_name='rate'),
),
migrations.AlterField(
model_name='cashout',
name='swipe_day',
field=models.DateField(default=datetime.date.today, help_text='刷卡日'),
),
]
| mit | 3,035,395,281,549,715,000 | 29.307692 | 88 | 0.563452 | false |
derrickorama/image_optim | image_optim/core.py | 1 | 5109 | # -*- coding: utf-8 -*-
import math
import os
import re
import subprocess
import sys
import traceback
class ImageOptim():
def __init__(self, config_path=None):
if config_path is not None:
print('load config')
# self.config_path = '"'
def get_bytes(self, number):
value = float(number[:-1])
if number.endswith('K'):
value = value * 1024
elif number.endswith('M'):
value = value * 1024 * 1024
return math.ceil(value)
def get_percent(self, number):
if number.endswith('%'):
number = number[:-1]
number = float(number)
return round(number, 2)
def split_output(self, line):
# Parse ratio
ratio_match = re.search(r'^[^\s]+\s*', line)
ratio = ratio_match.group(0).strip()
# Parse size
size_match = re.search(r'^[^\s]+\s*', line[len(ratio_match.group(0)):])
size = size_match.group(0).strip()
# Consider the rest of the line as the file name
# - this captures file names that contains spaces
filename = line[(len(size_match.group(0)) + len(ratio_match.group(0))):]
return ratio, size, filename
def interpret(self, stdout):
# Split output into lines/columns & images vs totals
images = []
output = [line.strip() for line in re.split(r'\n', stdout.decode('utf-8').strip())]
total_output = output.pop(len(output) - 1)
# Gather results for each image
for line in output:
# Zero out image results if there are no savings
if line.find('------') > -1:
ratio = '0%'
size = '0B'
filename = line[6:].strip()
else:
# Parse image results
ratio, size, filename = self.split_output(line)
# Add to list of images
images.append({
'ratioSavings': self.get_percent(ratio),
'sizeSavings': self.get_bytes(size),
'path': filename
})
# Zero out totals when there are no savings
if total_output.find('------') > -1:
total_ratio = '0%'
total_size = '0B'
else:
# Parse totals
# - Note: starting at index 6 so "Total: " doesn't go through
total_ratio, total_size, total_filename = self.split_output(total_output[6:].strip())
totals = {
# Save ratio savings in totals
'ratioSavings': round(float(total_ratio[:-1]), 4),
# Set size savings equal to the # of bytes (based on suffix)
'sizeSavings': self.get_bytes(total_size)
}
return {
'images': images,
'totals': totals
}
def run_command(self, command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise subprocess.CalledProcessError(proc.returncode, ' '.join(command), 'Captured stdout/stderr:\n%s\n%s' % (stdout.decode('utf-8'), stderr.decode('utf-8')))
return stdout, stderr
def feature_detection(self):
utils = ['pngcrush', 'jpegoptim', 'gifsicle', 'jpegtran', 'pngout', 'advpng', 'optipng', 'pngquant', 'jhead', 'svgo']
disabled_utils = []
# Try getting the help docs for each utility
for util in utils:
try:
stdout, stderr = self.run_command([util, '-h'])
except FileNotFoundError:
# If a FileNotFoundError error is thrown, the utility is not available
disabled_utils.append('--no-%s' % util)
except subprocess.CalledProcessError:
pass # who cares
return disabled_utils
def optimize(self, path, exclude=None, callback=None):
command = ['image_optim', path]
# Recursively optimize images if a directory is given
if os.path.isdir(path):
command.append('--recursive')
# Exclude paths as defined by "exclude" glob
if exclude is not None:
command.append('--exclude')
command.append(exclude)
# Determine which optimization utilities are available
command += self.feature_detection()
# Run image_optim
try:
stdout, stderr = self.run_command(command)
except subprocess.CalledProcessError as e:
raise e
# If nothing comes through the stdout/stderr, nothing was optimized
if stdout == b'' and stderr == b'':
raise NoImagesOptimizedError(path)
# Convert result to JSON
results = self.interpret(stdout)
if callback is not None:
return callback(results)
else:
return results
class NoImagesOptimizedError(Exception):
def __init__(self, path):
self.path = path
def __str__(self):
return 'No images were optimized at the given path: %s' % os.path.abspath(self.path)
| mit | -6,236,471,516,162,944,000 | 30.93125 | 169 | 0.558818 | false |
RNAcentral/rnacentral-webcode | rnacentral/portal/models/sequence_regions.py | 1 | 1612 | """
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.db import models
from django.contrib.postgres.fields import ArrayField
from portal.models import EnsemblAssembly, RnaPrecomputed
class SequenceRegion(models.Model):
id = models.AutoField(primary_key=True)
urs_taxid = models.ForeignKey(
RnaPrecomputed,
related_name='regions',
db_column='urs_taxid',
to_field='id',
on_delete=models.CASCADE
)
region_name = models.TextField()
chromosome = models.TextField()
strand = models.IntegerField()
region_start = models.IntegerField()
region_stop = models.IntegerField()
assembly = models.ForeignKey(
EnsemblAssembly,
related_name='regions',
db_column='assembly_id',
to_field='assembly_id',
on_delete=models.CASCADE
)
was_mapped = models.BooleanField()
identity = models.IntegerField()
providing_databases = ArrayField(models.TextField())
exon_count = models.IntegerField()
class Meta:
db_table = 'rnc_sequence_regions'
| apache-2.0 | 1,916,004,264,865,602,600 | 32.583333 | 72 | 0.715881 | false |
tobecontinued/onedrive-e | onedrivee/common/utils.py | 1 | 1602 | import os
import pkgutil
from pwd import getpwnam, getpwuid
def pretty_print_bytes(size, precision=2):
suffixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
index = 0
while size > 1024:
index += 1 # increment the index of the suffix
size /= 1024.0 # apply the division
return "%.*f %s" % (precision, size, suffixes[index])
def get_current_os_user():
"""
Find the real user who runs the current process. Return a tuple of uid, username, homedir.
:rtype: (int, str, str, int)
"""
user_name = os.getenv('SUDO_USER')
if not user_name:
user_name = os.getenv('USER')
if user_name:
pw = getpwnam(user_name)
user_uid = pw.pw_uid
else:
# If cannot find the user, use ruid instead.
user_uid = os.getresuid()[0]
pw = getpwuid(user_uid)
user_name = pw.pw_name
user_gid = pw.pw_gid
user_home = pw.pw_dir
return user_uid, user_name, user_home, user_gid
OS_USER_ID, OS_USER_NAME, OS_USER_HOME, OS_USER_GID = get_current_os_user()
OS_HOSTNAME = os.uname()[1]
def get_content(file_name, pkg_name='onedrivee', is_text=True):
"""
Read a resource file in data/.
:param str file_name:
:param str pkg_name:
:param True | False is_text: True to indicate the text is UTF-8 encoded.
:return str | bytes: Content of the file.
"""
content = pkgutil.get_data(pkg_name, 'store/' + file_name)
if is_text:
content = content.decode('utf-8')
return content
def mkdir(path):
os.makedirs(path, mode=0o700)
os.chown(path, OS_USER_ID, OS_USER_GID)
| gpl-3.0 | -919,203,654,911,786,500 | 28.666667 | 94 | 0.611111 | false |
preshing/junction | samples/MapMemoryBench/RenderGraphs.py | 1 | 7125 | #!/usr/bin/env python
import os
import cairo
import math
import glob
#---------------------------------------------------
# Cairo drawing helpers
#---------------------------------------------------
def createScaledFont(family, size, slant=cairo.FONT_SLANT_NORMAL, weight=cairo.FONT_WEIGHT_NORMAL):
""" Simple helper function to create a cairo ScaledFont. """
face = cairo.ToyFontFace(family, slant, weight)
DEFAULT_FONT_OPTIONS = cairo.FontOptions()
DEFAULT_FONT_OPTIONS.set_antialias(cairo.ANTIALIAS_SUBPIXEL)
return cairo.ScaledFont(face, cairo.Matrix(xx=size, yy=size), cairo.Matrix(), DEFAULT_FONT_OPTIONS)
def fillAlignedText(cr, x, y, scaledFont, text, alignment = 0):
""" Draw some aligned text at the specified co-ordinates.
alignment = 0: left-justify
alignment = 0.5: center
alignment = 1: right-justify """
ascent, descent = scaledFont.extents()[:2]
x_bearing, y_bearing, width, height = scaledFont.text_extents(text)[:4]
with Saved(cr):
cr.set_scaled_font(scaledFont)
cr.move_to(math.floor(x + 0.5 - width * alignment), math.floor(y + 0.5))
cr.text_path(text)
cr.fill()
class Saved():
""" Preserve cairo state inside the scope of a with statement. """
def __init__(self, cr):
self.cr = cr
def __enter__(self):
self.cr.save()
return self.cr
def __exit__(self, type, value, traceback):
self.cr.restore()
#---------------------------------------------------
# AxisAttribs
#---------------------------------------------------
class AxisAttribs:
""" Describes one axis on the graph. Can be linear or logarithmic. """
def __init__(self, size, min, max, step, logarithmic = False, labeler = lambda x: str(int(x + 0.5))):
self.size = float(size)
self.logarithmic = logarithmic
self.labeler = labeler
self.toAxis = lambda x: math.log(x) if logarithmic else float(x)
self.fromAxis = lambda x: math.exp(x) if logarithmic else float(x)
self.min = self.toAxis(min)
self.max = self.toAxis(max)
self.step = self.toAxis(step)
def mapAxisValue(self, x):
""" Maps x to a point along the axis.
x should already have been filtered through self.toAxis(), especially if logarithmic. """
return (x - self.min) / (self.max - self.min) * self.size
def iterLabels(self):
""" Helper to iterate through all the tick marks along the axis. """
lo = int(math.floor(self.min / self.step + 1 - 1e-9))
hi = int(math.floor(self.max / self.step + 1e-9))
for i in xrange(lo, hi + 1):
value = i * self.step
if self.min == 0 and i == 0:
continue
yield self.mapAxisValue(value), self.labeler(self.fromAxis(value))
#---------------------------------------------------
# Graph
#---------------------------------------------------
class Curve:
def __init__(self, name, points, color):
self.name = name
self.points = points
self.color = color
class Graph:
""" Renders a graph. """
def __init__(self, xAttribs, yAttribs):
self.xAttribs = xAttribs
self.yAttribs = yAttribs
self.curves = []
def addCurve(self, curve):
self.curves.append(curve)
def renderTo(self, fileName):
xAttribs = self.xAttribs
yAttribs = self.yAttribs
# Create the image surface and cairo context
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 140 + int(xAttribs.size + 0.5), 65 + int(yAttribs.size + 0.5))
cr = cairo.Context(surface)
cr.set_source_rgb(1, 1, 1)
cr.paint()
cr.set_miter_limit(1.414)
cr.translate(58, 11 + yAttribs.size)
# Draw axes
labelFont = createScaledFont('Arial', 11)
with Saved(cr):
cr.set_line_width(1)
cr.set_source_rgb(.4, .4, .4)
# Horizontal axis
cr.move_to(0, -0.5)
cr.rel_line_to(xAttribs.size + 1, 0)
for pos, label in xAttribs.iterLabels(): # Tick marks
x = math.floor(pos + 0.5) + 0.5
cr.move_to(x, -1)
cr.rel_line_to(0, 4)
cr.stroke()
for pos, label in xAttribs.iterLabels(): # Labels
x = math.floor(pos + 0.5)
with Saved(cr):
cr.translate(x - 1, 5)
cr.rotate(-math.pi / 4)
fillAlignedText(cr, 0, 6, labelFont, label, 1)
# Vertical axis
cr.move_to(0.5, 0)
cr.rel_line_to(0, -yAttribs.size - 0.5)
for pos, label in yAttribs.iterLabels(): # Tick marks
if label == '0':
continue
y = -math.floor(pos + 0.5) - 0.5
cr.move_to(1, y)
cr.rel_line_to(-4, 0)
cr.stroke()
for pos, label in yAttribs.iterLabels(): # Labels
if label == '0':
continue
fillAlignedText(cr, -4, -pos + 4, labelFont, label, 1)
# Draw curves
for curve in self.curves:
points = curve.points
width = 2.5
color = curve.color
with Saved(cr):
cr.set_line_width(width)
cr.set_source_rgba(*color)
with Saved(cr):
cr.rectangle(0, 5, xAttribs.size, -yAttribs.size - 15)
cr.clip()
cr.move_to(xAttribs.mapAxisValue(points[0][0]), -yAttribs.mapAxisValue(points[0][1]))
for x, y, yHi in points[1:]:
cr.line_to(xAttribs.mapAxisValue(x) + 0.5, -yAttribs.mapAxisValue(y) - 0.5)
cr.stroke()
# Label
labelFont = createScaledFont('Arial', 11)
label = curve.name
x, y, yHi = points[-1]
fillAlignedText(cr, xAttribs.mapAxisValue(x) + 3, -yAttribs.mapAxisValue(y) + 4, labelFont, label, 0)
# Draw axis names
cr.set_source_rgb(0, 0, 0)
axisFont = createScaledFont('Helvetica', 14, weight=cairo.FONT_WEIGHT_BOLD)
with Saved(cr):
cr.translate(-47, -yAttribs.size / 2.0)
cr.rotate(-math.pi / 2)
fillAlignedText(cr, 0, 0, axisFont, "Bytes In Use", 0.5)
fillAlignedText(cr, xAttribs.size / 2.0, 50, axisFont, "Population", 0.5)
# Save PNG file
surface.write_to_png(fileName)
#---------------------------------------------------
# main
#---------------------------------------------------
graph = Graph(AxisAttribs(600, 0, 1000000, 200000), AxisAttribs(320, 0, 50000000, 10000000))
COLORS = [
(1, 0, 0),
(1, 0.5, 0),
(0.5, 0.5, 0),
(0, 1, 0),
(0, 0.5, 1),
(0, 0, 1),
(1, 0, 1)
]
for i, fn in enumerate(glob.glob('build*/results.txt')):
points = eval(open(fn, 'r').read())
graph.addCurve(Curve(os.path.split(fn)[0], points, COLORS[i % len(COLORS)]))
graph.renderTo('out.png')
| bsd-2-clause | -3,046,255,733,022,424,000 | 35.917098 | 120 | 0.518035 | false |
uchicago-voth/cgmap | test/molecular_map_test/single_protein_explicit_mapping/test_single_protein_explicit_mapping.py | 1 | 3175 | #!/usr/bin/env python2
import sys
sys.path.append('../../../src/')
import cgmap as cg
import mdtraj as md
import md_check as check
############################### config #####################################
input_traj = "protein.trr"
input_top = "protein.pdb"
output_traj = "protein.trr"
output_top = "protein.pdb"
reference_traj = "protein.trr"
reference_top = "protein.pdb"
output_dir ='./output/'
input_dir ='./input/'
reference_dir ='./reference/'
############################### run ########################################
### pull in trajectories
trj = md.load(input_dir + input_traj,top=input_dir + input_top)
### define mapping based on knowledge of topology
### in this instance, map every residue into a single site
for a in trj.top.atoms: a.mass = a.element.mass
for a in trj.top.atoms: a.charge = 0
# first residue is SER148 (zero index'd)
name_lists = []
label_lists = []
molecule_types = []
resREF = 148
istart = 0
iend = 0
iname = "SER"
molnum = 0
maxSize = len(list(trj.top.atoms))
stopFlag = False
tempMol = []
tempCGL = []
name_lists_key = []
for i, a in enumerate(trj.top.atoms) :
resNAME = str(a.residue)[0:3]
resNUM = int(str(a.residue)[3:6])
aINDEX = a.index
if resNAME not in name_lists_key :
name_lists_key.append(resNAME)
if (resNUM != resREF) :
#first append name_lists and label
iend = aINDEX - 1
tempMol.append("index %d to %d" % (istart, iend))
tempCGL.append(iname)
#then update things for next residue
iname = resNAME
istart = aINDEX
if resNUM < resREF :
#stopFlag = True
molecule_types.append(int(molnum))
name_lists.append(tempMol)
label_lists.append(tempCGL)
tempMol = []
tempCGL = []
molnum += 1
resREF = resNUM
# special case if last item
if (i == (maxSize-1)) :
iend = aINDEX
tempMol.append("index %d to %d" % (istart, iend))
tempCGL.append(iname)
molecule_types.append(int(molnum))
name_lists.append(tempMol)
label_lists.append(tempCGL)
#actual map command
print name_lists
print label_lists
print molecule_types
print "Lengths of all three lists should be equivalent: %d = %d = %d" % (len(name_lists), len(label_lists), len(molecule_types))
cg_trj = cg.map_unfiltered_molecules( trj = trj,
selection_list = name_lists,
bead_label_list = label_lists,
molecule_types = molecule_types,
mapping_function = "com")
cg_trj.save(output_dir + output_traj)
cg_trj[0].save(output_dir + output_top)
############################### check results ###############################
# reloading results from disk.
cg_traj = cg_trj.load(output_dir + output_traj,top=output_dir + output_top)
ref_cg_traj = cg_trj.load(reference_dir + reference_traj,
top=reference_dir + reference_top)
result=check.md_content_equality(cg_traj,ref_cg_traj)
sys.exit(check.check_result_to_exitval(result))
| apache-2.0 | 3,298,537,481,466,733,600 | 27.348214 | 128 | 0.566299 | false |
Yelp/service_configuration_lib | tests/service_configuration_lib_test.py | 1 | 16529 | #!/usr/bin/env python
# Copyright 2015 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import service_configuration_lib
class TestServiceConfigurationLib:
fake_service_configuration = {
'fake_service1': {
'deployed_to': None,
'monitoring': {
'fake_monitoring_key': 'fake_monitoring_value',
},
'deploy': {},
'port': 11111,
'runs_on': [
'fake_hostname3',
'fake_hostname2',
'fake_hostname1',
],
},
'fake_service2': {
'deployed_to': [
'fake_deployed_hostname1',
'fake_deployed_hostname2',
'fake_hostname4',
],
'monitoring': {},
'port': 22222,
'runs_on': [
'fake_hostname2',
'fake_hostname3',
'fake_hostname4',
],
},
'fake_service3': {
'deployed_to': None,
'monitoring': {},
'port': 33333,
'runs_on': [
'fake_hostname3',
'fake_hostname4',
'fake_hostname5',
],
'env_runs_on': {
'fake_env1': ['fake_hostname3'],
'fake_env2': ['fake_hostname4', 'fake_hostname5'],
},
'needs_puppet_help': True,
},
'fake_service4': {
'deployed_to': True,
'runs_on': [],
'needs_puppet_help': True,
},
'fake_service5': {
'deployed_to': [],
'runs_on': [],
'needs_puppet_help': True,
},
}
def test_generate_service_info_should_have_all_keys(self):
"""I'm not entirely sure what this test is testing since I can add a
new value or remove an old value and the test passes without changing
any code. I simplified it to make it less misleading and focus on the
one thing it does to, which is test that the arg service_information is
updated.
"""
fake_service_information = {'fakekey2': 'fakevalue2'}
fake_port = 9999
actual = service_configuration_lib.generate_service_info(
fake_service_information,
port=fake_port,
)
expected = {
# Can't use the fake_service_information because it's an
# un-nested hash at this point
'fakekey2': 'fakevalue2',
'port': fake_port,
}
assert expected == actual
def test_read_monitoring_should_return_empty_when_file_doesnt_exist(self):
expected = {}
fake_monitoring_file = 'fake_monitoring_file'
# TODO: Mock open?
actual = service_configuration_lib.read_monitoring(
fake_monitoring_file,
)
assert expected == actual
def test_read_deploy_should_return_empty_when_file_doesnt_exist(self):
expected = {}
fake_deploy_file = 'fake_deploy_file'
# TODO: Mock open?
actual = service_configuration_lib.read_deploy(
fake_deploy_file,
)
assert expected == actual
def test_read_smartstack_should_return_empty_when_file_doesnt_exist(self):
expected = {}
fake_smartstack_file = 'fake_smartstack_file'
# TODO: Mock open?
actual = service_configuration_lib.read_smartstack(
fake_smartstack_file,
)
assert expected == actual
def test_read_dependencies_return_empty_when_file_doesnt_exist(self):
expected = {}
fake_dependencies_file = 'fake_dependencies_file'
# TODO: Mock open?
actual = service_configuration_lib.read_smartstack(
fake_dependencies_file,
)
assert expected == actual
def test_services_that_run_on_should_properly_read_configuration(self):
expected = ['fake_service1', 'fake_service2']
fake_hostname = 'fake_hostname2'
fake_service_configuration = self.fake_service_configuration
actual = service_configuration_lib.services_that_run_on(fake_hostname, fake_service_configuration)
assert sorted(expected) == sorted(actual)
def test_services_that_run_on_should_return_an_empty_array_when_the_hostname_isnt_anywhere(self):
expected = []
fake_hostname = 'non_existent_fake_hostname2'
fake_service_configuration = self.fake_service_configuration
actual = service_configuration_lib.services_that_run_on(fake_hostname, fake_service_configuration)
assert sorted(expected) == sorted(actual)
def test_services_deployed_to_should_return_deployed_and_running_services(self):
expected = ['fake_service1', 'fake_service2', 'fake_service3', 'fake_service4']
fake_hostname = 'fake_hostname3'
fake_service_configuration = self.fake_service_configuration
actual = service_configuration_lib.services_deployed_on(fake_hostname, fake_service_configuration)
assert set(expected) == set(actual)
def test_services_needing_puppet_help_on_should_properly_read_configuration(self):
expected = ['fake_service3', 'fake_service4']
fake_hostname = 'fake_hostname4'
fake_service_configuration = self.fake_service_configuration
actual = service_configuration_lib.services_needing_puppet_help_on(fake_hostname, fake_service_configuration)
assert expected == actual
def test_all_nodes_that_run_should_properly_return_the_right_nodes(self):
expected = ['fake_hostname3', 'fake_hostname4', 'fake_hostname5']
fake_service = 'fake_service3'
fake_service_configuration = self.fake_service_configuration
actual = service_configuration_lib.all_nodes_that_run(fake_service, fake_service_configuration)
assert expected == actual
def test_all_nodes_that_receive_removes_duplicates(self):
expected = [
'fake_deployed_hostname1',
'fake_deployed_hostname2',
'fake_hostname2',
'fake_hostname3',
'fake_hostname4',
]
fake_service = 'fake_service2'
fake_service_configuration = self.fake_service_configuration
actual = service_configuration_lib.all_nodes_that_receive(fake_service, fake_service_configuration)
assert expected == actual
def test_all_nodes_that_receive_with_no_deploys_to(self):
expected = ['fake_hostname3', 'fake_hostname4', 'fake_hostname5']
fake_service = 'fake_service3'
fake_service_configuration = self.fake_service_configuration
actual = service_configuration_lib.all_nodes_that_receive(fake_service, fake_service_configuration)
assert expected == actual
def test_all_nodes_that_receive_is_sorted(self):
expected = ['fake_hostname1', 'fake_hostname2', 'fake_hostname3']
fake_service = 'fake_service1'
fake_service_configuration = self.fake_service_configuration
actual = service_configuration_lib.all_nodes_that_receive(fake_service, fake_service_configuration)
assert expected == actual
@mock.patch('os.path.abspath', return_value='nodir')
@mock.patch('os.listdir', return_value=['1', '2', '3'])
@mock.patch('service_configuration_lib.read_service_configuration_from_dir', return_value='hello')
def test_read_services_configuration(self, read_patch, listdir_patch, abs_patch):
expected = {'1': 'hello', '2': 'hello', '3': 'hello'}
actual = service_configuration_lib.read_services_configuration(soa_dir='testdir')
abs_patch.assert_called_once_with('testdir')
listdir_patch.assert_called_once_with('nodir')
read_patch.assert_has_calls(
[mock.call('nodir', '1'), mock.call('nodir', '2'), mock.call('nodir', '3')],
)
assert expected == actual
@mock.patch('os.path.abspath', return_value='nodir')
@mock.patch('os.listdir', return_value=['1', '2', '3'])
def test_list_services(self, listdir_patch, abs_patch):
expected = ['1', '2', '3']
actual = service_configuration_lib.list_services(soa_dir='testdir')
abs_patch.assert_called_once_with('testdir')
listdir_patch.assert_called_once_with('nodir')
assert expected == actual
@mock.patch('service_configuration_lib.read_service_configuration_from_dir', return_value='bye')
@mock.patch('os.path.abspath', return_value='cafe')
def test_read_service_configuration(self, abs_patch, read_patch):
expected = 'bye'
actual = service_configuration_lib.read_service_configuration('boba', soa_dir='tea')
abs_patch.assert_called_once_with('tea')
read_patch.assert_called_once_with('cafe', 'boba')
assert expected == actual
@mock.patch('os.path.join', return_value='forever_joined')
@mock.patch('service_configuration_lib.read_port', return_value='1111')
@mock.patch('service_configuration_lib.read_monitoring', return_value='no_monitoring')
@mock.patch('service_configuration_lib.read_deploy', return_value='no_deploy')
@mock.patch('service_configuration_lib.read_data', return_value='no_data')
@mock.patch('service_configuration_lib.read_smartstack', return_value={})
@mock.patch('service_configuration_lib.read_service_information', return_value='no_info')
@mock.patch('service_configuration_lib.read_dependencies', return_value='no_dependencies')
@mock.patch('service_configuration_lib.generate_service_info', return_value={'oof': 'ouch'})
def test_read_service_configuration_from_dir(
self,
gen_patch,
deps_patch,
info_patch,
smartstack_patch,
data_patch,
deploy_patch,
monitoring_patch,
port_patch,
join_patch,
):
expected = {'oof': 'ouch'}
actual = service_configuration_lib.read_service_configuration_from_dir('never', 'die')
join_patch.assert_has_calls([
mock.call('never', 'die', 'port'),
mock.call('never', 'die', 'monitoring.yaml'),
mock.call('never', 'die', 'deploy.yaml'),
mock.call('never', 'die', 'data.yaml'),
mock.call('never', 'die', 'smartstack.yaml'),
mock.call('never', 'die', 'service.yaml'),
mock.call('never', 'die', 'dependencies.yaml'),
])
port_patch.assert_called_once_with('forever_joined')
monitoring_patch.assert_called_once_with('forever_joined')
deploy_patch.assert_called_once_with('forever_joined')
data_patch.assert_called_once_with('forever_joined')
smartstack_patch.assert_called_once_with('forever_joined')
info_patch.assert_called_once_with('forever_joined')
deps_patch.assert_called_once_with('forever_joined')
gen_patch.assert_called_once_with(
'no_info', port='1111',
monitoring='no_monitoring',
deploy='no_deploy',
data='no_data',
dependencies='no_dependencies',
smartstack={},
)
assert expected == actual
@mock.patch('os.path.join', return_value='together_forever')
@mock.patch('os.path.abspath', return_value='real_soa_dir')
@mock.patch('service_configuration_lib.read_yaml_file', return_value={'what': 'info'})
def test_read_extra_service_information(self, info_patch, abs_patch, join_patch):
expected = {'what': 'info'}
actual = service_configuration_lib.read_extra_service_information(
'noname',
'noinfo', soa_dir='whatsadir',
)
abs_patch.assert_called_once_with('whatsadir')
join_patch.assert_called_once_with('real_soa_dir', 'noname', 'noinfo.yaml')
info_patch.assert_called_once_with('together_forever', deepcopy=True)
assert expected == actual
@mock.patch('io.open', autospec=True)
@mock.patch('service_configuration_lib.load_yaml', return_value={'data': 'mock'})
def testread_yaml_file_single(self, load_patch, open_patch):
expected = {'data': 'mock'}
filename = 'fake_fname_uno'
actual = service_configuration_lib.read_yaml_file(filename)
open_patch.assert_called_once_with(filename, 'r', encoding='UTF-8')
load_patch.assert_called_once_with(open_patch.return_value.__enter__().read())
assert expected == actual
@mock.patch('io.open', autospec=True)
@mock.patch('service_configuration_lib.load_yaml', return_value={'mmmm': 'tests'})
def testread_yaml_file_with_cache(self, load_patch, open_patch):
expected = {'mmmm': 'tests'}
filename = 'fake_fname_dos'
service_configuration_lib.enable_yaml_cache()
actual = service_configuration_lib.read_yaml_file(filename)
actual_two = service_configuration_lib.read_yaml_file(filename)
open_patch.assert_called_once_with(filename, 'r', encoding='UTF-8')
load_patch.assert_called_once_with(open_patch.return_value.__enter__().read())
assert expected == actual
assert expected == actual_two
# When we cache, we can NOT return a pointer to the original object
# because the caller might mutate it. We need to ensure that
# the returned object is a copy.
assert expected is not actual_two
@mock.patch('io.open', autospec=True)
@mock.patch('service_configuration_lib.load_yaml', return_value={'water': 'slide'})
def testread_yaml_file_no_cache(self, load_patch, open_patch):
expected = {'water': 'slide'}
filename = 'fake_fname_tres'
service_configuration_lib.disable_yaml_cache()
actual = service_configuration_lib.read_yaml_file(filename)
actual_two = service_configuration_lib.read_yaml_file(filename)
open_patch.assert_any_call(filename, 'r', encoding='UTF-8')
assert open_patch.call_count == 2
load_patch.assert_any_call(open_patch.return_value.__enter__().read())
assert load_patch.call_count == 2
assert expected == actual
assert expected == actual_two
def test_env_runs_on(self):
expected = ['fake_hostname3']
actual = service_configuration_lib.all_nodes_that_run_in_env(
'fake_service3',
'fake_env1',
service_configuration=self.fake_service_configuration,
)
assert expected == actual
expected = ['fake_hostname4', 'fake_hostname5']
actual = service_configuration_lib.all_nodes_that_run_in_env(
'fake_service3',
'fake_env2',
service_configuration=self.fake_service_configuration,
)
assert expected == actual
def test_bad_port_get_service_from_port(self):
'Test for bad inputs'
service_name = service_configuration_lib.get_service_from_port(None)
assert service_name is None
service_name = service_configuration_lib.get_service_from_port({})
assert service_name is None
def test_valid_port_get_service_from_port(self):
'Test that if there is a service for that port it returns it'
all_services = {
'Other Service': {
'port': 2352,
},
'Service 23': {
'port': 656,
},
'Test Service': {
'port': 100,
},
'Smart Service': {
'port': 345,
'smartstack': {
'main': {
'proxy_port': 3444,
},
},
},
'Service 36': {
'port': 636,
},
}
found_service_name = service_configuration_lib.get_service_from_port(100, all_services)
assert found_service_name == 'Test Service'
found_service_name = service_configuration_lib.get_service_from_port(3444, all_services)
assert found_service_name == 'Smart Service'
| apache-2.0 | -5,560,176,066,147,454,000 | 41.932468 | 117 | 0.612197 | false |
hal0x2328/neo-python | neo/Core/State/ValidatorState.py | 1 | 2648 | from .StateBase import StateBase
from neo.Core.IO.BinaryReader import BinaryReader
from neo.Core.IO.BinaryWriter import BinaryWriter
from neo.IO.MemoryStream import StreamManager
from neo.Core.Cryptography.ECCurve import EllipticCurve, ECDSA
from neo.Core.Size import Size as s
from neo.Core.Size import GetVarSize
from neo.Core.Fixed8 import Fixed8
class ValidatorState(StateBase):
def __init__(self, pub_key=None):
"""
Create an instance.
Args:
pub_key (EllipticCurve.ECPoint):
Raises:
Exception: if `pub_key` is not a valid ECPoint.
"""
if pub_key is not None and type(pub_key) is not EllipticCurve.ECPoint:
raise Exception("Pubkey must be ECPoint Instance")
self.PublicKey = pub_key
self.Registered = False
self.Votes = Fixed8.Zero()
def Size(self):
"""
Get the total size in bytes of the object.
Returns:
int: size.
"""
return super(ValidatorState, self).Size() + self.PublicKey.Size() + s.uint8 + self.Votes.Size()
def Deserialize(self, reader: BinaryReader):
"""
Deserialize full object.
Args:
reader (neo.Core.IO.BinaryReader):
"""
super(ValidatorState, self).Deserialize(reader)
self.PublicKey = ECDSA.Deserialize_Secp256r1(reader)
self.Registered = reader.ReadBool()
self.Votes = reader.ReadFixed8()
@staticmethod
def DeserializeFromDB(buffer):
"""
Deserialize full object.
Args:
buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from.
Returns:
ValidatorState:
"""
m = StreamManager.GetStream(buffer)
reader = BinaryReader(m)
v = ValidatorState()
v.Deserialize(reader)
StreamManager.ReleaseStream(m)
return v
def Serialize(self, writer: BinaryWriter):
"""
Serialize full object.
Args:
writer (neo.IO.BinaryWriter):
"""
super(ValidatorState, self).Serialize(writer)
self.PublicKey.Serialize(writer)
writer.WriteBool(self.Registered)
writer.WriteFixed8(self.Votes)
def ToJson(self):
"""
Convert object members to a dictionary that can be parsed as JSON.
Returns:
dict:
"""
return {
'pubkey': self.PublicKey.ToString()
}
def Clone(self):
vs = ValidatorState(self.PublicKey)
vs.Registered = self.Registered
vs.Votes = self.Votes
return vs
| mit | -6,787,392,062,032,740,000 | 26.298969 | 103 | 0.603852 | false |
HuberTRoy/MusicPlayer | MusicPlayer/apis/qqApi.py | 1 | 8394 | # coding = utf-8
import json
import logging
import urllib.parse
from apiRequestsBase import HttpRequest, ignored
logger = logging.getLogger(__name__)
class QQApi(HttpRequest):
default_timeout = 3.05
def __init__(self):
super(QQApi, self).__init__()
self.headers['Host'] = 'c.y.qq.com'
self.headers['Referer'] = 'https://y.qq.com/portal/playlist.html'
self.playlistHeaders = self.headers.copy()
self.playlistHeaders['Host'] = 'shc.y.qq.com'
self.tokenHeaders = self.headers.copy()
self.tokenHeaders['Host'] = 'base.music.qq.com'
self.tokenHeaders.pop('Referer')
self.token = self._get_qqtoken()
self.key = self.token.get('key')
self.sip = self.token.get('sip')[0]
# 随便写一个就可以,原本是根据cookies里一个根据时间变化的参数确定的。
self.guid = 3768717388
if not self.sip:
logger.info("获取QQToken失败。当前key: {0}, 当前sip: {1}".format(
self.key, self.sip))
print('QQ 播放地址获取失败,请勿播放QQ音乐。')
def httpRequest(self, *args, **kwargs):
html = super(QQApi, self).httpRequest(*args, **kwargs)
logger.info("进行QQ Url请求, args: {0}, kwargs: {1}".format(args, kwargs))
with ignored():
return html.text
logger.info("url: {0} 请求失败. Header: {1}".format(
args[0], kwargs.get('headers')))
return ''
def _get_qqtoken(self):
"""
更新后的不需要再获取token, sip变为固定URL:
http://dl.stream.qqmusic.qq.com/
"""
# token_url = 'http://base.music.qq.com/fcgi-bin/fcg_musicexpress.fcg?' + \
# 'json=3&guid=3768717388&g_tk=938407465&loginUin=0&hostUin=0&' + \
# 'format=jsonp&inCharset=GB2312&outCharset=GB2312¬ice=0&' + \
# 'platform=yqq&jsonpCallback=jsonCallback&needNewCode=0'
# data = self.httpRequest(token_url, method='GET',
# headers=self.tokenHeaders)
# with ignored():
# data = data[len("jsonCallback("):-2]
# return json.loads(data)
return {'key': '1', 'sip': ['http://dl.stream.qqmusic.qq.com/']}
def _getImgUrl(self, mid):
imgUrl = 'https://y.gtimg.cn/music/photo_new/'
return imgUrl + 'T002R300x300M000' + mid + '.jpg'
def _getSongUrl(self, mid):
vkey = self._getSongUrlVkey(mid)
if not vkey:
vkey = '000'
return '{0}C400{1}.m4a?vkey={2}&guid={3}&uin=0&fromtag=66'.format(self.sip, mid, vkey, self.guid)
def _getSongUrlVkey(self, mid):
# 获取得到QQ音乐歌曲地址所需的vkey。
# 返回的是vkey。
vkey_url = 'https://c.y.qq.com/base/fcgi-bin/fcg_music_express_mobile3.fcg'
params = {
'g_tk': '5381',
'jsonpCallback': 'MusicJsonCallback8571665793949388',
'loginUin': '0',
'hostUin': '0',
'format': 'json',
'inCharset': 'utf8',
'outCharset': 'utf-8',
'notice': '0',
'platform': 'yqq',
'needNewCode': '0',
'cid': '205361747',
'callback': 'MusicJsonCallback8571665793949388',
'uin': '0',
'songmid': mid,
'filename': 'C400' + mid + '.m4a',
'guid': '{}'.format(self.guid)
}
response = self.httpRequest(vkey_url, method="GET", headers=self.headers, params=params)
with ignored():
data = json.loads(response[response.find("{"):-1])
return data['data']['items'][0]['vkey']
return False
def _fromSongUrlGetSongMid(self, songUrl):
# 根据一个完整的歌曲Url,获取出它的mid。
# 'http://dl.stream.qqmusic.qq.com/
# C400 0000ASDASD.m4a
# ?vkey=' + vkey + '&guid=7133372870&uin=0&fromtag=66'
songUrl = songUrl.split("?")[0]
return songUrl[songUrl.find('C400')+4:-4]
def getSongUrl(self, songUrl):
# songUrl格式:
# 1. 歌曲mid: 000xkbLI2QEKE9 这样的形式。
# 2. 完整URL,上次授权后得到的URL,但已过期需要重新获取。
mid = songUrl
if 'http' in songUrl:
mid = self._fromSongUrlGetSongMid(songUrl)
return self._getSongUrl(mid)
def playList(self, ein=29):
"""
ein控制返回的歌单。
29, 59, 89....
"""
url = 'https://c.y.qq.com/splcloud/fcgi-bin/' +\
'fcg_get_diss_by_tag.fcg?rnd=0.5136307078685405&g_tk=5381&' +\
'jsonpCallback=getPlaylist&loginUin=0&hostUin=0&format=jsonp&inCharset=utf8' +\
'&outCharset=utf-8¬ice=0&platform=yqq&needNewCode=0&categoryId=10000000&' +\
'sortId=5&sin=30&ein={0}'.format(ein)
response = self.httpRequest(url, method='GET', headers=self.headers)
with ignored():
data = json.loads(response[len('getPlaylist('):-1])
return data['data']['list']
return False
def getPlaylist(self, ids):
url = 'https://shc.y.qq.com/qzone/fcg-bin/fcg_ucc_getcdinfo_byids_cp.fcg?type=1&json=1&utf8=1&onlysong=0' +\
'&disstid={0}&format=jsonp&g_tk=5381&jsonpCallback=playlistinfoCallback&loginUin=0&hostUin=0&'.format(ids) +\
'format=jsonp&inCharset=utf8&outCharset=utf-8¬ice=0&platform=yqq&needNewCode=0'
response = self.httpRequest(
url, method='GET', headers=self.playlistHeaders)
with ignored():
data = json.loads(response[len('playlistinfoCallback('):-len(')')])
data = data['cdlist'][0]
newDatas = {}
newDatas['trackCount'] = data['total_song_num']
newDatas['name'] = data['dissname']
newDatas['creator'] = {'nickname': data['nick']}
newDatas['description'] = data['desc']
songs = data['songlist']
# imgUrl = 'https://y.gtimg.cn/music/photo_new/'
for i in songs:
i['name'] = i['songname']
i['artists'] = [
{'name': ';'.join([x['name'] for x in i['singer']])}]
i['duration'] = int(i['interval']) * 1000
# i['album'] = {'blurPicUrl': imgUrl + 'T002R300x300M000' + i['albummid'] + '.jpg'}
i['album'] = {'blurPicUrl': self._getImgUrl(i['albummid'])}
# i['mp3Url'] = '{0}C400{1}.m4a?vkey={2}&guid={3}'.format(self.sip, i['songmid'], self.key, self.guid)
i['mp3Url'] = self._getSongUrl(i['songmid'])
i['lyric'] = 'qq'
newDatas['tracks'] = songs
return newDatas
return False
def search(self, key):
url = 'https://c.y.qq.com/soso/fcgi-bin/client_search_cp?ct=24&qqmusic_ver=1298&' +\
'new_json=1&remoteplace=txt.yqq.center&searchid=43541888870417375&t=0&aggr=1' +\
'&cr=1&catZhida=1&lossless=0&flag_qc=0&p=1&n=50&' +\
'w={0}'.format(urllib.parse.quote(key)) +\
'&g_tk=5381&jsonpCallback=searchCallbacksong6064&loginUin=0&hostUin=0&' +\
'format=jsonp&inCharset=utf8&outCharset=utf-8¬ice=0&platform=yqq&needNewCode=0'
response = self.httpRequest(url, method='GET')
with ignored():
data = json.loads(
response[len('searchCallbacksong6064('):-1])
data = data['data']['song']
newDatas = {}
newDatas['songCount'] = data['curnum'] - 1
songs = []
for i in data['list']:
songs.append({'name': i['name'],
'ar': [{'name': ';'.join([x['name'] for x in i['singer']])}],
'al': {'picUrl': self._getImgUrl(i['album']['mid'])},
'dt': i['interval'] * 1000,
'id': i['id'],
# 当然这里不是mp3,为了统一接口这样写。
'mp3Url': i['mid'],
'lyric': 'qq'
})
newDatas['songs'] = songs
return newDatas
return False
qqApi = QQApi()
if __name__ == '__main__':
help(qqApi)
| mit | 7,373,061,869,558,684,000 | 33.566524 | 121 | 0.527688 | false |
zanaca/docker-dns | src/OSes/macos.py | 1 | 3483 | import os
import shutil
import time
import config
import dockerapi as docker
import util
FLAVOR = 'macos'
PLIST_PATH = '/Library/LaunchDaemons/com.zanaca.dockerdns-tunnel.plist'
KNOWN_HOSTS_FILE = f'{config.HOME_ROOT}/.ssh/known_hosts'
APP_DESTINATION = f'{config.HOME}/Applications/dockerdns-tunnel.app'
DOCKER_CONF_FOLDER = f'{config.HOME}/Library/Containers/com.docker.docker/Data/database/com.docker.driver.amd64-linux/etc/docker'
DOCKER_BUILD_TARGET = 'base'
def setup(tld=config.TOP_LEVEL_DOMAIN):
if not os.path.isdir('/etc/resolver'):
os.mkdir('/etc/resolver')
open(f'/etc/resolver/{tld}',
'w').write(f'nameserver {docker.NETWORK_GATEWAY}')
plist = open('src/templates/com.zanaca.dockerdns-tunnel.plist',
'r').read().replace('{PWD}', config.BASE_PATH)
open(PLIST_PATH, 'w').write(plist)
os.system(f'sudo launchctl load -w {PLIST_PATH} 1>/dev/null 2>/dev/null')
return True
def install(tld=config.TOP_LEVEL_DOMAIN):
print('Generating known_hosts backup for user "root", if necessary')
if not os.path.exists(f'{config.HOME_ROOT}/.ssh'):
os.mkdir(f'{config.HOME_ROOT}/.ssh')
os.chmod(f'{config.HOME_ROOT}/.ssh', 700)
if os.path.exists(KNOWN_HOSTS_FILE):
shutil.copy2(KNOWN_HOSTS_FILE,
f'{config.HOME_ROOT}/.ssh/known_hosts_pre_docker-dns')
time.sleep(3)
port = False
ports = docker.get_exposed_port(config.DOCKER_CONTAINER_NAME)
if '22/tcp' in ports:
port = int(ports['22/tcp'][0]['HostPort'])
if not port:
raise('Problem fetching ssh port')
os.system(
f'ssh-keyscan -H -t ecdsa-sha2-nistp256 -p {port} 127.0.0.1 2> /dev/null >> {KNOWN_HOSTS_FILE}')
if not os.path.exists(APP_DESTINATION):
uid = os.getuid()
gid = os.getgid()
if 'SUDO_UID' in os.environ:
uid = int(os.environ.get('SUDO_UID'))
gid = int(os.environ.get('SUDO_GID'))
shutil.copytree('src/templates/dockerdns-tunnel_app', APP_DESTINATION)
util.change_owner_recursive(APP_DESTINATION, uid, gid)
workflow = open(f'{APP_DESTINATION}/Contents/document.wflow', 'r').read()
workflow = workflow.replace(
'[PATH]', config.BASE_PATH)
open(f'{APP_DESTINATION}/Contents/document.wflow', 'w').write(workflow)
return True
def uninstall(tld=config.TOP_LEVEL_DOMAIN):
if os.path.exists(f'/etc/resolver/{tld}'):
print('Removing resolver file')
os.unlink(f'/etc/resolver/{tld}')
if os.path.exists(PLIST_PATH):
print('Removing tunnel service')
os.system(
f'sudo launchctl unload -w {PLIST_PATH} 1>/dev/null 2>/dev/null')
os.unlink(PLIST_PATH)
if os.path.exists(f'{config.HOME_ROOT}/.ssh/known_hosts_pre_docker-dns'):
print('Removing kwown_hosts backup')
os.unlink(f'{config.HOME_ROOT}/.ssh/known_hosts_pre_docker-dns')
if os.path.exists(APP_DESTINATION):
print('Removing tunnel app')
for filename in os.listdir(APP_DESTINATION):
file_path = os.path.join(APP_DESTINATION, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print('Failed to delete %s. Reason: %s' % (file_path, e))
os.unlink(APP_DESTINATION)
| mit | -5,995,928,301,452,000,000 | 36.858696 | 129 | 0.631927 | false |
wonghoifung/learning-python | spider/extract_all_links_by_tag.py | 1 | 1254 | #-*-coding:utf8-*-
import sys
import urllib
import urllib2
import urlparse
import re
from lxml import etree
def tag_link(tag, startpage):
return 'http://www.douban.com/tag/' + tag + '/movie?start=' + str(startpage)
def trim_link(url):
mre=re.match('^https?://movie.douban.com/subject/([^/]*)',url,re.IGNORECASE)
if not mre:
print 'url:' + url + ' is not valid...'
return ''
url = mre.group(0)
return url
class my_urlopener(urllib.FancyURLopener):
version = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36'
def process():
startpage = 0
links = []
badcount = 0
while True:
url = tag_link('日本', startpage)
startpage += 15
opener = my_urlopener()
page = opener.open(url)
text = page.read()
page.close()
selector = etree.HTML(text)
movielist = selector.xpath('//*[@id="content"]/div/div[@class="article"]/div[@class="mod movie-list"]/dl')
if len(movielist) == 0:
break
for movie in movielist:
movielink = movie.xpath('dd/a/@href')[0]
link = trim_link(movielink)
if len(link) > 0:
links.append(link)
print link
else:
badcount += 1
print len(links)
print badcount
def main():
process()
if __name__ == "__main__":
main()
| mit | 8,063,614,123,801,121,000 | 22.148148 | 122 | 0.6504 | false |
Afanc/parazite | src/trade_off.py | 1 | 2872 | # -*- coding: utf-8 -*-
from parazite1 import *
from random import uniform, sample
import matplotlib.pyplot as plt
from CONSTANTES import *
from CHANGING_CONST import *
def trade_off(para_i = None, effect_arg = None):
if effect_arg != None:
new_vir = 0
new_recov = 0
new_transmission = 0
effect = effect_arg
new_vir = (effect**2)/100
new_transmission = 1/(1+exp(-(effect/1.1-5)))
new_recov = 0.1 + 1/effect
if new_recov > 1:
new_recov = 1
return [new_vir,new_transmission,new_recov]
if isinstance(para_i, Parazite):
new_vir = 0
new_recov = 0
new_transmission = 0
effect = (para_i.getVir()*100)**0.5
effect += uniform(-2,2)
compteur = 0
while effect > 10 or effect <0 and compteur < 3:
effect = (para_i.getVir()*100)**0.5
effect += uniform(-2,2)
compteur += 1
if effect > 10 or effect <0:
effect = (para_i.getVir()*100)**0.5
new_vir = (effect**2)/100
new_transmission = 1/(1+exp(-(effect/1.1-5)))
new_recov = 0.1 + 1/effect
if new_recov > 1:
new_recov = 1
para_i.setVir(new_vir)
para_i.setTransmRate(new_transmission)
para_i.setRecovProb(new_recov)
else :
new_vir = 0
new_recov = 0
new_transmission = 0
effect = uniform(0,10)
new_vir = (effect**2)/100
new_transmission = 1/(1+exp(-(effect/1.1-5)))
new_recov = 0.1 + 1/effect
if new_recov > 1:
new_recov = 1
return [new_vir,new_transmission,new_recov]
"""
print trade_off(effect_arg = 2.6)
y = []
R0 = 0
for i in arange(0.1,10.0,0.1):
x.append(i)
effect = i
new_vir = (effect**2)/100
new_transmission = 1/(1+exp(-(effect/1.1-5)))
new_recov = 0.1 + 1/effect
if new_recov > 1:
new_recov = 1
R0num = (200 * (1+new_transmission)*0.4)
R0den = ((1+new_vir)*DYING_PROB + (1+new_recov)*BASE_CHANCE_OF_HEALING)
R0 = R0num/R0den
y1.append(new_vir)
y2.append(new_transmission)
y3.append(new_recov)
y4.append(R0)
plt.plot(x,y1, label = 'Virulance')
plt.plot(x,y2, label = 'Transmission')
plt.plot(x,y3, label = 'Guerison')
#plt.plot(x,y4, label = 'infections secondaires',)
plt.xlabel('Charge parasitaire')
#plt.ylabel('Infections secondaires')
plt.legend(loc='best')
plt.show()
#print trade_off(effect_arg = 7.0) #correspond à une virulance de 0.49
'''
new_vir = []
for effect in range(0,10,1):
new_vir.append(1/(1+exp(-(effect/1.1-5))))
plt.scatter(range(0,10,1 ),new_vir)
plt.show()
new_recov = 1- (effect**2)/150
test = Parazite(0.7, 0.1, 0, 'ID23')
trade_off(test)
print test.getVir()
print test.getTransmRate()
print test.getRecovProb()
'''
"""
| gpl-2.0 | 816,983,121,264,258,700 | 25.33945 | 75 | 0.563915 | false |
broomyocymru/ditto | ditto/core/confluence_client.py | 1 | 3394 | import json
import urllib
import requests
class ConfluenceClient:
def __init__(self, url, username, password):
self.base_url = url
self.username = username
self.password = password
self.http_headers = {'Accept': 'application/json', 'Content-type': 'application/json'}
def page(self, page_id):
query = {'expand': 'version'}
url = self.base_url + '/rest/api/content/' + page_id + '?' + urllib.urlencode(query)
response = requests.get(url, auth=(self.username, self.password), headers=self.http_headers)
return self.error_check(page_id, response)
def get_page_id(self, space, title):
try:
url = self.base_url + '/rest/api/content/?title=' + title + '&spaceKey=' + space
response = requests.get(url, auth=(self.username, self.password), headers=self.http_headers)
obj = self.error_check(title, response)
return obj["results"][0]["id"]
except requests.exceptions.RequestException:
return None
def new_child_page(self, parent_page_id, space, title, content):
data = json.dumps({
'type': 'page',
'title': title,
'ancestors': [{"id": parent_page_id}],
'space': {"key": space},
'body': {
'storage': {
'value': content,
'representation': 'storage'
}
}
})
page_id = self.get_page_id(space, title)
if page_id is not None:
page = self.page(page_id)
data = json.dumps({
'id': page_id,
'type': 'page',
'title': title,
'version': {'number': page['version']['number'] + 1},
'space': {'key': space},
'body': {
'storage': {
'value': content,
'representation': 'storage'
}
}
})
url = self.base_url + '/rest/api/content/' + page_id
response = requests.put(url, auth=(self.username, self.password), headers=self.http_headers, data=data)
return self.error_check(page_id, response)
else:
url = self.base_url + '/rest/api/content'
response = requests.post(url, auth=(self.username, self.password), headers=self.http_headers, data=data)
return self.error_check(page_id, response)
def save_content(self, page_id, version, title, content):
data = json.dumps({
'type': 'page',
'title': title,
'version': {
'number': version
},
'body': {
'storage': {
'value': content,
'representation': 'storage'
}
}
})
url = self.base_url + '/rest/api/content/' + page_id
response = requests.put(url, auth=(self.username, self.password), headers=self.http_headers, data=data)
return self.error_check(page_id, response)
@staticmethod
def error_check(prefix, response):
response.raise_for_status()
obj = response.json()
if 'errorMessages' in obj:
raise ValueError(prefix + ': ' + ','.join(obj['errorMessages']))
return obj
| mit | 1,831,143,497,517,647,400 | 34.354167 | 116 | 0.508544 | false |
kuralabs/flowbber | lib/flowbber/plugins/sources/lcov.py | 1 | 14017 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2017-2019 KuraLabs S.R.L
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Lcov
====
This source calls lcov_ on a specified directory to generate a tracefile or
loads one directly, and process it with lcov_cobertura_ to create a standard
Cobertura_ XML file, which in turn is then parsed using the flowbber Cobertura
source.
.. note::
This source requires the ``lcov`` executable to be available in your system
to run.
.. _lcov: http://ltp.sourceforge.net/coverage/lcov.php
.. _lcov_cobertura: https://github.com/eriwen/lcov-to-cobertura-xml
.. _Cobertura: http://cobertura.github.io/cobertura/
**Data collected:**
.. code-block:: json
{
"files": {
"my_source_code.c": {
"total_statements": 40,
"total_misses": 20,
"branch_rate": 0.5,
"total_hits": 8,
"line_rate": 0.5
},
"another_source.c": {
"total_statements": 40,
"total_misses": 40,
"branch_rate": 0.5,
"total_hits": 8,
"line_rate": 0.0
}
},
"total": {
"total_statements": 80,
"total_misses": 20,
"line_rate": 0.75
},
"tracefile": "<path-to-tracefile.info>"
}
**Dependencies:**
.. code-block:: sh
pip3 install flowbber[lcov]
**Usage:**
.. code-block:: toml
[[sources]]
type = "lcov"
id = "..."
[sources.config]
source = "{pipeline.dir}"
rc_overrides = ["lcov_branch_coverage=1"]
remove = ["*hello2*"]
remove_files = [
"/file/with/remove/patterns",
".removepatterns"
]
extract = ["*hello1*"]
extract_files = [
"/file/with/extract/patterns",
".extractpatterns"
]
derive_func_data = false
.. code-block:: json
{
"sources": [
{
"type": "lcov",
"id": "...",
"config": {
"source": "{pipeline.dir}",
"rc_overrides": ["lcov_branch_coverage=1"],
"remove": ["*hello2*"]
"remove_files": [
"/file/with/remove/patterns",
".removepatterns"
],
"extract": ["*hello1*"],
"extract_files": [
"/file/with/extract/patterns",
".extractpatterns"
],
"derive_func_data": false,
}
}
]
}
source
------
Path to the directory containing gcov's ``.gcda`` files or path to a tracefile
``.info`` file.
- **Default**: ``N/A``
- **Optional**: ``False``
- **Schema**:
.. code-block:: python3
{
'type': 'string',
'empty': False
}
- **Secret**: ``False``
rc_overrides
------------
Override lcov configuration file settings.
Elements should have the form ``SETTING=VALUE``.
- **Default**: ``[]``
- **Optional**: ``False``
- **Schema**:
.. code-block:: python3
{
'type': 'list',
'schema': {
'type': 'string',
'empty': False
},
}
- **Secret**: ``False``
remove
------
List of patterns of files to remove from coverage computation.
Patterns will be interpreted as shell wild‐card patterns.
- **Default**: ``[]``
- **Optional**: ``True``
- **Schema**:
.. code-block:: python3
{
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
}
- **Secret**: ``False``
remove_files
------------
List of paths to files containing patterns of files to remove from coverage
computation.
Patterns will be interpreted as shell wild‐card patterns.
All unique patterns parsed from these files will be added to the ones defined
in the ``remove`` configuration option.
- **Default**: ``[]``
- **Optional**: ``True``
- **Schema**:
.. code-block:: python3
{
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
}
- **Secret**: ``False``
extract
-------
List of patterns of files to extract for coverage computation.
Use this option if you want to extract coverage data for only a particular
set of files from a tracefile. Patterns will be interpreted as shell wild‐card
patterns.
- **Default**: ``[]``
- **Optional**: ``True``
- **Schema**:
.. code-block:: python3
{
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
}
- **Secret**: ``False``
extract_files
-------------
List of paths to files containing patterns of files to extract for coverage
computation.
Patterns will be interpreted as shell wild‐card patterns.
All unique patterns parsed from these files will be added to the ones defined
in the ``extract`` configuration option.
- **Default**: ``[]``
- **Optional**: ``True``
- **Schema**:
.. code-block:: python3
{
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
}
- **Secret**: ``False``
derive_func_data
----------------
Allow lcov to calculate function coverage data from line coverage data.
If ``True`` then the ``--derive-func-data`` option is used on the lcov
commands. If ``False`` then the option is not used.
This option is used to collect function coverage data, even when this data is
not provided by the installed gcov tool. Instead, lcov will use line coverage
data and information about which lines belong to a function to derive function
coverage.
- **Default**: ``False``
- **Optional**: ``True``
- **Schema**:
.. code-block:: python3
schema={
'type': 'boolean',
},
- **Secret**: ``False``
"""
from shutil import which
from pathlib import Path
from tempfile import NamedTemporaryFile
from flowbber.components import Source
from flowbber.utils.command import run
from flowbber.logging import get_logger
from flowbber.utils.filter import load_filter_file
from flowbber.plugins.sources.cobertura import CoberturaSource
log = get_logger(__name__)
class LcovSource(Source):
def declare_config(self, config):
config.add_option(
'source',
schema={
'type': 'string',
'empty': False,
},
)
config.add_option(
'rc_overrides',
default=[],
optional=True,
schema={
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
},
)
config.add_option(
'remove',
default=[],
optional=True,
schema={
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
},
)
config.add_option(
'remove_files',
default=[],
optional=True,
schema={
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
},
)
config.add_option(
'extract',
default=[],
optional=True,
schema={
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
},
)
config.add_option(
'extract_files',
default=[],
optional=True,
schema={
'type': 'list',
'schema': {
'type': 'string',
'empty': False,
},
},
)
config.add_option(
'derive_func_data',
default=False,
optional=True,
schema={
'type': 'boolean',
},
)
def collect(self):
from lcov_cobertura import LcovCobertura
# Check if file exists
source = Path(self.config.source.value)
if not source.exists():
raise FileNotFoundError(
'No such file or directory {}'.format(source)
)
source = source.resolve()
# Check if lcov is available
lcov = which('lcov')
if lcov is None:
raise FileNotFoundError('lcov executable not found.')
# Transform from list to something like
# --rc setting1=value1 --rc setting2=value2
rc_overrides = ''
if self.config.rc_overrides.value:
rc_overrides = '--rc {}'.format(
' --rc '.join(self.config.rc_overrides.value)
)
# Load remove patterns
remove = self.config.remove.value
for remove_file in self.config.remove_files.value:
for pattern in load_filter_file(remove_file):
if pattern not in remove:
remove.append(pattern)
# Load extract patterns
extract = self.config.extract.value
for extract_file in self.config.extract_files.value:
for pattern in load_filter_file(extract_file):
if pattern not in extract:
extract.append(pattern)
# Check if --derive-func-data is needed
derive_func_data = '--derive-func-data' \
if self.config.derive_func_data.value else ''
if source.is_dir():
# Create a temporary file. Close it, we just need the name.
tmp_file = NamedTemporaryFile(suffix='.info')
tmp_file.close()
tracefile = Path(tmp_file.name)
cmd = (
'{lcov} '
'{rc_overrides} '
'{derive_func_data} '
'--directory {directory} --capture '
'--output-file {tracefile}'.format(
lcov=lcov,
rc_overrides=rc_overrides,
derive_func_data=derive_func_data,
directory=source,
tracefile=tracefile
)
)
log.info('Gathering coverage info: "{}"'.format(cmd))
status = run(cmd)
if status.returncode != 0:
raise RuntimeError(
'Lcov failed capturing data:\n{}'.format(status.stderr)
)
else:
# Check file extension
if source.suffix != '.info':
raise ValueError(
'Unknown file extension "{}" '
'for a tracefile. Must be ".info"'.format(source.suffix)
)
tracefile = source
result = {
'tracefile': str(tracefile),
}
# Remove files from patterns
if remove:
cmd = (
'{lcov} '
'{rc_overrides} '
'{derive_func_data} '
'--remove {tracefile} {remove} '
'--output-file {tracefile}'.format(
lcov=lcov,
rc_overrides=rc_overrides,
derive_func_data=derive_func_data,
tracefile=tracefile,
remove=' '.join(
'"{}"'.format(e) for e in remove
)
)
)
log.info('Removing files: "{}"'.format(cmd))
status = run(cmd)
if status.returncode != 0:
raise RuntimeError(
'Lcov failed removing files from coverage:\n{}'.format(
status.stderr
)
)
# Extract files from patterns
if extract:
cmd = (
'{lcov} '
'{rc_overrides} '
'{derive_func_data} '
'--extract {tracefile} {extract} '
'--output-file {tracefile}'.format(
lcov=lcov,
rc_overrides=rc_overrides,
derive_func_data=derive_func_data,
tracefile=tracefile,
extract=' '.join(
'"{}"'.format(e) for e in extract
)
)
)
log.info('Extracting files: "{}"'.format(cmd))
status = run(cmd)
if status.returncode != 0:
raise RuntimeError(
'Lcov failed extracting files from coverage:\n{}'.format(
status.stderr
)
)
# Create cobertura xml file and parse it
converter = LcovCobertura(tracefile.open().read())
cobertura_xml = converter.convert()
with NamedTemporaryFile(delete=False, suffix='.xml') as xml:
xml.write(cobertura_xml.encode('utf-8'))
cobertura_src = CoberturaSource(
self._index, 'cobertura', self._id,
config={
'xmlpath': str(xml.name)
}
)
result.update(cobertura_src.collect())
return result
__all__ = ['LcovSource']
| apache-2.0 | -4,109,220,172,681,513,000 | 24.610603 | 78 | 0.484617 | false |
hvdieren/asap_operators | forth/clusterClassif.py | 1 | 4351 | Skip to content
Personal Open source Business Explore
Sign upSign inPricingBlogSupport
This repository
Search
Watch 2 Star 0 Fork 1 project-asap/telecom-analytics
Code Issues 0 Pull requests 0 Projects 0 Pulse Graphs
Branch: current Find file Copy pathtelecom-analytics/src/python/sociometer/stereo_type_classification.py
1f5e90a on Dec 9, 2016
@papagian papagian Fix stereotype classification
1 contributor
RawBlameHistory
108 lines (87 sloc) 3.8 KB
#
# Copyright 2015-2016 WIND,FORTH
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""Stereo Type Classification Module.
Given a set of user profiles for specific region and weeks of year and a set
of labeled calling behaviors, it returns the percentage of each label
on each spatial region.
E.g.:
Region 1, resident, 75%
Region 1, commuter, 20%
Usage:
$SPARK_HOME/bin/spark-submit sociometer/stereo_type_classification.py <profiles> <centroids>
Args:
profiles: The user profiles location. The expected location is expected to match
the following pattern:
/profiles/<region>-<start_week>-<end_week> where start_week and
end_week have the following format: <ISO_year>_<ISO_week>
centroids: The calling behavior dataset location. The expected location is expected to match
the following pattern:
/centroids/<region>-<start_week>-<end_week> where start_week and
end_week have the following format: <ISO_year>_<ISO_week>
Example:
$SPARK_HOME/bin/spark-submit sociometer/stereo_type_classification.py /profiles/aree_roma-2015_53-2016_3 \
/centroids/aree_roma-2015_53-2016_3
Results are stored into a local file: sociometer-<region>-<start_week>-<end_week>.
"""
from pyspark import SparkContext
from pyspark.mllib.clustering import KMeansModel
import re
import sys
def user_type(profile, model, centroids):
if len([x for x in profile if x != 0]) == 1 and sum(profile) < 0.5:
return 'passing by'
else:
idx = model.predict(profile)
cluster = model.clusterCenters[idx]
return centroids[cluster]
if __name__ == '__main__':
sc = SparkContext()
# annotazione utenti
d1 = sys.argv[1]
d2 = sys.argv[1]
pattern = r'/profiles/(?P<region>\w+)-(?P<start_week>\w+)-(?P<end_week>\w+)'
m = re.search(pattern, d1)
region, start_week, end_week = m.groups()
pattern = r'/centroids/(?P<region>\w+)-(?P<start_week>\w+)-(?P<end_week>\w+)'
m = re.search(pattern, d2)
assert((region, start_week, end_week) == m.groups())
r = sc.pickleFile(d2)
centroids = {tuple(v.tolist()): k for k, v in r.collect()}
model = KMeansModel(centroids.keys())
r = sc.pickleFile(d1)
# format: (user_id, profile)
r_auto = r.map(lambda (region, user_id, profile):
(region, user_type(profile, model, centroids), user_id, profile)) \
.map(lambda x: ((region, user_type, 1)) \
.reduceByKey(lambda x, y: x + y)
# ottengo coppie municipio,id_cluster
# risultato finale
#
lst = r_auto.collect()
sociometer = [(region,
user_type,
count * 1.0 / sum([count1 for ((region1, _), count1) in lst if region1 == region])
) for ((region, user_type), count) in lst]
with open("sociometer-%s-%s-%s" %
(region, start_week, end_week), 'w') as outfile:
print >>outfile, "region, profile, percentage"
for region, user_type, count in sorted(sociometer, key=lambda x: x[0][0]):
print>>outfile, region, user_type.replace("\n", ""), count
| apache-2.0 | -4,051,820,865,351,946,000 | 37.166667 | 110 | 0.674787 | false |
dahlia/wand | tests/image_properties_test.py | 2 | 22124 | # -*- coding: utf-8 -*-
#
# These test cover the Image attributes that directly map to C-API functions.
#
import io
import numbers
from pytest import mark, raises
from wand.color import Color
from wand.compat import string_type
from wand.font import Font
from wand.image import Image
from wand.version import MAGICK_VERSION_NUMBER
def test_alpha_channel_get(fx_asset):
"""Checks if image has alpha channel."""
with Image(filename=str(fx_asset.join('watermark.png'))) as img:
assert img.alpha_channel is True
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.alpha_channel is False
def test_alpha_channel_set(fx_asset):
"""Sets alpha channel to off."""
with Image(filename=str(fx_asset.join('watermark.png'))) as img:
if MAGICK_VERSION_NUMBER < 0x700:
enable_option = 'on'
disable_option = False
else:
enable_option = 'associate'
disable_option = 'disassociate'
img.alpha_channel = enable_option
assert img.alpha_channel is True
img.alpha_channel = disable_option
assert img.alpha_channel is False
img.alpha_channel = 'opaque'
assert img[0, 0].alpha == 1.0
with raises(ValueError):
img.alpha_channel = 'watermark'
def test_artifacts():
with Image(filename='rose:') as img:
img.artifacts['key'] = 'value'
assert 'date:create' in img.artifacts
assert img.artifacts['key'] == 'value'
assert img.artifacts['not_a_value'] is None
_ = len(img.artifacts)
for _ in img.artifacts.items():
pass
del img.artifacts['key']
def test_background_color_get(fx_asset):
"""Gets the background color."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert Color('white') == img.background_color
def test_background_color_set(fx_asset):
"""Sets the background color."""
with Image(filename=str(fx_asset.join('croptest.png'))) as img:
with Color('red') as color:
img.background_color = color
assert img.background_color == color
img.background_color = 'green'
assert img.background_color == Color('green')
def test_border_color():
green = Color('green')
with Image(filename='rose:') as img:
img.border_color = 'green'
assert img.border_color == green
@mark.xfail(MAGICK_VERSION_NUMBER >= 0x700,
reason="Channel traits are not implemented in IM7.")
def test_channel_depths(fx_asset):
with Image(filename=str(fx_asset.join('beach.jpg'))) as i:
assert dict(i.channel_depths) == {
'blue': 8, 'gray': 8, 'true_alpha': 1, 'opacity': 1,
'undefined': 1, 'composite_channels': 8, 'index': 1,
'rgb_channels': 8, 'alpha': 1, 'yellow': 8, 'sync_channels': 1,
'default_channels': 8, 'black': 1, 'cyan': 8,
'all_channels': 8, 'green': 8, 'magenta': 8, 'red': 8,
'gray_channels': 8, 'rgb': 8
}
with Image(filename=str(fx_asset.join('google.ico'))) as i:
assert dict(i.channel_depths) == {
'blue': 8, 'gray': 8, 'true_alpha': 1, 'opacity': 1,
'undefined': 1, 'composite_channels': 8, 'index': 1,
'rgb_channels': 8, 'alpha': 1, 'yellow': 8, 'sync_channels': 1,
'default_channels': 8, 'black': 1, 'cyan': 8, 'all_channels': 8,
'green': 8, 'magenta': 8, 'red': 8, 'gray_channels': 8, 'rgb': 8
}
def test_channel_images(fx_asset):
with Image(filename=str(fx_asset.join('sasha.jpg'))) as i:
i.format = 'png'
channels = ('opacity', 'alpha',)
# Only include TrueAlphaChannel if IM6, as its deprecated & unused
# in IM7.
if MAGICK_VERSION_NUMBER < 0x700:
channels = channels + ('true_alpha',)
for name in channels:
expected_path = str(fx_asset.join('channel_images', name + '.png'))
with Image(filename=expected_path) as expected:
if MAGICK_VERSION_NUMBER >= 0x700:
# With IM7, channels are dynamic & influence signatures.
# We'll need to compare the first channel of the expected
# PNG with the extracted channel.
first_channel = expected.channel_images['red']
assert i.channel_images[name] == first_channel
else:
assert i.channel_images[name] == expected
def test_colors(fx_asset):
with Image(filename=str(fx_asset.join('trim-color-test.png'))) as img:
assert img.colors == 2
def test_colorspace_get(fx_asset):
"""Gets the image colorspace"""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.colorspace.endswith('rgb')
def test_colorspace_set(fx_asset):
"""Sets the image colorspace"""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
img.colorspace = 'cmyk'
assert img.colorspace == 'cmyk'
def test_compose(fx_asset):
with Image(filename=str(fx_asset.join('sasha.jpg'))) as img:
assert img.compose == 'over'
img.compose = 'blend'
assert img.compose == 'blend'
with raises(TypeError):
img.compose = 0xDEADBEEF
with raises(ValueError):
img.compose = 'none'
def test_compression(fx_asset):
with Image(filename=str(fx_asset.join('sasha.jpg'))) as img:
# Legacy releases/library asserted ``'group4'`` compression type.
# IM 7 will correctly report ``'jpeg'``, but ``'group4'`` should
# still be apart of regression acceptance.
assert img.compression in ('group4', 'jpeg')
img.compression = 'zip'
assert img.compression == 'zip'
with raises(TypeError):
img.compression = 0x60
def test_compression_quality_get(fx_asset):
"""Gets the image compression quality."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.compression_quality == 80
def test_compression_quality_set(fx_asset):
"""Sets the image compression quality."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
img.compression_quality = 50
assert img.compression_quality == 50
strio = io.BytesIO()
img.save(file=strio)
strio.seek(0)
with Image(file=strio) as jpg:
assert jpg.compression_quality == 50
with raises(TypeError):
img.compression_quality = 'high'
def test_delay_set_get(fx_asset):
with Image(filename=str(fx_asset.join('nocomments.gif'))) as img:
img.delay = 10
assert img.delay == 10
def test_depth_get(fx_asset):
"""Gets the image depth"""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.depth == 8
def test_depth_set(fx_asset):
"""Sets the image depth"""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
img.depth = 16
assert img.depth == 16
def test_dispose(fx_asset):
with Image(filename=str(fx_asset.join('nocomments.gif'))) as img:
assert img.dispose == 'none'
img.dispose = 'background'
assert img.dispose == 'background'
def test_font_set(fx_asset):
with Image(width=144, height=192, background=Color('#1e50a2')) as img:
font = Font(
path=str(fx_asset.join('League_Gothic.otf')),
color=Color('gold'),
size=12,
antialias=False
)
img.font = font
assert img.font_path == font.path
assert img.font_size == font.size
assert img.font_color == font.color
assert img.antialias == font.antialias
assert img.font == font
assert repr(img.font)
fontStroke = Font(
path=str(fx_asset.join('League_Gothic.otf')),
stroke_color=Color('ORANGE'),
stroke_width=1.5
)
img.font = fontStroke
assert img.stroke_color == fontStroke.stroke_color
assert img.stroke_width == fontStroke.stroke_width
img.font_color = 'gold'
assert img.font_color == Color('gold')
img.stroke_color = 'gold'
assert img.stroke_color == Color('gold')
fontColor = Font(
path=str(fx_asset.join('League_Gothic.otf')),
color='YELLOW',
stroke_color='PINK'
)
img.font = fontColor
assert img.font_color == Color('YELLOW')
assert img.stroke_color == Color('PINK')
with raises(ValueError):
img.font_size = -99
def test_format_get(fx_asset):
"""Gets the image format."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.format == 'JPEG'
with Image(filename=str(fx_asset.join('croptest.png'))) as img:
assert img.format == 'PNG'
def test_format_set(fx_asset):
"""Sets the image format."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
img.format = 'png'
assert img.format == 'PNG'
strio = io.BytesIO()
img.save(file=strio)
strio.seek(0)
with Image(file=strio) as png:
assert png.format == 'PNG'
with raises(ValueError):
img.format = 'HONG'
with raises(TypeError):
img.format = 123
def test_fuzz():
with Image(filename='rose:') as img:
assert img.fuzz == 0.0
img.fuzz = img.quantum_range
assert img.fuzz == img.quantum_range
def test_gravity_set():
with Image(width=144, height=192, background=Color('#1e50a2')) as img:
img.gravity = 'center'
assert img.gravity == 'center'
def test_histogram(fx_asset):
with Image(filename=str(fx_asset.join('trim-color-test.png'))) as a:
h = a.histogram
assert len(h) == 2
assert frozenset(h) == frozenset([
Color('srgb(0,255,0'),
Color('srgb(0,0,255')
])
assert dict(h) == {
Color('srgb(0,255,0'): 5000,
Color('srgb(0,0,255'): 5000,
}
assert Color('white') not in h
assert Color('srgb(0,255,0)') in h
assert Color('srgb(0,0,255)') in h
assert h[Color('srgb(0,255,0)')] == 5000
assert h[Color('srgb(0,0,255)')] == 5000
def test_interlace_scheme_get(fx_asset):
with Image(filename='rose:') as img:
expected = 'no'
assert img.interlace_scheme == expected
def test_interlace_scheme_set(fx_asset):
with Image(filename='rose:') as img:
expected = 'plane'
img.interlace_scheme = expected
assert img.interlace_scheme == expected
def test_interpolate_method_get(fx_asset):
with Image(filename='rose:') as img:
expected = 'undefined'
assert img.interpolate_method == expected
def test_interpolate_method_set(fx_asset):
with Image(filename='rose:') as img:
expected = 'spline'
img.interpolate_method = expected
assert img.interpolate_method == expected
def test_kurtosis():
with Image(filename='rose:') as img:
kurtosis = img.kurtosis
assert isinstance(kurtosis, numbers.Real)
assert kurtosis != 0.0
def test_length_of_bytes():
with Image(filename='rose:') as img:
assert img.length_of_bytes > 0
img.resample(300, 300)
assert img.length_of_bytes == 0
def test_loop(fx_asset):
with Image(filename=str(fx_asset.join('nocomments.gif'))) as img:
assert img.loop == 0
img.loop = 1
assert img.loop == 1
def test_matte_color(fx_asset):
with Image(filename='rose:') as img:
with Color('navy') as color:
img.matte_color = color
assert img.matte_color == color
with raises(TypeError):
img.matte_color = False
img.matte_color = 'orange'
assert img.matte_color == Color('orange')
def test_mean():
with Image(filename='rose:') as img:
mean = img.mean
assert isinstance(mean, numbers.Real)
assert mean != 0.0
def test_metadata(fx_asset):
"""Test metadata api"""
with Image(filename=str(fx_asset.join('beach.jpg'))) as img:
assert 52 <= len(img.metadata) <= 55
for key in img.metadata:
assert isinstance(key, string_type)
assert 'exif:ApertureValue' in img.metadata
assert 'exif:UnknownValue' not in img.metadata
assert img.metadata['exif:ApertureValue'] == '192/32'
assert img.metadata.get('exif:UnknownValue', "IDK") == "IDK"
def test_mimetype(fx_asset):
"""Gets mimetypes of the image."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.mimetype in ('image/jpeg', 'image/x-jpeg')
with Image(filename=str(fx_asset.join('croptest.png'))) as img:
assert img.mimetype in ('image/png', 'image/x-png')
def test_minima_maxima():
with Image(filename='rose:') as img:
min_q = img.minima
max_q = img.maxima
assert min_q < max_q
def test_orientation_get(fx_asset):
with Image(filename=str(fx_asset.join('sasha.jpg'))) as img:
assert img.orientation == 'undefined'
with Image(filename=str(fx_asset.join('beach.jpg'))) as img:
assert img.orientation == 'top_left'
def test_orientation_set(fx_asset):
with Image(filename=str(fx_asset.join('beach.jpg'))) as img:
img.orientation = 'bottom_right'
assert img.orientation == 'bottom_right'
def test_page_basic(fx_asset):
with Image(filename=str(fx_asset.join('watermark.png'))) as img1:
assert img1.page == (640, 480, 0, 0)
assert img1.page_width == 640
assert img1.page_height == 480
assert img1.page_x == 0
assert img1.page_y == 0
with raises(TypeError):
img1.page = 640
def test_page_offset(fx_asset):
with Image(filename=str(fx_asset.join('watermark-offset.png'))) as img1:
assert img1.page == (640, 480, 12, 13)
assert img1.page_width == 640
assert img1.page_height == 480
assert img1.page_x == 12
assert img1.page_y == 13
def test_page_setter(fx_asset):
with Image(filename=str(fx_asset.join('watermark.png'))) as img1:
assert img1.page == (640, 480, 0, 0)
img1.page = (640, 480, 0, 0)
assert img1.page == (640, 480, 0, 0)
img1.page = (640, 480, 12, 13)
assert img1.page == (640, 480, 12, 13)
img1.page = (640, 480, -12, 13)
assert img1.page == (640, 480, -12, 13)
img1.page = (640, 480, 12, -13)
assert img1.page == (640, 480, 12, -13)
img1.page = (6400, 4800, 2, 3)
assert img1.page == (6400, 4800, 2, 3)
def test_page_setter_items(fx_asset):
with Image(filename=str(fx_asset.join('watermark.png'))) as img1:
assert img1.page == (640, 480, 0, 0)
img1.page_width = 6400
assert img1.page == (6400, 480, 0, 0)
img1.page_height = 4800
assert img1.page == (6400, 4800, 0, 0)
img1.page_x = 12
assert img1.page == (6400, 4800, 12, 0)
img1.page_y = 13
assert img1.page == (6400, 4800, 12, 13)
img1.page_x = -12
assert img1.page == (6400, 4800, -12, 13)
img1.page_y = -13
assert img1.page == (6400, 4800, -12, -13)
def test_page_setter_papersize():
with Image(filename='rose:') as img:
img.page = 'a4'
assert img.page == (595, 842, 0, 0)
img.page = 'badvalue'
assert img.page == (0, 0, 0, 0)
def test_primary_points(fx_asset):
with Image(filename='rose:') as img:
blue = [d/2 for d in img.blue_primary]
img.blue_primary = blue
assert blue == list(img.blue_primary)
green = [d/2 for d in img.green_primary]
img.green_primary = green
assert green == list(img.green_primary)
red = [d/2 for d in img.red_primary]
img.red_primary = red
assert red == list(img.red_primary)
white = [d/2 for d in img.white_point]
img.white_point = white
assert white == list(img.white_point)
with raises(TypeError):
img.blue_primary = 0xDEADBEEF
with raises(TypeError):
img.green_primary = 0xDEADBEEF
with raises(TypeError):
img.red_primary = 0xDEADBEEF
with raises(TypeError):
img.white_point = 0xDEADBEEF
def test_profiles(fx_asset):
with Image(filename=str(fx_asset.join('beach.jpg'))) as img:
assert len(img.profiles) == 1
assert 'exif' in [d for d in img.profiles]
exif_data = img.profiles['exif']
assert exif_data is not None
del img.profiles['exif']
assert img.profiles['exif'] is None
img.profiles['exif'] = exif_data
assert img.profiles['exif'] == exif_data
with raises(TypeError):
img.profiles[0xDEADBEEF]
with raises(TypeError):
del img.profiles[0xDEADBEEF]
with raises(TypeError):
img.profiles[0xDEADBEEF] = 0xDEADBEEF
with raises(TypeError):
img.profiles['exif'] = 0xDEADBEEF
def test_rendering_intent(fx_asset):
with Image(filename=str(fx_asset.join('trimtest.png'))) as img:
assert img.rendering_intent == 'perceptual'
img.rendering_intent = 'relative'
assert img.rendering_intent == 'relative'
def test_resolution_get(fx_asset):
"""Gets image resolution."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.resolution == (72, 72)
def test_resolution_set_01(fx_asset):
"""Sets image resolution."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
img.resolution = (100, 100)
assert img.resolution == (100, 100)
def test_resolution_set_02(fx_asset):
"""Sets image resolution with integer as parameter."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
img.resolution = 100
assert img.resolution == (100, 100)
def test_resolution_set_03():
"""Sets image resolution on constructor"""
with Image(filename='rose:', resolution=(100, 100)) as img:
assert img.resolution == (100, 100)
def test_resolution_set_04():
"""Sets image resolution on constructor with integer as parameter."""
with Image(filename='rose:', resolution=100) as img:
assert img.resolution == (100, 100)
def test_sampling_factors():
with Image(filename='rose:') as img:
img.sampling_factors = "4:2:2"
assert img.sampling_factors == (2, 1)
with raises(TypeError):
img.sampling_factors = {}
def test_scene():
with Image(filename='rose:') as img:
img.scene = 4
assert img.scene == 4
def test_signature(fx_asset):
"""Gets the image signature."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
with fx_asset.join('mona-lisa.jpg').open('rb') as f:
with Image(file=f) as same:
assert img.signature == same.signature
with img.convert('png') as same:
assert img.signature == same.signature
with Image(filename=str(fx_asset.join('beach.jpg'))) as diff:
assert img.signature != diff.signature
def test_size(fx_asset):
"""Gets the image size."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.size == (402, 599)
assert img.width == 402
assert img.height == 599
assert len(img) == 599
def test_skewness():
with Image(filename='rose:') as img:
skewness = img.skewness
assert isinstance(skewness, numbers.Real)
assert skewness != 0.0
def test_standard_deviation():
with Image(filename='rose:') as img:
standard_deviation = img.standard_deviation
assert isinstance(standard_deviation, numbers.Real)
assert standard_deviation != 0.0
def test_stroke_color_user_error():
with Image(filename='rose:') as img:
img.stroke_color = 'green'
img.stroke_color = None
assert img.stroke_color is None
with raises(TypeError):
img.stroke_color = 0xDEADBEEF
def test_type_get(fx_asset):
"""Gets the image type."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.type == "truecolor"
img.alpha_channel = True
if MAGICK_VERSION_NUMBER < 0x700:
expected = "truecolormatte"
else:
expected = "truecoloralpha"
assert img.type == expected
def test_type_set(fx_asset):
"""Sets the image type."""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
img.type = "grayscale"
assert img.type == "grayscale"
def test_ticks_per_second(fx_asset):
with Image(filename=str(fx_asset.join('nocomments.gif'))) as img:
assert img.ticks_per_second == 100
img.ticks_per_second = 10
assert img.ticks_per_second == 10
def test_units_get(fx_asset):
"""Gets the image resolution units."""
with Image(filename=str(fx_asset.join('beach.jpg'))) as img:
assert img.units == "pixelsperinch"
with Image(filename=str(fx_asset.join('sasha.jpg'))) as img:
assert img.units == "undefined"
def test_units_set(fx_asset):
"""Sets the image resolution units."""
with Image(filename=str(fx_asset.join('watermark.png'))) as img:
img.units = "pixelspercentimeter"
assert img.units == "pixelspercentimeter"
def test_virtual_pixel_get(fx_asset):
"""Gets image virtual pixel"""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
assert img.virtual_pixel == "undefined"
def test_virtual_pixel_set(fx_asset):
"""Sets image virtual pixel"""
with Image(filename=str(fx_asset.join('mona-lisa.jpg'))) as img:
img.virtual_pixel = "tile"
assert img.virtual_pixel == "tile"
with raises(ValueError):
img.virtual_pixel = "nothing"
| mit | -6,858,445,241,132,757,000 | 32.369532 | 79 | 0.599349 | false |
vossman/ctfeval | pyami/mem.py | 1 | 2238 | #!/usr/bin/env python
import os
def meminfo2dict():
if not os.path.exists('/proc/meminfo'):
return None
f = open('/proc/meminfo', 'r')
lines = f.readlines()
f.close()
info = {}
for line in lines:
line = line[:-1]
parts = line.split(':')
key = parts[0]
value = parts[1].strip()
value = value.split()
value = int(value[0])
info[key] = value
return info
def stats(meminfo=meminfo2dict()):
if meminfo is None:
return
total = meminfo['MemTotal']
free = meminfo['MemFree']
used = total - free
buffers = meminfo['Buffers']
cached = meminfo['Cached']
used2 = used - buffers - cached
free2 = free + buffers + cached
swaptotal = meminfo['SwapTotal']
swapfree = meminfo['SwapFree']
swapused = swaptotal - swapfree
print '%10d%10d%10d%10d%10d' % (total, used, free, buffers, cached)
print '%20d%10d' % (used2, free2)
print '%10d%10d%10d' % (swaptotal, swapused, swapfree)
meminfo
def used():
meminfo = meminfo2dict()
used = meminfo['MemTotal'] - meminfo['MemFree']
return used
def active():
return 0
meminfo = meminfo2dict()
used = meminfo['MemTotal'] - meminfo['MemFree'] - meminfo['Cached']
return used
def free():
meminfo = meminfo2dict()
free = meminfo['MemFree'] + meminfo['Cached']
return free
def total():
meminfo = meminfo2dict()
total = meminfo['MemTotal']
return total
def swapused():
meminfo = meminfo2dict()
used = meminfo['SwapTotal'] - meminfo['SwapFree']
return used
def swapfree():
meminfo = meminfo2dict()
free = meminfo['SwapFree']
return free
def swaptotal():
meminfo = meminfo2dict()
total = meminfo['SwapTotal']
return total
multdict = {
'b': 1,
'kb': 1024,
'mb': 1024*1024,
'gb': 1024*1024*1024,
}
def procStatus(pid=None):
if pid is None:
pid = os.getpid()
f = open('/proc/%d/status' % (pid,))
statuslines = f.readlines()
f.close()
vm = {}
for statusline in statuslines:
fields = statusline.split()
if fields[0][:2] == 'Vm':
name = fields[0][:-1]
value = int(fields[1])
mult = multdict[fields[2].lower()]
vm[name] = mult*value
return vm
def mySize():
status = procStatus()
return status['VmRSS']
def test():
mypid = os.getpid()
print 'mypid', mypid
print mySize()
if __name__ == '__main__':
#print used()
test()
| apache-2.0 | -7,636,281,603,136,761,000 | 18.982143 | 68 | 0.650581 | false |
tgbugs/hypush | hyputils/subscribe.py | 1 | 9535 | #!/usr/bin/env python3.6
import os
import asyncio
import ssl
import uuid
import json
from os import environ
from socket import socketpair
from threading import Thread
import certifi
import websockets
class Handler:
def __init__(self, filter_handlers):
self.filter_handlers = filter_handlers # list of filterHandlers that should be run on every message
def process(self, message):
if message['type'] == 'annotation-notification':
for fh in self.filter_handlers:
fh(message)
else:
print('NOT ANNOTATION')
print(message)
class preFilter:
""" Create a filter that will run on the hypothes.is server
Make group empty to default to allow all groups the authed user
is a member of in the hypothes.is system.
"""
def __init__(self, groups=[], users=[], uris=[], tags=[],
create=True, update=True, delete=True,
match_policy='include_any'):
self.create = create
self.update = update
self.delete = delete
#include_all include_any
self.match_policy = match_policy
self.groups = groups
self.users = users
self.uris = uris # NOTE: uri filters must be exact :(
self.tags = tags
self.clause_map = [
('/group', self.groups), # __world__
('/user', self.users),
('/uri', self.uris),
('/tags', self.tags),
]
def _make_clauses(self):
clauses = []
for field, value in self.clause_map:
if value:
clauses.append(
{'field':field,
'case_sensitive':True,
'operator':'one_of',
'options':{},
'value':value,
}
)
return clauses
def export(self):
output = {
'filter':{
'actions':{
'create':self.create,
'update':self.update,
'delete':self.delete,
},
'match_policy':self.match_policy,
'clauses':self._make_clauses(),
},
}
return output
def _ssl_context(verify=True):
ssl_context = ssl.create_default_context(cafile=certifi.where())
if not verify:
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
return ssl_context
async def setup_connection(websocket):
message = {'messageType': 'client_id',
'value': str(uuid.uuid4()),}
print('SETUP MESSAGE', message)
await websocket.send(json.dumps(message))
async def setup_filters(websocket, filters):
print('SETUP FILTERS\n', json.dumps(filters, indent=2))
await websocket.send(json.dumps(filters))
async def process_messages(websocket, handler):
while True:
response = await websocket.recv()
try:
msg = json.loads(response)
except ValueError:
pass
if msg:
handler.process(msg)
else:
pass
class ExitLoop(Exception):
pass
async def listen_for_exit(reader):
# the managing process will send a message on exit
msg = await reader.readline()
raise ExitLoop(msg.decode())
async def process_or_exit(websock, handler, exit_reader):
process_task = asyncio.ensure_future(process_messages(websock, handler))
exit_task = asyncio.ensure_future(listen_for_exit(exit_reader))
done, pending = await asyncio.wait([process_task, exit_task],
return_when=asyncio.FIRST_EXCEPTION)
future = done.pop()
for task in pending:
task.cancel()
raise future.exception()
def setup_websocket(api_token, filters, filter_handlers,
websocket_endpoint='wss://hypothes.is/ws',
extra_headers=None):
if extra_headers is None:
extra_headers = {}
rsock, wsock = socketpair()
def exit_loop():
try:
# stop the current await
wsock.send(b'Parent processes sent exit\n')
# close the socket and make sure we don't start again
# or more simply, to avoid leaking resources
wsock.close()
except OSError:
pass # socket was already closed
async def ws_loop(loop):
#websocket_endpoint = 'wss://hypothes.is/ws'
#filter_handlers = getFilterHandlers()
handler = Handler(filter_handlers)
ssl_context = _ssl_context(verify=True)
headers = {'Authorization': 'Bearer ' + api_token}
extra_headers.update(headers)
exit_reader, _writer = await asyncio.open_connection(sock=rsock, loop=loop)
while True: # for insurance could also test on closed wsock
print('WE SHOULD GET HERE')
try:
async with websockets.connect(websocket_endpoint,
ssl=ssl_context,
extra_headers=extra_headers) as ws:
await setup_connection(ws)
print(f'websocket connected to {websocket_endpoint}')
await setup_filters(ws, filters)
print('subscribed')
await process_or_exit(ws, handler, exit_reader)
except ExitLoop as e: # for whatever reason the await proceess or exit doesn't work here :/
print(e)
break
except KeyboardInterrupt as e:
break
except (websockets.exceptions.ConnectionClosed, ConnectionResetError) as e:
pass
_writer.close() # prevents ResourceWarning
return ws_loop, exit_loop
class AnnotationStream:
def __init__(self, annos, prefilter, *handler_classes, memoizer=None):
from .hypothesis import api_token
self.api_token = api_token
self.annos = annos
self.filters = prefilter
self.filter_handlers = [handler(self.annos, memoizer=memoizer) for handler in handler_classes]
@staticmethod
def loop_target(loop, ws_loop):
asyncio.set_event_loop(loop)
loop.run_until_complete(ws_loop(loop))
def __call__(self):
loop = asyncio.get_event_loop()
ws_loop, exit_loop = setup_websocket(self.api_token, self.filters, self.filter_handlers)
stream_thread = Thread(target=self.loop_target, args=(loop, ws_loop))
return stream_thread, exit_loop
def main():
from handlers import printHandler, websocketServerHandler
loop = asyncio.get_event_loop()
subscribed = {}
def send_message(d):
for send in subscribed.values():
send(json.dumps(d).encode())
wssh = websocketServerHandler(send_message)
async def incoming_handler(websocket, path):
try:
await websocket.recv() # do nothing except allow us to detect unsubscribe
except websockets.exceptions.ConnectionClosed as e:
pass # working as expected
async def outgoing_handler(websocket, path, reader):
while True:
message = await reader.readline()
await websocket.send(message.decode())
async def conn_handler(websocket, path, reader):
i_task = asyncio.ensure_future(incoming_handler(websocket, path))
o_task = asyncio.ensure_future(outgoing_handler(websocket, path, reader))
done, pending = await asyncio.wait([i_task, o_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
async def subscribe(websocket, path):
name = await websocket.recv() # this is not needed...
print(f"< {name}")
greeting = json.dumps(f"Hello {name}! You are now subscribed to cat facts!{{}}"
f"{list(subscribed)} are also subscribed to cat facts!")
greeting = greeting.format('\n')
rsock, wsock = socketpair()
reader, writer = await asyncio.open_connection(sock=rsock, loop=loop)
for send_something in subscribed.values():
msg = json.dumps(f'{name} also subscribed to cat facts!').encode()
send_something(msg)
def send(bytes_, s=wsock.send):
s(bytes_)
s(b'\n')
subscribed[name] = send # _very_ FIXME NOTE this is how we know where to route all our messages
await websocket.send(greeting)
print(f"> {greeting}")
# we now wait here for something else to happen, in this case
# either there is a subscription or an unsubscription
await conn_handler(websocket, path, reader) # when this completes the connection is closed
subscribed.pop(name)
for send_something in subscribed.values():
msg = json.dumps(f'{name} unsubscribed from cat facts!').encode()
send_something(msg)
start_server = websockets.serve(subscribe, 'localhost', 5050)
loop.run_until_complete(start_server) # TODO need this wrapped so that loop can be passed in
api_token = environ.get('HYP_API_TOKEN', 'TOKEN')
groups = environ.get('HYP_GROUPS', '__world__').split(' ')
filters = preFilter(groups=groups).export()
filter_handlers = [printHandler(), wssh]
print(groups)
ws_loop, exit_loop = setup_websocket(api_token, filters, filter_handlers)
loop.run_until_complete(ws_loop(loop))
if __name__ == '__main__':
main()
| mit | -8,656,371,062,877,171,000 | 32.45614 | 108 | 0.589512 | false |
cmr/tbot | tests/test_event_routing.py | 1 | 1818 | from mock import MagicMock
from tbot._common import EventRouter, IRCEvent
# The following two
def test_routing_matched_event():
e = IRCEvent("foo", None, None)
er = EventRouter()
cb = MagicMock()
er.register(cb, {'type': 'foo'})
er.fire(e)
assert cb.called, "Callback not fired"
def test_routing_unmatched_event():
e = IRCEvent(None, None, None)
er = EventRouter()
cb = MagicMock()
er.register(cb, {'type': "not empty"})
er.fire(e)
assert not cb.called, "Callback fired when event doesn't match filter"
def test_routing_true_callable():
e = IRCEvent(None, None, None)
er = EventRouter()
event_filter = MagicMock(return_value=True)
cb = MagicMock()
er.register(cb, event_filter)
er.fire(e)
assert event_filter.called, "Event filter not called"
assert cb.called, "Callback not fired"
def test_routing_false_callable():
e = IRCEvent(None, None, None)
er = EventRouter()
event_filter = MagicMock(return_value=False)
cb = MagicMock()
er.register(cb, event_filter)
er.fire(e)
assert not cb.called, "Callback fired when event_filter is false"
def test_routing_true_event_callables():
e = IRCEvent(None, None, None)
er = EventRouter()
cb = MagicMock()
filter_ = MagicMock(return_value=True)
er.register(cb, {'type': filter_})
er.fire(e)
assert cb.called, "Callback not fired"
assert filter_.called, "Filter not called"
def test_routing_false_event_callables():
e = IRCEvent(None, None, None)
er = EventRouter()
cb = MagicMock()
filter_ = MagicMock(return_value=False)
er.register(cb, {'type': filter_})
er.fire(e)
assert not cb.called, "Callback fired when filter doesn't match"
assert filter_.called, "Filter not called"
| bsd-3-clause | 1,456,609,157,313,447,700 | 21.725 | 74 | 0.650715 | false |
mortbauer/verein | wsgi/login.py | 1 | 1799 | import bcrypt
from functools import wraps
from flask import Blueprint, current_app,request, session, Response,g
from utils import send_response
mod = Blueprint('login', __name__, url_prefix='/login')
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def check_valid_auth():
if not session.get('user_id'):
return authenticate()
else:
g.user = current_app.db.users.find_one({'user_id':session['user_id']})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
check_valid_auth()
return f(*args, **kwargs)
return decorated
@mod.route("/", methods=["GET", "POST"])
def login():
if session.get('user_id'):
return send_response({'message':'you are already logged in'})
auth = request.authorization
if auth:
username = auth.username
password = auth.password
user = current_app.db.users.find_one({'username':username})
if user:
if current_app.bcrypt.check_password_hash(user['password'], password):
session['user_id'] = user['user_id']
return send_response({'message':'login successful'})
else:
return send_response({'message':'wrong passphrase'},status=400)
else:
if not username:
return send_response({'message':'provide a username'},status=400)
else:
return send_response({'message':'unknown user "{0}"'.format(username)},status=400)
else:
return send_response({'message':'username and password required'},status=400)
| gpl-2.0 | 3,872,796,502,596,405,000 | 33.596154 | 98 | 0.620345 | false |
boto/s3transfer | tests/functional/test_upload.py | 1 | 21675 | # Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import time
import tempfile
import shutil
import mock
from botocore.client import Config
from botocore.exceptions import ClientError
from botocore.awsrequest import AWSRequest
from botocore.stub import ANY
from tests import BaseGeneralInterfaceTest
from tests import RecordingSubscriber
from tests import RecordingOSUtils
from tests import NonSeekableReader
from s3transfer.compat import six
from s3transfer.manager import TransferManager
from s3transfer.manager import TransferConfig
from s3transfer.utils import ChunksizeAdjuster
class BaseUploadTest(BaseGeneralInterfaceTest):
def setUp(self):
super(BaseUploadTest, self).setUp()
# TODO: We do not want to use the real MIN_UPLOAD_CHUNKSIZE
# when we're adjusting parts.
# This is really wasteful and fails CI builds because self.contents
# would normally use 10MB+ of memory.
# Until there's an API to configure this, we're patching this with
# a min size of 1. We can't patch MIN_UPLOAD_CHUNKSIZE directly
# because it's already bound to a default value in the
# chunksize adjuster. Instead we need to patch out the
# chunksize adjuster class.
self.adjuster_patch = mock.patch(
's3transfer.upload.ChunksizeAdjuster',
lambda: ChunksizeAdjuster(min_size=1))
self.adjuster_patch.start()
self.config = TransferConfig(max_request_concurrency=1)
self._manager = TransferManager(self.client, self.config)
# Create a temporary directory with files to read from
self.tempdir = tempfile.mkdtemp()
self.filename = os.path.join(self.tempdir, 'myfile')
self.content = b'my content'
with open(self.filename, 'wb') as f:
f.write(self.content)
# Initialize some default arguments
self.bucket = 'mybucket'
self.key = 'mykey'
self.extra_args = {}
self.subscribers = []
# A list to keep track of all of the bodies sent over the wire
# and their order.
self.sent_bodies = []
self.client.meta.events.register(
'before-parameter-build.s3.*', self.collect_body)
def tearDown(self):
super(BaseUploadTest, self).tearDown()
shutil.rmtree(self.tempdir)
self.adjuster_patch.stop()
def collect_body(self, params, model, **kwargs):
# A handler to simulate the reading of the body including the
# request-created event that signals to simulate the progress
# callbacks
if 'Body' in params:
# TODO: This is not ideal. Need to figure out a better idea of
# simulating reading of the request across the wire to trigger
# progress callbacks
request = AWSRequest(
method='PUT', url='https://s3.amazonaws.com',
data=params['Body']
)
self.client.meta.events.emit(
'request-created.s3.%s' % model.name,
request=request, operation_name=model.name
)
self.sent_bodies.append(self._stream_body(params['Body']))
def _stream_body(self, body):
read_amt = 8 * 1024
data = body.read(read_amt)
collected_body = data
while data:
data = body.read(read_amt)
collected_body += data
return collected_body
@property
def manager(self):
return self._manager
@property
def method(self):
return self.manager.upload
def create_call_kwargs(self):
return {
'fileobj': self.filename,
'bucket': self.bucket,
'key': self.key
}
def create_invalid_extra_args(self):
return {
'Foo': 'bar'
}
def create_stubbed_responses(self):
return [{'method': 'put_object', 'service_response': {}}]
def create_expected_progress_callback_info(self):
return [{'bytes_transferred': 10}]
def assert_expected_client_calls_were_correct(self):
# We assert that expected client calls were made by ensuring that
# there are no more pending responses. If there are no more pending
# responses, then all stubbed responses were consumed.
self.stubber.assert_no_pending_responses()
class TestNonMultipartUpload(BaseUploadTest):
__test__ = True
def add_put_object_response_with_default_expected_params(
self, extra_expected_params=None):
expected_params = {
'Body': ANY, 'Bucket': self.bucket, 'Key': self.key
}
if extra_expected_params:
expected_params.update(extra_expected_params)
upload_response = self.create_stubbed_responses()[0]
upload_response['expected_params'] = expected_params
self.stubber.add_response(**upload_response)
def assert_put_object_body_was_correct(self):
self.assertEqual(self.sent_bodies, [self.content])
def test_upload(self):
self.extra_args['RequestPayer'] = 'requester'
self.add_put_object_response_with_default_expected_params(
extra_expected_params={'RequestPayer': 'requester'}
)
future = self.manager.upload(
self.filename, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assert_put_object_body_was_correct()
def test_upload_for_fileobj(self):
self.add_put_object_response_with_default_expected_params()
with open(self.filename, 'rb') as f:
future = self.manager.upload(
f, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assert_put_object_body_was_correct()
def test_upload_for_seekable_filelike_obj(self):
self.add_put_object_response_with_default_expected_params()
bytes_io = six.BytesIO(self.content)
future = self.manager.upload(
bytes_io, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assert_put_object_body_was_correct()
def test_upload_for_seekable_filelike_obj_that_has_been_seeked(self):
self.add_put_object_response_with_default_expected_params()
bytes_io = six.BytesIO(self.content)
seek_pos = 5
bytes_io.seek(seek_pos)
future = self.manager.upload(
bytes_io, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assertEqual(b''.join(self.sent_bodies), self.content[seek_pos:])
def test_upload_for_non_seekable_filelike_obj(self):
self.add_put_object_response_with_default_expected_params()
body = NonSeekableReader(self.content)
future = self.manager.upload(
body, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assert_put_object_body_was_correct()
def test_sigv4_progress_callbacks_invoked_once(self):
# Reset the client and manager to use sigv4
self.reset_stubber_with_new_client(
{'config': Config(signature_version='s3v4')})
self.client.meta.events.register(
'before-parameter-build.s3.*', self.collect_body)
self._manager = TransferManager(self.client, self.config)
# Add the stubbed response.
self.add_put_object_response_with_default_expected_params()
subscriber = RecordingSubscriber()
future = self.manager.upload(
self.filename, self.bucket, self.key, subscribers=[subscriber])
future.result()
self.assert_expected_client_calls_were_correct()
# The amount of bytes seen should be the same as the file size
self.assertEqual(subscriber.calculate_bytes_seen(), len(self.content))
def test_uses_provided_osutil(self):
osutil = RecordingOSUtils()
# Use the recording os utility for the transfer manager
self._manager = TransferManager(self.client, self.config, osutil)
self.add_put_object_response_with_default_expected_params()
future = self.manager.upload(self.filename, self.bucket, self.key)
future.result()
# The upload should have used the os utility. We check this by making
# sure that the recorded opens are as expected.
expected_opens = [(self.filename, 'rb')]
self.assertEqual(osutil.open_records, expected_opens)
def test_allowed_upload_params_are_valid(self):
op_model = self.client.meta.service_model.operation_model('PutObject')
for allowed_upload_arg in self._manager.ALLOWED_UPLOAD_ARGS:
self.assertIn(allowed_upload_arg, op_model.input_shape.members)
def test_upload_with_bandwidth_limiter(self):
self.content = b'a' * 1024 * 1024
with open(self.filename, 'wb') as f:
f.write(self.content)
self.config = TransferConfig(
max_request_concurrency=1, max_bandwidth=len(self.content)/2)
self._manager = TransferManager(self.client, self.config)
self.add_put_object_response_with_default_expected_params()
start = time.time()
future = self.manager.upload(self.filename, self.bucket, self.key)
future.result()
# This is just a smoke test to make sure that the limiter is
# being used and not necessary its exactness. So we set the maximum
# bandwidth to len(content)/2 per sec and make sure that it is
# noticeably slower. Ideally it will take more than two seconds, but
# given tracking at the beginning of transfers are not entirely
# accurate setting at the initial start of a transfer, we give us
# some flexibility by setting the expected time to half of the
# theoretical time to take.
self.assertGreaterEqual(time.time() - start, 1)
self.assert_expected_client_calls_were_correct()
self.assert_put_object_body_was_correct()
def test_raise_exception_on_s3_object_lambda_resource(self):
s3_object_lambda_arn = (
'arn:aws:s3-object-lambda:us-west-2:123456789012:'
'accesspoint:my-accesspoint'
)
with self.assertRaisesRegexp(ValueError, 'methods do not support'):
self.manager.upload(self.filename, s3_object_lambda_arn, self.key)
class TestMultipartUpload(BaseUploadTest):
__test__ = True
def setUp(self):
super(TestMultipartUpload, self).setUp()
self.chunksize = 4
self.config = TransferConfig(
max_request_concurrency=1, multipart_threshold=1,
multipart_chunksize=self.chunksize)
self._manager = TransferManager(self.client, self.config)
self.multipart_id = 'my-upload-id'
def create_stubbed_responses(self):
return [
{'method': 'create_multipart_upload',
'service_response': {'UploadId': self.multipart_id}},
{'method': 'upload_part',
'service_response': {'ETag': 'etag-1'}},
{'method': 'upload_part',
'service_response': {'ETag': 'etag-2'}},
{'method': 'upload_part',
'service_response': {'ETag': 'etag-3'}},
{'method': 'complete_multipart_upload', 'service_response': {}}
]
def create_expected_progress_callback_info(self):
return [
{'bytes_transferred': 4},
{'bytes_transferred': 4},
{'bytes_transferred': 2}
]
def assert_upload_part_bodies_were_correct(self):
expected_contents = []
for i in range(0, len(self.content), self.chunksize):
end_i = i + self.chunksize
if end_i > len(self.content):
expected_contents.append(self.content[i:])
else:
expected_contents.append(self.content[i:end_i])
self.assertEqual(self.sent_bodies, expected_contents)
def add_create_multipart_response_with_default_expected_params(
self, extra_expected_params=None):
expected_params = {'Bucket': self.bucket, 'Key': self.key}
if extra_expected_params:
expected_params.update(extra_expected_params)
response = self.create_stubbed_responses()[0]
response['expected_params'] = expected_params
self.stubber.add_response(**response)
def add_upload_part_responses_with_default_expected_params(
self, extra_expected_params=None):
num_parts = 3
upload_part_responses = self.create_stubbed_responses()[1:-1]
for i in range(num_parts):
upload_part_response = upload_part_responses[i]
expected_params = {
'Bucket': self.bucket,
'Key': self.key,
'UploadId': self.multipart_id,
'Body': ANY,
'PartNumber': i + 1,
}
if extra_expected_params:
expected_params.update(extra_expected_params)
upload_part_response['expected_params'] = expected_params
self.stubber.add_response(**upload_part_response)
def add_complete_multipart_response_with_default_expected_params(
self, extra_expected_params=None):
expected_params = {
'Bucket': self.bucket,
'Key': self.key, 'UploadId': self.multipart_id,
'MultipartUpload': {
'Parts': [
{'ETag': 'etag-1', 'PartNumber': 1},
{'ETag': 'etag-2', 'PartNumber': 2},
{'ETag': 'etag-3', 'PartNumber': 3}
]
}
}
if extra_expected_params:
expected_params.update(extra_expected_params)
response = self.create_stubbed_responses()[-1]
response['expected_params'] = expected_params
self.stubber.add_response(**response)
def test_upload(self):
self.extra_args['RequestPayer'] = 'requester'
# Add requester pays to the create multipart upload and upload parts.
self.add_create_multipart_response_with_default_expected_params(
extra_expected_params={'RequestPayer': 'requester'})
self.add_upload_part_responses_with_default_expected_params(
extra_expected_params={'RequestPayer': 'requester'})
self.add_complete_multipart_response_with_default_expected_params(
extra_expected_params={'RequestPayer': 'requester'})
future = self.manager.upload(
self.filename, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
def test_upload_for_fileobj(self):
self.add_create_multipart_response_with_default_expected_params()
self.add_upload_part_responses_with_default_expected_params()
self.add_complete_multipart_response_with_default_expected_params()
with open(self.filename, 'rb') as f:
future = self.manager.upload(
f, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assert_upload_part_bodies_were_correct()
def test_upload_for_seekable_filelike_obj(self):
self.add_create_multipart_response_with_default_expected_params()
self.add_upload_part_responses_with_default_expected_params()
self.add_complete_multipart_response_with_default_expected_params()
bytes_io = six.BytesIO(self.content)
future = self.manager.upload(
bytes_io, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assert_upload_part_bodies_were_correct()
def test_upload_for_seekable_filelike_obj_that_has_been_seeked(self):
self.add_create_multipart_response_with_default_expected_params()
self.add_upload_part_responses_with_default_expected_params()
self.add_complete_multipart_response_with_default_expected_params()
bytes_io = six.BytesIO(self.content)
seek_pos = 1
bytes_io.seek(seek_pos)
future = self.manager.upload(
bytes_io, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assertEqual(b''.join(self.sent_bodies), self.content[seek_pos:])
def test_upload_for_non_seekable_filelike_obj(self):
self.add_create_multipart_response_with_default_expected_params()
self.add_upload_part_responses_with_default_expected_params()
self.add_complete_multipart_response_with_default_expected_params()
stream = NonSeekableReader(self.content)
future = self.manager.upload(
stream, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
self.assert_upload_part_bodies_were_correct()
def test_limits_in_memory_chunks_for_fileobj(self):
# Limit the maximum in memory chunks to one but make number of
# threads more than one. This means that the upload will have to
# happen sequentially despite having many threads available because
# data is sequentially partitioned into chunks in memory and since
# there can only every be one in memory chunk, each upload part will
# have to happen one at a time.
self.config.max_request_concurrency = 10
self.config.max_in_memory_upload_chunks = 1
self._manager = TransferManager(self.client, self.config)
# Add some default stubbed responses.
# These responses are added in order of part number so if the
# multipart upload is not done sequentially, which it should because
# we limit the in memory upload chunks to one, the stubber will
# raise exceptions for mismatching parameters for partNumber when
# once the upload() method is called on the transfer manager.
# If there is a mismatch, the stubber error will propogate on
# the future.result()
self.add_create_multipart_response_with_default_expected_params()
self.add_upload_part_responses_with_default_expected_params()
self.add_complete_multipart_response_with_default_expected_params()
with open(self.filename, 'rb') as f:
future = self.manager.upload(
f, self.bucket, self.key, self.extra_args)
future.result()
# Make sure that the stubber had all of its stubbed responses consumed.
self.assert_expected_client_calls_were_correct()
# Ensure the contents were uploaded in sequentially order by checking
# the sent contents were in order.
self.assert_upload_part_bodies_were_correct()
def test_upload_failure_invokes_abort(self):
self.stubber.add_response(
method='create_multipart_upload',
service_response={
'UploadId': self.multipart_id
},
expected_params={
'Bucket': self.bucket,
'Key': self.key
}
)
self.stubber.add_response(
method='upload_part',
service_response={
'ETag': 'etag-1'
},
expected_params={
'Bucket': self.bucket, 'Body': ANY,
'Key': self.key, 'UploadId': self.multipart_id,
'PartNumber': 1
}
)
# With the upload part failing this should immediately initiate
# an abort multipart with no more upload parts called.
self.stubber.add_client_error(method='upload_part')
self.stubber.add_response(
method='abort_multipart_upload',
service_response={},
expected_params={
'Bucket': self.bucket,
'Key': self.key, 'UploadId': self.multipart_id
}
)
future = self.manager.upload(self.filename, self.bucket, self.key)
# The exception should get propogated to the future and not be
# a cancelled error or something.
with self.assertRaises(ClientError):
future.result()
self.assert_expected_client_calls_were_correct()
def test_upload_passes_select_extra_args(self):
self.extra_args['Metadata'] = {'foo': 'bar'}
# Add metadata to expected create multipart upload call
self.add_create_multipart_response_with_default_expected_params(
extra_expected_params={'Metadata': {'foo': 'bar'}})
self.add_upload_part_responses_with_default_expected_params()
self.add_complete_multipart_response_with_default_expected_params()
future = self.manager.upload(
self.filename, self.bucket, self.key, self.extra_args)
future.result()
self.assert_expected_client_calls_were_correct()
| apache-2.0 | 6,815,558,839,211,964,000 | 41.087379 | 79 | 0.634971 | false |
mcflugen/wmt-rest | wmt/flask/tests/test_sims.py | 1 | 2948 | import json
from uuid import uuid4
from wmt.flask import create_app
from nose.tools import (assert_equal, assert_is_instance, assert_dict_equal,
assert_list_equal, assert_less_equal)
from .tools import (assert_401_unauthorized, assert_404_not_found,
assert_403_forbidden, assert_200_success,
assert_204_empty, loads_if_assert_200,
assert_422_unprocessable_entity,
json_post, json_delete, login_or_fail,
AssertIsResourceResponse, AssertIsCollectionResponse)
from . import (app, FAKE_SIM, FAKE_SIM_NAME, FAKE_SIM_MODEL, FAKE_USER,
FAKE_USER_NAME, FAKE_USER1_NAME, FAKE_USER1_PASS)
class AssertIsSimResource(AssertIsResourceResponse):
__type__ = 'sim'
__fields__ = set(['href', 'id', 'name', 'owner', 'status', 'message',
'user', 'model'])
class AssertIsSimCollection(AssertIsCollectionResponse):
__validator__ = AssertIsSimResource()
assert_is_sim_resource = AssertIsSimResource()
assert_is_sim_collection = AssertIsSimCollection()
def test_show():
with app.test_client() as c:
resp = c.get('/sims/')
assert_200_success(resp)
assert_is_sim_collection(resp)
def test_get_existing():
with app.test_client() as c:
resp = c.get('/sims/1')
assert_200_success(resp)
assert_is_sim_resource(resp, name='foobar')
def test_get_non_existing():
with app.test_client() as c:
assert_404_not_found(c.get('/sims/0'))
def test_new_and_delete():
sim_name = str(uuid4())
with app.test_client() as c:
login_or_fail(c, **FAKE_USER)
resp = json_post(c, '/sims/', data=dict(name=sim_name, model=1))
assert_200_success(resp)
assert_is_sim_resource(resp, name=sim_name)
new_sim = json.loads(resp.data)
with app.test_client() as c:
login_or_fail(c, **FAKE_USER)
assert_204_empty(json_delete(c, new_sim['href']))
def test_new_not_logged_in():
with app.test_client() as c:
assert_401_unauthorized(
json_post(c, '/sims/', data=dict(name='a-new-sim')))
def test_new_existing():
with app.test_client() as c:
login_or_fail(c, **FAKE_USER)
assert_200_success(
json_post(c, '/sims/', data=dict(name=FAKE_SIM_NAME,
model=FAKE_SIM_MODEL)))
def test_delete_non_existing():
with app.test_client() as c:
login_or_fail(c, **FAKE_USER)
assert_404_not_found(json_delete(c, '/sims/999999'))
def test_delete_not_logged_in():
with app.test_client() as c:
assert_401_unauthorized(json_delete(c, '/sims/1'))
def test_delete_wrong_user():
with app.test_client() as c:
login_or_fail(c, username=FAKE_USER1_NAME, password=FAKE_USER1_PASS)
assert_403_forbidden(json_delete(c, '/sims/1'))
| mit | 3,351,741,665,556,969,500 | 30.031579 | 76 | 0.604478 | false |
ardi69/pyload-0.4.10 | pyload/plugin/hoster/FilepupNet.py | 1 | 1270 | # -*- coding: utf-8 -*-
#
# Test links:
# http://www.filepup.net/files/k5w4ZVoF1410184283.html
# http://www.filepup.net/files/R4GBq9XH1410186553.html
import re
from pyload.plugin.internal.SimpleHoster import SimpleHoster
class FilepupNet(SimpleHoster):
__name = "FilepupNet"
__type = "hoster"
__version = "0.03"
__pattern = r'http://(?:www\.)?filepup\.net/files/\w+'
__config = [("use_premium", "bool", "Use premium account if available", True)]
__description = """Filepup.net hoster plugin"""
__license = "GPLv3"
__authors = [("zapp-brannigan", "[email protected]"),
("Walter Purcaro", "[email protected]")]
NAME_PATTERN = r'>(?P<N>.+?)</h1>'
SIZE_PATTERN = r'class="fa fa-archive"></i> \((?P<S>[\d.,]+) (?P<U>[\w^_]+)'
OFFLINE_PATTERN = r'>This file has been deleted'
LINK_FREE_PATTERN = r'(http://www\.filepup\.net/get/.+?)\''
def setup(self):
self.multiDL = False
self.chunkLimit = 1
def handle_free(self, pyfile):
m = re.search(self.LINK_FREE_PATTERN, self.html)
if m is None:
self.error(_("Download link not found"))
dl_link = m.group(1)
self.download(dl_link, post={'task': "download"})
| gpl-3.0 | 4,836,978,921,758,395,000 | 27.222222 | 83 | 0.577953 | false |
datakortet/dk | tests/test_import_dk.py | 1 | 1056 | # -*- coding: utf-8 -*-
"""Test that all modules are importable.
"""
import dk.collections.invdict
import dk.collections.mmap
import dk.collections.OrderedSet
import dk.collections.pset
import dk.collections.sdict
import dk.collections.xmlrec
import dk.html.css
import dk.html.html
import dk.html.theme
import dk.html.uhtml
import dk.js.js
import dk.dkimport
import dk.dklogger
import dk.fstr
import dk.gchart
import dk.grid
import dk.proxy
import dk.text
import dk.utidy
import dk.utils
def test_import_dk():
"Test that all modules are importable."
assert dk.collections.invdict
assert dk.collections.mmap
assert dk.collections.OrderedSet
assert dk.collections.pset
assert dk.collections.sdict
assert dk.collections.xmlrec
assert dk.html.css
assert dk.html.html
assert dk.html.theme
assert dk.html.uhtml
assert dk.js.js
assert dk.dkimport
assert dk.dklogger
assert dk.fstr
assert dk.gchart
assert dk.grid
assert dk.proxy
assert dk.text
assert dk.utidy
assert dk.utils
| lgpl-3.0 | 2,760,848,373,896,228,400 | 21 | 43 | 0.741477 | false |
ratoaq2/deluge | deluge/tests/test_web_api.py | 1 | 7231 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2016 bendikro <[email protected]>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
from __future__ import unicode_literals
from io import BytesIO
from twisted.internet import defer, reactor
from twisted.python.failure import Failure
from twisted.web.client import Agent, FileBodyProducer
from twisted.web.http_headers import Headers
from twisted.web.static import File
import deluge.component as component
from deluge.ui.client import client
from . import common
from .common_web import WebServerTestBase
common.disable_new_release_check()
class WebAPITestCase(WebServerTestBase):
def test_connect_invalid_host(self):
d = self.deluge_web.web_api.connect('id')
d.addCallback(self.fail)
d.addErrback(self.assertIsInstance, Failure)
return d
def test_connect(self):
d = self.deluge_web.web_api.connect(self.host_id)
def on_connect(result):
self.assertEqual(type(result), tuple)
self.assertTrue(len(result) > 0)
self.addCleanup(client.disconnect)
return result
d.addCallback(on_connect)
d.addErrback(self.fail)
return d
def test_disconnect(self):
d = self.deluge_web.web_api.connect(self.host_id)
@defer.inlineCallbacks
def on_connect(result):
self.assertTrue(self.deluge_web.web_api.connected())
yield self.deluge_web.web_api.disconnect()
self.assertFalse(self.deluge_web.web_api.connected())
d.addCallback(on_connect)
d.addErrback(self.fail)
return d
def test_get_config(self):
config = self.deluge_web.web_api.get_config()
self.assertEqual(self.webserver_listen_port, config['port'])
def test_set_config(self):
config = self.deluge_web.web_api.get_config()
config['pwd_salt'] = 'new_salt'
config['pwd_sha1'] = 'new_sha'
config['sessions'] = {
'233f23632af0a74748bc5dd1d8717564748877baa16420e6898e17e8aa365e6e': {
'login': 'skrot',
'expires': 1460030877.0,
'level': 10
}
}
self.deluge_web.web_api.set_config(config)
web_config = component.get('DelugeWeb').config.config
self.assertNotEquals(config['pwd_salt'], web_config['pwd_salt'])
self.assertNotEquals(config['pwd_sha1'], web_config['pwd_sha1'])
self.assertNotEquals(config['sessions'], web_config['sessions'])
@defer.inlineCallbacks
def get_host_status(self):
host = list(self.deluge_web.web_api._get_host(self.host_id))
host[3] = 'Online'
host[4] = '2.0.0.dev562'
status = yield self.deluge_web.web_api.get_host_status(self.host_id)
self.assertEqual(status, tuple(status))
def test_get_host(self):
self.assertFalse(self.deluge_web.web_api._get_host('invalid_id'))
conn = list(self.deluge_web.web_api.hostlist.get_hosts_info()[0])
self.assertEqual(self.deluge_web.web_api._get_host(conn[0]), conn[0:4])
def test_add_host(self):
conn = ['abcdef', '10.0.0.1', 0, 'user123', 'pass123']
self.assertFalse(self.deluge_web.web_api._get_host(conn[0]))
# Add valid host
result, host_id = self.deluge_web.web_api.add_host(conn[1], conn[2], conn[3], conn[4])
self.assertEqual(result, True)
conn[0] = host_id
self.assertEqual(self.deluge_web.web_api._get_host(conn[0]), conn[0:4])
# Add already existing host
ret = self.deluge_web.web_api.add_host(conn[1], conn[2], conn[3], conn[4])
self.assertEqual(ret, (False, 'Host details already in hostlist'))
# Add invalid port
conn[2] = 'bad port'
ret = self.deluge_web.web_api.add_host(conn[1], conn[2], conn[3], conn[4])
self.assertEqual(ret, (False, 'Invalid port. Must be an integer'))
def test_remove_host(self):
conn = ['connection_id', '', 0, '', '']
self.deluge_web.web_api.hostlist.config['hosts'].append(conn)
self.assertEqual(self.deluge_web.web_api._get_host(conn[0]), conn[0:4])
# Remove valid host
self.assertTrue(self.deluge_web.web_api.remove_host(conn[0]))
self.assertFalse(self.deluge_web.web_api._get_host(conn[0]))
# Remove non-existing host
self.assertFalse(self.deluge_web.web_api.remove_host(conn[0]))
def test_get_torrent_info(self):
filename = common.get_test_data_file('test.torrent')
ret = self.deluge_web.web_api.get_torrent_info(filename)
self.assertEqual(ret['name'], 'azcvsupdater_2.6.2.jar')
self.assertEqual(ret['info_hash'], 'ab570cdd5a17ea1b61e970bb72047de141bce173')
self.assertTrue('files_tree' in ret)
def test_get_magnet_info(self):
ret = self.deluge_web.web_api.get_magnet_info('magnet:?xt=urn:btih:SU5225URMTUEQLDXQWRB2EQWN6KLTYKN')
self.assertEqual(ret['name'], '953bad769164e8482c7785a21d12166f94b9e14d')
self.assertEqual(ret['info_hash'], '953bad769164e8482c7785a21d12166f94b9e14d')
self.assertTrue('files_tree' in ret)
@defer.inlineCallbacks
def test_get_torrent_files(self):
yield self.deluge_web.web_api.connect(self.host_id)
filename = common.get_test_data_file('test.torrent')
torrents = [{'path': filename, 'options': {'download_location': '/home/deluge/'}}]
yield self.deluge_web.web_api.add_torrents(torrents)
ret = yield self.deluge_web.web_api.get_torrent_files('ab570cdd5a17ea1b61e970bb72047de141bce173')
self.assertEqual(ret['type'], 'dir')
self.assertEqual(
ret['contents'], {
'azcvsupdater_2.6.2.jar': {
'priority': 4, 'index': 0, 'offset': 0, 'progress': 0.0, 'path':
'azcvsupdater_2.6.2.jar', 'type': 'file', 'size': 307949}})
@defer.inlineCallbacks
def test_download_torrent_from_url(self):
filename = 'ubuntu-9.04-desktop-i386.iso.torrent'
self.deluge_web.top_level.putChild(filename, File(common.get_test_data_file(filename)))
url = 'http://localhost:%d/%s' % (self.webserver_listen_port, filename)
res = yield self.deluge_web.web_api.download_torrent_from_url(url)
self.assertTrue(res.endswith(filename))
@defer.inlineCallbacks
def test_invalid_json(self):
"""
If json_api._send_response does not return server.NOT_DONE_YET
this error is thrown when json is invalid:
exceptions.RuntimeError: Request.write called on a request after Request.finish was called.
"""
agent = Agent(reactor)
bad_body = b'{ method": "auth.login" }'
d = yield agent.request(
b'POST',
b'http://127.0.0.1:%s/json' % self.webserver_listen_port,
Headers({b'User-Agent': [b'Twisted Web Client Example'],
b'Content-Type': [b'application/json']}),
FileBodyProducer(BytesIO(bad_body)))
yield d
| gpl-3.0 | -6,263,807,956,593,385,000 | 40.085227 | 109 | 0.637809 | false |
bd808/striker | striker/labsauth/views.py | 1 | 4046 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Wikimedia Foundation and contributors.
# All Rights Reserved.
#
# This file is part of Striker.
#
# Striker is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Striker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Striker. If not, see <http://www.gnu.org/licenses/>.
import logging
from django import shortcuts
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import views as auth_views
from django.core import urlresolvers
from django.utils.translation import ugettext_lazy as _
from ratelimitbackend import views as ratelimit_views
import mwoauth
from striker.labsauth import forms
from striker.labsauth import utils
NEXT_PAGE = 'striker.oauth.next_page'
REQUEST_TOKEN_KEY = 'striker.oauth.request_token'
ACCESS_TOKEN_KEY = 'striker.oauth.access_token'
logger = logging.getLogger(__name__)
def login(req):
resp = ratelimit_views.login(
request=req,
template_name='labsauth/login.html',
authentication_form=forms.LabsAuthenticationForm)
if 'remember_me' in req.POST:
req.session.set_expiry(settings.REMEMBER_ME_TTL)
req.session.save()
return resp
def logout(req):
auth_views.logout(req)
return shortcuts.redirect(urlresolvers.reverse('index'))
def oauth_initiate(req):
"""Initiate an OAuth login."""
next_page = req.GET.get('next', None)
if next_page is not None:
req.session[NEXT_PAGE] = next_page
consumer_token = mwoauth.ConsumerToken(
settings.OAUTH_CONSUMER_KEY, settings.OAUTH_CONSUMER_SECRET)
try:
redirect, request_token = mwoauth.initiate(
settings.OAUTH_MWURL,
consumer_token,
req.build_absolute_uri(
urlresolvers.reverse('labsauth:oauth_callback')))
except Exception:
# FIXME: get upstream to use a narrower exception class
logger.exception('mwoauth.initiate failed')
messages.error(req, _("OAuth handshake failed."))
return shortcuts.redirect(next_page or '/')
else:
# Convert to unicode for session storage
req.session[REQUEST_TOKEN_KEY] = utils.tuple_to_unicode(request_token)
return shortcuts.redirect(redirect)
def oauth_callback(req):
"""OAuth handshake callback."""
serialized_token = req.session.get(REQUEST_TOKEN_KEY, None)
if serialized_token is None:
messages.error(req, _("Session invalid."))
return shortcuts.redirect(
urlresolvers.reverse('labsauth:oauth_initiate'))
# Convert from unicode stored in session to bytes expected by mwoauth
serialized_token = utils.tuple_to_bytes(serialized_token)
consumer_token = mwoauth.ConsumerToken(
settings.OAUTH_CONSUMER_KEY, settings.OAUTH_CONSUMER_SECRET)
request_token = mwoauth.RequestToken(*serialized_token)
access_token = mwoauth.complete(
settings.OAUTH_MWURL,
consumer_token,
request_token,
req.META['QUERY_STRING'])
# Convert to unicode for session storage
req.session[ACCESS_TOKEN_KEY] = utils.tuple_to_unicode(access_token)
req.user.set_accesstoken(access_token)
sul_user = mwoauth.identify(
settings.OAUTH_MWURL, consumer_token, access_token)
req.user.sulname = sul_user['username']
req.user.sulemail = sul_user['email']
req.user.realname = sul_user['realname']
req.user.save()
messages.info(req, _("Authenticated as OAuth user {user}".format(
user=sul_user['username'])))
return shortcuts.redirect(req.session.get(NEXT_PAGE, '/'))
| gpl-3.0 | 982,522,145,362,879,500 | 34.80531 | 78 | 0.703411 | false |
borg-project/borg | borg/domains/sat/__init__.py | 1 | 1212 | """@author: Bryan Silverthorn <[email protected]>"""
import contextlib
import borg
from . import solvers
from . import features
from . import instance
class SatisfiabilityTask(object):
def __init__(self, path):
self.path = path
def clean(self):
pass
@borg.named_domain
class Satisfiability(object):
name = "sat"
extensions = [".cnf"]
@contextlib.contextmanager
def task_from_path(self, task_path):
"""Clean up cached task resources on context exit."""
task = SatisfiabilityTask(task_path)
try:
yield task
except:
raise
finally:
task.clean()
def compute_features(self, task):
return features.get_features_for(task.path)
def is_final(self, task, answer):
"""Is the answer definitive for the task?"""
return answer is not None
def show_answer(self, task, answer):
if answer is None:
print "s UNKNOWN"
return 0
elif answer:
print "s SATISFIABLE"
print "v", " ".join(map(str, answer)), "0"
return 10
else:
print "s UNSATISFIABLE"
return 20
| mit | 4,675,229,339,373,902,000 | 20.263158 | 61 | 0.573432 | false |
kaushik94/unishark | unishark/runner.py | 1 | 12912 | # Copyright 2015 Twitter, Inc and other contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import traceback
import unittest
from unittest.signals import registerResult
import time
from unishark.util import (get_long_class_name, get_long_method_name, get_module_name)
import threading
import concurrent.futures
from collections import deque
from inspect import ismodule
import warnings
import logging
log = logging.getLogger(__name__)
_io = None
if sys.version_info[0] < 3: # python 2.7
_io = __import__('StringIO')
else:
_io = __import__('io')
if _io is None or not ismodule(_io):
raise ImportError
def _make_buffer():
return _io.StringIO()
class _PooledIOBuffer(object):
_lock = threading.RLock()
def __init__(self):
self.buff_queue = deque()
self.buff_queue.append(_make_buffer())
self.buff_dict = dict()
def _get_buff(self):
with _PooledIOBuffer._lock:
if not self.buff_queue:
return _make_buffer()
else:
return self.buff_queue.popleft()
def write(self, *args, **kwargs):
i = threading.current_thread().ident
if i not in self.buff_dict:
buff = self._get_buff()
self.buff_dict[i] = buff
self.buff_dict[i].write(*args, **kwargs)
def getvalue(self, *args, **kwargs):
i = threading.current_thread().ident
return self.buff_dict[i].getvalue(*args, **kwargs) if i in self.buff_dict else None
def flush(self, *args, **kwargs):
i = threading.current_thread().ident
if i in self.buff_dict:
self.buff_dict[i].flush(*args, **kwargs)
def seek(self, *args, **kwargs):
i = threading.current_thread().ident
if i in self.buff_dict:
self.buff_dict[i].seek(*args, **kwargs)
def truncate(self, *args, **kwargs):
i = threading.current_thread().ident
if i in self.buff_dict:
self.buff_dict[i].truncate(*args, **kwargs)
def free(self):
i = threading.current_thread().ident
if i in self.buff_dict:
buff = self.buff_dict.pop(threading.current_thread().ident)
buff.seek(0)
buff.truncate()
self.buff_queue.append(buff)
_io_buffer = _PooledIOBuffer()
out = _io_buffer
PASS = 0
SKIPPED = 1
ERROR = 2
FAIL = 3
EXPECTED_FAIL = 4
UNEXPECTED_PASS = 5
class BufferedTestResult(unittest.TextTestResult):
def __init__(self, stream, descriptions, verbosity):
super(BufferedTestResult, self).__init__(stream, descriptions, verbosity)
self.buffer = False
# key = test class name, value = a list of results.
# One result is a tuple like (test method name, method doc, duration, status, output, traceback)
self.results = dict()
self.start_time = 0.0
self.sum_duration = 0.0
self.successes = 0
self.name = 'test'
self.descriptions = ''
def _add_result(self, test, duration, status, output, trace_back):
mod_name = get_module_name(test)
cls_name = get_long_class_name(test)
if mod_name not in self.results:
self.results[mod_name] = dict()
if cls_name not in self.results[mod_name]:
self.results[mod_name][cls_name] = []
test_name, test_doc = self.__class__._get_test_info(test)
output = output or 'No Log\n'
trace_back = trace_back or 'No Exception\n'
self.results[mod_name][cls_name].append((test_name, test_doc, duration, status, output, trace_back))
@staticmethod
def _get_test_info(test):
test_name = get_long_method_name(test)
test_doc = getattr(test, '_testMethodDoc')
return test_name, test_doc or 'No Method Doc\n'
def _exc_info_to_string(self, error, test):
"""Almost the same as its base class implementation, except eliminating the mirror output"""
exctype, value, tb = error
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
msg_lines = traceback.format_exception(exctype, value, tb, length)
else:
msg_lines = traceback.format_exception(exctype, value, tb)
return ''.join(msg_lines)
def startTest(self, test):
super(BufferedTestResult, self).startTest(test)
self.start_time = time.time()
def stopTest(self, test):
self._mirrorOutput = False
_io_buffer.free()
def addSuccess(self, test):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addSuccess(test)
self.successes += 1
self._add_result(test, duration, PASS, _io_buffer.getvalue(), '')
def addError(self, test, error):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addError(test, error)
test_obj, exception_str = self.errors[-1]
self._add_result(test, duration, ERROR, _io_buffer.getvalue(), exception_str)
def addFailure(self, test, error):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addFailure(test, error)
test_obj, exception_str = self.failures[-1]
self._add_result(test, duration, FAIL, _io_buffer.getvalue(), exception_str)
def addSkip(self, test, reason):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addSkip(test, reason)
test_obj, reason = self.skipped[-1]
self._add_result(test, duration, SKIPPED, _io_buffer.getvalue(), 'Skipped: {0!r}'.format(reason))
def addExpectedFailure(self, test, error):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addExpectedFailure(test, error)
test_obj, exception_str = self.expectedFailures[-1]
self._add_result(test, duration, EXPECTED_FAIL, _io_buffer.getvalue(), exception_str)
def addUnexpectedSuccess(self, test):
duration = time.time() - self.start_time
super(BufferedTestResult, self).addUnexpectedSuccess(test)
self._add_result(test, duration, UNEXPECTED_PASS, _io_buffer.getvalue(), '')
def wasSuccessful(self):
return len(self.failures) == len(self.errors) == len(self.unexpectedSuccesses) == 0
class BufferedTestRunner(unittest.TextTestRunner):
def __init__(self, reporters=None, verbosity=1, descriptions=False):
super(BufferedTestRunner, self).__init__(buffer=False,
verbosity=verbosity,
descriptions=descriptions,
resultclass=BufferedTestResult)
if reporters:
self.reporters = reporters
else:
self.reporters = []
from unishark.reporter import Reporter
for reporter in self.reporters:
if not isinstance(reporter, Reporter):
raise TypeError
def _before_run(self):
# Keep the same as lines 145-162 in unittest.TextTextRunner.run
result = self.resultclass(self.stream, self.descriptions, self.verbosity)
registerResult(result)
result.failfast = self.failfast
result.buffer = self.buffer
with warnings.catch_warnings():
warn = getattr(self, 'warnings', None)
if warn:
warnings.simplefilter(warn)
if warn in ['default', 'always']:
warnings.filterwarnings('module',
category=DeprecationWarning,
message='Please use assert\w+ instead.')
return result
def _after_run(self, result):
# Almost the same as lines 175-213 in unittest.TextTextRunner.run,
# with small fix of counting unexpectedSuccesses into a FAILED run.
result.printErrors()
if hasattr(result, 'separator2'):
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", result.sum_duration))
self.stream.writeln()
expected = unexpected = skipped = 0
try:
results = map(len, (result.expectedFailures,
result.unexpectedSuccesses,
result.skipped))
except AttributeError:
pass
else:
expected, unexpected, skipped = results
infos = []
if not result.wasSuccessful():
self.stream.write("FAILED")
failed, errored = len(result.failures), len(result.errors)
if failed:
infos.append("failures=%d" % failed)
if errored:
infos.append("errors=%d" % errored)
if unexpected:
infos.append("unexpected successes=%d" % unexpected)
else:
self.stream.write("OK")
if skipped:
infos.append("skipped=%d" % skipped)
if expected:
infos.append("expected failures=%d" % expected)
if infos:
self.stream.writeln(" (%s)" % (", ".join(infos),))
else:
self.stream.write("\n")
@staticmethod
def _is_suite(test):
try:
iter(test)
except TypeError:
return False
return True
@staticmethod
def _combine_results(result, results):
for r in results:
result.failures.extend(r.failures)
result.errors.extend(r.errors)
result.testsRun += r.testsRun
result.skipped.extend(r.skipped)
result.expectedFailures.extend(r.expectedFailures)
result.unexpectedSuccesses.extend(r.unexpectedSuccesses)
result.successes += r.successes
for mod_name, mod in r.results.items():
if mod_name not in result.results:
result.results[mod_name] = dict()
for cls_name, tups in mod.items():
if cls_name not in result.results[mod_name]:
result.results[mod_name][cls_name] = []
result.results[mod_name][cls_name].extend(tups)
def _group_test_cases_by_class(self, test, dic):
if not self.__class__._is_suite(test):
if test.__class__ not in dic:
dic[test.__class__] = []
dic[test.__class__].append(test)
else:
for t in test:
self._group_test_cases_by_class(t, dic)
def _regroup_test_cases(self, test):
dic = dict()
self._group_test_cases_by_class(test, dic)
log.debug('Test cases grouped by class: %r' % dic)
suite = unittest.TestSuite()
for _, cases in dic.items():
cls_suite = unittest.TestSuite()
cls_suite.addTests(cases)
suite.addTest(cls_suite)
return suite
def run(self, test, name='test', description='', max_workers=1):
result = self._before_run()
result.name = name
result.description = description
start_time = time.time()
start_test_run = getattr(result, 'startTestRun', None)
if start_test_run is not None:
start_test_run()
try:
if max_workers <= 1 or not self.__class__._is_suite(test):
test(result)
else:
test = self._regroup_test_cases(test)
log.debug('Regrouped test: %r' % test)
results = [self._before_run() for _ in test]
with concurrent.futures.ThreadPoolExecutor(max_workers) as executor:
for t, r in zip(test, results):
executor.submit(t, r)
self.__class__._combine_results(result, results)
finally:
stop_test_run = getattr(result, 'stopTestRun', None)
if stop_test_run is not None:
stop_test_run()
result.sum_duration = time.time() - start_time
self._after_run(result)
for reporter in self.reporters:
reporter.report(result)
return result | apache-2.0 | -2,083,779,264,028,304,100 | 36.428986 | 108 | 0.589607 | false |
ninchat/thumq | test.py | 1 | 3773 | #!/usr/bin/env python3
import argparse
import base64
import os
import signal
import socket
import subprocess
import sys
import tempfile
import time
import webbrowser
from contextlib import closing
from struct import pack, unpack
socket_path = "./test.socket"
imagedir = "exif-orientation-examples"
protoc = os.environ.get("PROTOC", "protoc")
compiledir = tempfile.mkdtemp()
try:
subprocess.check_call([protoc, "--python_out", compiledir, "thumq.proto"])
sys.path.insert(0, compiledir)
sys.dont_write_bytecode = True
import thumq_pb2
sys.path.pop(0)
finally:
for dirpath, dirnames, filenames in os.walk(compiledir, topdown=False):
for filename in filenames:
os.remove(os.path.join(dirpath, filename))
for dirname in dirnames:
os.rmdir(os.path.join(dirpath, dirname))
os.rmdir(compiledir)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--browser", action="store_true", help="open result images in web browser tabs")
parser.add_argument("--top-square", action="store_true", help="enable cropping")
parser.add_argument("scale", type=int, help="maximum width/height of result image")
args = parser.parse_args()
request = thumq_pb2.Request()
request.scale = args.scale
crop = "no-crop"
if args.top_square:
request.crop = thumq_pb2.Request.TOP_SQUARE
crop = "top-square"
request_data = request.SerializeToString()
service = subprocess.Popen(["./thumq", socket_path])
try:
for _ in range(10):
if os.path.exists(socket_path):
break
time.sleep(0.2)
files = []
for kind in ["Landscape", "Portrait"]:
for num in range(1, 8 + 1):
filename = "{}_{}.jpg".format(kind, num)
filepath = os.path.join(imagedir, filename)
files.append((filepath, "image/jpeg", True))
files.append(("test.pdf", "application/pdf", False))
for filepath, expect_type, expect_thumbnail in files:
print(filepath)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM | socket.SOCK_CLOEXEC)
with closing(sock):
sock.connect(socket_path)
send(sock, request_data)
with open(filepath, "rb") as f:
send(sock, f.read())
response = thumq_pb2.Response.FromString(receive(sock))
output_data = receive(sock)
if expect_thumbnail:
assert response.source_type == expect_type, response
assert response.nail_width in range(1, args.scale + 1), response
assert response.nail_height in range(1, args.scale + 1), response
assert output_data
if args.browser:
output_b64 = base64.standard_b64encode(output_data).decode()
webbrowser.open_new_tab("data:image/jpeg;base64," + output_b64)
else:
with open(filepath.replace(imagedir + "/", "test-output/" + crop + "/"), "wb") as f:
f.write(output_data)
else:
assert response.source_type == expect_type, response
assert not response.nail_width, response
assert not response.nail_height, response
assert not output_data
finally:
os.kill(service.pid, signal.SIGINT)
service.wait()
def send(sock, data):
sock.send(pack("<I", len(data)))
sock.send(data)
def receive(sock):
size, = unpack("<I", sock.recv(4))
return sock.recv(size)
if __name__ == "__main__":
main()
| bsd-2-clause | 3,513,826,372,980,710,400 | 30.705882 | 108 | 0.58362 | false |
google-research/language | language/capwap/synthetic/filter_round_trip.py | 1 | 6692 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Filter for round trip consistency in QA generations.
This uses a RC model with a no answer option.
This code follows the round-trip consistency check from the paper:
Chris Alberti, Daniel Andor, Emily Pitler, Jacob Devlin, and Michael Collins.
2019. Synthetic QA Corpora Generation with Roundtrip Consistency. In ACL.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
from absl import app
from absl import flags
from language.capwap.utils import experiment_utils
from language.capwap.utils import reward_utils
from language.capwap.utils import text_utils
import tensorflow.compat.v1 as tf
import tensorflow_hub as hub
DATA_DIR = os.getenv("CAPWAP_DATA", "data")
flags.DEFINE_string("input_file", None, "Input TFRecord file.")
flags.DEFINE_string("output_file", None, "Where to write to.")
flags.DEFINE_integer("max_answer_length", 10,
"Maximum answer length for prediction.")
flags.DEFINE_integer("seq_length", 128, "Padded input length.")
flags.DEFINE_float("no_answer_bias", 0, "Bias for CLS prediction.")
flags.DEFINE_string("rc_model", os.path.join(DATA_DIR, "rc_model"),
"TF Hub handle for BERT QA model.")
flags.DEFINE_string("vocab_path", os.path.join(DATA_DIR, "uncased_vocab.txt"),
"Path to BERT directory.")
FLAGS = flags.FLAGS
def clean(text):
"""Postprocessing."""
text = text.strip()
text = " ".join(text.split())
return text
def input_fn(params, input_file):
"""tf.data.Dataset."""
def _parse_example(serialized_example):
"""Parse a serialized example proto."""
features = tf.io.parse_single_example(
serialized_example,
features={
"unique_ids": tf.FixedLenFeature([], tf.int64),
"input_ids": tf.FixedLenFeature([params["seq_length"]], tf.int64),
"input_mask": tf.FixedLenFeature([params["seq_length"]], tf.int64),
"segment_ids": tf.FixedLenFeature([params["seq_length"]], tf.int64),
"start_positions": tf.FixedLenFeature([], tf.int64),
"end_positions": tf.FixedLenFeature([], tf.int64),
"answer_types": tf.FixedLenFeature([], tf.int64),
})
# Remove special [Q] token inserted before start of question.
for k in ["input_ids", "input_mask", "segment_ids"]:
v = features[k]
features[k] = tf.concat([[v[0]], v[2:]], axis=0)
return features
dataset = tf.data.TFRecordDataset(input_file, buffer_size=16 * 1024 * 1024)
dataset = dataset.map(
_parse_example, num_parallel_calls=params["num_input_threads"])
dataset = dataset.batch(params["batch_size"], drop_remainder=True)
dataset = dataset.prefetch(params["prefetch_batches"])
return dataset
def model_fn(features, labels, mode, params):
"""A model function satisfying the tf.estimator API."""
del labels
assert mode == tf.estimator.ModeKeys.PREDICT, "Mode should be PREDICT."
rc_model = hub.Module(params["rc_model"])
outputs = rc_model(
inputs=dict(
input_ids=tf.cast(features["input_ids"], tf.int32),
input_mask=tf.cast(features["input_mask"], tf.int32),
segment_ids=tf.cast(features["segment_ids"], tf.int32)),
signature="extractive_qa",
as_dict=True)
start, end, _ = reward_utils.max_scoring_span(
start_scores=outputs["start_logits"],
end_scores=outputs["end_logits"],
max_length=params["max_answer_length"],
no_answer_bias=params["no_answer_bias"])
is_consistent = tf.logical_and(
tf.logical_and(tf.greater(start, 0), tf.greater(end, 0)),
tf.logical_and(
tf.equal(start, tf.cast(features["start_positions"] - 1, tf.int32)),
tf.equal(end, tf.cast(features["end_positions"] - 1, tf.int32))))
return tf.estimator.tpu.TPUEstimatorSpec(
mode=mode,
predictions=dict(
unique_ids=features["unique_ids"],
input_ids=features["input_ids"],
start=start,
end=end,
is_consistent=is_consistent))
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
tf.logging.set_verbosity(tf.logging.INFO)
tf.logging.info("***** Generating captions *****")
# Load vocab
vocab = text_utils.Vocab.load(FLAGS.vocab_path)
# Update params.
params = dict(
seq_length=FLAGS.seq_length,
model_dir=os.path.dirname(FLAGS.output_file),
max_answer_length=FLAGS.max_answer_length,
batch_size=FLAGS.batch_size,
rc_model=FLAGS.rc_model,
eval_batch_size=FLAGS.eval_batch_size,
no_answer_bias=FLAGS.no_answer_bias,
num_input_threads=FLAGS.num_input_threads,
predict_batch_size=FLAGS.predict_batch_size,
prefetch_batches=FLAGS.prefetch_batches,
use_tpu=FLAGS.use_tpu,
)
# Get estimator.
estimator = experiment_utils.get_estimator(model_fn, params)
# Write predictions.
tf.logging.info("Writing predictions to disk...")
tf.io.gfile.makedirs(os.path.dirname(FLAGS.output_file))
with tf.io.gfile.GFile(FLAGS.output_file, "w") as f:
iterator = estimator.predict(
input_fn=functools.partial(input_fn, input_file=FLAGS.input_file),
yield_single_examples=True)
total = 0
for i, ex in enumerate(iterator, 1):
if ex["is_consistent"]:
tokens = [vocab.i2t(idx) for idx in ex["input_ids"]]
breakpoint = tokens.index(vocab.PAD)
question = clean(" ".join(vocab.clean(tokens[1:breakpoint])))
context = clean(" ".join(vocab.clean(tokens[breakpoint:])))
answer = clean(" ".join(tokens[ex["start"]:ex["end"] + 1]))
output = [str(ex["unique_ids"]), question, answer, context]
output = "\t".join(output)
f.write(output + "\n")
total += 1
if total % 10000 == 0:
tf.logging.info("Wrote %d predictions", total)
if i % 10000 == 0:
tf.logging.info("Processed %d examples", i)
tf.logging.info("Done.")
if __name__ == "__main__":
tf.disable_v2_behavior()
app.run(main)
| apache-2.0 | -8,093,537,959,721,813,000 | 34.978495 | 80 | 0.661984 | false |
bjthinks/orbital-explorer | radial_analyzer.py | 1 | 14230 | # This file is part of the Electron Orbital Explorer. The Electron
# Orbital Explorer is distributed under the Simplified BSD License
# (also called the "BSD 2-Clause License"), in hopes that these
# rendering techniques might be used by other programmers in
# applications such as scientific visualization, video gaming, and so
# on. If you find value in this software and use its technologies for
# another purpose, I would love to hear back from you at bjthinks (at)
# gmail (dot) com. If you improve this software and agree to release
# your modifications under the below license, I encourage you to fork
# the development tree on github and push your modifications. The
# Electron Orbital Explorer's development URL is:
# https://github.com/bjthinks/orbital-explorer
# (This paragraph is not part of the software license and may be
# removed.)
#
# Copyright (c) 2013, Brian W. Johnson
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# + Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# + Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import numbers
from math import exp, sqrt
from license import license
class Polynomial:
"""Polynomials, immutable, with floating point coefficients"""
# Paul's suggestion: make a list constructor.
def __init__(self, c = 0, n = 0):
'''Polynomial(c, n) creates the polynomial c*x^n.
Polynomial([c0, c1, ..., cn]) creates the polynomial c0 + c1 x + ...'''
# self.__coeffs[n] is the coefficient of x^n. Invariant:
# if len(self.__coeffs) > 0 then self.__coeffs[-1] != 0
if isinstance(c, list):
self.__coeffs = list(c)
else:
self.__coeffs = [0] * n + [c]
self.__standardize()
def __standardize(self):
while self.degree >= 0 and self.__coeffs[-1] == 0:
self.__coeffs.pop()
@property
def degree(self):
return len(self.__coeffs) - 1
@property
def constantTerm(self):
if self.degree == -1:
return 0
else:
return self.__coeffs[0]
@property
def leadingCoefficient(self):
if self.degree == -1:
return 0
else:
return self.__coeffs[-1]
def __eq__(self, other):
return self.__coeffs == other.__coeffs
def __ne__(self, other):
return not (self == other)
def __call__(self, x):
total = 0
for c in reversed(self.__coeffs):
total *= x
total += c
return total
def __add__(self, other):
if isinstance(other, numbers.Number):
return self + Polynomial(other)
if self.degree < other.degree:
sm = self.__coeffs
lg = other.__coeffs
else:
sm = other.__coeffs
lg = self.__coeffs
s = list(lg)
for i in range(len(sm)):
s[i] += sm[i]
return Polynomial(s)
def __radd__(self, other):
return self + other
def __pos__(self):
return self
def __neg__(self):
return Polynomial([-x for x in self.__coeffs])
def __sub__(self, other):
return self + (-other)
def __rsub__(self, other):
return (-self) + other
def __mul__(self, other):
if isinstance(other, int) or isinstance(other, float):
return self * Polynomial(other)
p = [0] * (self.degree + other.degree + 1)
for i in range(len(self.__coeffs)):
for j in range(len(other.__coeffs)):
p[i + j] += self.__coeffs[i] * other.__coeffs[j]
return Polynomial(p)
def __rmul__(self, other):
return self * other
def __truediv__(self, other):
return self * (1 / other)
def __pow__(self, e):
if e < 0:
raise ArithmeticError('Polynomial to a negative power')
if e == 0:
return Polynomial(1)
if e == 1:
return self
if e % 2 == 0:
return (self * self) ** (e >> 1)
return self * (self ** (e - 1))
def derivative(self):
return Polynomial([i * self.__coeffs[i]
for i in range(1, self.degree + 1)])
def factorial(n):
if n < 0:
raise ArithmeticError('Factorial of a negative number')
f = 1
for i in range(2, n + 1):
f *= i
return f
def choose(n, k):
return factorial(n) // (factorial(k) * factorial(n - k))
def laguerre(n, a):
x = Polynomial(1, 1)
f = 0
for i in range(n + 1):
f += ((-1) ** i) * choose(n + a, n - i) * (x ** i) / factorial(i)
return f
def bisect(f, lower, upper):
if not (lower < upper):
raise Exception('bisect: lower not less than upper')
f_lower = f(lower)
if f_lower == 0:
return lower
f_upper = f(upper)
if f_upper == 0:
return upper
if (f_lower < 0 and f_upper < 0) or (f_lower > 0 and f_upper > 0):
raise Exception('bisect: no sign change present')
while True:
mid = (lower + upper) / 2
if not (lower < mid < upper):
return mid
f_mid = f(mid)
if f_mid == 0:
return mid
if f_mid < 0:
if f_lower < 0:
lower = mid
f_lower = f_mid
else:
upper = mid
f_upper = f_mid
else:
if f_lower > 0:
lower = mid
f_lower = f_mid
else:
upper = mid
f_upper = f_mid
def roots(f):
if f.degree < 1:
if f.constantTerm != 0:
return []
raise Exception('roots called on the zero polynomial')
if f.degree == 1:
return [-f.constantTerm / f.leadingCoefficient]
df = f.derivative()
df_roots = roots(df)
leading_coeff_f = f.leadingCoefficient
degree_f = f.degree
# First, handle the case where df has no roots
if len(df_roots) == 0:
assert degree_f % 2 == 1
f0 = f(0)
if f0 == 0:
return [0]
if leading_coeff_f > 0 and f0 < 0:
upper = 1
while f(upper) <= 0:
upper += 1
return [bisect(f, 0, upper)]
if leading_coeff_f > 0 and f0 > 0:
lower = -1
while f(lower) >= 0:
lower -= 1
return [bisect(f, lower, 0)]
if leading_coeff_f < 0 and f0 > 0:
upper = 1
while f(upper) >= 0:
upper += 1
return [bisect(f, 0, upper)]
if leading_coeff_f < 0 and f0 < 0:
lower = -1
while f(lower) <= 0:
lower -= 1
return [bisect(f, lower, 0)]
raise Exception('Impossible monotonic polynomial')
r = []
# Check for a root to the left of the first root of df
first_df_root = df_roots[0]
f_at_first_df_root = f(first_df_root)
negative_behavior_f = leading_coeff_f * ((-1) ** degree_f)
if negative_behavior_f > 0 and f_at_first_df_root < 0:
lower_bound_on_first_root = first_df_root - 1
while f(lower_bound_on_first_root) <= 0:
lower_bound_on_first_root -= 1
r.append(bisect(f, lower_bound_on_first_root, first_df_root))
if negative_behavior_f < 0 and f_at_first_df_root > 0:
lower_bound_on_first_root = first_df_root - 1
while f(lower_bound_on_first_root) >= 0:
lower_bound_on_first_root -= 1
r.append(bisect(f, lower_bound_on_first_root, first_df_root))
# Look at each pair of roots of df
for i in range(len(df_roots) - 1):
dr1 = df_roots[i]
dr2 = df_roots[i + 1]
fdr1 = f(dr1)
fdr2 = f(dr2)
if fdr1 > 0 and fdr2 < 0 or fdr1 < 0 and fdr2 > 0:
r.append(bisect(f, dr1, dr2))
if fdr1 == 0:
r.append(dr1)
# Last one -- just check if it's a root of f
if f(df_roots[-1]) == 0:
r.append(df_roots[-1])
# Check for a root to the right of the last root of df
last_df_root = df_roots[-1]
f_at_last_df_root = f(last_df_root)
positive_behavior_f = leading_coeff_f
if positive_behavior_f > 0 and f_at_last_df_root < 0:
upper_bound_on_last_root = last_df_root + 1
while f(upper_bound_on_last_root) <= 0:
upper_bound_on_last_root += 1
r.append(bisect(f, last_df_root, upper_bound_on_last_root))
if positive_behavior_f < 0 and f_at_last_df_root > 0:
upper_bound_on_last_root = last_df_root + 1
while f(upper_bound_on_last_root) >= 0:
upper_bound_on_last_root += 1
r.append(bisect(f, last_df_root, upper_bound_on_last_root))
return r
def list_to_cpp(nums):
if nums == []:
return ' {}'
return ' {\n ' + ',\n '.join([str(n) for n in nums]) + \
'\n }'
max_n = 16
def make_table3(name, func):
'''Make a C++ table of arrays for each n and L'''
sn = str(max_n)
print('const double ' + name + '[' + sn + '][' + sn + '][' + sn + '] = {')
for n in range(1, max_n + 1):
print(' // n ==', n)
print(' {')
for L in range(0, n):
print(' // L ==', L)
s = list_to_cpp(func(n, L))
if L != n - 1:
s += (',')
print(s)
if n != max_n:
print(' },')
else:
print(' }')
print('};')
def make_table2(name, func):
'''Make a C++ table of values for each n and L'''
sn = str(max_n)
print('const double ' + name + '[' + sn + '][' + sn + '] = {')
for n in range(1, max_n + 1):
print(' // n ==', n)
print(' {')
for L in range(0, n):
print(' // L ==', L)
s = ' ' + str(func(n, L))
if L != n - 1:
s += (',')
print(s)
if n != max_n:
print(' },')
else:
print(' }')
print('};')
'''
The radial factor of the wave function is of the form:
(x ^ L) * exp(-x / 2) * Laguerre(x)
To find radial nodes, we set this to zero, and look for nonzero
solutions. These occur iff the Laguerre polynomial factor is zero.
'''
def radial_nodes(n, L):
return roots(laguerre(n - L - 1, 2 * L + 1))
'''
To find radial maxima, we set the derivative of the radial factor to
zero, like so:
(L * Laguerre(x) + x * (-1 / 2) * Laguerrre(x) + x * Laguerre'(x))
* (x ^ (L-1)) * exp(-x / 2) = 0
Note that this is correct only for positive L, and we must handle the
case L=0 separately.
Simplifying, and ignoring the solution x=0, we get:
(L - x / 2) * Laguerre(x) + x * Laguerre'(x) = 0
For the special case L=0, we instead have:
(-1 / 2) * Laguerre(x) + Laguerre'(x) = 0,
which differs only in not having zero as a root. (Note that an extra
root at x=0 would confuse the C++ use of the table, where zero is
treated as an 'end of data' marker.)
'''
def radial_maxima(n, L):
x = Polynomial(1,1)
la = laguerre(n - L - 1, 2 * L + 1)
dla = la.derivative()
if L != 0:
f = (L - x / 2) * la + x * dla
else:
f = (-1 / 2) * la + dla
return roots(f)
def radial_extent(n, L):
maxes = radial_maxima(n, L)
maxes.append(0)
la = laguerre(n - L - 1, 2 * L + 1)
def f(r):
return abs((r ** L) * exp(-r / 2) * la(r))
big_f = max([f(r) for r in maxes])
upper_x = max(maxes) + 1
while f(upper_x) > big_f / 1e5:
upper_x += 1
return upper_x
def radial_extent2(n, L):
maxes = radial_maxima(n, L)
maxes.append(0)
la = laguerre(n - L - 1, 2 * L + 1)
def f(r):
return ((r ** L) * exp(-r / 2) * la(r)) ** 2
big_f = max([f(r) for r in maxes])
upper_x = max(maxes) + 1
while f(upper_x) > big_f / 1e5:
upper_x += 1
return upper_x
dx = 0.01
def radial_integral(n, L):
outer = radial_extent(n, L)
la = laguerre(n - L - 1, 2 * L + 1)
c = sqrt(factorial(n - L - 1) / (2 * n * factorial(n + L)))
def f(r):
return abs(c * (r ** L) * exp(-r / 2) * la(r))
tot = 0
for s in range(0, int(outer / dx - 0.5)):
x = s * dx
tot += dx * (f(x) + f(x + dx)) / 2
return tot
def radial_integral2(n, L):
outer = radial_extent2(n, L)
la = laguerre(n - L - 1, 2 * L + 1)
c = sqrt(factorial(n - L - 1) / (2 * n * factorial(n + L)))
def f(r):
return (c * (r ** L) * exp(-r / 2) * la(r)) ** 2
tot = 0
for s in range(0, int(outer / dx - 0.5)):
x = s * dx
tot += dx * (f(x) + f(x + dx)) / 2
return tot
if __name__ == '__main__':
for s in license('c'):
print(s)
print('')
print('#include "radial_data.hh"')
print('')
make_table3('radial_nodes', radial_nodes)
print('')
make_table3('radial_maxima', radial_maxima)
print('')
make_table2('radial_extent', radial_extent)
print('')
make_table2('radial_extent2', radial_extent2)
print('')
make_table2('radial_integral', radial_integral)
print('')
make_table2('radial_integral2', radial_integral2)
| bsd-2-clause | 112,988,447,810,036,910 | 29.276596 | 79 | 0.541813 | false |
redsolution/django-menu-proxy | example/models.py | 1 | 1829 | from django.db import models
from django.core.urlresolvers import reverse
class Page(models.Model):
class Meta:
ordering = ['id']
slug = models.CharField(max_length=100)
title = models.CharField(max_length=100)
text = models.TextField()
parent = models.ForeignKey('self', related_name='children', null=True, blank=True)
def get_absolute_url(self):
return reverse('page', kwargs={'slug': self.slug})
def __unicode__(self):
return self.slug
def get_ancestors(self):
ancestors = []
parent = self.parent
while parent is not None:
ancestors.append(parent)
parent = parent.parent
return self.__class__.objects.filter(id__in=[ancestor.id for ancestor in ancestors])
# Or use:
#mptt.register(Page)
class Catalog(models.Model):
class Meta:
ordering = ['id']
title = models.CharField(max_length=100)
parent = models.ForeignKey('self', related_name='children', null=True, blank=True)
visible = models.BooleanField()
def get_absolute_url(self):
return reverse('catalog', kwargs={'object_id': self.pk})
def __unicode__(self):
return unicode(self.pk)
def get_ancestors(self):
ancestors = []
parent = self.parent
while parent is not None:
ancestors.append(parent)
parent = parent.parent
return self.__class__.objects.filter(id__in=[ancestor.id for ancestor in ancestors])
# Or use:
#mptt.register(Page)
class News(models.Model):
class Meta:
ordering = ['id']
text = models.TextField()
def title(self):
return self.text
def get_absolute_url(self):
return reverse('news', kwargs={'object_id': self.pk})
def __unicode__(self):
return unicode(self.pk)
| gpl-3.0 | 8,239,604,442,253,999,000 | 26.298507 | 92 | 0.621104 | false |
VMatrixTeam/open-matrix | src/webservice/handlers/question/detail.py | 1 | 1460 | # coding=utf-8
from handlers.base import BaseController
from tornado.web import gen
from model.question.question import Question
from model.question.answer import Answer
from model.question.vote import Vote
from model.user import User
from model.question.tag import Tag
from model.question.comment import Comment
class QuestionDetailHandler(BaseController):
@gen.coroutine
def get(self, qid):
question = yield Question.get_question_by_qid(qid)
if question == None:
self.redirect("/404")
raise gen.Return()
question.author = yield User.get_user_by_id(question.author)
question.votes = yield Vote.get_votes_by_qid(question.qid)
question.answers = yield Answer.get_answers_count_by_qid(question.qid)
question.tags = yield Tag.get_tags_by_qid(question.qid)
question.comments = yield Comment.get_comments_by_qid(question.qid)
answers = yield Answer.get_answers_by_qid(question.qid)
for answer in answers:
answer.author = yield User.get_user_by_id(answer.author)
answer.comments = yield Comment.get_comments_by_aid(answer.aid)
answer.votes = yield Vote.get_votes_by_aid(answer.aid)
data = {
"current_user": self.current_user,
'question': question,
'answers' : answers
}
self.render('question/question-detail.jade', **data)
def post(self):
pass
| mit | -599,517,309,497,890,200 | 32.181818 | 78 | 0.667123 | false |
MLAB-project/PyMeteostation | pymeteostation/MeteostationLib.py | 1 | 10344 | from pymlab import config
import time, json, urllib, urllib2, sys, os, ast, ConfigParser, base64
class Meteostation:
def __init__(self,configFileName):
self.settings = self.__getSettings(configFileName)
try:
cfg = config.Config(i2c={"port":1}, bus=self.settings["I2C_configuration"])
cfg.initialize()
self.NameTypeDict = self.__getTypes(self.settings["I2C_configuration"])
self.Devices = {}
for device in self.__getNames(self.settings["I2C_configuration"]):
self.Devices[device] = cfg.get_device(device)
except Exception, e:
sys.exit("Initialization of I2c failed: "+str(e))
time.sleep(0.5)
def getData(self,requestList="all"): # returns requested sensor data
outputList = {}
outputList["time"] = int(time.time())
if requestList == "all":
for device in self.Devices.keys():
outputList[device] = self.__getSensorData(device,self.NameTypeDict[device])
else:
for request in requestList:
outputList[request] = self.__getSensorData(request,self.NameTypeDict[device])
return outputList
def __getSensorData(self,sensorName,sensorType): # must return list
try:
if sensorType == "sht25":
self.Devices[sensorName].route()
return [self.Devices[sensorName].get_hum(),self.Devices[sensorName].get_temp()]
elif sensorType == "altimet01": # returns atmospheric pressure readings corrected to sea level altitude.
self.Devices[sensorName].route()
data = self.Devices[sensorName].get_tp()
return [data[0],data[1]/((1-((0.0065*self.settings["altitude"])/288.15))**5.255781292873008*100)]
except Exception, e:
print sensorName + " sensor error:",str(e)
return ["error",str(e)]
def log(self,dataDict,logFileName=""): # logging function
if logFileName == "":
logFileName = time.strftime("%Y-%m-%d:%H-", time.localtime()) + "meteoData.log"
FULLlogFileName = self.settings["logpath"] + time.strftime("%Y/", time.localtime()) + time.strftime("%m/", time.localtime()) + time.strftime("%d/", time.localtime()) + logFileName
if not os.path.exists(FULLlogFileName):
self.__generateLogFile(logFileName,self.settings["logpath"] + time.strftime("%Y/", time.localtime()) + time.strftime("%m/", time.localtime()) + time.strftime("%d/", time.localtime()))
try:
with open(FULLlogFileName,"r") as f:
savedData = json.load(f)
with open(FULLlogFileName,"w") as f:
savedData.append(dataDict)
f.write(json.dumps(savedData))
except Exception, e:
print "Logging failed:", str(e)
def __generateLogFile(self,logFileName,logPath): # generator of a log file
defaultLog = []
try:
if not logPath == "" and not os.path.exists(logPath):
os.makedirs(logPath)
with open(logPath+logFileName,"w") as f:
f.write(json.dumps(defaultLog))
except Exception, e:
print "Cannot generate log file:",str(e)
def sendData(self,username,password,sendDict): # sends data to openweathermap.com
sendData = self.translateToPOST(sendDict)
url = "http://openweathermap.org/data/post"
request = urllib2.Request(url,data=urllib.urlencode(sendData),headers={"Authorization":"Basic "+base64.encodestring(username+":"+password)[:-1]})
try:
result = urllib2.urlopen(request)
except urllib2.URLError as e:
if hasattr(e, "code"):
return (False, {"message":e.reason,"cod":e.code,"id":"0"})
else:
return (False, {"message":e.reason,"cod":"Failed to reach server","id":"0"})
except Exception as e:
return (False, {"message":str(e),"cod":"Network error","id":"0"})
else:
try:
result = result.read()
return (True, json.loads(result))
except Exception as e:
return (False, {"message":result,"cod":str(e),"id":"0"})
def translateToPOST(self,sendDict): # translates sensor values to POST request format
payload = {}
for itemKey in sendDict.keys():
if not itemKey == "time" and not sendDict[itemKey][0] == "error":
for transList in self.settings["Translation_Into_POST"]:
if itemKey == transList[1]:
payload[transList[0]] = str(round(sendDict[itemKey][transList[2]],2))
if self.settings["stationname"]:
payload["name"] = str(self.settings["stationname"])
if self.settings["latitude"] and self.settings["longitude"]:
payload["lat"] = str(self.settings["latitude"])
payload["long"] = str(self.settings["longitude"])
if self.settings["altitude"]:
payload["alt"] = str(self.settings["altitude"])
return payload
def __getNames(self,busConfig): # recursively searches for all "name" dictionary keys and returns their values: ["name1", "name2", ...]
names = []
for item in busConfig:
for key in item.keys():
if key == "name":
names.append(item[key])
if type(item[key]) == list:
names += self.__getNames(item[key])
return names
def __getTypes(self,busConfig): # recursively searches for all "name" and "type" dictionary keys and return their values: {name:type, ...}
names = {}
for item in busConfig:
for key in item.keys():
if key == "name":
names[item[key]] = item["type"]
if type(item[key]) == list:
names = dict(names.items() + self.__getTypes(item[key]).items())
return names
def __getSettings(self,fileName): # returns settings dictionary made of config file
parser = ConfigParser.SafeConfigParser()
try:
parser.read(fileName)
except Exception, e:
sys.exit("Unable to load configuration file. Error: "+str(e))
options = {}
for sectionName in ["Meteostation","I2C_Device","Translation_Into_POST"]:
if not parser.has_section(sectionName):
sys.exit("Unable to find \'%s\' section" % (sectionName))
else:
options[sectionName] = parser.options(sectionName)
requiedOptions = ["username","password","uploadinterval","logpath"]
missingOptions = requiedOptions
missingOptionsString = ""
for requiedOptionID in range(len(requiedOptions)):
for option in options["Meteostation"]:
if option == requiedOptions[requiedOptionID]:
missingOptions[requiedOptionID] = ""
break
for missingOption in missingOptions:
if missingOption != "":
missingOptionsString += "\'"+missingOption+"\', "
if len(missingOptionsString) != 0:
sys.exit("Unable to find %s option(s)." % (missingOptionsString[:len(missingOptionsString)-2]))
possibleOptions = ["username","password","uploadinterval","logpath","stationname","latitude","longitude","altitude"]
settings = {}
try:
for option in possibleOptions:
if parser.has_option("Meteostation",option):
try:
settings[option] = float(parser.get("Meteostation",option))
except ValueError:
settings[option] = parser.get("Meteostation",option)
else:
settings[option] = ""
if not settings["altitude"]:
settings["altitude"] = 0
settings["I2C_configuration"] = [self.__getI2CConfig(parser,"I2C_Device")]
settings["Translation_Into_POST"] = []
for option in options["Translation_Into_POST"]:
if parser.get("Translation_Into_POST",option) == "":
translationListPart = ['',0]
else:
try:
translationListPart = self.__getOptionList(parser.get("Translation_Into_POST",option))
if len(translationListPart) != 2:
print "Strange value set to option \'%s\'. Using default value." % (option)
translationListPart = ['',0]
except:
print "Strange value set to option \'%s\'. Using default value." % (option)
translationListPart = ['',0]
settings["Translation_Into_POST"].append([option,translationListPart[0],int(translationListPart[1])])
except Exception, e:
sys.exit("Bad format of configuration file. Error: "+str(e))
return settings
def __getI2CConfig(self,parser,section): # recursively generates I2C configuration from configuration file
result = {}
for option in parser.options(section):
if option == "children":
children = self.__getOptionList(parser.get(section,option))
result[option] = []
for child in children:
result[option].append(self.__getI2CConfig(parser,child))
elif option == "address":
result[option] = int(parser.get(section,option),base=16)
elif option == "channel":
result[option] = int(parser.get(section,option))
else:
result[option] = parser.get(section,option)
return result
def __getOptionList(self,string):
lastPosition = 0
optionList = []
for letterPos in range(len(string)):
if string[letterPos] == ";":
optionList.append(string[lastPosition:letterPos])
lastPosition = letterPos+1
if lastPosition < len(string):
optionList.append(string[lastPosition:len(string)])
return optionList | gpl-3.0 | -3,879,657,435,161,456,600 | 45.183036 | 195 | 0.563128 | false |
dimagi/loveseat | tests/test_aggregated_result.py | 1 | 1049 | from __future__ import absolute_import
import unittest
from datetime import timedelta
from loveseat.aggregated_result import AggregatedResult
from loveseat.result import Result
class TestAggregatedResult(unittest.TestCase):
def setUp(self):
self.resultOne = Result(database='a', elapsed=timedelta(0, 0, 2))
self.resultTwo = Result(database='a', elapsed=timedelta(0, 0, 4))
self.resultThree = Result(database='b', elapsed=timedelta(0, 0, 5))
def test_aggregated_result(self):
ag = AggregatedResult('example')
ag.add_results([self.resultOne, self.resultTwo, self.resultThree])
self.assertEqual(ag.results['a']['avg'], 3)
self.assertEqual(ag.results['a']['max'], 4)
self.assertEqual(ag.results['a']['min'], 2)
self.assertEqual(ag.results['a']['count'], 2)
self.assertEqual(ag.results['b']['avg'], 5)
self.assertEqual(ag.results['b']['max'], 5)
self.assertEqual(ag.results['b']['min'], 5)
self.assertEqual(ag.results['b']['count'], 1)
| mit | -1,915,400,627,183,827,700 | 37.851852 | 75 | 0.655863 | false |
CHBMB/LazyLibrarian | lazylibrarian/__init__.py | 1 | 67424 | # This file is part of Lazylibrarian.
#
# Lazylibrarian is free software':'you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Lazylibrarian is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Lazylibrarian. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import calendar
import json
import locale
import os
import subprocess
import sys
import threading
import time
import webbrowser
import cherrypy
from lazylibrarian import logger, postprocess, searchnzb, searchtorrents, searchrss, \
librarysync, versioncheck, database, searchmag, magazinescan, bookwork, dbupgrade
from lazylibrarian.cache import fetchURL
from lazylibrarian.common import restartJobs
from lazylibrarian.formatter import getList, bookSeries, plural, unaccented
from lib.apscheduler.scheduler import Scheduler
FULL_PATH = None
PROG_DIR = None
ARGS = None
SIGNAL = None
DAEMON = False
PIDFILE = ''
SYS_ENCODING = ''
SCHED = Scheduler()
INIT_LOCK = threading.Lock()
__INITIALIZED__ = False
started = False
GIT_USER = None
GIT_REPO = None
GIT_BRANCH = None
INSTALL_TYPE = None
CURRENT_VERSION = None
LATEST_VERSION = None
COMMITS_BEHIND = None
COMMIT_LIST = None
DATADIR = None
DBFILE = None
UPDATE_MSG = None
CONFIGFILE = ''
CFG = ''
CURRENT_TAB = '1'
LOGDIR = None
CACHEDIR = None
LOGLIST = []
# Info 1, Debug 2, >2 don't toggle console/file
LOGLEVEL = 2
LOGLIMIT = 500
LOGFULL = False # include debug on screen if true
LOGFILES = 10 # 10 log files
LOGSIZE = 204800 # each up to 200K
CFGLOGLEVEL = None
MATCH_RATIO = 80
DLOAD_RATIO = 90
DISPLAYLENGTH = 5
HTTP_HOST = None
HTTP_PORT = 5299
HTTP_USER = None
HTTP_PASS = None
HTTP_PROXY = None
HTTP_ROOT = None
HTTP_LOOK = None
HTTPS_ENABLED = 0
HTTPS_CERT = None
HTTPS_KEY = None
LAUNCH_BROWSER = 0
API_ENABLED = 0
API_KEY = None
PROXY_HOST = None
PROXY_TYPE = None
SAB_HOST = None
SAB_PORT = 0
SAB_SUBDIR = None
SAB_USER = None
SAB_PASS = None
SAB_API = None
SAB_CAT = None
NZBGET_HOST = None
NZBGET_PORT = 0
NZBGET_USER = None
NZBGET_PASS = None
NZBGET_CATEGORY = None
NZBGET_PRIORITY = 0
DESTINATION_COPY = 0
DESTINATION_DIR = None
ALTERNATE_DIR = None
DOWNLOAD_DIR = None
IMP_PREFLANG = None
IMP_MONTHLANG = None
IMP_ONLYISBN = 0
IMP_SINGLEBOOK = 1
IMP_AUTOADD = None
IMP_AUTOSEARCH = 1
IMP_CONVERT = None
IMP_CALIBREDB = None
GIT_PROGRAM = None
BOOK_API = None
GR_API = None
GB_API = None
NZBMATRIX = 0
NZBMATRIX_USER = None
NZBMATRIX_API = None
NEWZBIN = 0
NEWZBIN_UID = None
NEWZBIN_PASS = None
EBOOK_TYPE = None
MAG_TYPE = None
REJECT_WORDS = None
REJECT_MAXSIZE = 0
REJECT_MAGSIZE = 0
MAG_AGE = 31
TOR_DOWNLOADER_BLACKHOLE = 0
TOR_CONVERT_MAGNET = 0
TOR_DOWNLOADER_UTORRENT = 0
TOR_DOWNLOADER_RTORRENT = 0
TOR_DOWNLOADER_QBITTORRENT = 0
TOR_DOWNLOADER_TRANSMISSION = 0
TOR_DOWNLOADER_SYNOLOGY = 0
TOR_DOWNLOADER_DELUGE = 0
NUMBEROFSEEDERS = 10
KEEP_SEEDING = 0
TORRENT_DIR = None
PREFER_MAGNET = 0
RTORRENT_HOST = None
RTORRENT_USER = None
RTORRENT_PASS = None
RTORRENT_LABEL = None
RTORRENT_DIR = None
UTORRENT_HOST = None
UTORRENT_PORT = 0
UTORRENT_USER = None
UTORRENT_PASS = None
UTORRENT_LABEL = None
SYNOLOGY_HOST = None
SYNOLOGY_PORT = 0
SYNOLOGY_USER = None
SYNOLOGY_PASS = None
SYNOLOGY_DIR = None
USE_SYNOLOGY = 0
QBITTORRENT_HOST = None
QBITTORRENT_PORT = 0
QBITTORRENT_USER = None
QBITTORRENT_PASS = None
QBITTORRENT_LABEL = None
TRANSMISSION_HOST = None
TRANSMISSION_PORT = 0
TRANSMISSION_USER = None
TRANSMISSION_PASS = None
DELUGE_PORT = 0
DELUGE_HOST = None
DELUGE_USER = None
DELUGE_PASS = None
DELUGE_LABEL = None
KAT = 0
KAT_HOST = None
TPB = 0
TPB_HOST = None
ZOO = 0
ZOO_HOST = None
EXTRA = 0
EXTRA_HOST = None
TDL = 0
TDL_HOST = None
GEN = 0
GEN_HOST = None
LIME = 0
LIME_HOST = None
NZB_DOWNLOADER_SABNZBD = 0
NZB_DOWNLOADER_NZBGET = 0
NZB_DOWNLOADER_SYNOLOGY = 0
NZB_DOWNLOADER_BLACKHOLE = 0
NZB_BLACKHOLEDIR = None
USENET_RETENTION = 0
VERSIONCHECK_INTERVAL = 24 # Every 2 hours
SEARCH_INTERVAL = 720 # Every 12 hours
SCAN_INTERVAL = 10 # Every 10 minutes
SEARCHRSS_INTERVAL = 20 # Every 20 minutes
FULL_SCAN = 0 # full scan would remove books from db
ADD_AUTHOR = 1 # auto add authors not found in db from goodreads
# value to mark missing books (deleted/removed) in db, can be 'Open', 'Ignored', 'Wanted','Skipped'
NOTFOUND_STATUS = 'Skipped'
# value to mark new books when importing a new author, can be 'Open', 'Ignored', 'Wanted','Skipped'
NEWAUTHOR_STATUS = 'Skipped'
# value to mark new books when rescanning existing author, can be 'Open', 'Ignored', 'Wanted','Skipped'
NEWBOOK_STATUS = 'Skipped'
EBOOK_DEST_FOLDER = None
EBOOK_DEST_FILE = None
MAG_DEST_FOLDER = None
MAG_DEST_FILE = None
MAG_RELATIVE = 1
USE_TWITTER = 0
TWITTER_NOTIFY_ONSNATCH = 0
TWITTER_NOTIFY_ONDOWNLOAD = 0
TWITTER_USERNAME = None
TWITTER_PASSWORD = None
TWITTER_PREFIX = 'LazyLibrarian'
USE_BOXCAR = 0
BOXCAR_TOKEN = None
BOXCAR_NOTIFY_ONSNATCH = 0
BOXCAR_NOTIFY_ONDOWNLOAD = 0
USE_PUSHBULLET = 0
PUSHBULLET_TOKEN = None
PUSHBULLET_DEVICEID = None
PUSHBULLET_NOTIFY_ONSNATCH = 0
PUSHBULLET_NOTIFY_ONDOWNLOAD = 0
USE_PUSHOVER = 0
PUSHOVER_APITOKEN = None
PUSHOVER_KEYS = None
PUSHOVER_DEVICE = None
PUSHOVER_ONSNATCH = 0
PUSHOVER_ONDOWNLOAD = 0
PUSHOVER_PRIORITY = 0
USE_ANDROIDPN = 0
ANDROIDPN_NOTIFY_ONSNATCH = 0
ANDROIDPN_NOTIFY_ONDOWNLOAD = 0
ANDROIDPN_URL = None
ANDROIDPN_BROADCAST = 0
ANDROIDPN_USERNAME = None
USE_NMA = 0
NMA_APIKEY = None
NMA_PRIORITY = 0
NMA_ONSNATCH = None
NMA_ONDOWNLOAD = None
USE_SLACK = 0
SLACK_TOKEN = None
SLACK_NOTIFY_ONSNATCH = 0
SLACK_NOTIFY_ONDOWNLOAD = 0
USE_EMAIL = 0
EMAIL_NOTIFY_ONSNATCH = 0
EMAIL_NOTIFY_ONDOWNLOAD = 0
EMAIL_FROM = None
EMAIL_TO = None
EMAIL_SSL = 0
EMAIL_SMTP_SERVER = None
EMAIL_SMTP_PORT = None
EMAIL_TLS = 0
EMAIL_SMTP_USER = None
EMAIL_SMTP_PASSWORD = None
NEWZNAB_PROV = []
TORZNAB_PROV = []
RSS_PROV = []
# Month names table to hold long/short month names for multiple languages
# which we can match against magazine issues
# Defined as global and initialised early, because locale changes are not thread safe
# This means changes to languages require a restart
MONTH0 = ['en_GB.UTF-8', 'en_GB.UTF-8'] # This holds the language code
MONTH1 = [u'january', u'jan'] # multiple names for first month
MONTH2 = [u'february', u'feb'] # etc...
MONTH3 = [u'march', u'mar']
MONTH4 = [u'april', u'apr']
MONTH5 = [u'may', u'may']
MONTH6 = [u'june', u'jun']
MONTH7 = [u'july', u'jul']
MONTH8 = [u'august', u'aug']
MONTH9 = [u'september', u'sep']
MONTH10 = [u'october', u'oct']
MONTH11 = [u'november', u'nov']
MONTH12 = [u'december', u'dec']
MONTHNAMES = [MONTH0, MONTH1, MONTH2, MONTH3, MONTH4, MONTH5, MONTH6,
MONTH7, MONTH8, MONTH9, MONTH10, MONTH11, MONTH12]
CACHE_HIT = 0
CACHE_MISS = 0
LAST_GOODREADS = 0
LAST_LIBRARYTHING = 0
CACHE_AGE = 30
TASK_AGE = 0
BOOKSTRAP_THEME = ''
BOOKSTRAP_THEMELIST = []
def check_section(sec):
""" Check if INI section exists, if not create it """
if CFG.has_section(sec):
return True
else:
CFG.add_section(sec)
return False
def check_setting_bool(config, cfg_name, item_name, def_val, log=True):
""" Check if option exists and coerce to boolean, if not create it """
try:
my_val = config.getboolean(cfg_name, item_name)
except Exception:
my_val = def_val
check_section(cfg_name)
config.set(cfg_name, item_name, my_val)
if log:
logger.debug(cfg_name + ":" + item_name + " -> " + str(my_val))
return my_val
def check_setting_int(config, cfg_name, item_name, def_val, log=True):
""" Check if option exists and coerce to int, if not create it """
try:
my_val = config.getint(cfg_name, item_name)
except Exception:
my_val = def_val
check_section(cfg_name)
config.set(cfg_name, item_name, my_val)
if log:
logger.debug(cfg_name + ":" + item_name + " -> " + str(my_val))
return my_val
def check_setting_str(config, cfg_name, item_name, def_val, log=True):
""" Check if option exists and coerce to string, if not create it """
try:
my_val = config.get(cfg_name, item_name)
# Old config file format had strings in quotes. ConfigParser doesn't.
if my_val.startswith('"'):
my_val = my_val[1:]
if my_val.endswith('"'):
my_val = my_val[:-1]
except Exception:
my_val = def_val
check_section(cfg_name)
config.set(cfg_name, item_name, my_val)
if log:
logger.debug(cfg_name + ":" + item_name + " -> " + my_val)
return my_val.decode(SYS_ENCODING)
def initialize():
with INIT_LOCK:
global __INITIALIZED__, LOGDIR, LOGLIMIT, LOGFILES, LOGSIZE, CFG, CFGLOGLEVEL, LOGLEVEL, \
LOGFULL, CACHEDIR, DATADIR, LAST_LIBRARYTHING, LAST_GOODREADS, \
IMP_MONTHLANG, BOOKSTRAP_THEMELIST, CURRENT_TAB, UPDATE_MSG
if __INITIALIZED__:
return False
check_section('General')
LOGDIR = check_setting_str(CFG, 'General', 'logdir', '')
LOGLIMIT = check_setting_int(CFG, 'General', 'loglimit', 500)
LOGFILES = check_setting_int(CFG, 'General', 'logfiles', 10)
LOGSIZE = check_setting_int(CFG, 'General', 'logsize', 204800)
if not LOGDIR:
LOGDIR = os.path.join(DATADIR, 'Logs')
# Create logdir
if not os.path.exists(LOGDIR):
try:
os.makedirs(LOGDIR)
except OSError as e:
if LOGLEVEL:
print '%s : Unable to create folder for logs: %s. Only logging to console.' % (LOGDIR, str(e))
# Start the logger, silence console logging if we need to
CFGLOGLEVEL = check_setting_int(CFG, 'General', 'loglevel', 9)
if LOGLEVEL == 1: # default if no debug or quiet on cmdline
if CFGLOGLEVEL == 9: # default value if none in config
LOGLEVEL = 2 # If not set in Config or cmdline, then lets set to DEBUG
else:
LOGLEVEL = CFGLOGLEVEL # Config setting picked up
logger.lazylibrarian_log.initLogger(loglevel=LOGLEVEL)
logger.info("Log level set to [%s]- Log Directory is [%s] - Config level is [%s]" % (
LOGLEVEL, LOGDIR, CFGLOGLEVEL))
if LOGLEVEL > 2:
LOGFULL = True
logger.info("Screen Log set to DEBUG")
else:
LOGFULL = False
logger.info("Screen Log set to INFO/WARN/ERROR")
config_read()
logger.info('SYS_ENCODING is %s' % SYS_ENCODING)
# Put the cache dir in the data dir for now
CACHEDIR = os.path.join(DATADIR, 'cache')
if not os.path.exists(CACHEDIR):
try:
os.makedirs(CACHEDIR)
except OSError:
logger.error('Could not create cachedir. Check permissions of: ' + DATADIR)
# keep track of last api calls so we don't call more than once per second
# to respect api terms, but don't wait un-necessarily either
time_now = int(time.time())
LAST_LIBRARYTHING = time_now
LAST_GOODREADS = time_now
# Initialize the database
try:
curr_ver = db_needs_upgrade()
if curr_ver:
threading.Thread(target=dbupgrade.dbupgrade, name="DB_UPGRADE", args=[curr_ver]).start()
else:
myDB = database.DBConnection()
result = myDB.match('PRAGMA user_version')
if result:
version = result[0]
else:
version = 0
logger.info("Database is version %s" % version)
except Exception as e:
logger.error("Can't connect to the database: %s" % str(e))
build_monthtable()
BOOKSTRAP_THEMELIST = build_bookstrap_themes()
__INITIALIZED__ = True
return True
def config_read(reloaded=False):
global FULL_PATH, PROG_DIR, DAEMON, DISPLAYLENGTH, \
HTTP_HOST, HTTP_PORT, HTTP_USER, HTTP_PASS, HTTP_PROXY, HTTP_ROOT, HTTP_LOOK, API_KEY, API_ENABLED, \
LAUNCH_BROWSER, LOGDIR, CACHE_AGE, MATCH_RATIO, DLOAD_RATIO, PROXY_HOST, PROXY_TYPE, GIT_PROGRAM, \
IMP_ONLYISBN, IMP_SINGLEBOOK, IMP_PREFLANG, IMP_MONTHLANG, IMP_AUTOADD, IMP_CONVERT, IMP_CALIBREDB, \
IMP_AUTOSEARCH, MONTHNAMES, MONTH0, MONTH1, MONTH2, MONTH3, MONTH4, MONTH5, MONTH6, MONTH7, \
MONTH8, MONTH9, MONTH10, MONTH11, MONTH12, CONFIGFILE, CFG, LOGLIMIT, TASK_AGE, \
SAB_HOST, SAB_PORT, SAB_SUBDIR, SAB_API, SAB_USER, SAB_PASS, SAB_CAT, \
DESTINATION_DIR, DESTINATION_COPY, DOWNLOAD_DIR, USENET_RETENTION, NZB_BLACKHOLEDIR, \
ALTERNATE_DIR, GR_API, GB_API, BOOK_API, \
NZBGET_HOST, NZBGET_USER, NZBGET_PASS, NZBGET_CATEGORY, NZBGET_PRIORITY, \
NZBGET_PORT, NZB_DOWNLOADER_NZBGET, NZBMATRIX, NZBMATRIX_USER, NZBMATRIX_API, \
NEWZBIN, NEWZBIN_UID, NEWZBIN_PASS, EBOOK_TYPE, MAG_TYPE, \
KAT, KAT_HOST, TPB, TPB_HOST, ZOO, ZOO_HOST, TDL, TDL_HOST, GEN, GEN_HOST, EXTRA, EXTRA_HOST, \
LIME, LIME_HOST, NEWZNAB_PROV, TORZNAB_PROV, RSS_PROV, REJECT_WORDS, REJECT_MAXSIZE, REJECT_MAGSIZE, \
VERSIONCHECK_INTERVAL, SEARCH_INTERVAL, SCAN_INTERVAL, SEARCHRSS_INTERVAL, MAG_AGE, \
EBOOK_DEST_FOLDER, EBOOK_DEST_FILE, MAG_RELATIVE, MAG_DEST_FOLDER, MAG_DEST_FILE, \
USE_TWITTER, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, \
TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, TOR_CONVERT_MAGNET, \
USE_BOXCAR, BOXCAR_NOTIFY_ONSNATCH, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_TOKEN, \
TORRENT_DIR, TOR_DOWNLOADER_BLACKHOLE, TOR_DOWNLOADER_UTORRENT, TOR_DOWNLOADER_RTORRENT, \
TOR_DOWNLOADER_QBITTORRENT, NZB_DOWNLOADER_SABNZBD, NZB_DOWNLOADER_SYNOLOGY, NZB_DOWNLOADER_BLACKHOLE, \
SYNOLOGY_DIR, USE_SYNOLOGY, USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, \
PUSHBULLET_TOKEN, PUSHBULLET_DEVICEID, RTORRENT_HOST, RTORRENT_USER, RTORRENT_PASS, RTORRENT_DIR, \
RTORRENT_LABEL, UTORRENT_HOST, UTORRENT_PORT, UTORRENT_USER, UTORRENT_PASS, UTORRENT_LABEL, \
QBITTORRENT_HOST, QBITTORRENT_PORT, QBITTORRENT_USER, QBITTORRENT_PASS, QBITTORRENT_LABEL, \
SYNOLOGY_PORT, SYNOLOGY_HOST, SYNOLOGY_USER, SYNOLOGY_PASS, USE_PUSHOVER, PUSHOVER_ONSNATCH, \
PUSHOVER_KEYS, PUSHOVER_APITOKEN, PUSHOVER_PRIORITY, PUSHOVER_ONDOWNLOAD, PUSHOVER_DEVICE, \
USE_ANDROIDPN, ANDROIDPN_NOTIFY_ONSNATCH, ANDROIDPN_NOTIFY_ONDOWNLOAD, \
ANDROIDPN_URL, ANDROIDPN_USERNAME, ANDROIDPN_BROADCAST, \
USE_SLACK, SLACK_NOTIFY_ONSNATCH, SLACK_NOTIFY_ONDOWNLOAD, SLACK_TOKEN, \
USE_EMAIL, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_FROM, EMAIL_TO, \
EMAIL_SSL, EMAIL_SMTP_SERVER, EMAIL_SMTP_PORT, EMAIL_TLS, EMAIL_SMTP_USER, EMAIL_SMTP_PASSWORD, \
TOR_DOWNLOADER_TRANSMISSION, TRANSMISSION_HOST, TRANSMISSION_PORT, TRANSMISSION_PASS, TRANSMISSION_USER, \
TOR_DOWNLOADER_SYNOLOGY, TOR_DOWNLOADER_DELUGE, DELUGE_HOST, DELUGE_USER, DELUGE_PASS, DELUGE_PORT, \
DELUGE_LABEL, FULL_SCAN, ADD_AUTHOR, NOTFOUND_STATUS, NEWBOOK_STATUS, NEWAUTHOR_STATUS, \
USE_NMA, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, NMA_ONDOWNLOAD, \
GIT_USER, GIT_REPO, GIT_BRANCH, INSTALL_TYPE, CURRENT_VERSION, COMMIT_LIST, PREFER_MAGNET, \
LATEST_VERSION, COMMITS_BEHIND, NUMBEROFSEEDERS, KEEP_SEEDING, SCHED, CACHE_HIT, CACHE_MISS, \
BOOKSTRAP_THEME, LOGFILES, LOGSIZE, HTTPS_ENABLED, HTTPS_CERT, HTTPS_KEY
NEWZNAB_PROV = []
TORZNAB_PROV = []
RSS_PROV = []
# we read the log details earlier for starting the logger process,
# but read them again here so they get listed in the debug log
LOGDIR = check_setting_str(CFG, 'General', 'logdir', '')
LOGLIMIT = check_setting_int(CFG, 'General', 'loglimit', 500)
LOGFILES = check_setting_int(CFG, 'General', 'logfiles', 10)
LOGSIZE = check_setting_int(CFG, 'General', 'logsize', 204800)
HTTP_PORT = check_setting_int(CFG, 'General', 'http_port', 5299)
if HTTP_PORT < 21 or HTTP_PORT > 65535:
HTTP_PORT = 5299
MATCH_RATIO = check_setting_int(CFG, 'General', 'match_ratio', 80)
DLOAD_RATIO = check_setting_int(CFG, 'General', 'dload_ratio', 90)
DISPLAYLENGTH = check_setting_int(CFG, 'General', 'displaylength', 10)
HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0')
HTTP_USER = check_setting_str(CFG, 'General', 'http_user', '')
HTTP_PASS = check_setting_str(CFG, 'General', 'http_pass', '')
HTTP_PROXY = check_setting_bool(CFG, 'General', 'http_proxy', 0)
HTTP_ROOT = check_setting_str(CFG, 'General', 'http_root', '')
HTTP_LOOK = check_setting_str(CFG, 'General', 'http_look', 'default')
HTTPS_ENABLED = check_setting_bool(CFG, 'General', 'https_enabled', 0)
HTTPS_CERT = check_setting_str(CFG, 'General', 'https_cert', '')
HTTPS_KEY = check_setting_str(CFG, 'General', 'https_key', '')
BOOKSTRAP_THEME = check_setting_str(CFG, 'General', 'bookstrap_theme', 'slate')
LAUNCH_BROWSER = check_setting_bool(CFG, 'General', 'launch_browser', 1)
API_ENABLED = check_setting_bool(CFG, 'General', 'api_enabled', 0)
API_KEY = check_setting_str(CFG, 'General', 'api_key', '')
PROXY_HOST = check_setting_str(CFG, 'General', 'proxy_host', '')
PROXY_TYPE = check_setting_str(CFG, 'General', 'proxy_type', '')
IMP_PREFLANG = check_setting_str(CFG, 'General', 'imp_preflang', 'en, eng, en-US, en-GB')
IMP_MONTHLANG = check_setting_str(CFG, 'General', 'imp_monthlang', '')
IMP_AUTOADD = check_setting_str(CFG, 'General', 'imp_autoadd', '')
IMP_AUTOSEARCH = check_setting_bool(CFG, 'General', 'imp_autosearch', 0)
IMP_CALIBREDB = check_setting_str(CFG, 'General', 'imp_calibredb', '')
IMP_ONLYISBN = check_setting_bool(CFG, 'General', 'imp_onlyisbn', 0)
IMP_SINGLEBOOK = check_setting_bool(CFG, 'General', 'imp_singlebook', 0)
IMP_CONVERT = check_setting_str(CFG, 'General', 'imp_convert', '')
GIT_PROGRAM = check_setting_str(CFG, 'General', 'git_program', '')
CACHE_AGE = check_setting_int(CFG, 'General', 'cache_age', 30)
TASK_AGE = check_setting_int(CFG, 'General', 'task_age', 0)
GIT_USER = check_setting_str(CFG, 'Git', 'git_user', 'dobytang')
GIT_REPO = check_setting_str(CFG, 'Git', 'git_repo', 'lazylibrarian')
GIT_BRANCH = check_setting_str(CFG, 'Git', 'git_branch', 'master')
INSTALL_TYPE = check_setting_str(CFG, 'Git', 'install_type', '')
CURRENT_VERSION = check_setting_str(CFG, 'Git', 'current_version', '')
LATEST_VERSION = check_setting_str(CFG, 'Git', 'latest_version', '')
COMMITS_BEHIND = check_setting_str(CFG, 'Git', 'commits_behind', '')
SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')
SAB_PORT = check_setting_int(CFG, 'SABnzbd', 'sab_port', 0)
SAB_SUBDIR = check_setting_str(CFG, 'SABnzbd', 'sab_subdir', '')
SAB_USER = check_setting_str(CFG, 'SABnzbd', 'sab_user', '')
SAB_PASS = check_setting_str(CFG, 'SABnzbd', 'sab_pass', '')
SAB_API = check_setting_str(CFG, 'SABnzbd', 'sab_api', '')
SAB_CAT = check_setting_str(CFG, 'SABnzbd', 'sab_cat', '')
# legacy name conversion, separate out nzbget host/port
if not CFG.has_option('NZBGet', 'nzbget_port'):
port = 0
host = check_setting_str(CFG, 'NZBGet', 'nzbget_host', '')
if host.startswith('http'):
hostpart = 2
else:
hostpart = 1
words = host.split(':')
if len(words) > hostpart:
host = ':'.join(words[:hostpart])
port = ':'.join(words[hostpart:])
CFG.set('NZBGet', 'nzbget_port', port)
CFG.set('NZBGet', 'nzbget_host', host)
NZBGET_HOST = check_setting_str(CFG, 'NZBGet', 'nzbget_host', '')
NZBGET_PORT = check_setting_int(CFG, 'NZBGet', 'nzbget_port', '0')
NZBGET_USER = check_setting_str(CFG, 'NZBGet', 'nzbget_user', '')
NZBGET_PASS = check_setting_str(CFG, 'NZBGet', 'nzbget_pass', '')
NZBGET_CATEGORY = check_setting_str(CFG, 'NZBGet', 'nzbget_cat', '')
NZBGET_PRIORITY = check_setting_int(CFG, 'NZBGet', 'nzbget_priority', '0')
DESTINATION_COPY = check_setting_bool(CFG, 'General', 'destination_copy', 0)
DESTINATION_DIR = check_setting_str(CFG, 'General', 'destination_dir', '')
ALTERNATE_DIR = check_setting_str(CFG, 'General', 'alternate_dir', '')
DOWNLOAD_DIR = check_setting_str(CFG, 'General', 'download_dir', '')
NZB_DOWNLOADER_SABNZBD = check_setting_bool(CFG, 'USENET', 'nzb_downloader_sabnzbd', 0)
NZB_DOWNLOADER_NZBGET = check_setting_bool(CFG, 'USENET', 'nzb_downloader_nzbget', 0)
NZB_DOWNLOADER_SYNOLOGY = check_setting_bool(CFG, 'USENET', 'nzb_downloader_synology', 0)
NZB_DOWNLOADER_BLACKHOLE = check_setting_bool(CFG, 'USENET', 'nzb_downloader_blackhole', 0)
NZB_BLACKHOLEDIR = check_setting_str(CFG, 'USENET', 'nzb_blackholedir', '')
USENET_RETENTION = check_setting_int(CFG, 'USENET', 'usenet_retention', 0)
NZBMATRIX = check_setting_bool(CFG, 'NZBMatrix', 'nzbmatrix', 0)
NZBMATRIX_USER = check_setting_str(CFG, 'NZBMatrix', 'nzbmatrix_user', '')
NZBMATRIX_API = check_setting_str(CFG, 'NZBMatrix', 'nzbmatrix_api', '')
count = 0
while CFG.has_section('Newznab%i' % count):
newz_name = 'Newznab%i' % count
# legacy name conversions
if CFG.has_option(newz_name, 'newznab%i' % count):
CFG.set(newz_name, 'ENABLED', CFG.getboolean(newz_name, 'newznab%i' % count))
CFG.remove_option(newz_name, 'newznab%i' % count)
if CFG.has_option(newz_name, 'newznab_host%i' % count):
CFG.set(newz_name, 'HOST', CFG.get(newz_name, 'newznab_host%i' % count))
CFG.remove_option(newz_name, 'newznab_host%i' % count)
if CFG.has_option(newz_name, 'newznab_api%i' % count):
CFG.set(newz_name, 'API', CFG.get(newz_name, 'newznab_api%i' % count))
CFG.remove_option(newz_name, 'newznab_api%i' % count)
if CFG.has_option(newz_name, 'nzedb'):
CFG.remove_option(newz_name, 'nzedb')
NEWZNAB_PROV.append({"NAME": newz_name,
"ENABLED": check_setting_bool(CFG, newz_name, 'enabled', 0),
"HOST": check_setting_str(CFG, newz_name, 'host', ''),
"API": check_setting_str(CFG, newz_name, 'api', ''),
"GENERALSEARCH": check_setting_str(CFG, newz_name, 'generalsearch', 'search'),
"BOOKSEARCH": check_setting_str(CFG, newz_name, 'booksearch', 'book'),
"MAGSEARCH": check_setting_str(CFG, newz_name, 'magsearch', ''),
"BOOKCAT": check_setting_str(CFG, newz_name, 'bookcat', '7000,7020'),
"MAGCAT": check_setting_str(CFG, newz_name, 'magcat', '7010'),
"EXTENDED": check_setting_str(CFG, newz_name, 'extended', '1'),
"UPDATED": check_setting_str(CFG, newz_name, 'updated', ''),
"MANUAL": check_setting_bool(CFG, newz_name, 'manual', 0)
})
count += 1
# if the last slot is full, add an empty one on the end
add_newz_slot()
count = 0
while CFG.has_section('Torznab%i' % count):
torz_name = 'Torznab%i' % count
# legacy name conversions
if CFG.has_option(torz_name, 'torznab%i' % count):
CFG.set(torz_name, 'ENABLED', CFG.getboolean(torz_name, 'torznab%i' % count))
CFG.remove_option(torz_name, 'torznab%i' % count)
if CFG.has_option(torz_name, 'torznab_host%i' % count):
CFG.set(torz_name, 'HOST', CFG.get(torz_name, 'torznab_host%i' % count))
CFG.remove_option(torz_name, 'torznab_host%i' % count)
if CFG.has_option(torz_name, 'torznab_api%i' % count):
CFG.set(torz_name, 'API', CFG.get(torz_name, 'torznab_api%i' % count))
CFG.remove_option(torz_name, 'torznab_api%i' % count)
if CFG.has_option(torz_name, 'nzedb'):
CFG.remove_option(torz_name, 'nzedb')
TORZNAB_PROV.append({"NAME": torz_name,
"ENABLED": check_setting_bool(CFG, torz_name, 'enabled', 0),
"HOST": check_setting_str(CFG, torz_name, 'host', ''),
"API": check_setting_str(CFG, torz_name, 'api', ''),
"GENERALSEARCH": check_setting_str(CFG, torz_name, 'generalsearch', 'search'),
"BOOKSEARCH": check_setting_str(CFG, torz_name, 'booksearch', 'book'),
"MAGSEARCH": check_setting_str(CFG, torz_name, 'magsearch', ''),
"BOOKCAT": check_setting_str(CFG, torz_name, 'bookcat', '8000,8010'),
"MAGCAT": check_setting_str(CFG, torz_name, 'magcat', '8030'),
"EXTENDED": check_setting_str(CFG, torz_name, 'extended', '1'),
"UPDATED": check_setting_str(CFG, torz_name, 'updated', ''),
"MANUAL": check_setting_bool(CFG, torz_name, 'manual', 0)
})
count += 1
# if the last slot is full, add an empty one on the end
add_torz_slot()
count = 0
while CFG.has_section('RSS_%i' % count):
rss_name = 'RSS_%i' % count
# legacy name conversions
if CFG.has_option(rss_name, 'rss%i' % count):
CFG.set(rss_name, 'ENABLED', CFG.getboolean(rss_name, 'rss%i' % count))
CFG.remove_option(rss_name, 'rss%i' % count)
if CFG.has_option(rss_name, 'rss_host%i' % count):
CFG.set(rss_name, 'HOST', CFG.get(rss_name, 'rss_host%i' % count))
CFG.remove_option(rss_name, 'rss_host%i' % count)
if CFG.has_option(rss_name, 'rss_user%i' % count):
# CFG.set(rss_name, 'USER', CFG.get(rss_name, 'rss_user%i' % count))
CFG.remove_option(rss_name, 'rss_user%i' % count)
if CFG.has_option(rss_name, 'rss_pass%i' % count):
# CFG.set(rss_name, 'PASS', CFG.get(rss_name, 'rss_pass%i' % count))
CFG.remove_option(rss_name, 'rss_pass%i' % count)
if CFG.has_option(rss_name, 'PASS'):
CFG.remove_option(rss_name, 'PASS')
if CFG.has_option(rss_name, 'USER'):
CFG.remove_option(rss_name, 'USER')
RSS_PROV.append({"NAME": rss_name,
"ENABLED": check_setting_bool(CFG, rss_name, 'ENABLED', 0),
"HOST": check_setting_str(CFG, rss_name, 'HOST', '')
})
count += 1
# if the last slot is full, add an empty one on the end
add_rss_slot()
TOR_DOWNLOADER_BLACKHOLE = check_setting_bool(CFG, 'TORRENT', 'tor_downloader_blackhole', 0)
TOR_CONVERT_MAGNET = check_setting_bool(CFG, 'TORRENT', 'tor_convert_magnet', 0)
TOR_DOWNLOADER_UTORRENT = check_setting_bool(CFG, 'TORRENT', 'tor_downloader_utorrent', 0)
TOR_DOWNLOADER_RTORRENT = check_setting_bool(CFG, 'TORRENT', 'tor_downloader_rtorrent', 0)
TOR_DOWNLOADER_QBITTORRENT = check_setting_bool(CFG, 'TORRENT', 'tor_downloader_qbittorrent', 0)
TOR_DOWNLOADER_TRANSMISSION = check_setting_bool(CFG, 'TORRENT', 'tor_downloader_transmission', 0)
TOR_DOWNLOADER_SYNOLOGY = check_setting_bool(CFG, 'TORRENT', 'tor_downloader_synology', 0)
TOR_DOWNLOADER_DELUGE = check_setting_bool(CFG, 'TORRENT', 'tor_downloader_deluge', 0)
NUMBEROFSEEDERS = check_setting_int(CFG, 'TORRENT', 'numberofseeders', 10)
TOR_DOWNLOADER_DELUGE = check_setting_bool(CFG, 'TORRENT', 'tor_downloader_deluge', 0)
KEEP_SEEDING = check_setting_bool(CFG, 'TORRENT', 'keep_seeding', 1)
PREFER_MAGNET = check_setting_bool(CFG, 'TORRENT', 'prefer_magnet', 1)
TORRENT_DIR = check_setting_str(CFG, 'TORRENT', 'torrent_dir', '')
RTORRENT_HOST = check_setting_str(CFG, 'RTORRENT', 'rtorrent_host', '')
RTORRENT_USER = check_setting_str(CFG, 'RTORRENT', 'rtorrent_user', '')
RTORRENT_PASS = check_setting_str(CFG, 'RTORRENT', 'rtorrent_pass', '')
RTORRENT_LABEL = check_setting_str(CFG, 'RTORRENT', 'rtorrent_label', '')
RTORRENT_DIR = check_setting_str(CFG, 'RTORRENT', 'rtorrent_dir', '')
# legacy name conversion, separate out utorrent host/port
if not CFG.has_option('UTORRENT', 'utorrent_port'):
port = 0
host = check_setting_str(CFG, 'UTORRENT', 'utorrent_host', '')
if host.startswith('http'):
hostpart = 2
else:
hostpart = 1
words = host.split(':')
if len(words) > hostpart:
host = ':'.join(words[:hostpart])
port = ':'.join(words[hostpart:])
CFG.set('UTORRENT', 'utorrent_port', port)
CFG.set('UTORRENT', 'utorrent_host', host)
UTORRENT_HOST = check_setting_str(CFG, 'UTORRENT', 'utorrent_host', '')
UTORRENT_PORT = check_setting_int(CFG, 'UTORRENT', 'utorrent_port', 0)
UTORRENT_USER = check_setting_str(CFG, 'UTORRENT', 'utorrent_user', '')
UTORRENT_PASS = check_setting_str(CFG, 'UTORRENT', 'utorrent_pass', '')
UTORRENT_LABEL = check_setting_str(CFG, 'UTORRENT', 'utorrent_label', '')
# legacy name conversion, separate out qbittorrent host/port
if not CFG.has_option('QBITTORRENT', 'qbittorrent_port'):
port = 0
host = check_setting_str(CFG, 'QBITTORRENT', 'qbittorrent_host', '')
if host.startswith('http'):
hostpart = 2
else:
hostpart = 1
words = host.split(':')
if len(words) > hostpart:
host = ':'.join(words[:hostpart])
port = ':'.join(words[hostpart:])
CFG.set('QBITTORRENT', 'qbittorrent_port', port)
CFG.set('QBITTORRENT', 'qbittorrent_host', host)
QBITTORRENT_HOST = check_setting_str(CFG, 'QBITTORRENT', 'qbittorrent_host', '')
QBITTORRENT_PORT = check_setting_int(CFG, 'QBITTORRENT', 'qbittorrent_port', 0)
QBITTORRENT_USER = check_setting_str(CFG, 'QBITTORRENT', 'qbittorrent_user', '')
QBITTORRENT_PASS = check_setting_str(CFG, 'QBITTORRENT', 'qbittorrent_pass', '')
QBITTORRENT_LABEL = check_setting_str(CFG, 'QBITTORRENT', 'qbittorrent_label', '')
# legacy name conversion, separate out transmission host/port
if not CFG.has_option('TRANSMISSION', 'transmission_port'):
port = 0
host = check_setting_str(CFG, 'TRANSMISSION', 'transmission_host', '')
if host.startswith('http'):
hostpart = 2
else:
hostpart = 1
words = host.split(':')
if len(words) > hostpart:
host = ':'.join(words[:hostpart])
port = ':'.join(words[hostpart:])
CFG.set('TRANSMISSION', 'transmission_port', port)
CFG.set('TRANSMISSION', 'transmission_host', host)
TRANSMISSION_HOST = check_setting_str(CFG, 'TRANSMISSION', 'transmission_host', '')
TRANSMISSION_PORT = check_setting_int(CFG, 'TRANSMISSION', 'transmission_port', 0)
TRANSMISSION_USER = check_setting_str(CFG, 'TRANSMISSION', 'transmission_user', '')
TRANSMISSION_PASS = check_setting_str(CFG, 'TRANSMISSION', 'transmission_pass', '')
DELUGE_HOST = check_setting_str(CFG, 'DELUGE', 'deluge_host', '')
DELUGE_PORT = check_setting_int(CFG, 'DELUGE', 'deluge_port', 0)
DELUGE_USER = check_setting_str(CFG, 'DELUGE', 'deluge_user', '')
DELUGE_PASS = check_setting_str(CFG, 'DELUGE', 'deluge_pass', '')
DELUGE_LABEL = check_setting_str(CFG, 'DELUGE', 'deluge_label', '')
SYNOLOGY_HOST = check_setting_str(CFG, 'SYNOLOGY', 'synology_host', '')
SYNOLOGY_PORT = check_setting_int(CFG, 'SYNOLOGY', 'synology_port', 0)
SYNOLOGY_USER = check_setting_str(CFG, 'SYNOLOGY', 'synology_user', '')
SYNOLOGY_PASS = check_setting_str(CFG, 'SYNOLOGY', 'synology_pass', '')
SYNOLOGY_DIR = check_setting_str(CFG, 'SYNOLOGY', 'synology_dir', 'Multimedia/Download')
USE_SYNOLOGY = check_setting_bool(CFG, 'SYNOLOGY', 'use_synology', 0)
KAT = check_setting_bool(CFG, 'KAT', 'kat', 0)
KAT_HOST = check_setting_str(CFG, 'KAT', 'kat_host', 'kickass.cd')
TPB = check_setting_bool(CFG, 'TPB', 'tpb', 0)
TPB_HOST = check_setting_str(CFG, 'TPB', 'tpb_host', 'https://piratebays.co')
ZOO = check_setting_bool(CFG, 'ZOO', 'zoo', 0)
ZOO_HOST = check_setting_str(CFG, 'ZOO', 'zoo_host', 'https://zooqle.com')
EXTRA = check_setting_bool(CFG, 'EXTRA', 'extra', 0)
EXTRA_HOST = check_setting_str(CFG, 'EXTRA', 'extra_host', 'extratorrent.cc')
TDL = check_setting_bool(CFG, 'TDL', 'tdl', 0)
TDL_HOST = check_setting_str(CFG, 'TDL', 'tdl_host', 'torrentdownloads.me')
GEN = check_setting_bool(CFG, 'GEN', 'gen', 0)
GEN_HOST = check_setting_str(CFG, 'GEN', 'gen_host', 'libgen.io')
LIME = check_setting_bool(CFG, 'LIME', 'lime', 0)
LIME_HOST = check_setting_str(CFG, 'LIME', 'lime_host', 'https://www.limetorrents.cc')
NEWZBIN = check_setting_bool(CFG, 'Newzbin', 'newzbin', 0)
NEWZBIN_UID = check_setting_str(CFG, 'Newzbin', 'newzbin_uid', '')
NEWZBIN_PASS = check_setting_str(CFG, 'Newzbin', 'newzbin_pass', '')
EBOOK_TYPE = check_setting_str(CFG, 'General', 'ebook_type', 'epub, mobi, pdf')
EBOOK_TYPE = EBOOK_TYPE.lower() # to make extension matching easier
MAG_TYPE = check_setting_str(CFG, 'General', 'mag_type', 'pdf')
MAG_TYPE = MAG_TYPE.lower() # to make extension matching easier
REJECT_WORDS = check_setting_str(CFG, 'General', 'reject_words', 'audiobook, mp3')
REJECT_WORDS = REJECT_WORDS.lower()
REJECT_MAXSIZE = check_setting_int(CFG, 'General', 'reject_maxsize', 0)
REJECT_MAGSIZE = check_setting_int(CFG, 'General', 'reject_magsize', 0)
MAG_AGE = check_setting_int(CFG, 'General', 'mag_age', 31)
SEARCH_INTERVAL = check_setting_int(CFG, 'SearchScan', 'search_interval', '360')
SCAN_INTERVAL = check_setting_int(CFG, 'SearchScan', 'scan_interval', '10')
SEARCHRSS_INTERVAL = check_setting_int(CFG, 'SearchScan', 'searchrss_interval', '20')
VERSIONCHECK_INTERVAL = check_setting_int(CFG, 'SearchScan', 'versioncheck_interval', '24')
FULL_SCAN = check_setting_bool(CFG, 'LibraryScan', 'full_scan', 0)
ADD_AUTHOR = check_setting_bool(CFG, 'LibraryScan', 'add_author', 1)
NOTFOUND_STATUS = check_setting_str(CFG, 'LibraryScan', 'notfound_status', 'Skipped')
NEWBOOK_STATUS = check_setting_str(CFG, 'LibraryScan', 'newbook_status', 'Skipped')
NEWAUTHOR_STATUS = check_setting_str(CFG, 'LibraryScan', 'newauthor_status', 'Skipped')
EBOOK_DEST_FOLDER = check_setting_str(CFG, 'PostProcess', 'ebook_dest_folder', '$Author/$Title')
EBOOK_DEST_FILE = check_setting_str(CFG, 'PostProcess', 'ebook_dest_file', '$Title - $Author')
MAG_DEST_FOLDER = check_setting_str(CFG, 'PostProcess', 'mag_dest_folder', '_Magazines/$Title/$IssueDate')
MAG_DEST_FILE = check_setting_str(CFG, 'PostProcess', 'mag_dest_file', '$IssueDate - $Title')
MAG_RELATIVE = check_setting_bool(CFG, 'PostProcess', 'mag_relative', 1)
USE_TWITTER = check_setting_bool(CFG, 'Twitter', 'use_twitter', 0)
TWITTER_NOTIFY_ONSNATCH = check_setting_bool(CFG, 'Twitter', 'twitter_notify_onsnatch', 0)
TWITTER_NOTIFY_ONDOWNLOAD = check_setting_bool(CFG, 'Twitter', 'twitter_notify_ondownload', 0)
TWITTER_USERNAME = check_setting_str(CFG, 'Twitter', 'twitter_username', '')
TWITTER_PASSWORD = check_setting_str(CFG, 'Twitter', 'twitter_password', '')
TWITTER_PREFIX = check_setting_str(CFG, 'Twitter', 'twitter_prefix', 'LazyLibrarian')
USE_BOXCAR = check_setting_bool(CFG, 'Boxcar', 'use_boxcar', 0)
BOXCAR_NOTIFY_ONSNATCH = check_setting_bool(CFG, 'Boxcar', 'boxcar_notify_onsnatch', 0)
BOXCAR_NOTIFY_ONDOWNLOAD = check_setting_bool(CFG, 'Boxcar', 'boxcar_notify_ondownload', 0)
BOXCAR_TOKEN = check_setting_str(CFG, 'Boxcar', 'boxcar_token', '')
USE_PUSHBULLET = check_setting_bool(CFG, 'Pushbullet', 'use_pushbullet', 0)
PUSHBULLET_NOTIFY_ONSNATCH = check_setting_bool(CFG, 'Pushbullet', 'pushbullet_notify_onsnatch', 0)
PUSHBULLET_NOTIFY_ONDOWNLOAD = check_setting_bool(CFG, 'Pushbullet', 'pushbullet_notify_ondownload', 0)
PUSHBULLET_TOKEN = check_setting_str(CFG, 'Pushbullet', 'pushbullet_token', '')
PUSHBULLET_DEVICEID = check_setting_str(CFG, 'Pushbullet', 'pushbullet_deviceid', '')
USE_PUSHOVER = check_setting_bool(CFG, 'Pushover', 'use_pushover', 0)
PUSHOVER_ONSNATCH = check_setting_bool(CFG, 'Pushover', 'pushover_onsnatch', 0)
PUSHOVER_ONDOWNLOAD = check_setting_bool(CFG, 'Pushover', 'pushover_ondownload', 0)
PUSHOVER_KEYS = check_setting_str(CFG, 'Pushover', 'pushover_keys', '')
PUSHOVER_APITOKEN = check_setting_str(CFG, 'Pushover', 'pushover_apitoken', '')
PUSHOVER_PRIORITY = check_setting_int(CFG, 'Pushover', 'pushover_priority', 0)
PUSHOVER_DEVICE = check_setting_str(CFG, 'Pushover', 'pushover_device', '')
USE_ANDROIDPN = check_setting_bool(CFG, 'AndroidPN', 'use_androidpn', 0)
ANDROIDPN_NOTIFY_ONSNATCH = check_setting_bool(CFG, 'AndroidPN', 'androidpn_notify_onsnatch', 0)
ANDROIDPN_NOTIFY_ONDOWNLOAD = check_setting_bool(CFG, 'AndroidPN', 'androidpn_notify_ondownload', 0)
ANDROIDPN_URL = check_setting_str(CFG, 'AndroidPN', 'androidpn_url', '')
ANDROIDPN_USERNAME = check_setting_str(CFG, 'AndroidPN', 'androidpn_username', '')
ANDROIDPN_BROADCAST = check_setting_bool(CFG, 'AndroidPN', 'androidpn_broadcast', 0)
USE_NMA = check_setting_bool(CFG, 'NMA', 'use_nma', 0)
NMA_APIKEY = check_setting_str(CFG, 'NMA', 'nma_apikey', '')
NMA_PRIORITY = check_setting_int(CFG, 'NMA', 'nma_priority', 0)
NMA_ONSNATCH = check_setting_bool(CFG, 'NMA', 'nma_onsnatch', 0)
NMA_ONDOWNLOAD = check_setting_bool(CFG, 'NMA', 'nma_ondownload', 0)
USE_SLACK = check_setting_bool(CFG, 'Slack', 'use_slack', 0)
SLACK_NOTIFY_ONSNATCH = check_setting_bool(CFG, 'Slack', 'slack_notify_onsnatch', 0)
SLACK_NOTIFY_ONDOWNLOAD = check_setting_bool(CFG, 'Slack', 'slack_notify_ondownload', 0)
SLACK_TOKEN = check_setting_str(CFG, 'Slack', 'slack_token', '')
USE_EMAIL = check_setting_bool(CFG, 'Email', 'use_email', 0)
EMAIL_NOTIFY_ONSNATCH = check_setting_bool(CFG, 'Email', 'email_notify_onsnatch', 0)
EMAIL_NOTIFY_ONDOWNLOAD = check_setting_bool(CFG, 'Email', 'email_notify_ondownload', 0)
EMAIL_FROM = check_setting_str(CFG, 'Email', 'email_from', '')
EMAIL_TO = check_setting_str(CFG, 'Email', 'email_to', '')
EMAIL_SSL = check_setting_bool(CFG, 'Email', 'email_ssl', 0)
EMAIL_SMTP_SERVER = check_setting_str(CFG, 'Email', 'email_smtp_server', '')
EMAIL_SMTP_PORT = check_setting_int(CFG, 'Email', 'email_smtp_port', 25)
EMAIL_TLS = check_setting_bool(CFG, 'Email', 'email_tls', 0)
EMAIL_SMTP_USER = check_setting_str(CFG, 'Email', 'email_smtp_user', '')
EMAIL_SMTP_PASSWORD = check_setting_str(CFG, 'Email', 'email_smtp_password', '')
BOOK_API = check_setting_str(CFG, 'API', 'book_api', 'GoodReads')
GR_API = check_setting_str(CFG, 'API', 'gr_api', 'ckvsiSDsuqh7omh74ZZ6Q')
GB_API = check_setting_str(CFG, 'API', 'gb_api', '')
if reloaded:
logger.info('Config file reloaded')
else:
logger.info('Config file loaded')
# noinspection PyUnresolvedReferences,PyTypeChecker,PyTypeChecker
def config_write():
check_section('General')
CFG.set('General', 'http_port', HTTP_PORT)
CFG.set('General', 'http_host', HTTP_HOST)
CFG.set('General', 'http_user', HTTP_USER)
CFG.set('General', 'http_pass', HTTP_PASS)
CFG.set('General', 'http_proxy', HTTP_PROXY)
CFG.set('General', 'http_root', HTTP_ROOT)
CFG.set('General', 'http_look', HTTP_LOOK)
CFG.set('General', 'https_enabled', HTTPS_ENABLED)
CFG.set('General', 'https_cert', HTTPS_CERT)
CFG.set('General', 'https_key', HTTPS_KEY)
CFG.set('General', 'bookstrap_theme', BOOKSTRAP_THEME)
CFG.set('General', 'launch_browser', LAUNCH_BROWSER)
CFG.set('General', 'api_enabled', API_ENABLED)
CFG.set('General', 'api_key', API_KEY)
CFG.set('General', 'proxy_host', PROXY_HOST)
CFG.set('General', 'proxy_type', PROXY_TYPE)
CFG.set('General', 'logdir', LOGDIR.encode(SYS_ENCODING))
CFG.set('General', 'loglimit', LOGLIMIT)
CFG.set('General', 'loglevel', LOGLEVEL)
CFG.set('General', 'logsize', LOGSIZE)
CFG.set('General', 'logfiles', LOGFILES)
CFG.set('General', 'match_ratio', MATCH_RATIO)
CFG.set('General', 'dload_ratio', DLOAD_RATIO)
CFG.set('General', 'imp_onlyisbn', IMP_ONLYISBN)
CFG.set('General', 'imp_singlebook', IMP_SINGLEBOOK)
CFG.set('General', 'imp_preflang', IMP_PREFLANG)
CFG.set('General', 'imp_monthlang', IMP_MONTHLANG)
CFG.set('General', 'imp_autoadd', IMP_AUTOADD)
CFG.set('General', 'imp_autosearch', IMP_AUTOSEARCH)
CFG.set('General', 'imp_calibredb', IMP_CALIBREDB)
CFG.set('General', 'imp_convert', IMP_CONVERT.strip())
CFG.set('General', 'git_program', GIT_PROGRAM.strip())
CFG.set('General', 'ebook_type', EBOOK_TYPE.lower())
CFG.set('General', 'mag_type', MAG_TYPE.lower())
CFG.set('General', 'reject_words', REJECT_WORDS.encode(SYS_ENCODING).lower())
CFG.set('General', 'reject_maxsize', REJECT_MAXSIZE)
CFG.set('General', 'reject_magsize', REJECT_MAGSIZE)
CFG.set('General', 'mag_age', MAG_AGE)
CFG.set('General', 'destination_dir', DESTINATION_DIR.encode(SYS_ENCODING))
CFG.set('General', 'alternate_dir', ALTERNATE_DIR.encode(SYS_ENCODING))
CFG.set('General', 'download_dir', DOWNLOAD_DIR.encode(SYS_ENCODING))
CFG.set('General', 'cache_age', CACHE_AGE)
CFG.set('General', 'task_age', TASK_AGE)
CFG.set('General', 'destination_copy', DESTINATION_COPY)
#
CFG.set('General', 'displaylength', DISPLAYLENGTH)
#
check_section('Git')
CFG.set('Git', 'git_user', GIT_USER)
CFG.set('Git', 'git_repo', GIT_REPO)
CFG.set('Git', 'git_branch', GIT_BRANCH)
CFG.set('Git', 'install_type', INSTALL_TYPE)
CFG.set('Git', 'current_version', CURRENT_VERSION)
CFG.set('Git', 'latest_version', LATEST_VERSION)
CFG.set('Git', 'commits_behind', COMMITS_BEHIND)
#
check_section('USENET')
CFG.set('USENET', 'nzb_downloader_sabnzbd', NZB_DOWNLOADER_SABNZBD)
CFG.set('USENET', 'nzb_downloader_nzbget', NZB_DOWNLOADER_NZBGET)
CFG.set('USENET', 'nzb_downloader_synology', NZB_DOWNLOADER_SYNOLOGY)
CFG.set('USENET', 'nzb_downloader_blackhole', NZB_DOWNLOADER_BLACKHOLE)
CFG.set('USENET', 'nzb_blackholedir', NZB_BLACKHOLEDIR)
CFG.set('USENET', 'usenet_retention', USENET_RETENTION)
#
check_section('SABnzbd')
CFG.set('SABnzbd', 'sab_host', SAB_HOST)
CFG.set('SABnzbd', 'sab_port', SAB_PORT)
CFG.set('SABnzbd', 'sab_subdir', SAB_SUBDIR)
CFG.set('SABnzbd', 'sab_user', SAB_USER)
CFG.set('SABnzbd', 'sab_pass', SAB_PASS)
CFG.set('SABnzbd', 'sab_api', SAB_API)
CFG.set('SABnzbd', 'sab_cat', SAB_CAT)
#
check_section('NZBGet')
CFG.set('NZBGet', 'nzbget_host', NZBGET_HOST)
CFG.set('NZBGet', 'nzbget_port', NZBGET_PORT)
CFG.set('NZBGet', 'nzbget_user', NZBGET_USER)
CFG.set('NZBGet', 'nzbget_pass', NZBGET_PASS)
CFG.set('NZBGet', 'nzbget_cat', NZBGET_CATEGORY)
CFG.set('NZBGet', 'nzbget_priority', NZBGET_PRIORITY)
#
check_section('API')
CFG.set('API', 'book_api', BOOK_API)
CFG.set('API', 'gr_api', GR_API)
CFG.set('API', 'gb_api', GB_API)
#
check_section('NZBMatrix')
CFG.set('NZBMatrix', 'nzbmatrix', NZBMATRIX)
CFG.set('NZBMatrix', 'nzbmatrix_user', NZBMATRIX_USER)
CFG.set('NZBMatrix', 'nzbmatrix_api', NZBMATRIX_API)
#
for provider in NEWZNAB_PROV:
check_section(provider['NAME'])
CFG.set(provider['NAME'], 'ENABLED', provider['ENABLED'])
oldprovider = check_setting_str(CFG, provider['NAME'], 'HOST', '', log=False)
CFG.set(provider['NAME'], 'HOST', provider['HOST'])
CFG.set(provider['NAME'], 'API', provider['API'])
CFG.set(provider['NAME'], 'GENERALSEARCH', provider['GENERALSEARCH'])
CFG.set(provider['NAME'], 'BOOKSEARCH', provider['BOOKSEARCH'])
CFG.set(provider['NAME'], 'MAGSEARCH', provider['MAGSEARCH'])
CFG.set(provider['NAME'], 'BOOKCAT', provider['BOOKCAT'])
CFG.set(provider['NAME'], 'MAGCAT', provider['MAGCAT'])
CFG.set(provider['NAME'], 'EXTENDED', provider['EXTENDED'])
if provider['HOST'] == oldprovider:
CFG.set(provider['NAME'], 'UPDATED', provider['UPDATED'])
CFG.set(provider['NAME'], 'MANUAL', provider['MANUAL'])
else:
logger.debug('Reset %s as provider changed' % provider['NAME'])
CFG.set(provider['NAME'], 'UPDATED', '')
CFG.set(provider['NAME'], 'MANUAL', False)
add_newz_slot()
#
for provider in TORZNAB_PROV:
check_section(provider['NAME'])
CFG.set(provider['NAME'], 'ENABLED', provider['ENABLED'])
oldprovider = check_setting_str(CFG, provider['NAME'], 'HOST', '', log=False)
CFG.set(provider['NAME'], 'HOST', provider['HOST'])
CFG.set(provider['NAME'], 'API', provider['API'])
CFG.set(provider['NAME'], 'GENERALSEARCH', provider['GENERALSEARCH'])
CFG.set(provider['NAME'], 'BOOKSEARCH', provider['BOOKSEARCH'])
CFG.set(provider['NAME'], 'MAGSEARCH', provider['MAGSEARCH'])
CFG.set(provider['NAME'], 'BOOKCAT', provider['BOOKCAT'])
CFG.set(provider['NAME'], 'MAGCAT', provider['MAGCAT'])
CFG.set(provider['NAME'], 'EXTENDED', provider['EXTENDED'])
if provider['HOST'] == oldprovider:
CFG.set(provider['NAME'], 'UPDATED', provider['UPDATED'])
CFG.set(provider['NAME'], 'MANUAL', provider['MANUAL'])
else:
logger.debug('Reset %s as provider changed' % provider['NAME'])
CFG.set(provider['NAME'], 'UPDATED', '')
CFG.set(provider['NAME'], 'MANUAL', False)
add_torz_slot()
#
for provider in RSS_PROV:
check_section(provider['NAME'])
CFG.set(provider['NAME'], 'ENABLED', provider['ENABLED'])
CFG.set(provider['NAME'], 'HOST', provider['HOST'])
# CFG.set(provider['NAME'], 'USER', provider['USER'])
# CFG.set(provider['NAME'], 'PASS', provider['PASS'])
add_rss_slot()
#
check_section('Newzbin')
CFG.set('Newzbin', 'newzbin', NEWZBIN)
CFG.set('Newzbin', 'newzbin_uid', NEWZBIN_UID)
CFG.set('Newzbin', 'newzbin_pass', NEWZBIN_PASS)
#
check_section('TORRENT')
CFG.set('TORRENT', 'tor_downloader_blackhole', TOR_DOWNLOADER_BLACKHOLE)
CFG.set('TORRENT', 'tor_convert_magnet', TOR_CONVERT_MAGNET)
CFG.set('TORRENT', 'tor_downloader_utorrent', TOR_DOWNLOADER_UTORRENT)
CFG.set('TORRENT', 'tor_downloader_rtorrent', TOR_DOWNLOADER_RTORRENT)
CFG.set('TORRENT', 'tor_downloader_qbittorrent', TOR_DOWNLOADER_QBITTORRENT)
CFG.set('TORRENT', 'tor_downloader_transmission', TOR_DOWNLOADER_TRANSMISSION)
CFG.set('TORRENT', 'tor_downloader_synology', TOR_DOWNLOADER_SYNOLOGY)
CFG.set('TORRENT', 'tor_downloader_deluge', TOR_DOWNLOADER_DELUGE)
CFG.set('TORRENT', 'numberofseeders', NUMBEROFSEEDERS)
CFG.set('TORRENT', 'torrent_dir', TORRENT_DIR)
CFG.set('TORRENT', 'keep_seeding', KEEP_SEEDING)
CFG.set('TORRENT', 'prefer_magnet', PREFER_MAGNET)
#
check_section('RTORRENT')
CFG.set('RTORRENT', 'rtorrent_host', RTORRENT_HOST)
CFG.set('RTORRENT', 'rtorrent_user', RTORRENT_USER)
CFG.set('RTORRENT', 'rtorrent_pass', RTORRENT_PASS)
CFG.set('RTORRENT', 'rtorrent_label', RTORRENT_LABEL)
CFG.set('RTORRENT', 'rtorrent_dir', RTORRENT_DIR)
#
check_section('UTORRENT')
CFG.set('UTORRENT', 'utorrent_host', UTORRENT_HOST)
CFG.set('UTORRENT', 'utorrent_port', UTORRENT_PORT)
CFG.set('UTORRENT', 'utorrent_user', UTORRENT_USER)
CFG.set('UTORRENT', 'utorrent_pass', UTORRENT_PASS)
CFG.set('UTORRENT', 'utorrent_label', UTORRENT_LABEL)
#
check_section('SYNOLOGY')
CFG.set('SYNOLOGY', 'synology_host', SYNOLOGY_HOST)
CFG.set('SYNOLOGY', 'synology_port', SYNOLOGY_PORT)
CFG.set('SYNOLOGY', 'synology_user', SYNOLOGY_USER)
CFG.set('SYNOLOGY', 'synology_pass', SYNOLOGY_PASS)
CFG.set('SYNOLOGY', 'synology_dir', SYNOLOGY_DIR)
CFG.set('SYNOLOGY', 'use_synology', USE_SYNOLOGY)
#
check_section('QBITTORRENT')
CFG.set('QBITTORRENT', 'qbittorrent_host', QBITTORRENT_HOST)
CFG.set('QBITTORRENT', 'qbittorrent_port', QBITTORRENT_PORT)
CFG.set('QBITTORRENT', 'qbittorrent_user', QBITTORRENT_USER)
CFG.set('QBITTORRENT', 'qbittorrent_pass', QBITTORRENT_PASS)
CFG.set('QBITTORRENT', 'qbittorrent_label', QBITTORRENT_LABEL)
#
check_section('TRANSMISSION')
CFG.set('TRANSMISSION', 'transmission_host', TRANSMISSION_HOST)
CFG.set('TRANSMISSION', 'transmission_port', TRANSMISSION_PORT)
CFG.set('TRANSMISSION', 'transmission_user', TRANSMISSION_USER)
CFG.set('TRANSMISSION', 'transmission_pass', TRANSMISSION_PASS)
#
check_section('DELUGE')
CFG.set('DELUGE', 'deluge_host', DELUGE_HOST)
CFG.set('DELUGE', 'deluge_port', DELUGE_PORT)
CFG.set('DELUGE', 'deluge_user', DELUGE_USER)
CFG.set('DELUGE', 'deluge_pass', DELUGE_PASS)
CFG.set('DELUGE', 'deluge_label', DELUGE_LABEL)
#
check_section('KAT')
CFG.set('KAT', 'kat', KAT)
CFG.set('KAT', 'kat_host', KAT_HOST)
#
check_section('TPB')
CFG.set('TPB', 'tpb', TPB)
CFG.set('TPB', 'tpb_host', TPB_HOST)
#
check_section('ZOO')
CFG.set('ZOO', 'zoo', ZOO)
CFG.set('ZOO', 'zoo_host', ZOO_HOST)
#
check_section('EXTRA')
CFG.set('EXTRA', 'extra', EXTRA)
CFG.set('EXTRA', 'extra_host', EXTRA_HOST)
#
check_section('LIME')
CFG.set('LIME', 'lime', LIME)
CFG.set('LIME', 'lime_host', LIME_HOST)
#
check_section('GEN')
CFG.set('GEN', 'gen', GEN)
CFG.set('GEN', 'gen_host', GEN_HOST)
#
check_section('TDL')
CFG.set('TDL', 'tdl', TDL)
CFG.set('TDL', 'tdl_host', TDL_HOST)
#
check_section('SearchScan')
CFG.set('SearchScan', 'search_interval', SEARCH_INTERVAL)
CFG.set('SearchScan', 'scan_interval', SCAN_INTERVAL)
CFG.set('SearchScan', 'searchrss_interval', SEARCHRSS_INTERVAL)
CFG.set('SearchScan', 'versioncheck_interval', VERSIONCHECK_INTERVAL)
#
check_section('LibraryScan')
CFG.set('LibraryScan', 'full_scan', FULL_SCAN)
CFG.set('LibraryScan', 'add_author', ADD_AUTHOR)
CFG.set('LibraryScan', 'notfound_status', NOTFOUND_STATUS)
CFG.set('LibraryScan', 'newbook_status', NEWBOOK_STATUS)
CFG.set('LibraryScan', 'newauthor_status', NEWAUTHOR_STATUS)
#
check_section('PostProcess')
CFG.set('PostProcess', 'ebook_dest_folder', EBOOK_DEST_FOLDER.encode(SYS_ENCODING))
CFG.set('PostProcess', 'ebook_dest_file', EBOOK_DEST_FILE.encode(SYS_ENCODING))
CFG.set('PostProcess', 'mag_dest_folder', MAG_DEST_FOLDER.encode(SYS_ENCODING))
CFG.set('PostProcess', 'mag_dest_file', MAG_DEST_FILE.encode(SYS_ENCODING))
CFG.set('PostProcess', 'mag_relative', MAG_RELATIVE)
#
check_section('Twitter')
CFG.set('Twitter', 'use_twitter', USE_TWITTER)
CFG.set('Twitter', 'twitter_notify_onsnatch', TWITTER_NOTIFY_ONSNATCH)
CFG.set('Twitter', 'twitter_notify_ondownload', TWITTER_NOTIFY_ONDOWNLOAD)
CFG.set('Twitter', 'twitter_username', TWITTER_USERNAME)
CFG.set('Twitter', 'twitter_password', TWITTER_PASSWORD)
CFG.set('Twitter', 'twitter_prefix', TWITTER_PREFIX)
#
check_section('Boxcar')
CFG.set('Boxcar', 'use_boxcar', USE_BOXCAR)
CFG.set('Boxcar', 'boxcar_notify_onsnatch', BOXCAR_NOTIFY_ONSNATCH)
CFG.set('Boxcar', 'boxcar_notify_ondownload', BOXCAR_NOTIFY_ONDOWNLOAD)
CFG.set('Boxcar', 'boxcar_token', BOXCAR_TOKEN)
#
check_section('Pushbullet')
CFG.set('Pushbullet', 'use_pushbullet', USE_PUSHBULLET)
CFG.set('Pushbullet', 'pushbullet_notify_onsnatch', PUSHBULLET_NOTIFY_ONSNATCH)
CFG.set('Pushbullet', 'pushbullet_notify_ondownload', PUSHBULLET_NOTIFY_ONDOWNLOAD)
CFG.set('Pushbullet', 'pushbullet_token', PUSHBULLET_TOKEN)
CFG.set('Pushbullet', 'pushbullet_deviceid', PUSHBULLET_DEVICEID)
#
check_section('Pushover')
CFG.set('Pushover', 'use_pushover', USE_PUSHOVER)
CFG.set('Pushover', 'pushover_onsnatch', PUSHOVER_ONSNATCH)
CFG.set('Pushover', 'pushover_ondownload', PUSHOVER_ONDOWNLOAD)
CFG.set('Pushover', 'pushover_priority', PUSHOVER_PRIORITY)
CFG.set('Pushover', 'pushover_keys', PUSHOVER_KEYS)
CFG.set('Pushover', 'pushover_apitoken', PUSHOVER_APITOKEN)
CFG.set('Pushover', 'pushover_device', PUSHOVER_DEVICE)
#
check_section('AndroidPN')
CFG.set('AndroidPN', 'use_androidpn', USE_ANDROIDPN)
CFG.set('AndroidPN', 'androidpn_notify_onsnatch', ANDROIDPN_NOTIFY_ONSNATCH)
CFG.set('AndroidPN', 'androidpn_notify_ondownload', ANDROIDPN_NOTIFY_ONDOWNLOAD)
CFG.set('AndroidPN', 'androidpn_url', ANDROIDPN_URL)
CFG.set('AndroidPN', 'androidpn_username', ANDROIDPN_USERNAME)
CFG.set('AndroidPN', 'androidpn_broadcast', ANDROIDPN_BROADCAST)
#
check_section('NMA')
CFG.set('NMA', 'use_nma', USE_NMA)
CFG.set('NMA', 'nma_apikey', NMA_APIKEY)
CFG.set('NMA', 'nma_priority', NMA_PRIORITY)
CFG.set('NMA', 'nma_onsnatch', NMA_ONSNATCH)
CFG.set('NMA', 'nma_ondownload', NMA_ONDOWNLOAD)
#
check_section('Slack')
CFG.set('Slack', 'use_slack', USE_SLACK)
CFG.set('Slack', 'slack_notify_onsnatch', SLACK_NOTIFY_ONSNATCH)
CFG.set('Slack', 'slack_notify_ondownload', SLACK_NOTIFY_ONDOWNLOAD)
CFG.set('Slack', 'slack_token', SLACK_TOKEN)
#
check_section('Email')
CFG.set('Email', 'use_email', USE_EMAIL)
CFG.set('Email', 'email_notify_onsnatch', EMAIL_NOTIFY_ONSNATCH)
CFG.set('Email', 'email_notify_ondownload', EMAIL_NOTIFY_ONDOWNLOAD)
CFG.set('Email', 'email_from', EMAIL_FROM)
CFG.set('Email', 'email_to', EMAIL_TO)
CFG.set('Email', 'email_ssl', EMAIL_SSL)
CFG.set('Email', 'email_smtp_server', EMAIL_SMTP_SERVER)
CFG.set('Email', 'email_smtp_port', EMAIL_SMTP_PORT)
CFG.set('Email', 'email_tls', EMAIL_TLS)
CFG.set('Email', 'email_smtp_user', EMAIL_SMTP_USER)
CFG.set('Email', 'email_smtp_password', EMAIL_SMTP_PASSWORD)
with open(CONFIGFILE + '.new', 'wb') as configfile:
CFG.write(configfile)
try:
os.remove(CONFIGFILE + '.bak')
except OSError as e:
if e.errno is not 2: # doesn't exist is ok
logger.debug('{} {}{} {}'.format('Error deleting backup file:', CONFIGFILE, '.bak', e.strerror))
try:
os.rename(CONFIGFILE, CONFIGFILE + '.bak')
except OSError as e:
if e.errno is not 2: # doesn't exist is ok as wouldn't exist until first save
logger.debug('{} {} {}'.format('Unable to backup config file:', CONFIGFILE, e.strerror))
try:
os.rename(CONFIGFILE + '.new', CONFIGFILE)
except OSError as e:
logger.debug('{} {} {}'.format('Unable to create new config file:', CONFIGFILE, e.strerror))
def add_newz_slot():
count = len(NEWZNAB_PROV)
if count == 0 or len(CFG.get('Newznab%i' % int(count - 1), 'HOST')):
newz_name = 'Newznab%i' % count
check_section(newz_name)
CFG.set(newz_name, 'ENABLED', False)
CFG.set(newz_name, 'HOST', '')
CFG.set(newz_name, 'API', '')
CFG.set(newz_name, 'GENERALSEARCH', 'search')
CFG.set(newz_name, 'BOOKSEARCH', 'book')
CFG.set(newz_name, 'MAGSEARCH', '')
CFG.set(newz_name, 'BOOKCAT', '7000,7020')
CFG.set(newz_name, 'MAGCAT', '7010')
CFG.set(newz_name, 'EXTENDED', '1')
CFG.set(newz_name, 'UPDATED', '')
CFG.set(newz_name, 'MANUAL', False)
NEWZNAB_PROV.append({"NAME": newz_name,
"ENABLED": 0,
"HOST": '',
"API": '',
"GENERALSEARCH": 'search',
"BOOKSEARCH": 'book',
"MAGSEARCH": '',
"BOOKCAT": '7000,7020',
"MAGCAT": '7010',
"EXTENDED": '1',
"UPDATED": '',
"MANUAL": 0
})
def add_torz_slot():
count = len(TORZNAB_PROV)
if count == 0 or len(CFG.get('Torznab%i' % int(count - 1), 'HOST')):
torz_name = 'Torznab%i' % count
check_section(torz_name)
CFG.set(torz_name, 'ENABLED', False)
CFG.set(torz_name, 'HOST', '')
CFG.set(torz_name, 'API', '')
CFG.set(torz_name, 'GENERALSEARCH', 'search')
CFG.set(torz_name, 'BOOKSEARCH', 'book')
CFG.set(torz_name, 'MAGSEARCH', '')
CFG.set(torz_name, 'BOOKCAT', '7000,7020')
CFG.set(torz_name, 'MAGCAT', '7010')
CFG.set(torz_name, 'EXTENDED', '1')
CFG.set(torz_name, 'UPDATED', '')
CFG.set(torz_name, 'MANUAL', False)
TORZNAB_PROV.append({"NAME": torz_name,
"ENABLED": 0,
"HOST": '',
"API": '',
"GENERALSEARCH": 'search',
"BOOKSEARCH": 'book',
"MAGSEARCH": '',
"BOOKCAT": '8000,8010',
"MAGCAT": '8030',
"EXTENDED": '1',
"UPDATED": '',
"MANUAL": 0
})
def USE_NZB():
for provider in NEWZNAB_PROV:
if bool(provider['ENABLED']):
return True
for provider in TORZNAB_PROV:
if bool(provider['ENABLED']):
return True
return False
def DIRECTORY(dirname):
usedir = ''
if dirname == "Destination":
usedir = DESTINATION_DIR
elif dirname == "Download":
usedir = DOWNLOAD_DIR
# elif dirname == "Alternate":
# usedir = ALTERNATE_DIR
else:
return usedir
if not usedir or not os.path.isdir(usedir) or not os.access(usedir, os.W_OK | os.X_OK):
usedir = os.getcwd()
logger.warn("%s dir not usable, using %s" % (dirname, usedir))
# return directory as unicode so we get unicode results from listdir
if isinstance(usedir, str):
usedir = usedir.decode(SYS_ENCODING)
return usedir
def add_rss_slot():
count = len(RSS_PROV)
if count == 0 or len(CFG.get('RSS_%i' % int(count - 1), 'HOST')):
rss_name = 'RSS_%i' % count
check_section(rss_name)
CFG.set(rss_name, 'ENABLED', False)
CFG.set(rss_name, 'HOST', '')
# CFG.set(rss_name, 'USER', '')
# CFG.set(rss_name, 'PASS', '')
RSS_PROV.append({"NAME": rss_name,
"ENABLED": 0,
"HOST": ''
})
def USE_RSS():
for provider in RSS_PROV:
if bool(provider['ENABLED']):
return True
return False
def USE_TOR():
if bool(KAT):
return True
if bool(TPB):
return True
if bool(ZOO):
return True
if bool(EXTRA):
return True
if bool(LIME):
return True
if bool(TDL):
return True
if bool(GEN):
return True
return False
def build_bookstrap_themes():
themelist = []
if not os.path.isdir(os.path.join(PROG_DIR, 'data/interfaces/bookstrap/')):
return themelist # return empty if bookstrap interface not installed
URL = 'http://bootswatch.com/api/3.json'
result, success = fetchURL(URL, None, False) # use default headers, no retry
if not success:
logger.debug("Error getting bookstrap themes : %s" % result)
return themelist
try:
results = json.loads(result)
for theme in results['themes']:
themelist.append(theme['name'].lower())
except Exception as e:
# error reading results
logger.debug('JSON Error reading bookstrap themes, %s' % str(e))
logger.debug("Bookstrap found %i themes" % len(themelist))
return themelist
def build_monthtable():
if len(getList(IMP_MONTHLANG)) == 0: # any extra languages wanted?
return
try:
current_locale = locale.setlocale(locale.LC_ALL, '') # read current state.
# getdefaultlocale() doesnt seem to work as expected on windows, returns 'None'
except locale.Error as e:
logger.debug("Error getting current locale : %s" % str(e))
return
lang = str(current_locale)
if not lang.startswith('en_'): # en_ is preloaded
MONTHNAMES[0].append(lang)
for f in range(1, 13):
MONTHNAMES[f].append(unaccented(calendar.month_name[f]).lower())
MONTHNAMES[0].append(lang)
for f in range(1, 13):
MONTHNAMES[f].append(unaccented(calendar.month_abbr[f]).lower().strip('.'))
logger.info("Added month names for locale [%s], %s, %s ..." % (
lang, MONTHNAMES[1][len(MONTHNAMES[1]) - 2], MONTHNAMES[1][len(MONTHNAMES[1]) - 1]))
for lang in getList(IMP_MONTHLANG):
try:
if len(lang) > 1:
locale.setlocale(locale.LC_ALL, lang)
MONTHNAMES[0].append(lang)
for f in range(1, 13):
MONTHNAMES[f].append(unaccented(calendar.month_name[f]).lower())
MONTHNAMES[0].append(lang)
for f in range(1, 13):
MONTHNAMES[f].append(unaccented(calendar.month_abbr[f]).lower().strip('.'))
locale.setlocale(locale.LC_ALL, current_locale) # restore entry state
logger.info("Added month names for locale [%s], %s, %s ..." % (
lang, MONTHNAMES[1][len(MONTHNAMES[1]) - 2], MONTHNAMES[1][len(MONTHNAMES[1]) - 1]))
except Exception as e:
locale.setlocale(locale.LC_ALL, current_locale) # restore entry state
logger.warn("Unable to load requested locale [%s] %s" % (lang, str(e)))
try:
if '_' in lang:
wanted_lang = lang.split('_')[0]
else:
wanted_lang = lang
params = ['locale', '-a']
all_locales = subprocess.check_output(params).split()
locale_list = []
for a_locale in all_locales:
if a_locale.startswith(wanted_lang):
locale_list.append(a_locale)
if locale_list:
logger.warn("Found these alternatives: " + str(locale_list))
else:
logger.warn("Unable to find an alternative")
except Exception as e:
logger.warn("Unable to get a list of alternatives, %s" % str(e))
logger.info("Set locale back to entry state %s" % current_locale)
def daemonize():
"""
Fork off as a daemon
"""
# Make a non-session-leader child process
try:
pid = os.fork() # @UndefinedVariable - only available in UNIX
if pid != 0:
sys.exit(0)
except OSError as e:
raise RuntimeError("1st fork failed: %s [%d]" %
(e.strerror, e.errno))
os.setsid() # @UndefinedVariable - only available in UNIX
# Make sure I can read my own files and shut out others
prev = os.umask(0)
os.umask(prev and int('077', 8))
# Make the child a session-leader by detaching from the terminal
try:
pid = os.fork() # @UndefinedVariable - only available in UNIX
if pid != 0:
sys.exit(0)
except OSError as e:
raise RuntimeError("2st fork failed: %s [%d]" %
(e.strerror, e.errno))
dev_null = file('/dev/null', 'r')
os.dup2(dev_null.fileno(), sys.stdin.fileno())
if PIDFILE:
pid = str(os.getpid())
logger.debug(u"Writing PID " + pid + " to " + str(PIDFILE))
file(PIDFILE, 'w').write("%s\n" % pid)
def launch_browser(host, port, root):
if host == '0.0.0.0':
host = 'localhost'
try:
webbrowser.open('http://%s:%i%s' % (host, port, root))
except Exception as e:
logger.error('Could not launch browser: %s' % str(e))
def db_needs_upgrade():
"""
Check if database needs upgrading
Return zero if up-to-date
Return current version if needs upgrade
"""
myDB = database.DBConnection()
result = myDB.match('PRAGMA user_version')
# Had a report of "index out of range", can't replicate it.
# Maybe on some versions of sqlite an unset user_version
# or unsupported pragma gives an empty result?
if result:
db_version = result[0]
else:
db_version = 0
# database version history:
# 0 original version or new empty database
# 1 changes up to June 2016
# 2 removed " MB" from nzbsize field in wanted table
# 3 removed SeriesOrder column from books table as redundant
# 4 added duplicates column to stats table
# 5 issue numbers padded to 4 digits with leading zeros
# 6 added Manual field to books table for user editing
# 7 added Source and DownloadID to wanted table for download monitoring
# 8 move image cache from data/images/cache into datadir
# 9 add regex to magazine table
# 10 check for missing columns in pastissues table
db_current_version = 10
if db_version < db_current_version:
return db_current_version
return 0
def start():
global __INITIALIZED__, started
if __INITIALIZED__:
# Crons and scheduled jobs started here
SCHED.start()
restartJobs(start='Start')
started = True
def shutdown(restart=False, update=False):
cherrypy.engine.exit()
SCHED.shutdown(wait=False)
# config_write() don't automatically rewrite config on exit
if not restart and not update:
logger.info('LazyLibrarian is shutting down...')
if update:
logger.info('LazyLibrarian is updating...')
try:
versioncheck.update()
except Exception as e:
logger.warn('LazyLibrarian failed to update: %s. Restarting.' % str(e))
if PIDFILE:
logger.info('Removing pidfile %s' % PIDFILE)
os.remove(PIDFILE)
if restart:
logger.info('LazyLibrarian is restarting ...')
popen_list = [sys.executable, FULL_PATH]
popen_list += ARGS
if '--update' in popen_list:
popen_list.remove('--update')
if '--nolaunch' not in popen_list:
popen_list += ['--nolaunch']
logger.info('Restarting LazyLibrarian with ' + str(popen_list))
subprocess.Popen(popen_list, cwd=os.getcwd())
os._exit(0)
| gpl-3.0 | 6,376,135,073,458,459,000 | 41.51198 | 114 | 0.625994 | false |
tom-mi/logrotor | test/util/test_syslog_util.py | 1 | 2429 | from datetime import datetime
from freezegun import freeze_time
import pytest
from logrotor.util.syslog import message_to_bytes, bytes_to_message, Message, Facility, Level
@pytest.mark.parametrize('message,data', [
(Message(Level.CRIT, Facility.KERN, datetime(2018, 7, 28, 1, 2, 3), 'localhost', 'tag', None, 'Test message'),
b'<2>Jul 28 01:02:03 localhost tag: Test message'),
(Message(Level.ERR, Facility.CRON, datetime(2018, 1, 1, 15, 55, 55), 'localhost', 'tag', None, 'Test message'),
b'<75>Jan 1 15:55:55 localhost tag: Test message'),
(Message(Level.ERR, Facility.CRON, datetime(2018, 1, 1, 15, 55, 55), 'localhost', 'tag', None, 'Test message'),
b'<75>Jan 1 15:55:55 localhost tag: Test message'),
])
@freeze_time('2018-01-01')
def test_message_conversion(message, data):
assert message_to_bytes(message) == data
converted_message = bytes_to_message(data)
assert converted_message.level == message.level
assert converted_message.facility == message.facility
assert converted_message.timestamp == message.timestamp
assert converted_message.hostname == message.hostname
assert converted_message.tag == message.tag
assert converted_message.message == message.message
@pytest.mark.parametrize('tag,pid,data,two_way', [
('tag', None, b'<75>Jan 1 15:55:55 localhost tag: Test', True),
('tag', None, b'<75>Jan 1 15:55:55 localhost tag Test', False),
('tag', 42, b'<75>Jan 1 15:55:55 localhost tag[42]: Test', True),
('tag', 42, b'<75>Jan 1 15:55:55 localhost tag[42] Test', False),
])
@freeze_time('2018-01-01')
def test_parse_tags(tag, pid, data, two_way):
message = Message(Level.ERR, Facility.CRON, datetime(2018, 1, 1, 15, 55, 55), 'localhost', tag, pid, 'Test')
if two_way:
assert message_to_bytes(message) == data
converted_message = bytes_to_message(data)
assert converted_message.level == message.level
assert converted_message.facility == message.facility
assert converted_message.timestamp == message.timestamp
assert converted_message.hostname == message.hostname
assert converted_message.tag == message.tag
assert converted_message.pid == message.pid
assert converted_message.message == message.message
def test_syslog_bytes_to_message(benchmark):
data = b'<75>Jan 1 15:55:55 localhost tag[42]: Test message'
benchmark(bytes_to_message, data)
| mit | -5,929,938,222,142,129,000 | 42.375 | 115 | 0.685467 | false |
stefan-walluhn/RPi.TC | runtests.py | 1 | 217227 | #! /usr/bin/env python
sources = """
eNrsvXtzI0mSJ9Z7ku50uNOtnmemPyTloKwWQDeIevTsni220bM9/dgt2+mHddXs1IrDQyWBJJlD
EInKTBTJmWuZvom+mz6BTJ9A/ozwiIwEWdU9s3ayapspJjLj/fBw93D/+f/5L354/cH45f/6wQcf
LHe3bdG0s7xpirotq+1sW1yX27aod3XRvv6zl//3ow8+GA6Hg6/K7TqjD1fFuszbIive5Jt9m2Om
rC6a/aZtIEHGJWUNfCmuii28bC/qan9+kZ3uy00LKT57/mI2eHFRNllzUe03a8i92+SrIqs2a1f1
bHcLNUD+fb7Z3M4G2IZBebWrsOzbRh/zpnWvd7eDs7q6yjp9yiTBvi03PUlmdeGq1uS/5PZ+pmm+
rOuqHkB1Z9iCGbS5Pavqqxl0tW6b67K9GA9/l7/Jh5P5IIP/HmTPiyK7aNvd/NGj0/15M/vdbXsB
dVX1+aOyafbFk5//9X+gpMviZlc32SIbD39ZVZtvd8NpNvxlueWHX2/z+pYff5Vfna5zfHp29uUN
vfqiXLXDKRXj/xv+qmzaz6srSvF3xbao87aqJcc/lcVmjQ+YIK8LeoRh7pbyfbGr8es3+yv887yl
X5+1bV2e7lvK+Hx/2qzqctdSuvyqSLcFv77Y7zbFcMI9btqrlnv81X67wvH9ojijlmxgVuT5+6Ld
11vqZbEpuEKYj/J8263ls/25fsqG39UwnfjwVUVN/s1FuSl42OgnTBaVn5dNosEv6tsvb1YF9wl+
wOLHVSi1w2qgkmiV+KevYGF1i/rypljRHGyq03yDTzAJ1KTvoJs0zXWRX/JsbGG57d0I4ZpYbqt1
gcPUFO34vIBN1dZjWPTTbAtDPclgAdIT7jxeRGZ43yozzQdnXhdn2bJslpCUyhxjObKo8b+aZiXD
t7PlcoXztVy6FnCtnYKw/PsX5Js/KDZNMX+bdpVNuYU9uV0VlGCKRGKG6Sdv06hkKZh+AiSAmpp9
lZebfV2Mea3ACpZSgFYRrYAB3gPVvMa1lznyUm7PiQQSRXMNWpbbsl0ux02xOZtm0NpNviXauhgO
TePw82yV75sCpxWoUHGzgqxn1XgSpjElQErzC1qPFbrmjJtqX6+gf2c1LIWp0OTlGfRt8VUOYy+1
X1VrKAgHAUhGU0g2rvUNDBaQF/j+RQGE7pmWXdRjKpVTtfWt74hkmdHfMRTOaQoaSh1Zn/yMX3Q6
ffzkJJ44WOiSeix/uWgk275vnfkeD8f+wMAUxXoKR1YL01fAaoC/eQNpj+r9ljbOMNrpMO07JDo4
vcE5icdk0UyxiHW5hsXeUvGzLHu+Pz+HUwgqbOap8lZAnoHK6pGKS7gACgftOy2gCQWcrO4jtAgX
xdER/17AfJfbCZASmm1o9Lg6Oyu2cMCeLzflVud78U211RmGAaJ3sPQzfG1Gn14v4ISdrXCvfoW/
xzgTSxxt+vVkwsOs28etr2S90i4zV9qKcOHikT3DUzanLa9fxoVd4LIIcZxxrfP+0JWB0w5kj76a
jYRtacKdMWt2G1iLo99uR5Mw3fHjk+yjRTbMsk8+yR42w+xhxgVOfcKw3VjI7HdVuR1TAbIBipsW
vg0fwnRLKdrYxydAAJEWL5fB7ndLF/NaTmMUsiVzXQcj00epD/8cP/mr+YmdH3w5GFQ7ZgyWV/kO
Uv6BUuAe/2XZfltnsCr/kxxp8vJlRW//Y/j2M6By8PYvzNtfPb8oz1p8+8kn5vX37vWnn5rXn62p
gI/MK+Ar8NWRefU18Jj47kPz7ovyDb56ZF59tamqWt/bD19XVMtD8+rL1/hmsTCvvqlafvsz+/ZX
3JfgzZf0yqb6O+5a8IZSfWpTfVddUzdsP541+KpsglfQFH6LVMN+2dLrbdhqfov0Bb4MfhgM9sg3
dqZWCsV0D4Pqtm9wAcGn/yN4/2udifCtThm8xbrkSOzQf65xXfwj03t/QrpEeBhmzKm0VXa+KfIr
JGVn+w2cjFDaOVNUJgK4TbNDJydTl+jAVPpFf33Wc+SLy9WSzyDOHzEDD+Ao2mxO89UlnwPXRbau
tqM2u8jfAO3Nml2xKvMNigwbEne4iS5/eZhjCQ/WLx2B53RBslVFjBz0ZonnApxP/tR0G96esP58
Q9lMM9MQzFB2G6+qMLecvY6bSRSF7LJje+Lclv6ZukC6q8fciDCHUCKTcSqN9UR1Ux5m1sIBhA2+
h3E5xkQn9xk+4LoLZNDvMYx29CDL8icePhkMPHen9K8Zg6YIy+JywgNgPFzlvC63a+A26XhhxhXO
Zb9ZtO+82JX1g5EoFkNcFMMuI+yySOLhJ0aw1k1M0wd0j4oyFdLWWuLC9pxt3a2DBpe3ISaY8Wkd
FyPTywXh4jccCpxLuD5QeIAvs9NqHc2hqUK4+LDwb4ilYRoQkpDEAtW1FFKQrWN1vfiPRzdwaVvh
qzbVKt948sBjSjxBjU2nz814kjHJB2qIUiO8IGYBC5mVa8N0JFa15cuTi5nquIMcHF7LD5j6AbWG
lbDHjsG4N7D6kvUEixmGg98Lj4l8K3bUNAZ2e1vvizGli7a6CowyEDG5OEBRotkWzYdMOI6d5Q2L
M5xhfDvD5+DDMiid3oRrgtcCfpgEy7PawcoCrmtZ7XCqf1/uxlRDtWu4BbMVNQr5sSbqN+ULKqY3
qYqlipC6VLtlc3t1WuF8WJ7vuNp5wfvkAD0HnjWj/9FC7I6Dq2DaoXp9/8V9Clvsd8aSTk5kAJY0
MdAM86ouzy9aapRvwp2UP94j73J2TrOoYQuzFu4/Cp2+LMzM/rgDpndsk12FndmzEaUt82SfTlF7
lWrmPxS3p1Ver4nPq/e7tqe5ibzdlN1a7tqKB3fMwNIjEjGXyKXw/iuU+IYysCwHlUltBrP0bCuC
KsMZgGrhdbef4aarRY32dqSNFchC2U7hR2VpG0wqKWqMeovTzJA+IRP1bZ0UarHnxycRRQNKQtcC
Ug+f21G/DhyfTLGkmF52spnlu12xXfcvaB5OKnjBPbxrCW1ZIODRwD4M8SSSf+h3j2A/HA+zj/gA
IvHetnMCX4aT4TvMmWj4ZdJIbjNztstb2EOkDlGJ7pieZmnyLQMaU2h+mZoBLSyah7Dn2goitd0K
DjIlJq+l5wWpwicHmJX7EmB8WIRdvC83cw+i+RabD+9sdO+V2ypmK+7JPVDWWchD0OkQ5udX/QXQ
d6MYTrIAnDS9kuK1r1zApIcNeDseoNOjyeDdj//O2f8uvPEf+bzvnPV2Av9Z1ite+ykTDI9Wotpv
V+Hk4ptwlWGWGb72Vef1+bL/1KDff7BzirmH2ZwK/8GWsufL486hA5/wwKGq4Tk6bKLqp/QitUHg
/aSTU04F2z7H8W7HQOAj1nLbHGu2E1RBuLrikrkzeo5pnsmhpgepk7OMo3FZ3F5X9dqNiPx+11GR
7Cp//2nGRypdGmG4WTyUgrVFqfkKRzUqBvJq3faEOTTS6RKCQ25iOUcacdTJ/4h1GJRxrxHHxMOf
YhkOP5QxfttxCjLeMTyX1z92cLiEew3N5fW6+YmG5t3H5h6Dgx3ib+W2QOUlXv8zPxmX28eNwR4Z
6zncJdVhBWHFcXXc8zuOXqzC1Yel/DEP2g8/DPv9I09Dzz7DoP12+4eHOAT49INl1Xf30Fr3MsSQ
Gk/YXeLu7r7nMWq5xUKKFlxaF2n7wpozoPxOS0iSi3+v2sRhd25N250yTR5+7MR6he47KRelADI9
iLkVZ3gkg4MGLVa2ZRWGEW0xwWzV3rBk+6sqX0/6mxsqc6nsaOAiZpffJbkLrDc6RDv7dyaEIlU2
FZBqQrQvSVuuBEfz/ImYYJK5gkH4CTdtZ7T8xp3x3qWhS2/Yt1C/xXc7P6qY+03kg+zzi2J1qVcD
+BFVTmSYmOOdCi/e/lsCNeWK5mKajZbLdblql8vRNPvDD6l9b1ibn2i1YLOX2uY/5qKJK4rvFcLv
5n4hodIM0nZIwu6eF5j3OVj+2MeGEEa6kVOq2DR1G5iBNJGYTm+SdAuzzvBz704UOxOhOZ2yJ4O7
Fcqm+mjs00p4ohZpa9yfXD+eGnPT3sTAl+dbP/Dww3SJToBw6PlVz9hD7jtOjdlsRmvMmyD1jL5w
x2R+gdxEV+3mjsTxAUKH1kvbamHbNuN3/XlW1WZZnZ01RRvm8+9NM4vrJSeSxsqASkYQdYDYNcyb
TKPW3NWO/vakWpKwInBtOzlIIpN2BJ2VnLAf6BJGuzr+yPogW9Xg9b94+a8++OCD3e0MxMv16//i
5f/1Zx98YKztxbrnebseV6e/K1att+PJrvJLMt3ZHW2KN8Um25GRO44njGKT5W+gZfnpBi0V8XjL
lYfzE8cVoZkPnIdXGdpflnXTZvlqVTTNTGvqsfmJjX30DBRbUWmHzSznp7MZinjsztRdQVFQJbUS
MoWqG5kctgFnW7yUsYb2Wow1YKRhoOewnNABgrhWHm3e2mEVMn1XgwHkgbbgNEwGr//Ll38uc1ZW
Mx2S1//Vy//lA3LZAE5ht28fVfsW/mQXxWaHI4zj+Ppfvvw3klMkjtf/6uWr/zmccTcKNcxq3ZCY
qo8D6TRSFtNlsjSVJDC0r/dFeLpC3d3P2dGnmikrW9ZLZxX8FsPZrGI2STP4uzt2DsjyTnZboT2O
LvKG+CYtihgmbREwTX2yAaee2bTjzjmXLBxWGjTqKlE2rYoXtztdECqIo1WFG6RJdrWHfXBKlmZS
7rCzLlyrtP9+cHmwePN2k7mt7EpMGNRBWZMEJYPXRAhfdz/VxVUORcBiW2SoF8QSOlVAG7r7N5LI
wkx4mZrKUCpBdvXGd4Nl9mn2OHHjnR0tsifd1zBhWiQ0/bg8SduC2W6WCcsBNoOGwhIT/xzI5TOa
BrLID8cGxuy8vVhelNv2ziEynfY7Nt/e4lG2vT2wT+HrmBYCUObIjuoGZQz9No8H82be19cXwNsN
0mK8b9oG75/g30NN22zerWm4Dfubx01Jtdk3rwHyR5SOH3oaqWqa1RVKCvjvA1idkA/2Lhrh4xs4
7FQ3D1vbnz5csOvfFBMv2OoP0suTbNXFY9N9KFgqjAUfyNdvsoB6WMjVMem3S06KHt9Ms9seMw8Z
LtPz8c3x4xPIAP9GN/cde8W3r+dQ+biAjlFXPy7Y5HYC4pM80SKRH3apnBywpvQjlB7BcPDh305r
NmXT+hUb2JZIvt7CNzNcEGNJNznQTEkZFC/rJE4or8O0B1eJDPuxDh0O43hpxhW1iR1Pm43dOUWL
xtfV74stPLLPmXnRs5NI5wGfG2V/olyDByAuN86FdFBs4ZCs0atmkbln04pf5k3h2GRIE/zuaUOc
x6f35RoPyhI7F/zuKZdP3SBlx1NMeWjyhi1cI3CecA75tFhP2cAS2aCazC6R5cZ3uwrEE2St28oV
d13DSszQTYeMCuGEwQToM5J9/h2z5k9nf0m+tawjvoZ65IsXXGH90K2G5aBwJrqjAWwRs9jEcY9c
J5rRYLAETrK4QSfPjjXYNHt+28DhiEVMs6+Lq6q+pcGbhsVPzCzgdRBJEgv3eOAY0STI5nQtjpVd
g49T1Khi6uUS/aXEvVeGgPzMsk+hCx9Ps8fOR6lYZWP298Je00P2N/R+uTCSoKxqIZsihDyAQwCE
c3QquIYhcdNUbsr2lhh1qJq9OE9hbTYgORCTAQ9eiGkr9ODh5gOrV6GDlZweBY5DYz27lA/yinHK
d4pO0JMONeT8/UcG5EUbktPfzdYFrrOx1n+fc6A3s7Z7Ehr/Rwp9ancTaP19qQ3PZ4dBhnfBPQv1
e3xz0OfzRofHZqQhvzMfNm9gHb504qhA+E1/B6ZJKCZcITtHV3kRffBujbRgNQXJF5iHtL002WGJ
mBAl4bcqc6jS8zBZJs3YW5aHeVLl4TY5Q/3JGaTDK6xG1u+mWqVWLyVJr0ryz2Fx3zokBmk4+4LS
zs6WcmcWW6ybFHx7NgiZmA19DZcoZUu2Swqkqj0fu2MLrC11fFgfVBqp3v5sB6c6qup8MeyTHunI
d7PVpgqOflJYxT4cVHPBrunrCohYuYWzpWwXyA9b3YYqqqgLPDUwhX5rC4FbuqvLJXK+jt4REWNS
RdaLJS6GBGnzP+y+sel17+he0tVDLpz82HtGvP3Wb9r6HTa+tHfy43d2eUVb+59/Y/fd7x7a6R3d
WKj8um95ZNWARXZbzAfv+EMyRMg+/FBMREKe6grE4qucNywt0goy3n58Kae1AyiZBWqihm5/RtnI
kp4RvB0ht5SyZuEc/GW2q3ZjSm0GYbsWh+CgSHjdWyRnsUViarv3aYGP8K8phK7/bS76PqE7MCCN
Tbuu9m0gE6SNc0ITkWM8Vm8mXv7mXJFU1lFolds3+aZcWymYLU3U9zm06slbMmzCbR/oEVyt3ZYK
d6A5u7wGdn9G/PAYpiRsr/1G/YvMe3xzQr2ByVegrXRwnC2ZP5GzxR9n+uO+K5T4y54Fuly2db4q
MM9FuWauOxgxOSqhzvShpM6wd56WWIL4zPYcmOrNdYBZlO++nOhY1RO0txBXQNwAHKSnwYDrWB9k
4pWHJ8pXF7RuxyvMDOt1mrWnZpLuHGu5DFVhgkoYmVyo+B2EWwTSmPNTJAmY37vac7AtXLTNNdA1
2TNE9yiU2oactTcx0uwIxyQQDlDCLXMB4w/x2qIxV0OKNXBRyI1Os6crnbM9cC7COxRruSryy1xG
lUobKBFQoBp6e+CAeueLGvhOy6QXVyXhiCuKWH/NdEyWhjy2OpkfSsmTwev/+uW/pIsXvFF+/a9f
/r9/9sEHD7Lv/unF33/7zfKz7//u82+//u5XX774cvntPxDgFSecI2PREnHX4xGIBn5B5okkexbf
GSVruSRRFrVkI9QNj05oOyi8BLrIwBLFL7A65yCNloQPQtdzGUM54YlxepuN5Crv6CrjloxEfr0u
YNzL8/Oiprk9LTbVNfCTMD5DYDW365K0GFACzXy1gc/kxE9Db/lGLtYsYa8UGAtEFzZ0jD7MD2gC
sG7y0ZLFMwgBvaD2s1JryjDvNPt1k58XoltY7mpobL7ebfbnwLqh+nWN98hBKZ6rVfqxHAyinOMJ
jENbZXDO7jeokJIGfMgLdId4Zk1F0rz0ZJLBOXgJct/rwct/L9dvhKaydP7U2+L69b95+erxnw5y
zdddbdY/AnPNdaYDurbsordMfwS0Gm2DDmn/JBs/nWZ/Oc2evsdee4+99h577T322nvstffYa39y
7LVtJfBr/zkBr+lEpeDXEoe3pu8HYNMEPwqGjT50rWzodSQg4ivnfjPsg267L3Lb8Lfb4X2Q28Je
vjV+27AHv234Vvht7wHc3gO4vQdwew/g9h7A7T2A23sAt/cAbn8UALfwAkQZMfz4HpztPTjbe3C2
Px04G1f8/0sgNsJSM5KuxVcLTF7xXQqVjT68R2J7j8T2HontPRLbeyS290hs75HY3iOxvUdie4/E
9h6J7T0S23sktvdIbO+R2N4jsb1HYnuPxPYeie2nRmKL3Q+C7+/B1Q6Bq5FlSFhCYCVCxBd6T3+z
hdi//razHTqFAAckRfx2O4xR1qLEZGMSvjp+8pfzo6cnPxUInBmT94Bv7wHf3gO+/bMDvv3blwPx
Qbi52gC/9fq/efmb/439DtiVhZQa52y9jgY8ZHcPXFm+KX+PvyEbvbto4aFp6/0Kqiyawelttm8w
QUMmNQoHx1hTzWwwGK8m2UW1QeeRy7q4LDZT/Zm32VVRwyzvb7JiP8uePn781+SE4H0QkEbf17tr
P24Shgr+M3vrBm69BiTshg5xSdQLDnYzM2nGnQlg18LIjDrZLm2NiFvy0zhBq4HWN+pe8jXwHvRq
3N7uCuvVdhfAHRJleHM8f3LC3kCje0DWhWoynAEoGrO7BqUK+UdcdGLf4hIKNApwWvlpgw5ord1H
+TnaZLlNt5QXy2VwaEoqg2hDq5GooAhdkuat+0atI15wAbxb3GVoUdOWq8tbtvYMS3dZj0ewPzDF
CJUd+GCKR04G52zMRG7sOypq/+lk6ouyxuWtQMzppGKyprPq4J2ulhf5+RjhiSYWk+YzAT7xOI49
tngJz+oOzOJsv1vnIAeqcqjPrq/fWbvZ74p6DC2dUskTh2U4DtVNVK3MCz1TP1yBtma/JSP4NCtl
6pYrt+ti2y4ei/cGTG9N7nOmmEGHanCnJOtTe7cUKsqfExX8NecSG8rxRnRUWsBk5i0YO7O5R6Nm
sUgOOomsc7eHwgfIHMmCcsbJceHDT0CQgYUn9Dl7uP7UWSpB69ZcOlTrN+8iRYJG7h0IPLK2phks
YzZbHZltPJqzkZz5IK2EL7gK9IPdZvCJWODp4IeJru2/h5MH1zf831L9t5wgLKVvXsgjo26Lq117
Kw4ZbzFTD/D8ZQ9XUv/xUencA6UX+HLsRs/75bpRgeZKT9369ONipS8zxvCaECKO4Rh7Ylz3x67J
o3yan57W03xVV9vbq2m+XqPp6hQGYFO007wu8unp9HRdTU/L8+kpHMqXUw9gMDrdVKvL1/uqLaZo
qTeFkoCctsBnrHJiWaarAu1NpqsSkuCEwD8bWwL8PK+r/Q7eX6Hec7peT9fADKzPttN1WcP/30zX
8LOdFlfwv9NibXOfoRsdgoWdVVv8p76aEquFry6eTC+eTi8+nl78fHrxl9OLv5peFPl6igNtiyin
JbvdlFfnU8JKnaJH6OXperrJT6Elm+Ic18KmnFLvkYwiTpEp4irfTa/y+vW+KKbQhz380+bTK2Cw
SuzttoJh2Vbc+G3FDbT5txW7701lw0CeatfysFQ8jLvpri6mr6fNVJKa7LAIMVdzlW82U1g+2yli
qFwW+KeCljbt7QZ+7E/h/7tpS3iDJntLM9eup+hLQBPenlVVO20v4H84Ym3ZQo62nrbtdD/db6Y3
V7tgEeSwIfEfngQazIt6ilq/dXEzRXO5q2mTQ6Y3ec35JuzdMR5NRxNCkzwJMEqfY4vvfTRFxxKt
8ml2yxgVM4TebMYJnEEEJLuZiQfseIQqmtHRaJIwGLbHLYEWOi6szq87SMi/Q6zWPDutbrL2AnjZ
VY62t9sWt37uODrBvKEU1+Vmk53y7i23q81+DbQCFj8Uu6FuoFaKAXzVNz4xFFByCHGmpxujYvGD
g3BOnUcdTGdowR546zeFcy9rK0GWk354b/1EiyilI76PgUvZ1/6HoakENxQ1nKtZcCGx893qogi5
Mi9P0/XNH37IHtBCWBdtQZ6tCKoh3TEAd5SNm4TGR/QQ1aVNRmNTfQ6ToG1a52accWNdF/Eo9j8Q
+w1/ZfwT6bVEQI70DGlnBpyadQk7vYVxqBEoGxOMGt49jwhFiS8PAsgQ5jhDT89+uZXqB0J2QfKx
H/djKOZk0DUuTEApkDH3/lTYfGJIESawrmJ+uVvfebDptBDjXMVQQAkDSVvOQTDPHmvJJFxdYjCW
S94uXZbYDxOkNTkHidLEQUSc1XGfw9znqCM6U38V9gQgjTQcMpfIRtTlm1w2BaEP5G+qck2zfwEz
T1AQWBQydMQHmpbyNuUXMroh1Xjgd1+IomfeFw0c78VYeSxMZr0TkCqmig5LFtJk8hGAaiqj6BlL
gp2fqGBn93ySEBxLBpyHJ4GVBgKaYhHwtde7ANOYxiGLyW1rHaMZtg3fH2xbZ49BDqEeuqqElhwf
PenssWd4miZ2WVBI4HESjwU2MBwLeCOCgm64FuUvJ7VSZxPyQ0cDYOgkyd8a8WOJXAAq7MZSVUqG
5CWBGursI+CeRxkwBR9GxU4isT9RjG/CRwtL2fsqhJo+edg8bD6F6lB9zQ2cegGTVAMNDVtkbGDn
Sg0NMFmXERENRI8fEq+0mwOFI4DX5FAPHmkHdIAPDcxRemC6RA9LEhlbyv0oNSyx0a/OOWr+QH6o
tj3TnpyJRzIRWnds4ZEmzZ1iPvVD4otyo2N2tOlLamM/cHrOwiQ1SGVtjQSLZCzaN9GtoiaIAZs3
XRM6RYDQLCnDdoF5B1nfNcZVOs26M88m2IePQBV08ebNNHAGggC+lUv4lgQI3gb31EpE7fTGAU2/
FvLpSfYzwiSK1JDqT+7QM0wxHSA4UrPrndeoo641mhGu1NLZLjwuu4jjeZYoB1IVhAzHtxzCX9+9
YF2+6BTlC6CkN+Eoe9gshg+b4cgoZaiYifWClYlKLWYDw+Wmhc5b4K1KEgnF9e4WiPV5yDd2rzSw
GlQsMHWY0W+nCXwH2MfjQ8DsVLrstOObj0ZzGI6PQLxjOY/kI9cglfZOkrXg0UJJeSyRQsCrv4Hj
hlewqwnyWzMgT83c0EaLGIYLxbwxnn0ozU0wWAdGL6nL9ZoRtpWHLRo7tlYR6QWEoHY5P/9YVYtd
j9elqTSYEhE1SAiPiFUvZUa/1Fny49FpTfoVUi+wQgA1Ixc1q0pIsUJqhFGSTR+xXoZUCyOrO5AL
Oh6it2hOnqHWKxOtV3aaqfoiO11X2Wl5DpJBtnII6uuzLYhqGSVItHBUZtA5DrSTXZ6uM1IcZa+z
JkO1R8YKmowUNBkqaDJW0CTLYqUNzhlqxDNVymRtm+0zVKBo92HZTk5+FM2lWx9m7X4EzeW0vciP
vMw6C56U/Wa5qdI/6kVQ8dvtSUXIEBZXmXLO+DY7rKcgzuVUQUsm5/P7RafixA7PQf8DcoR0aY4P
f4F61b8ZTab44xP3duPeferendO7uKS/cN9hEUqm4WjoXu6qppMt0qisLmDtnS1rQu9HlBO5+oaC
/pOe+6Y/aO0O1DfwVBcFm4ryV6i06LmJ4UKOKcmM1J/jxwEpFnh+VXixDi065UB0OZLRRRn4vMQI
B6HWLTze9g5+Wk9iLTjZSD8kM9ijcmmnnfSNGgzcFMvKwFhd/+7lv/3ggw8UpXBbNcXrP395OuBA
XQhrRLiDeCSjHA9MLfBhzCFCUgHMCDATpwi9Z7D5UviFCBtJ+IoE+cPfllAZ/cUYEtB6ZFGXS1ZZ
0xhPKWZR4ITy/LLcvSjoHHbqGQ95iYziGFXZhcJVgxypWUZWYQM0Q9/PO+bpCr+Jx5l9McMDzXNm
WsAk9mgHmjBqkHo2bdkik4OLQAslzUiOzqU0/g0UklmBWZHDnwpsuJd8xxtC/Zv3Gp3IlGKRhCQc
tF1YOx7NGQKQTDp1zjzwKDVBfwOB+VuHgVlfzoAb2yD0YmIuG1imO4xAc+URpcpmuatgFbUgySxx
djClTdPlevGjyJ9T7ZfDL4wG/LxAAx6TIwXNrLYUkBitKbAV5xTYZb9LMei0APneI99gphlB144k
Q0rpZ1qP6aPGP0tHeO2vj/PfUS0mDPqSMIHE3ofF01AFBccrGNMLFm7GoRS5dLZOrQsW6fSmllOk
FnFU58E+PXj58iWUXlIAnk2JxAeYyO3mllvTFnm9rq63jJ3DzUEUU6PbojoaRr2Z8UokINJZvl5T
jAIQo+uxbiNXIg0gL8cJkxxBJkskeOs1fXD0tYJ4Au4aOp/Pj56xFwo2T/9af0D/JOoKcvRUx+fh
xm67MF9A6ZHCL4HbRh50KaRefrrtfB/SrsfIW5D3tPbBVU7UculDBaHezlF2sWn8Ixw6P0GrQpLj
sh4kl+Fk++rCHSnQkHetcDQqaOs8W5HhOkadK9Yola0KhraiDtKfpmCZl7ftI11PUkq1lYtGxyVI
ahEYB2k4TuYNpLMKHhsbJUeuH5xHq5nhaH4OUpfLLV0Ph4kGx/DjnQsilyCY7OasvGkZrFI3JufE
ABXcbEmhUNpDN7HduyOn0vbl4htgI8Q/aOaCTHHmYKM/E7o9aigi466uIOFVg/ENq2tlTyTiF0b9
JMs0WDwXBKudb61CEnjvLd0Jn2+rWiPlelNWvlKaJKMbvv5vXbRXjRH73738f/4923oKRyTd2pSn
c2jBrlw7eHH8sMajptpRZLh9W6J1GR0Zclbt8dYRMu0uzxFBGrnWTf772yNEyy5FotUovMTd8Qqb
ZdmLC5SqyhZyHqEEj1Nf32YCKE7QbGdwJg1wmo8+ZXnzKt9hINsGj6RaED7pqh2jpMJc4eh5GxtT
KsFLOOBxGGaxRf17Nj79B7JFpf5SISB8Pn38+OdHTx8/+Vhw1R0aOMbeeDL7+ezJX48Gin/t8K55
JND8p6n4mh8GmWJbzT50LiUNgZjvytUlh6gxnLxFkx9pVjIjlKJnn23KvBGj5aGmQE9CnGP9MeJ8
sLs0m4z02N+l4v5Y/GEkCUZzreGHqZguwb5sFn8QksHhmOnaEdUZ0I81hi+DReOmVxQZa9TVUSjp
Of6YJgsAKbS8waBf2wp3SBNQHmk9FUOPc34xHYh12O4WyNcoowRL1CUhyzznt2JBJtOFhexu52b6
4Hx40BN+TS9kmVQcPZ09RjoCosPZJm/9upoisUXM1xynIug+3cteFgUIGISzfgarlCbcR+Pj5tGR
mlHj+HT1r2dySPR+ZtD68LPUflFVlwjrVvFNsAvLbeerhv3AxQFRwjnBkrxCAp2XKrTU0xc0ypJ0
rh+nxpbrao0m7sn0M/k410Qm32W52YwMCxbkw4/4PKdUJtdXxIDi8THq5jqjj0jc5yYd5/5BV4+s
97DTsj1GWiInmutr04DPdiXvvlGQ0r+OqitRvYBxCaIan23Lz+W974hLPPefTd3fIQA0KdRHqTzm
c9QIpHj3mmZIl5rj5s32ejWK5wrJKH2ZP3+z/c3nn1dXV7DMvsO6wrz72sx0kBe+YOaerOQ5m6yW
Eeh+hf/GmaC4z/bY3f620vdwiB6orwIQ6R0bDz1CtdCRekHAp8++ezYQ88V1ccdwasWYNLlrWKeW
TD9jx7y5pAnyPadPNls3n6QxuT6nBmfpXNRESmH3GWndD+XgFCaL81h5BtR4lMoSpjBZX2ggmlFf
bT6FyYZA1g07Bo36BsOmCbPW+fXKjktUo0lh8pH5lXqIN6NEviiFybvfdnJHeTspTO5l6Aw3Cmp2
oJ7zKJUtoC4c4ucSzqdRuoA4VU8Jo3jQkiVEuR2q24HcQTKbvQtHPkoXkEgYb3aNusVh0/I1R61p
0F4wiF84kl/3IZ6SNLXbXbjiUSK9/2hySBjn9ShVg/toySwZm0f7RzPIR5MchJIuEdHk+NGmDQ/q
KG14PjduZaSaES4IF+o5NSr+o8kRhGseRTnCjyZXEFM4zhV+tOuN4hf3DKh8tISBwhL2JJeP4WYg
mS85v+6jzeBCbY0SGfxHu+rQ83DUMxf80Vbg43COuhWYj0GjKLxnzz6Qjza9BOFM91o/hhkOVCAf
bXoXczM1Sv5jlEWjao5SWdzHKJM9PDqZ4nMjODHiDClyr4FNR32TRx8tRyHaiGQG99E2qXcmOtNg
5yBIacbfE1e6Ej9i03UKgqWhXUdldTfbpGxtlSKk6/3uLGKbXPoZep9g8BJNZBkMaOezb1MskMkn
iSy9wYGI88XZNJHlnr74nD+ODuTziSx/1667WeOcJlEy61dfjO7OComCAaqvUGH/G7Spq0dh5lY+
ksFdPY/SBqdKUy6J2CVaH5Vi0oZ82VITLq/LNTHyPSUk0tqTKEfpe1ePUnOnH+cuVbyI2W4DuYOr
It/iBfMjcuHy8gAvafhwjzVN9bKzdWpZY8nR4gyy0Hc7W/l5nDxIj98tM5FfH0yO301y786XTm7c
/Qy9ouvuUW8d8j0STDdVJBc/yOAVBY0AaSsbo45ivV8BtzOiuQC5aYug5fh7hTHzVuj68abMFb7O
WDT2TwRUkZoFlOWv83o7SqSf4Qdo1twlsnK5NDKZEStzCUJOSTszSmayCQK+BE3+PC+Zysffo/Pq
YKbzRCaSrFPLxnUrFr1ffPHtr1+M+jNIgjDLl99/fzgLJrBZbhtaNv1ZOIFfaj9MBq//+5d/Lopw
0gWg42G1ff0/vPxBAi4aqIGqmdqQh7tbtfYhwCBYe+KnuS7QjHZZbUHUQFaEAXFG9KNBxzK0cMO/
pJeUiM29PtLYrNhgCN8xsM2Fz4eGx7FnM10fYBQZjWRSrCWsouY608ARb5dtXbXJnMY2hsYT+UOy
wzUANqPZKIiR7GB06vPulS28jPN24ApxdGdoBwrP/U2hwEkEMmgrxzl5h/5rv5JN7x2FTv3MPmPv
3q4gOySEvWkM/zZtdb/COGmY/WxLBlf3K0ATh0U4i+tDZYg3F5Uz8UbaYUlLim2aO/vFwLtTTeWn
anPY7+BZchCkOGqF3pLFnjK9GESS+LwDuZCE/T9k6hzYun/MiBuw2kdpmxTX/sCi5M7m3tns44/n
J5Nktnt2weNyNZ0g4FRrvyFoFBo+WfRwWyFe4orJa5a/ycuNs05/WBtQAr+W7hc4QSMKeGUeXdZO
4O0S1uCq2m/b7NPsSc98nEG9VUtdVEv97D/KNPWPUmBj3RnxpHuA1Ib4/VIR1EO/qO4fV61M8thd
RX75zbdffvNimtkXL7549r1988tfP/+nhOnUg4y+ZGcFdIQIN9orAIW5BX6sRpuCaSIPhnMs2ya7
xKjGFeyHAu9r8GKWrReg/q+//OLZr79O5JUreb4/xBs0pACsrO+ktm41bKOZOKN7R19zXl73DzIk
A+qDBig9Dk73XgjGK4VIgsQz+JGNw6H6kQ209gOKO4QKR4rZu+4EXSWaIOmQIUQ1XNepvEED2nUU
aY3mC9mLhiJ/E5/jwKLUWleZLhgnfbQ81Lp841gourvvOcjE16MecyLFe2mhl1QEYb7wE94FfXzZ
wwNEhsfuxN/lyDmesXFB1uftgXz46S3eMI1HmnU0OX7MriuefUCDkR3s9tuxvpua6GJTl3ImYswk
YA/7Giuf3qGtktM3VYsyLZVXYUPlZaKdu31dHBpZ/E79FG9mhumkJt+nxbZ432z71rbdvg87YL8k
eqFcbdR48qVrBIFA252NGedBTHiQl53cqy+oZHNd4LigruXwM2wwxhpNrgpswp14TMZljPzSyUys
AkL9He1N7sxqX5PnMb3DjWVMQvE/trje3oodPvXdg3vbupJd3xbXbtkvRuqnFbfZssL52seVgyz1
qNOjfE36A/WGy0jFyS4AZMODKXReutsihTlHYec/TsGQIJke7VHi+/UoyZXcSKC7nqPoisHf8I9D
KkFEOmMNeqa+obAKtuOrIN5kHxg0OpLm1lDsjKOrRz2YrTZofBuNL4fklRAV9eLJnQNMvnvVmQT5
pTH2Y4t2/4RtwLhO+GUAs07+XIKLAiVdVW/gyHBZC+Ydinx1QaV2pogsfyPuFTFYjKc9jcCo/vUo
6XEpidXX6rfW/rbLt4Vz0C20J26ZmYs4znHvpPRMDBrZpajPGLge2LcgUK0nWqNYju+3ZHem4+oH
ZRKMZty309Hd68vxjszHr8lvB5nOpl3PuNYZtnianb39GsS14Hy1ECY06jJ+Z1t2ojZoAUXJOotE
XnvxeZLCCPR88LNv/vGzX4051zQRKjzfkoSA1VPNZH2Vo92jp52waoH7hbruhAyt+TiULsZOtb5V
L7/48h/nfNcC4gLCgNRV0xytizflCl18t5cJQIBqd9sp2dRMQ3wPkDnrVayIRpiyaBxfR+eEzEWS
wSDkZ1UOWM8xWjtpmCesly2fcx5qwvRwmjeyYuVTsSwQa9R28TdwIlX71p9CU8vmKgY4lcqqImEm
rxltFfZ+VKCMuioE54PufgW+dvHEyTA5vUinehxKOvQyG1/ltyxg4RlKrerK8LDGuA6tBF4MErC+
l0FLOuuD1xj2G9M9kOewT/9U7cnUli1zbzNyG0OQVxXbYfpQoYl2FFOZmxzZ/dTgsA6W5prHaSqt
nJCtKP3IdnDskx0y7ahg+BKUPxaMLq/R0VK1sdk8e/JDkttQoWIs+JNOFwWLr09fJkjj4eJUtsks
KOViHp3tN4Stwp6QGCwbvRQ31enRSMoamQXGi0vhzNH3MF+1iWX2oTIMUihKaSAY63Wr/veLKBly
ZuLL7coOMhw3xeuTKINLybL16zDDzzhHnMED47z2LX/mNht3j1HSGtnVR00hoVYxFXOhOHqsBHdz
3qAF7pqWmRZElavZep6NPhxhMqBLqAGAtGwVG8fwMMNehg1bVwWGxTUwbt32aWLbMPKg4pGAdpxj
39pgNRzkgL/65mtG+NJofRPBm0xqfuE5ujdIUefri3JFOjpsBWQBIv3GS4FK+fzRVBkn2pFUMZod
2nzG40PSI7QtYpSoSmCSPGe9YnD4sJ4L+ZNW89SqQgAjYmvRRvkHaeUtcixtrWlCrCKfiJCs3U9E
XEL8EfYRL3YxBoXL9pFP468qBQQqKm5xz6RPg5pthxhc2enOJ7EYAhJBizZuWNXwutx+/JQiZjrn
mIZ80BjJidxjWO0ca52htKphBf8WSluh3kZqn9h7h5h4pzLpqE96OV4p+RhDlJkM85MwXLwks/Xb
1IN3Klnxbof28qIBwX6pd2kiEsdSPCXyrAGLuiLeYgwnPXKZz+tXpXBBXoT9wxAyDefEfPxgtvYp
yqTcfmnWOnR3Pri7c7u3kT9SDj8YtTGH/SZ6iG4zvv0TfLVmX/TVBbmvdPlEksVNO2lxrdWNSMgB
5o8Delyoow6iM6LRRuGkRcyeJi8dLtlgj6+Lpu1dbrBIV/s65oZInaS4hnQDPF4XTTcqMPYB087Z
NZeaTYoPzA/8LWy6dXXdHFjriXKx1qe2BUzI8U0sjW7W7PxIA9OXTIaCikwAHkkt9HlGFx1jJTYY
IffJPUCLsIzHYcBnhOMhAoBjewIcyLaLP7VJz4zBndp0R4cFE+0zNNLhSyUGmBO/A+aQn5m7roYD
8AfkU02WskndwXdyEHMeZLvrKt1yxFZVai6OxfZXEKl7KENdVe3RWVnDIlI9DFszraDvCJAqW74s
wjW82+wbT+5YZk1vTFUBLkICQ+sD33jiC+QJGM3ohg3BDBhbtQMf4EuWp5nqLjvLn0txCe8VsVwX
oeTpRKuR8Y2aO5PXXTfLjZFdmaKk7yIixSqnZR5M7kYcgif7MQYsGZ27NfulRpAApqT0VMlYB1ff
Fi759+VO7+hxmU2k7fpz0iHAN8i49Ou2CKgiMdk3g1RCo27I1+veqxwzfNvi2jKGPG4jyjBCU37H
6juG+16KZnqjvz6yN0UWiuZqd58mor29mCuMj0C2fTzNPnoyuQNgTZWQVzId0ARmpHk65OfkHage
Fanc5DQzfSO8Q7lS9/OOtZhOb9rePh8MPCgtzj4Je/COZJsaD2X51qdhk4F6LvgmBGicPJHndLEw
15jT7PRsIdpnnK0kPb0loHl2WIWGN+KbbK9AcFNF+tuNU/egtDxGKd/JlLDt1BQbkS+Y5DjB1Yqn
jmsifTgxJAU3CBsveMGiHCGyMPbSPzQgMYoYgTR6vbpnUwXTHAThutqQ9zVOQGieRRjTWB6cryuS
uFPNDFvA80KN2PqIR4IHT3UiOAklWhMKCbYIS4MjgXCDwyMBsqLmwci0TTY+vdVmTGkmDcYDumEz
NkZnbE7PcHZIOUgTsMrJ45dOlHV7wWdrU+Q1st4g+uL9B9cbFKO4zmuokjPNsi/43dwG13Q8IhKO
oGJ6AyutLgXkGSg2XYF49TL5+KZpv7uPUnw/mGraGLoncB/wDphg5M2URpwWHVBvTxEwuYFtrotG
igis+vBN5+xQYAb8SEGWGHhqlBQcG0ZRpaLvwa66DIYyNEDMHdd1kGarIphPX/LdbvQgwVLQibrV
MydvmMT1SnxK+xaLgGDxypbJmPeZgh6YpD74JsrStDFSE1KjhdEpwe9JTwlUX7cEkVRWQTHwexLK
7RbxiUoyUysXg6veghkfiDbsPLR9SwCD+mzwb2guy72FfyOof6YyCxnN8GOwoRcw0OHnatfSDlzw
RiT1EYd1gZ0lwEbZzTy7CTBZt2mb3s5pqfuaTXxnuKdQsuic8aYbqfNxYCm6Ey794ECZjb/Qow6N
j3esPEXiIM3oMz3aBfIMjcFYzg46eSrcLqvxbjI56bDTnTFOBguAkrEd2M550qhr5yy6aHApR49t
E5Or3SCdXbsvXZ4ktQszOcS1d/oOutitM65Py/hn6bQQGLdT5weszaMbDOUDJfCqKrb7cBijpe0z
2nLsoKi+/awE6Yv98VETAQTy6EkXhly3hDLkd6CWi7qUFVIPOKuS4AcZ4pw1b7a/SKl5VJtZNvlp
M+4OS7eDeLHwUaY94Megs/4+HG++5ebIGVGziZFWNBi8/h9f/jsDD8mWja//p5f/evRnBBD5HeMS
EFLPumxWiPJ7OxU8NqKse75wxQQKbNTwyeQ9GORJ0A0s5EyALBlASRL0oELbfA3PXxSriq4+FPfG
+K5rOnUO+77Y1Q62UgocrPZI33Bub2cex2Gs9S2XeFQvlxMj8g9wE+I1gcbNY8Cnb7598fzLF/BO
glrg2SrcKhcfgzQxzpRBAxj7aBMPspubGwSRhIUuPWyyol25u84cOAAiBeui1ts11mvomUeIVMPl
8rpG/QLIsTbuLsfiwegXJsEg4ogYEYysfZZ5M0pm16/0STvC/VUz56B7lE5jVRhmQdIcPzlBSPF2
QqE3LOqXpnCjVl7h0hrjP9Ky4DDTbPAZZxH+SAfvkPF6bZWwoPKKSnoHeZGyOWLI6F0KOYYL+QBh
hA22o/1Z51dNItxsvm+rverfpkx4V+2N/i7XDcmcMWGlYpFK4d9OJCGoiggePkQhD7k6DFbLT+Fn
rR6+62PE+6yxZPi3l5AryYhYSyYUQBY5xl8ilb/FM4E+0/b9PBECo9bIFcFO4uORocNpoSGvzvaY
aGJMZbTuWRrELdZBmjVAbpq8hiXTmCZgMXQIcDrVoimIpjeaTjKuUi3rxFMLlBOIWOZaT3aWs3Dm
Gs8T8OKCJGTNJ3cLdJc9JkUWvIT/XYu9CrUN70sauZw5K0WLZ2rSGhhTzVrLxkmovk2Owr5EHnMY
VaQ0YykXLfXL7RsgzZQnvxDhFg6aLYnyMAfNnP4MxF4REbBIY8AwXlQPFkVyuMGxhXe6Csby4P1q
KNBRjd0IjjIqzAVGw1K1WwTJhbIYeZwPxEuMDXxwOfHNGF3a2NFw0etVL+JtDCXJwC47Z5FYbn9H
/qYylXP0SKPlNGchkbY4Mm5cL1lAuTobUfNOM2Iqz7qmZG5Jov0eqQiALvPWQXw3HmV8EsDVoW0H
L+I56lEURtGp/v0ikpYZjYZa8SQCVugCaHpXExlaQFHyndZFpyAOCF22trWyy+aq6ph2yiedENSC
Ia/YzIMhaaGG7sUkA20ir4yLd0YmJrSBs7EJrjVhg5KcQ2OXq7JNaBBkN5BunfE10fhIGmJbabsD
VGLuRltuZZHwkp6G3GKaXbVdixqMdVOW1CtWIHy5ZWxAY5rBoN7rGXYKDnssl5LU1Zty7Q2vKa+u
VHFSR/sTGGngOwnf+9aa4WL90gfVYQgaN0Fa0mJmWU/OJ5WHaNL1VBJXO4t8KciiQuNs3D21cEmd
x+MD+8GdxlrvRJo3cKKFtjAB3um4Hi2EYiLDitvDsp8EIoycw6Ta4l+TwZ0NDxkGbSIfHSX6o9Px
Q6fz8k91CFFtR31HEZUy/vLld19+/+zrL7958dmvJvZ0avNLMVAjtZenqEBo57vbOeHrvRKqLnW8
QhcvYfiQb2gS5xO2j5Wor15RA1+9sqpTfM29evWKw42gPzrhjkJGWe6+WNjlzwsmWxdtu5s/eiQt
qurzR8X2EZ5yTfuIKtIsBOCAtOSKtM4I0v6f5wYwfCdF0bT7IRSQ32XpHnQZC1UO2oJouSO5xdbD
2tnVeOLz0U7KOd4BE0WrBsFj6ZxZjMuayB2dCNfsG6FBqhgHe6ymTxoeQYJ+BaL+uugPi6U3iyhD
qSS0rlCXNhRYwIeNqn7pWKfirhG3lME6lYcYC0NH23VGjquDUFMBIu81DP1kGpAWNwJoZR46UFUr
BUkWaO98veYjHmlU49TZFDqDpQh4iYPDwTSGfBhshKGmlzNfxuhI6QRBFsAvnjeL/6OiD62dxRBN
CArynBuygRK8uqiutZihu/cRAhaXg5BAnMW6+kruqepXT2/RHOEcDS9BYIYlCjRc+iBdRNi2bTke
GqYS6kYlwWKIPTANOT7xreDq9bh2kTdVOGGzKOLI5UaerEgrMkJM1s9LhHQXcQMG0UXQYkwgrssP
Z7tbHO4Pl6L0sSFZuIFkHcyw3GQPLhqjhgiYVQoJErPTDR1spIaE6hmn8RB57+G00xrexkW3dhbt
7le5Y+n7q2/fsvqAFxUQb9sYu3EEOXd5lZfbMWOm+vAC/HvGu2Jm17OR5M1bLSCmtI/DzaqcGM1z
g97ceZ8OhWkkMiCaauZEO2Hh6/L3xX3UKuZagI47pr7odMZ1zC16OFdlahh/KJKYmivKT+d4Z8eU
RgEpejCgMpo8/2QIsaRoS0NpgNkZQ1szVEBCHcFJUL5mMudwFHk04c7/wF/ADdsSBUL0SCYWHMXG
M+Lo21AwxNGZ+SqEX0YO2RSG/H/jrCSVXkgOWDHaVo9VzqbfyCOQEG7KQvnFFUGcZ3NnYa6D9GFm
i/tSPCuyv7WDN4JCngBVOX4yfXoyya6r/WadbZC9QqnjuqJxdNKcKU5YlWIdDjGLqmrt+mTxhFkf
wVn375/OTFkog3UZMuHGTGsTzBgVb4oS/5FGip+89crq0TY8mWbm19NpNpvNYJUR78wSZs7SHy4i
0x4j6uosrd25McsO9tyyoA9MuzLtm91UIuCTDfRYfsjGcsFWpICrfJufE2clLN7X/MJluzuQkAM2
VzgVUuo1M2+PsVAdzhmc1v69ZWAM/pm0bDTXsTHYTYEYBCmC3wFiGTYBvXb4KUBOZnj0AHNNsLfn
GT/Ayfo5nkvwgv5OEW+bxUB4pY8WsE+WPHz9yq1+DxwKr91zAEkL8rbjl7C/8FMmlp2NfoDhj4Q+
1d5MzBzwogah7vXe23ujnOf1Z7LwA6M3j6m/YgBt8otFBZCHT3AOd1T0jEsJz6jdLSnsyV0zCgfG
n8JAPMhCB8lmCEeJcfvsVTfqab2C1pczs/FSCaHFfVmWjRg4ac4Y4MeWOv7QZuWT6YCzrsxS2BZ9
26lEkv7hh87FLel7tkGzZYJJnlQ63r1A1GKP4R8M6KB10+8DvfxQM8YHL8f2IeMWNt9jYUiGTGAC
BBBL506wsYbAcQ47EFwSVEhJDKpOypau4RLOF+J5RWPB+YQy42UR8HwjyxWnzHtKwZ/Qe1H40XO5
3bXiTdr/RLYQJQVDWMS9OkcesboEhv7mdhwaVGrEG/w+c3tDBxojKjGrTdkWdswXMvTRturJGs2U
VPt1IsGB8nDpxbvVhCTiW2Z/xcnhpHu27cCZ7veEjnZEpBn03A/5mkgJYu4145hHLqJUHBhLKnmA
4TXYRyUrMaxfnl2VN6VaBdIezDSOUIYXsogSYI2biZvXSinXlq3AQH5KoeLQEYH25i4TIfjuQc69
4t4NKckw6YtAn9ytPq0GH2hr4AykfOG4teMGEesT3gRH9lM+whJe8urhT2ocHPsuAheHTDvXYyua
IdMBd7Id7ER626klN2pMg+HbOllv7JfixMW58s2640Y5AeBFORDA62yTA5X+i+zjp7BoXImhmjMt
KBEkFdkHmGumi/xNgRp2hAbBiszqlOWIcZfwtBVrXFdlczC88neo4/ocoSVu2hAYSST3RawGGzIt
kPW22jSJJHZFugutbjLldYaToDlf444a25YZtRu8tAcv3Z4mVHF3YVwwjE0nPPr9EOdcZrZ8UJUf
bkxqXq/fl6mOGt4UaqbKuGoJa0Kphr531p+O5Blp5rA41cwN91ugPptblBJESyiuqtw61/gAVEE6
kHYCstB9QQBCdlcI4yFDYlhAxlcQ6BX84kWlWlkCHJLD55DPkHSADKacF6FcVokDNPaTCwrNXlcX
6B0tE4SUgF6YKaLfCQ8acjBBh6GTEOyRVLVbztaxzzMXOphwmh2I84kNL7cRlKKYi5GutwO4lqqA
N2OieDGsIa2uQ7Yk1qq7PLVWAoI8+vgkxQ7Z+esFqQtm9BBSI4yt+jp1oSv7WasDGflTdx5xDoez
ITvsUaJuEGUHZDScHeMd9vEw2hTObql3ZwQpZmTeZMOgV3WLHHicn63/Vjl5WPwisqnSkgYJC3U+
hTlw1xJPGzTklEAynfCtFHExDCYWMMuNeqib+G5sD8a/lssTZ/0WL0jOHK6w1TCxHl0t/ADr7Em4
zpy9GFad7NkhCaph1tQVkjQ2k2NNuyvXNEqnfJquURqXxPZoHVueoO6p1uCPtM+V9xj7482Fqvzc
xD31gB4hF2YwXkPoWdY8O0vcUMjpKra7BsUxjC8X2O8oz7iLzqcwYl9/XDtV+/+Tt5JHuLv3vvnl
jNj5N1W5zmrgY6srPeQ49M6uKC7V8gvxF0G6WeqtsClnjJu4qstzhovCRcKQ6nLvlsMa+O6WlHZ4
DqIZBN5P/WJibOBXfNgEZ6yy2lzxaAoS/yQ8jdAAGTmw0oUGowVdezLk44VHw0VVKimVcmZalyGf
BYXTNrqGTfdQhEysz4Yiu4K4Y7ElTRoeOUISxXp7YmOnzk1t6jHmP+kDKWY5k4bGSaleAEg1R4TO
NHzwwdaksEeaKQkjPZoEbt0x/DlJuKcTaCO6p5vza8MeRZfF7UKcPbBLc47CbE6dyfH86ckh52A3
Gn4HW/k8QAkhw03SQihJlyFVOpiydzS5+hUGETae0MWFb9PCNWwRnrChTGc7gW/Djji5JrqmF04q
OHFhT0A7umlJvpnYZCihc2IOJIuPWJQ6Rhuj4nzbnBX1Uq4ExtLCKWaaSuuMeelV5nBYksp1A1bn
FH14yl7NvCjuWA83HlTbpHPRBhm/lkffLlPI1NLvw6YYUP7C9GgRd+wcAe2bVvRg9liwqyS8oTTG
1hhpGjYLl3oSaFc2Tb8tBWV0XvYb9CAfGHyBc4R6MrPNb8YISUE5U62f7RhKkNNO3Vgu3HWqy/VV
aBTc1eR8FYZ+NnTEXYWS4qVJOUFq5lBTEoL0dDmnB+iV7vS90NXc2YVijF0UsyIV8lPzOfSyXa+X
UtBy1xT7daVrdQlpFQRVOgLr6ipyaz0Tr148yOg++FCfieLvT0V0GT5sjh82J4jDxL3XcmbletLj
FRWM10LKCsbtTmMjrWWhD1wxEijdPncVoaFhFn8wTZ4r9g4StzuG1ehVzdgGNMJ5krDVb2A6zPQI
AZvwBtFbDPF1LIPbKWXVEFpnbNCJNmZiT+ZuMMblDCRweetvVHOOpy6A6wWb916jldK2FZtvukdi
ra/Y4+HVNlrfYAjygu6uNVyzQA6i0C83i18UZNy6X7FVn5jkRri+LG5dc1RujHoOxd8K/0dDk2/O
gY9rL65o30FCoC/oYvM5FPYcpuapD6TsbL5XuVS4AnG1VZheu4XEMRot19kIRJqT3trHj0/c1U9H
0oynkIwPEAtOfFzfFD1jLw2X+fXzJeAruvYahQgNLQrUev2pM0MVhhDfqfeIeWV7byxGlinqwXc1
UOyy3K6Lm2lAAlz4yXE0SpGAITdb3jKDdDS6obS3ac7T9wvO2FasfcauxOOTiR4YWngH60USK6Kg
G5UEycLX0CnkF7R5S5cePhA6vqtc3zohrkvMXOZjyXWinjzNsWYPGUpPaLbAjheNzWhmYpAa4eS4
RuPJjGI0Ji5T5EgUvZ/td2uc7k4dk24ON62rTZHXqsh8AHvlHHFXsc1teVauynxjyMSocSbtTHjQ
tj2gPbrJy6tCiYXQH4RZJJ2rJTuwX4gaScAAtismD8jZoLM+af3Q5pJ1It3urMwHQoFoT+fiv6Go
ExflOVq5wM+tIecFiZimBLK5voTGYIuuC7YAQg0Z0yq0FsZrejh+CGrcbczMmAUj4Cl0+IgbAIJi
2cyy32BT9k1GxpbM8eariyKifDhgFnPiAWtPSYPrcXCoYA8QodSku6xjM9tvoms1zvmzhbFNjnxx
OR9G5qO0S3xhz09jqJywzk0LgSoWcbMX6hPDgHNe+DO1fB1xwn54yOeFvh4RbIYXgviAwZJlIsas
jL5OKHO5ly5rx9CY7KT8biZdM9nZPGXuwk9gl04a4m2pxvGBQly6k0P6O79oFpnZXOOQkUGc+anb
TIdYK5xkPgmXTNKUZSL+6FDO7sI7mNxt6KnixfNFxr2Hzr8/6Vt5aVlGJ/uucfeG7fB24Jxm+4cn
MvWJjHQovXNfheQyWxg2vddx1ZtO0p8lQqijURX9tEMTu6Y608OFKyNMEJQnlxjudzdpxJrEr3h0
7pbRHUeNDCWxZM0FWTQCr3JBDozZSm6KkMaR9Z4YjAs2L76SMuSDlAD8znnh8db07ovS+APlotqs
2Wp2HLUpefPJ4jQ1YyF5Z/7d2zgxdxRe4a2ULzSp61KDXwmTJEkTQc7grY76IenVl+FTy9JUkxVW
s39VouGd0cUbO7bPAyrrDccL1gJHsAX3uyxl+f6quKrK3xdr0hCM8Cp3JCqUJYMPUOvlPLhLYX1I
CzQjm3b2pSolFkb3nmsPB/lYjRAZSlfrssq0uGldzbk3gsGnWjgL7CVZu6wKZBnkKOtflPA9C3SH
sCS57jFj0ja39JrdiOJF+vx22+Y3iRUK30UJpvYZzij0GWlDE07iDLbOY8G+4lIK2XZwPIDbTUGO
IXU7nKRBcKQXz6gPX5cN2Y6lWiiXbZB3KQra6DKsr1VdPywBsSAvjCupcv7b7bAvJUyQaEAf1nid
yNRLr/eyXLd+uowse9gkPzicXzyrvHte471EqYUq8pNVKPXrLev5+2ffvJhL+BVo9e6WGE9o+KMM
bx45YBdu20ewldmhL1HKflvCYeZDWuG+v632tWmpXIR2M2cPs2LWscv0C+IBhY02w238YIZEqMMd
LVpPJvC6nTG2brnGE4h3IJL3eDtjSR5KDOWNLsWAl0un8L5B6zafkO+XhvDi17vApscBBZncSf73
vuW7RIcrSLM5D+CInds7bFi1q8upBFWp+AuPXqTYxJOpFY9REGKqphTfc5i9KaljvA5IDZnZpykO
xPkARcbrgpw3AoRFaTyb1nSMNfyVHK4XjJZlu4xBs1K2RT6FMSa4G2AuyBjEsInCbsq9Ygsy8xfV
9Vas0wWN3c4PZk3O+8Ey11CmNONAof/Mcz37U0w2dDU9xxRXCPiC2sHLoXn+9q2m25ZxFg0Hc87r
tUszdk8ThzzDpqH35ohUcxnwQD2cChuqoADgu5Ti22bNZbkbCwDLw2b2kHWlTnjIriuMcYHQ204x
HbutWji8wGZleigRUv04yaRDYY/TtzTebJGvDobjyTC8bzm5F2FeiWlvkm6OTJpRgmxy5t6dFNbg
t6p7CzUIhNFoalNP3roYhVS6ZzmxhZQs5kQlXbLCo5GmKneNSKoO9y4YDfd28lYFmHHoKaG7L83u
j0fHCTK63BJ79YAcYmzIvK1mFyEcb7v/RIKH82zqFz22xbWTJFONOGhsayUv6pZXURCsgLcqpkdD
7tiE/0wkcxLcBbgczpyvrBOvM0f32AyHdzoFCy1qumBjVDyuBqEsrDtjJxScw6Il/cloSSHM6jfF
mmcztvKzIxMlDbFQjaxuFkevpWp3Gdk5Sizvg1PUPcqSyzSN9cUnUN+JwhRGYxYLM0Q5RpH/ktAS
Tep5lih1Ct61W4dO4H1r6ab35SyrWloRngpiZa2VJ06DKPcdx0KYOsF2hAweVR/06G35uoOELyB5
jqrt6v0W3SpWxWnugu6JUN5hNYKdglqWicPYsTKW+M9jxC4qOdZsrQujNPgcLwfSHBnbORnLVNK3
Qx7+QI8pu1Wx/+EuQR7VMbh34XWFTeqeZ6t9a328TD0L89y9xTClmeISKmVb7ban3qSx3L2ruKsa
GBc0I6faGJVzktLwzHqKEGtUu5ooWCQ6CKOOe3cbLqdpJsqdTbU9H4aLS6sq6rqj5Yw8jhN2qxgg
UQqgewLUuXYXXhRLIMjRc5YGh5oeqJ1eav+iwGHYV/o3tFF3WePRqfYwnHWMHCkXX/xR5USQ98kY
ll/uG2ARGHVfQ772xJN+m5Z3N3R7qt2RM997jUUDlWahei2E6Yj2nlSk6iSLWA+ug69P9wqPtmkq
e4O7fVNdyl3wIyWgeCu6q3b7TV7rnYq1JS63bDl8emtDsQwZmGWIFxZ4V2ujsmCos1zg5yY9TBs1
gTCrZhhzMJdJNlorQggIRmvmUXzF8V/LWJaNZzKWTz7+eVhrzIAcOOAjY+KutTFyS+WUwwR4YxAl
y50AMN74a5qJm7Qa8xF4Fpq43SQjajlsLXxImZZ3QwJqxAvIv3X5ec08rNEUzYsw1KLJZJC0du67
WlbbtuOHa7Rsy8r7RMXSPKOHzYhypdyIDltZd3BTsa8etY9RKGTpbY9El0pF+m53YK7uttB20Y9o
2CJrRneN2CwY+EXt7Gi6Dtk226mXOAsB89g11KYZGxMQilxiJZliSmd3ECM3trJHey7IINPx45Op
RYFeoh6akRknSfRwzpNoAn54Mj+5G388sJUItoe2Jy5KUceiDSX3x6re8UOJP2OecaSKnEBkJ6a+
bCNEX8j9MwXMnmm+pD8ve3cocK4HtYhhgFH0wu94LUKQO87ojd20e+SvyNGXCLQgIjDOryXad1yY
2vthBGXJN8xlcgQzgeRhUEI2iBGvd3vV5gJvFb9rLmGU5CY4y561eMHRuLtKStkBjXhABPxaDHbI
mgjqua4IivG0pKsY3ErX1MxZaHjuOn238XnS5txCRJGU6V7Q+giFCD/CkSl7eZ/p8CAz3/OLcddk
Tk0YlsGysVuYpA5kX8LF8rsmtD8HGdhgdkTHlRLXs7IHcMPkdtTQ9Sf80h0cU68pJwmumO7sYLAE
eseBBUOIelYLq6lr6A2esOmI4MLc8W+cCfQxioSSHDq5Fl+TcVzEFVgg8k4WxKKjmNSLzPesJ83S
MQU2PJNNQ/UcKsoaa1JzxHKLgYAb8rk5N+a3FEm0qfb1KjZdUdPvRKfEMpO/GD7ViS6JwV9hcj99
wVwY68nG202KrWUwKUHK0CbTzEOQSvsRpNOXQUrpVpBQ3gXpQotYmzr4EuZxS8dFf9J3YTqzXsLF
kUy1tGxk+DaRXldPZ0XF5+rKYt5TBBukFVUr7uzF2htOhbyKQPzYPeGBf4KZvAMMf7jeI7uCZqDM
tuFN2SR04acK0m6dB4PrccMiEiZn5T8UtwmTCA1ME4wjGia4bXgPPrXrSh4ImXY6KNXfKoCCj4K6
7rHnGR6JC/lVvuPY4aJlYKk3WG12EIHQEKqNh3lAbElrEPcGzQVbxKidOmcQb/idsD00JtpdK/gp
FKdxM+M6kvASybWHq24QmVMaHZv0YqJoUfC7W7IxLo8Ny7utsLtac+jveJ1QDw+vDC60uNpBM2kp
OkS8SO6w86bih/UYCkiZEiJP2uw0o4mX97FjIQXnlBoQTRSJs+l9emBWbibpc1Tb1SXhyFk7bXDf
gN01Bophflc51kG0fxsnqg5JZ0guDarZofE6tH6WJtiGvxLSzfE52RkgA++lBoR1LcTe2jkLfO2c
q4IboZDpFHBmvLVGW3xi6SkeIsXGYGjLM1P7MMRl8OZXIfZ3l1DZQlJ3TnBI5Y0vjsbx1SuDLNm8
eqWu+UdPZx+H7egLHGjzOztLdRhNj2o//+iRPAMPU3EuJQdSJq7iQZqKXCO4gwvJFTGiapgUGSK6
zSO+mAEmk09g+upNqEkze8DGeNl1f4pzJv0nuTfqR/wW/WB3qATDvOToOkbPwEyvDdzt0FljK22H
PouQF2woHgP+T0OXjXh6cBF+tl5TQGQbrkO8Pgw8UYicyxE5MAnSI3+dfWYQcfWm9rx8U2xdq0Ek
/i50ekPKI5evxi9mva+1CtHhYtLdBYaip+gct9Xe7V7WgaKIvm0QdSjFyqObIoqMkLZlaF8ZMYra
ib6CFRoniWm4uhtxyehkNPOGEByARKVGkNAJO/OoQZUtKd8kgMe6AIKKDxIEkGBq1duxz11B5tbB
/T4iFZcMLeWU4ptEi3jo5xTFx4VtdzOyxgguV0DimgyR06uzFiemAxxJEXNJKS666y4Esngc8kwo
ZHHsTEEiNiLc+4Pdt4WiBruAJyWi9AaFfpMqrcmuMWq5K89sgexq37QhzvI3RwyTHHJn1xT4nOg9
fT4qNhwiwsMm5xJ4m9TaIH9j8BVcgG+SXYynQReWC03DlTmfx040l3zrR6vchEzaKVu44dA1oXMH
5gwb50qxIYhi1YQ7KcuWHWzF7IHWpt9BfI1gdmH3gpo3I8fdoGUjW4f83fLWbtvU/nn7iDd+rpvo
tvZdI9/4kDfxoN8Z/sY1Ju6XhHNCUckt+7JlWqBBUcg5zMWY1tBKs6ARL1xC2CpOc0GxF1wELYpw
oPkdhl9U0LOWfcjx2itDUPuaoqBsb3WdHLm1wQzQWiPYkcbRKslpsIncwpyvb1GFvpJGyokgMMMI
B0jBvATbPef4ZebYGQQ3cOsS5mOfb0KNjCd4RdtooDOJfkiemfAhiDr7AAdVQfDD6F3XeJphXhp0
WUfkZLfbbW5DZx/RyLaeh0SWyOoV00ohrgBDM/ql2oHhKZVohZdlBqw+Ilcl2V27qwlOOA2jaSau
wqCV4kkUpJT8BOV9p39e7HKHGUntibhg2g8H6t/TAlGCOvnMuG+XhJbUm/FYHlxjT7ioYKLC5rEH
mmtnDF+WmKHQbdzAi5kpTGQbHLia8hzaofsp4zR3fIOhZsvdeOIFT8cwNTtg9sfD6XCCVbmUHT/E
TbF1NVNQ3CfznkGSdTnGdcQ1ynJ0n0/i3nneYtBfnpcfodgPwwadDLpewGl3BXEt9v7Bg4RbPrvO
z0hTMY48gnVC9BzuGK+rEx+6RSFRJDne8avVjgzTC6Ibv+hD7+5H6Rb1n3jUd6SUHlip1G0uXRxt
HbSLaADvt2ED8Yk1lH6ERDWEYT1lkDhwAqthhki5VQBuhqGegokmTi2GasLbQJnmJPXqdGv0UK+m
/elIPCa8hwJHd/YvrG3qWjLpzv86GmsNZIre9nUyTogl8QmJLYa+saIdfDoO1fqTkxTcuYHQYH3f
/Y4Bo7rBHouqEHd5sMtSpNS12IBZuDuKgzlmTi96UCPaHJtundzzQLEnAoJpBCMDh8JB5WoPXKlT
uPZ2yqGS+k5OeiR1TeJF8XzNLqKh/lDUH+V6Ycgft8a8iWTusbe1mpIDnOF7JiSR5ySTE3JMRxgP
QiR4zv6AyGy59D1eS39TtUUYBVOFC+0l4YGLs6gpWTi8qPhdXSJp3Hr5HScChxrYN2SvKALmXgyg
PIgHtOSfQI4XekyCJLkDahvsng4Efomq7QLc+Ng8vqdR3FzHoTsVpWcwZSkSOCJ6JNW3Er4Lw32S
LNxhg53HZE8t5Xru2XaF+TtnxAkcFcQ8Yf3Hbl/DiaOCL3QzhIAkPSXGOkavqG32qly/IpFR5ZJM
bHvKdTd4Z9woWmaosfBSg4nnekoSYNVIyFJBXomDMIY01Qf5bS9ANjlnMwiMw2hk1VevYp2p1Zsa
2uYunTVSIjLMJuq409njRIWWzSZnj2uJvfVPaqmt3dXbHd/G1HSsJye67mIptAgIYsBgQs3U6Oue
EUeMlOGuCxJBCDtcBJAlGkfi4VCcK9bDxM1D320B3b/iCWNoo8lOt7NtHV9ElGt/OnZO4UNXnJCR
eJysc7uD5SBpoqoOXaYHt8hy4ZO66klRfQc5qIFSDvEKYlAVWUIIM/2WJzwujSWNZgxkDrX6ssKC
kteFXWGEhMXx2abKW0JGRjvqepqdVtWGbXvQWnKSYDekUc7er/XjcOzadTL5CD9olyf3cYpMFIyS
l11X9pbLXepKvkkQ9pLT8nSlgutx9aS2WbooVugFwR75KNdpsCxbrH0/1suOpa2AYgDKOul+cHnS
sbdE67b03lW0Yvc1cON8HUAYBRugrRsfXMWF1UEYqjCgqRhKmO+IUMdfEmZHpCPzGejKp6Tbvbax
Royu6OF8Hl+tH3NzZ6doFVlsJMpP3UL+yUn2EdWBxopmYrk4B+Su1QsiQ1PsptnwkcK6t9c8EmU1
e0Fq63zzm7pknwESYor6FD2N9VoJ2UjaWeOhfNKSBJ41bRzHcHguiGiwkw0A09lV59ZoPugCIzWx
hR2+80hxPAAdDHOb35xfsUSTrDsJ9xIgNVlMI6sQ2VB4WmjqRo7wsU8540tUnmLTXh0n3ZxjPBxM
NjSShQ4e4tmjSg46A5sudhcbNPu+FbkJ8O/U/sN3iWCa3U7E+w53p+dmQ/G88SC5EowWaZOHvLLD
j6oCrSEwEQqr+JneGybJucTMMODqQyVmKRT/9npGAQ0nqS+wy5Ckwk5zAQRV7UtU8mGDSgbp26Rb
RDw00XUnmn7IxvxkkT22OGZAGAh7bTm8G6xIy/g0e5zmiVigHT5ssqMjabMbfp2Q+/BWXI5kHcQj
aFJNs/O6KLYRzM077CGOCt3dBfB+uSTVS6BygdddPpaC9CFQe7VS/eBvt4eWwpABWz6inKpCPDQ+
QcaHDeI9YG1yUeOWNI47dH2a2IjQORksBTPzY+T0UjI4wkzxmSzOpLzftp3j0GA4UOQ6Z0HM3XG+
ftZ2H90LNPATpZ0lglagFIFBZjZtNQ7a5RoSf7YcAyzF+UMEPhifbTXMxEdPoPM+UJRsWY5HKfYr
PDj8avylGJk7b7Vp9qFG0BUtu7d/kbODLr9ytlg/hRG6fBTizkptf9spm6nfdi0ceZ6JZ5eYAGIB
FKb1Gr6KHPkCRRixu9lx9AS8LX31qgfDCeQ+8RXg/CDe8j2RxF9+OvtLuug+rd5gQHoQ8K9y1gCE
ALpoJKOhW+lqSQ7v+dzLAp9++ilrEmUs//eirr4o35R46JOgYSZzNpvhnyePHnP+bwlYiG6YVLOQ
e18humRj54QcBOOj0+JI9CLiHxu1oq8BUwcjAhX7vfdJMGjYtk+5vCrRKlQ8n5ZtjRoK10CNo8sa
kLg5ZIg0vpnMdaU+eXRjR+KebT+bZnc0+t7l3Czu0/3PcBHUa4SJadTyqiTTFEaMElLEENTikrG+
/1wMz8aPJ8O7e6RaiuXS+a5elGsEszJWikA6OvsLG/sZ7VGtNAJncxhbiAYGHQoTm7TCL2eIfdXs
T9miy7mcKHWxDOMDF0q4mT96BAvmdL+6LDic8MXu8udPJb7wo7Jp9sWjJ//hr+QFj5enelZuyrV9
s31bbpRa/5KrDxuPO7ZLcAbhhVIXOPt7ql0D3XUKMLEQw9u2hpyjmtbu8RVF3lpkgQ14N2YQJ8Os
ptPsjoXgbxRXCn+Pn8SHPr2enS3pVGpYpR6kcZao0f2oYo8JZebqxGyA1R9h0f3W3JEz/HMyeqIu
TSg4U7mJmUBzKOKWWUr/tUI28sQIKNVqEuEt4cXZulxvRy+yqwIhjTU1MnXa1qypEK+AvF/Zl+oX
UTGqAWCPYxj46hYoKRnlWGIkduidNZA0Zz8EIRiyNuLyImumf2TJVpLOXhRhzfn70zcw0B1+8eyL
7JtvX2Tff/bs+Zc+4mK4Me7y+elunMggs0urFlmaP3BZPF4jyUKmagyyUUvdcUU+l/MZ3BbXkDg5
IGmUGCkjqPLG9PbDdmeqPUimhf60OxRFkhrUg5OR6przghxDMzpxgBsl1+MgD9+lpacCOUbgGZm0
5025ckBo3hqlLa4gUQSdE3vRS1eeQeJpdtDsl7hJXzzmwNFhCyw4ezdsCqeWk+JuLpc4yBVRKcLT
RZciwRF6XmxJm+TE6cTitfE9xJeYb9hYyxNZtXbg8jWxOB7D9D3/8oX32VioJwgpgzrWygFsg0Ns
0AaGTZP23CfMTnAvzzWrXrBrmNzEhxYqC7g7TjlEvZr34fpI6kFHley0yONIOML5wisVD6WC3Ocf
fpikgzEEDhfOO9I6mEibcUb6Yo1JU9xls7sO7o08dq9q9fsd1f6Y2nx1HG3euZIlos9H8xs7zd4R
Nyp21U03UlekRzJQQ3VzTc3XAtS+yR029D2m82ZO7wARSpgcxNc5sKPkijBENTbvUwdK75VY7wQk
3V84DotFJBpqQ4eTaQK6cKgoEj7Ys7nDn6CQ5mK7OMuZQ/qVDiSSaUAiDK0f0GCAewPKClZGEIGj
xwFLibKJbJS6FXXB2F0QJTYXGvYEyvMuhrbs0G9IYWV6fMP5Pu+Abw3NcOxX042pPHJwXhpe+WDU
ZEZFsLiLnWCxPXB+guPgcOz9AhUUe9hreKN1PJygG3si+I/eoKJC+OhJEiWCAwCXJ/eO/GyCPneH
EYQ77KaLK2hHcszuRmiWwWaCJG8wEoIML0zT0dPZUwFp1ZnkUW6S7kl2s3d2690xrWHZ0SE5FGCC
4L7EAyXtKfJbyudKNAUH/VtCnL9UWEQCZ+OQcm78Fma99kINPhBrw/Isnn/YYXSnIuqcbUaekaGD
TUdOCahB0pnybcIGoJrrYPxeE7vXgQe6CL6TfvDYc8TYou54wxOyHnMqvofoB5c5HergoD2jx4Y1
bTLq7DQfx/Nh8fMM5ILM3YDk8Ke4TfCMEZ6IWTZUmJvPx0OBy0a2YDzkS0y8V3Eo2kFijjjjXB47
ZXyUIcYurOson2oqulm1RMzpUk2zKL8zo+3kNyVjES4hdEHZnqEo7ymfC2RPeLrMSK+rlWWiMe2a
Tc0LyjOOsSfIqM8TMmQz5f6DZ9Rfk6DzG/tpxHvImPy4fGRxYnrHXBW+OOnFcSMhv2sFyfrwcJ+M
8aaJ9FbuprjERcuBftaqmO7BPdZOy1nC9sodadGNr3R7KmBt/K+9JQouHoLx9t6z4VF6p/z3mSPe
jLvjrL44NjKRx6prDDZTXW1I+snpr8GwVUXjvAklSewn4u4hMM4BkD2GiwMu/dWrjvEYCwwNH6wm
jhX5EFrXEbVr3twmMGAjidNT8JjbNIcjqdn0h6fmcz8ildrSkXZLxwMeTws6LDsujjHzHSNAQeHP
yRieNd1TdiM8y4KdSiTD0RxC8idqFR1fh6zqWTBRxjoF3hLaN1gknUCc8QMUCDo/yvc21qv6Msjg
KNHad3QJDgWvsDcHbUe6PNW26tIs3GOG3TAWtBQ/aszWsgg25ZDsdQkxuejz9sYjmFI4Fk7oyyBk
V7ew1fy4hr7Ekz5TluSMhcEPExizgTFLIn4Ax6Op0c+Q9L4aapRMd41nmVg/R5lFXaF2dNqSiQBZ
ij0r0JJtDu1Ar72oACTjl1sEecwpAife7D2KvBV74p75MemV08ngZ3CHI5U5DAyNceDNkZVQ795J
Rw+z/nC26RzQECbl6mrf0hHGIRTQXg3BK3DkC4w52Ih/tAEB0c0Wbb9wKTyeZEfZkzvWAp4i4yMu
79MsNB9qJkn/ETnLfgUc+H7Hx3Ewhr3UwY5P2B/lAMxYsj1j355mNV9qV0v8LznREOhAzkA4hqtV
SU6kclnnz4VQwgj2s1kPgpcgbQqZsMndkjA2LvAQkIZxaGBHX+jcpfCR3kmYwj3eq41O0Ri1T9hT
30wM5J1oIbMrY/HuZJmOfbQ7Jva0q12MibB5YRB420AfEF5EkiguPEcjh8zJmxsfJL4XjagHCx87
p9+i/lXqmnrv7j1AK/+WIQn2O7p45aAreM+rF7SaZPYC/vkc2KSvYpP9g3hQdtAolLvjtO4pPhq0
jkhkNxxLeF4YVl7vzPvjKPrLNKu7Lq+wEgMkeHi3pKOlUQQA1hYFAWTeZSn2jWlnIUqM0d4tZLY4
CZd91IdCYTW3DbKpBFppvd+lO+IK7SKHu+AG92s7N6BvC6i4mGqgfnukftnGx0ZdLg1jRLEye5tE
X7XEvtao2XZ6ihnVg1LopeS9RkCNxK07mEfKF58wF8AnqBZS+k8eYtqtJTZhEZui/IzirBvEBSBX
gkYiMcoPCFNYT9lcBGof52sVEhS8z983e4I1oNXgwxI0F/kaXUXQ9MnYsopOaYWCSAdwMRnISLz4
FgFb6rjSQyOocX6tQcdGBLEU3xu6APfNXoCxHVQfB5Hh1wvTHql/IX9NRwhCgJe3Aj/Sjxhoh124
5CO7WCGrtYlpi3eq8vYQL8IZAnb5bL9Rtzj2EVN7IglGLdtE9p3H6NlSLPlEZR0oC86J7mLz3e2c
Dur5K+9FUV/OAmiBV6mgWxzDmlZS7twZyWDQl/LNZ19/OZ7NZpNXr9LeYWm1Z0AMjrmxAVRA1PN7
nGAd5ySZSKNZxhQFfZOpbs6jeVZ7yi7b6o22GBQJpFGUWSIC5KNKxlJGghH2yIbcFLO9uvpNY4Bu
d1JCsREJ0Xqcl2RMMLRfh9Ou2D3pC0rf70nHXvPW9Y7PnX5HPfvZMvoKJZ8UEg1MK4akXC+9kl6C
t/ggKmorQBTMa1iAytY5LLsUllZwJ/g9nySsPENCrbBUYr5Km+LVK6r11avsL1xJr15pE+A1e6bh
S2oIasO2a/ilzYAXDteJ/VXt2REU5ZkX8faUI0chgZr9aYPHylYuD63s7dt5zSGUC4lOz4cRdwya
+Rtc4e5i6GMiTwQ0ZezfaYbJlffVq2Aa0GAY+Cq1elUVHoqlG8TtyXkyAlrWUUmyTXNxDtJuwZpM
jcEnHqqmT8bp2R8FRIZCSqiDOPe1QNOK8g0bBcOcvymrfQNNZWgkNyARNhB+RHq6rY6c77E318YB
5QL78rORoLN9ZUApRDUDju/VKy3p1aspjizSa37ktfvqVQhOX9Ok0rkI405IoFw/eZDDvODzpjwr
VrerjcMr6mmaLsc58kXsQFyiYRgqcrUs+IyljA0soh7zabovtqL2onymjnUjXjO0fkYp+xTxCjPJ
GAiaOZ/rIr+si7NfGLx9SIEtXGTjmKBN+1kOTw8mYVHuqr7TjP5jjQFsKNGxtuee0Lx32r1Z5BUN
QWzUhV1+KzyPSBMdREseD9F1HxaiuIuzrj9Ht2e5FBkm1WBDRwb5otNkCC8ResFOxtpic5kyuXM0
CBkxMdzxtaCbQT8FCfRc1KEoI39QaOUACI62pI0l1sWmd9pDeD6uchxAFb0VJx4yyiLtOKsm8dQJ
zs4+VTEeeF+wqpaEiBovXwriPTHlOkGZheYZ8voZITb4AwPDwOKAohayIEgJJ6ScFm0rfDOvNXJP
iVkWxfaCsxXZfFJlthlfLZXb3d6AGIooFcPleSsAxZmUwOl010bgaYQkya4s+Zaa4aoX5MgwcBzp
lA95uECn3PCV5AJF9fS2LTut1rdpohlfDixzQgc0lziOG5rJ6Qs7cr9Bx93wyiCRs28tHFYspVTW
9yNrnXI73q2un9EFh+P4Aod0rqvLR/fBOPG93AKJFpGmYXr/shLzu6bYrysp/IvibN57DRCMu544
0wxdp9PwUDyacQVpWh3Ng1Th7TXs3jb+sInrhhR7zQWmEt91L5FaH+ZruPRUcGk1rqDVBMgFGXeq
L0wVI+c9mQ96l45KNVIemhb5772qSuPb2xcTajMDhrCom5QLrot9FGYQf+q++fBdlieQ2kiX6ALd
BMOXoN+m5HkQS4UCjSFPuT+NLvFPizNEBUXqTpF5+oBgHmTjTU602Qg2qZ64DZXwzQ5I3F3iqV5n
m2JCyaBDN9QS0d+tqNGUi6DQoRhCLcahxmBq9AOTdCVsnz3ogaeCBI8PWFkxHiZ6Vl6V5xdsy3lK
iKy4/KrN+oiN7TRqiEFsQqU0ws0cqLnbfw33EV3ldUO359bEwiL/MB6oYPoJ2qgCjUbFEPxpS7BF
nF59z51m5CDcEJu2eRjAbm/CoCSmT52NHJZ1kIfT5caog8dBTgNsaHbQInu+P7V2rFMFVaXM0xAJ
zu6VlKVgnl3AUijqow2QlY1yy47jIG6IPHER4KfOOFUEIOEgF9Pd/OPJDw9wMzKDQ/dW3kMMPYnP
9psu+SSI6oUZTzhdKHj1rY81O/mRggqutTtFFY+8eJewMu0rAvfJ5g1Ol4bf/u2W8QoOyjahkYC8
RCd/RmGhIZrcX+zpJ4wPRCt719ZjYckQXDGkHcs4LPxsBSGKkXWfd4lJRzZAlyuGSmayR1psVeMU
5gIlEZziTu2+RVugC5L7w96aqvxyRNXzYSGqu2AjJkYXeYT4FEKHeCY+4IeiXSZVdVEl+q13rd2u
ZI/Md1NX6RFgjwVjOYvAJ8VJQ8Ea0PQtbwVKY/yhwXDQd9qKsBgaJYcERebBcx7gh03PLtppN6c6
MMZcmHw4u4EosZaQgwpNqrrUzVHUhTWqm3eOu1KPcUKaJXx6jMd5FjjbrYtmh4iziOfubR07MUF7
riYDeCWe2yW1bekM0Wxy7UuoZVs7OF6SuX3f2Iwj7tgZbGuK6QwE9cz6JWIfLFNu2tXbbue0YtgF
jWyJ74zhJoUgXvaFf/oktHqlDfWw/tRFeeVtqyay5oQO8wVukbDrA9PYCxgeYofPxR0ytpUddJA5
H0kCf8+TsEKVOu7LJ3QsVQNpgOS0MGpxwtr0ABMehxPq9ThMBwTynxzjmQotFZimhgx8LFOmJclO
yPqoV+bLIctex2WkRI2OeaznSWLHqpS1rInw2BNQJmER6zJFnw6YwQZZEsMc3ehp8iBY0B0WsXFX
AjPYe+1Pv+WQkfLbs1/lboyz/cbtsnixqzubtHeYDGQCDYSID2zSSUn2MgPRI6wuxEiO+WteseNi
dj6Dd8/VmGRbyLVZbizuVINABugsP6AZtlqgiHUO98sdI4LpJJ4cNjpWDH3uj56Jd9Rw7LnoCoSi
bItre4rpaWKLc0myT8MPtqRJ5GNgr6PNs5mJFV0ZIh2p9U5U1lLhYnNlY40AjJivW0SUXE8OUkzH
IjvSedWcJwNQeb1DTOfo8/2IJrFedhd02bIoIlSDMa/gX79BmA3CTRJvkfY0wRAC/SKkr4V+Fk7I
1yNtOrb9iM0zT8LkwLi3yE0hkCMDEmU38+xG0Mo6PTbcEndI+2bPCvzS7xIsjaS/FDpCFAFk2KQ7
T6wmtlkhWElFO2oyClLfHyzJiBRtxkFgrGVUYxUQZ+JqJFYv3JzYwQ152T42uROp2DGo02xpeF3k
HumCi7715JOphQ2MWsqHwpszkl02Dhvy0ZOIK5Y4IooC5zFhWSycJyVqirm6LWa1oL91fYe1SR4p
bpJSi1MpGynFghKOYImPem64QHTKLweJNZMAanaqv74DJ2h1F7qz6/QfSlUJLM9+GABmrHwg2tQm
C+z2u9cV+crGvMXaiTw7hYART6euvkly7KWs9Bh30Dm7ei/ewlavQcCi1X679sewEtZu1o8WqIMw
Y66UYi4YjOiBxBoKl2bSXw6ewiOFNzk6coCUx/gCd8DJiHluEDIzghiHwTahg+Q06Z5D3yOFDTfR
VGkoHRPT4GA4cKJRSYo/iz8OhEqEnazLzSELmmqJsKkvV/cKjRrkjwF5ikBnfBGQxPyKT6yN5/H5
MU7gGkhpQiDEA+em63tbtTIo0vv22t4rOPjIxFWbLJVgAqYGLtJuXx4FpwqRQUlDVfJHR9/CgoJi
zMD1w2cGCJgp+MxUm7vQq64Y9W12vO7GMrpmDojchwvya8E7DOB0xJg5hHEtGZyCwgxsmd0Qu/oG
iA3aF8MXuabx0Nikp6Aa5Wr+C474YLyizojMNYWnWjZChAvEgBfdxGFT4oG9mA/DjElDyy2k9Tea
Ch0gHWADIl50eUt2T/sV9ba3nWimZIXvxodhP7oqViC+l6tGHDNa9M5h/1OCP5J2EACgs/9H7p86
ile26CZDSglC2Tzi+H/OPrenDPjCoRT0ip8uyBzeEhbXGFEDqRysCXSgrVA+8RoGFt5MKAWpuuir
+aLarJtgIbDpglszJjjuFypf1sWmeIO2xOwJjDje5Wq/yWtrGfEZB0Ur840L4ucKLbkcVOtcnRJO
ZnnJBg+CzniEeY/06gdtmSWrfMXYFvD2iKxv16a1m6ob4ASmFPb1fudj8wX66yNbvgsOl6tjxCNa
9R4zle5OJGNole2kffr63NlKSvQ4/Gx9TdNTRiuputohVqUMEveHfVLVyM61F60kbYxT/KjjrolY
l13OsmLWyegvYg1yjVlgdPVLzIK0R3y1g9YTGj93+9c7PoZpbI5MHMiGrcqDTqNftt9n66lf/1QW
mYRv6GCQfpd1vJAj721k20AkgGqQixE0DpmY5ZLJYsLsepF42ZMWj/hkevzQ60IeovzH9xIefb4n
LrLq1VPxkRPaoY5/dlMU291mfw6jzQZtHYdpJARFDVQBk/ak4YrwdiRVBzO3SyAYS9meLnrdeKhG
4B6JH8oZD83OG04mNvSx7e+Mm65G7nrnY33H+NvQ3I76RlmPcXFEh8YH1mwGQUtA5hj1kvG5A5gs
Os6tgYSqxikqeOS2PSQSSS30t39iQ+qrmNzTSirVmeP7W4AG4C02Ig7SeGtGyy0bbisPVNWxxccg
14evxFHsU93jk3tE/QgyfNMTLlh8DYK46aKXcNkPoWqZYszytgeAt/rhcoM12vFmZ0qrRWrpdpRt
2b0DFpVj8lCYSuArbYpBIAiTl0sfAIGFhnLABiRcPD0Qk0LSjW2l73IhGt/xTHpwISKabsLphO2+
ZxOi3k3uQROOxYo3hUoRWqJ5qNEHD9hSnHhjRH/SMGXecYJk0xIZ63KVwbmG8ceI15GTScPfFq5A
dbW4LsRpi665Ofo4IX4Wm+q6k52q9yHhBYSKqObSX5ErnAm9D68q+Z2/UTYHRoogdeO/BBu2a1zV
AennwmdLdEIqFLDmp0Ghwu5QnBFivm1MDBwv8irb3aZQ6VxndrN1WdOt9UTw/AMrAtYbJNUvOwyf
gdhsw0c9lqCukijWzc6Et4lvyaxQxQPXjRTTmTeKhiVzrZ6wxjMP5ohEynjVuDUlqyUIk0Sj7/iI
5OmM1nKcYhwK2H6JJRcW97Vv5Qb90Ct4y2aYcxtGJfK3c35WGGzXSCJMXsS9Fu3iQke7mI0JDTE0
youutMbvnh5WKBG6hZaAWaISHidpbczf0quqlDBskj+98oqbFmSh2q2943JefvQk7UiArdP0xB/o
D3G+G877FriGYQgitqSGVO8Z3AAaBPQM46plJPEEs4AiA4bhYcnFGpahjd5jUmNEnHRQJYXwCVqF
jnr+liONonJzgnGRZlpPxzzEVpFiLPU8DQxt5YRjdS6djgEIIDOSsexnZKgmc0zPeeQ32sV3kdFr
AvkQePwZOVjAYaPHDIZAoEiAtgwxHwr1R7fX+e3UhUh2e6zvQhqndEeR6fWIYy8y6GZNrhN1gdre
kiMNr6uiyRQw1ZRBVoJH6xKm4k1RB/Ax+TmK+bgzi3x1YTtu7ZA1KzaElOzGcLkRhQb73mLOR8rc
I6bltoH3RsIwin8/ibGO346/8jHW2D6gYU61H4DcXRX1efH/sffuTW5cWZ6Y/nMEwuG1w471eu1d
p1DDyUwSAKtIPbqxgtRsiuqmh6IUJDXibLEWjQKyqnIKQIJIoB7do/4M/h7+KP4k/gT+1/e87jsT
KErdMxNexUwTBdz3Pffcc8/jdzIMIsE04cOmlNO6yK6c07FBxQ9zrKSVEN0y/eEg4zG+rQ9UsPHN
8cnZP/Jc8+UnziEdG4u3GKaxxgqxxYtFHzevixX4QXm+HzXmXdvJDyPIUU2p4ULiiOMoNZh+7gYM
5dhj7K2VJHGDMKW0Kye3vzg6vtghGWkRV0Zuc9HL2vBj7o5pH/fmwHFjXdOF2wDLwngOrt8YzCGb
C7hlpSRUimmZIIJaB3sFdOiZIJvseM5mkwvVjpc47SFP3fLQzyLp6LjBOzz2BEgRFSMSROiHXdyl
vVltV4fU7Z1QRhUtC6QGLM9uyThLRIOfnbu0hggWCEV3ECDJFdgBPKiPhxTBVK1nxXpMrVJ71hhc
edzIneNqPSY8ZbwCdGoBQV0aoae1++7i10CLE51WzLHYxr20vH8aagzsWsFLRAp51xfgeuvmtHzr
KAtb34eR8vgu0d/ke0rbcuAVsVmV3b5pyqK+0aUkZUQ0WkuA0MEQnYVNNjLvg4janHFVfK1zwgrp
TP2LkAcYnuPHzyBfK5KuPNK4+S5ZJFynAQaIGVkiJqPLOCsqfhRUupF/8Sr4r2UtCtPw8zvebHqM
6sxMtnNR2VFjHvBKDMDcJJW2hnDi6RHDbMbUck/Yr0554uPt2GLpeTlNJFURSdLjMVOR2j0r2uAc
s3WrLb5eV166d5cmtDlnP2KKK0QtqP5de9Mcwmhsd84LWc4EHI+9eDOj5/CLi/+6y2WBSO7Tzc2I
68rf+9XWPFQ+9PCi4Lbwkojf+XGAUsX7mSw5Z8vxSUsc5+BiUo8lN+WwyX1nV4RmfJNdBE4kvmWV
SGdkfqOn0pmiuU2kPj6nSrRYK3JhayFHg1qkEMKGmSYws7v1EMKxoLUUnugI0SFaH0XUkQY4ENQy
DGITjFON2d5F91YPOk3Ki+MzEoQs+UdnNnb2IBIELZtAq5CVvaRpHzRHHPCNM2zXUkRdpeAN0KjZ
aVUC6cTIfP2qCadpz2ktz2NKBHwNeI6tgborzFO2G673jhH3LOY7Ct+QtrkUatyYXTX6tNV6IraC
zyvuBynXkdlHxOimd1FU+xbmqQ5pgxL62GHp6FcN0WlifEHYeOuzjQHWHGRlynOeWuEBI0odK46f
du5YpDbCSgmy/AUN2qnL0Mcu6Q7fLd8twW2pBvQcgu0BG9pyk+U5FKBfZSghr15R/BqLtrQUIE7J
UozDtdDO2JQwr6dvBknye6a/GfpAP2UtD8Vq7UzQf/K1bEYoqdTnI0rfM9a+xmqMJSiEaLcxYFAQ
QSyHeuvtYAWWUGrAIB8gaFBdpDRIMgr57m4243E3ApSNNaJiW7wt+LZrv8fhyMOXmatQ0iAx+R5Y
Gl59iz283lSr5xvYjOi16Fhpmi/Au+0VqvTdvdL+NxCBsaBraELRFimWTJ01ieB5yXrE8kFiKtG/
FNG5or71kuBsUaAQ/cMfcBJ/+MOgKYD4uZJ9i8lMCaUQIkjgZqSMBWvLsjBuWQ/1PJragtrzgnWz
aiTgN07xv+hvh7tdYOy++mMQEFvzCeAhgQuX67moYVAoIgfk40mJg6Y0E4HuuTXkwzV/97SRxqg6
yFnACqGLya9aOPUe7yR1xiJIwmCoBks8lqZhqUL8AYUAt4gt75iCdjIj12GX8pY6Qb37hLa0BNdZ
MApOqA9VEb8WjxWawL5aYvrqTtTFQ+NVA2UdQXvhrO7g9TGKOYBgI7KZqhH56BbQIzGD8nI7zTBU
eRYGwJmowuOTOyP/eo2YB4QUteKC0O3KjwkiNbnXTAi5beUkkK5WlR/ggWe2xRMJEMg4mZQFTXRn
s72ekaADOMGtjtpO7bhcxDOTvdxFb4SM1wiVTOtjWxDFJFauZfHdLGrEnbx0Ja522aHBFpwgJ+Kr
GU0rgruEnGmMSzomvzBLBo2vtBn7DtQly07xcStAVUNArLihw8paZq/FrTNemXssfNdHsfJA352f
YwKQW79Ryc35sjWP734D4SqJ33zzcgYNWJPp9qKRyfYW2svR3Am8O52lG5mKEb0ADSkShauG9/TJ
098/S37//E0XTs5qndkNNyJKNU+8rZdvn79+3Q3Ts4iuk4FNQS9gomXnt66qXSdhYi1dJ9BgqBYR
7xW8i5U0BmZgRE4UhCmDfLosrhPfOdAiVI+rtaXztKkj7wS5QuUqGLbApSHTlNWT2UWTgYe58oyo
ZLcWgUMKBF1YKfFaOUVSZ6dsxXq2HgzlQeIPT9vfMVnjdbW+1A4EXRhGVzULu1t77UxqzuxYzAY+
YQd9tBqk3KljhgadaW3gOsSG+jis9bG1Yq2GaxGLzF8D1O0qgdEfcx7f5zu+ZvdP82xJJp7AEoD/
CaOJH3P3eO8ba59pMAyIqsGMfQJdBB9I3oRPEH7fCVGI7DhAB4PIEnBz1gjEZDdOvGjkN0uqJr9K
tDsTYUIGosHqNq2Tq8na+MAeWMdbgmvLmvKpu1G1in/MrRB5JwCEpChhD3ZJfJ9fr0Eum6knurOI
Tnv2nwOriig0LME3eM87QrGZBUH5m2kYMfmwo0+dNYiPzayibT8gHavt3MxTXLEbDLg3H5/YYXHx
CXY8R2oPqycTPZT6ez25ho9mFjlgiDKFgvrd97emIWE+3rEUSRsPVaTmWFcbj1MWKTBXOV8M6j1j
dQ0rIn/msqxWkeDUkD5TJn9s1nfYt6qdOG/e5jqqXEcx1vl5pZ7uFwsUP8FDDUEOlpxzyblJNTQy
ORKoY1bWnQOGxLWcjuXtNhodJpKfNKc4TiQkTjZYS3eqDbxv+SKxYCxUke30Al3HqmSh7tgFgPup
pk9B/D5LuoA8R0W7nQO2qmMfolapiQnErOpDoSXF0OqxweDuiDBu4PcUg5ssz4vs0Pj7adAL62A6
bR1rDD6wQ4aiPuERES6RJ99DIxwGA9eFvQn6NoAymYWYBK6Bga0BCkVAn4+hHgwLfnc1JNYyjSeE
qEHL1aOonJ47ScRiDFfYr1qeLyt2xLHr6oFr5ate8i9H4UrDSYJDQ9uXfJE8Do4I/tTRLhXjWbW0
Qu8D0FUqRHIeO2+Ma7xU6DM6tSE2Cc1ANfUujB2Zl9PCpqyG+RqCyKMD8J0/9CK2XuveFKILLR2L
1l50i3q+TakQAT0aMfcrGRrBUoOnq6OzscRda+Wk2ZhxxtqfB84kvLWBEuKsaxfz1hCuAjOdB/Yo
TEHZQ72fbD9p2UGCUxw766kFBfVwWJUAM4aCMrA3PNQUKLsl0Ev/+cG43h1HPi4E0wxYF+eIpGcO
RGFgVNwt+sv+WNBg2ZV2xQbRA14EslouMXmPBL7SSiwrNGnZRA6d2bWt1ZKT6E18CGVnzoMJ/lJS
PNiJ1QXwlbUptCNrcWGybc5rDAxXU53gbDj7QscDEBEGaQBESt//lEcnSkJvuOhZR+BMoWoC67W+
FEzj/Gkgb8ypGgrSR97gn4l8F+eKL6T4NH0eon7n8VtdhrqyBtbBLmTlSUvhmnSxxP5bytHhDRBL
bL5wbdEh6JlBplhLgLCTFT6AhtH3XrML/wfta8MeI2Cgu7zkOat/fdfKYL26kv8H93+4B2uGRRft
avug4zaqhu3Z3aR9a9EHP11xtDS6yLmTboV9z+56iTryKbNbO5c43hjydNtf+DGGHI2bSQwQ9xmV
q/O5qxoyel3iucgidfZuTignvNmYfvii07LKFxFRBc6+/WRxVNpTua40MnRnDzW2tmm6dHKgmiOY
7E2hkYKAsymRdVYtErq2qzPzarLdaFDLs+T4BQtxTlsDRdqCKAnApzgvdItWI2Yl64r0Z0UNV9eC
kddBil+tq9PJ6fw2pvB29JzosVNr6O8o+BEoaGsXT9topD82wuOeHpW26DkaJYdDcd51w3m8KOrY
4PPQS9Bu+QivBUJ3uEuzvcTKdLmjj0fYB2oiPrgL/mM695zbyCcBrjE6ojegqaQkNdrblbcKlAjW
CzvqDFuehW6kUC8PJVI+1fCrUewE/qcBF3CKiKGPpF9NpeiCBr+h9/zMOoyYeFDOvXOGuXlndl1O
P2gp6YK8ssa96bRST9TngCKz3q6sy9CkyeCyGj7S8SuvFoXGoyGvfwIOohhfQQ3M0JPuTNHCZVIu
EOI8wME+MAG/pGRFDA7W8Qb+AZ2YLxZjQUJgMxnY1XMXC6b4ZzpMMK8xKVNSIn/1JeWY5W+lB/U9
+yYA/HCv85O+CyzcYIIJsBEjARRAzr0eB4oKFqKwAQ9wicsBTOZT3406ldkR715WRCszY3e7pCTu
pJtwrrwl66c43S4esff/9u1/99FHH61uASt3czGor5bX0/f/89uv/q+PPgLyU3/3n1aLBYgroN6c
Jc8Xqzm6BU0EWGgCsC+Q2AP+/pFU60/VhZ98Dy2Cbvb11fLHp9wMfJkIdATIbxphCfFMmFYqePff
qv+hFEFwwasrS8kdk7UUWd3Kp3p7qng84Nd3kOpkNkJ4U9Uz5Kg96H/4f52D5Kkkf5ug3RyA++sS
/TTAHlSbJHurqg/dg44qEzxkpGx4vkIkuw5cFJfV0llUtS8HHVYHQ0ToYtOvJ2fFzxs/O6DQe4pc
49bVih2GWUVkgEKQCywJGFZ/ARUYR4Qae6Wm+myp2VPENWW7nsPuXdFGqpfoYuX7Aqgi4IOwnvvw
o1eomLjyoOCkGQCDk8+2/r/etEDtwt4A0tp/ulcnNuAaDlO67Xk95fZ0n5r1A2YPKFPwNWnxyO0F
kjlTvGUxs6mEaAIFOVAUopFeKKemtFo6wxV0DpBUknSQ+DJGWgGigtay4iuIiU9SEVbVZuBAoklj
6oY+ROFCiSKzmp2j9atfSUXn6ARFQAzARgweqXTSgPXj7ynO2vMPmc6LyXrv0qvtppmEfGckNAtc
xZVJXmwOZKjTJBSt4FMYEkJm2Uox2haoXntL4AQCOREJe0RFgcJCkZB+ipN0vGx4KjyrdrcuiDVB
Tlao0oljjja//WhqI3O4s9ghDk3RuAjyOsRG8pjZvSvAWu4obd9vs/HWHgM5uhvyAfth+z3rfckb
ooOu1QPL36EHhjeoExV/NMtEifIw1d5iUcwA7St5tlkvbxu3xmJVenQ9s/Od3WWtcGL+Ub7uaE5E
PF0zMyBtsGFcqcXbrvjChPwCT168+O7HZ1+Pn/7+yavX4JA9TvoP370b/c3gzw/udZMDSOAO6AgT
9QBcY5z9soBLGHLYU8pd1DB2QMYBjZK6N8H8gqLOdbl8/IgFHbcfAEEddt3Ox7//7vUbNQKvZJL+
ZpiSTI7JKa6WLIVk6t/R8QlvbEx8VgXEKMfC7nN4ingv3ytGcCHhYjBdzMCBK+vCWvXfJ/0+92e5
Al4NfCBhaCQdpIT/esXImuqL/Hj4yNLIXxnVylWYRIVnWdTTiZJEAfkVXXhvRDG5Id9o8GjHb0Xo
VKOvN7OBs/7qlZri+qdWCIVXXyO0pH+T9pL03bu/SV3znirEY8J8TGPV8vh0MhsDQdTYRg/0HtV1
wd+NnM2TwAB1dKcEO+1MR0TmQVlP5up16b/Cg7c0FEb1mt3ljjo8FZ3PSXBzKTUQXlkwNTUrmpRh
FUquu1hd5qiTe78FB2QC7TiVK7u4UYdC3WFrdeSU6Hq+LUFfPfhKxKhNBeytJLlHwFaHDx8qznBk
9g7KoRcbwAVZr4KLCnwvVH0UcqGNUsjqYapb4AhLTNKJKbMIv4XObC1pPiHtCnY0rVBJtXFwvLLo
7lL/4QnNPY2m6i9an0fuNZHnO7LOd2XwMPZ7a4wMQe6WIwcb/7z/dBqFqyU8UX6rHjsZPRwG8rf1
yIc/Pakd15dr+wK90eEBUpI6fw9TF9VHzaNBZiUvkw0iM+FvJC+DLF6sN7cZ13ZVmV0ohPIgPDnU
Gw4fd918p6DsIAcxpDJ41CDePE0zK5fT+XZGv1z1ySieNyZblaFbPV9M6otGGR1+zOya1qCXxTXL
BuBs5g2bM69PEoyOB/WpvEllIXARkidJqsadatVhxw/sh0T1NTvdidzrOn+h3a0CVsO2letqbWt1
KT4f3HbQTRCPKZ6t4bDjXeIXm81KHXw4UgNFbg/hln4IFR7CoQcDpFfhnxoeeP9k2dnLdZAlqe2/
f5IWBJToLjWpX8XqijvX1i3Ys7TpiGLbSdkE6aqKayYb9QeeVhtDDQL5STy+vEb1C26zeda5JSdb
ZJ1SFP6UsvDZih2ixTSQVz1nspjp3Ma2Ob3FxF3uInSlFV3NacMNYVJ8N5WfUuB1l9eBLJva9bkQ
BlSopqJ14hyVM4qIyYEDL4i3qgPW8kpYndLqWUHF7pjcZfJjqTZhbRg5rmZgNYXiAtepPjvJFQax
1ArUvvpNyYj+Cbq8PjarC74oaiZUynKSdwbGe6cGx5/yppKKuQsRqY/OjrrdBhAOwuww7jGTfmpQ
T6zgH72ONW1Ny840t+ZVZYYLCBnOXcQUzBi+6qPHaEFLYGxUyNdE90AcNkkVK0zhdS13iDNA1H7B
1TrRICUzi0cOUKlCzU4wLBGG6YFBgHkeZCyGmda2Jst+RsQ8cBmu5BqnMrceJyce7da4I4P+xdnz
hzLnn8uaWxnz2kcroe0aOVc+y6ThcbChTTA9LQuvPf8sW9AYg1h+lvIskVTk+pRG2J6JP5ex0GMM
B42pd/KIZUu3rI9ta9PUGLS1R/ynNEnRYlQvihCgET3X4LeC7C6mrChhoP2juD4idl2Zv60QvN3+
AG1NgQ+IuSWPy+FJVK0iq+pcFsMGVYhZ3ca7JNwvuEh2NhjcM+0Tp6vzpeqCb86v0WyFSlLYQb4z
3eHZ4Z9G+i3ea+04eijEpe+NepVSvkFIsItmhSuDsFIT3B/6DcQk76zmmI8cVgT+oK6061Xg1Y6S
04hGhH+AH2rsBweuQIlkjZPRVqcNNqQbMdXxELI8j5EjDe8QEMTxVZVpwwl6FIlwnlv5KKoFPLnd
Scq9AKPAdojjE5wm3FV1SuuMtZcgwg+ibI8D8qFGI2J5x/bKxZQ16AkNTgLMwzRDtGEMobDFOoSf
Oi8h9dJVooqTW1EtDV/0rlgc43R5YOJwJV3ZMGrSUsWrp6bRynr5eWySpfDpDRi0eMkJVFLeqi7Q
7BXIoSYBKncSPUFY1B0QJo2KPF7RoIUON9wpwd26Xcso7c6xwaBnLmn6rgFDLtLvK7MGUMSkrJgH
i4Jm0cb3MuLx5wNoxPS6QJV3e7dop6G3L8vxUMkeyM5OsRvG+U7qiwkYsDDLFMVl2GyMNJECl7yY
hQffU1ZCmQ63zXrDcj5bTG4UOdozO/CoSpUoF9uFMXMZL3xsoU4ym1XhEeVfjFLigMZ1ZdKVXZnv
TUI0tH9qwDegCCVVYJZkWiBrhKZBNcAMjxQmOCEtBMQx8YPzwLkJruwVUPROtmSZW9s6WFbnvl4L
TAZlVBvOdElPFKiPDoi9xHiGViPxyE35A+16C7lJIHJuMqcsxyWZMtHq2IfByWVVakeFg8AF/iAI
uD5wuamzrjg3h1zyPKykgzJHiTtXw1JxztiaRjp3i7EpIHv+3TNKt+hp/gd2Cs+DuFHtoMkgeeDK
ErqPrskDOisofRis9bXsduLsNiHi6GTSUeoiReZT0LwW61q0mPK3dVKBCgFb0GMsjbAoNsvAdWS+
cQlUwEJ4GkNNWd1SdrFBJELeb1jOHbh317bdFc+M8+O+4zYD5upxbBczypffPXv5pnGYZCHYLTja
BgZnFsCMf7lFh9bSnz8fZ4TomVb/3DH+nEHFaUFjkY4nq/UYb0UyzsqpLLURfe2/mWLvpFAlZk4j
9sMHTjrpGA+BtelJu79jlRneu2yrBtZrD1Od93v/0L+36N+bvbn3++G9b4f3XrtuWlhtcYmVTHva
CeV7Jau8UKvydTndZOB86+Rlh28VqyATLMjEZ4V6IhQ1iVDqroRUI6+vluLTJdEh6q6cT/5Yzm9b
YGZIBL0sbgmvzWIjJapnncLH2Q3fJci20LdWqp4EyCmaM9tvC4iR3hQL3SQ4hA8D8ZE7jxW25XYq
HnX4cARRpF4RRgMXdO6sDjvrUTNNoiud+htMA1JkVtzZLsMsN5O+eDp+8uLF6GmS2rSiHu8ddslc
KvEPLH3b5SXKRhDnDgnSqvlVYV6RIBQocVQsI/DV+22FHmplXSsK6Tx/8eLZ75680Fb/9H7yT8m7
5GEyTL5Ivky+St5tknfL5N3N4Sn8zzR5t05FgZOok6YmVdWYEETtuNMYTcr5Sglii+qqyKhG3nn+
+sfnL7/+7kfo2PcZ4KXpQIaMMdp5x7OyptzToJlbAVNdp/9FPbX6fzx5N3z3Lv/q+L8MTx6ABVsV
eZ7b9mq8/tG8xHsxnxfnE5CYnAEesxajXonoYMtSaq56xJbhmpqSuaXDNPdfkN4cBpSYvF7tMoGm
uJGgwEQIX4KNUsd+Ws3BMjfMuSu0q7OltF65NnUrnwUn+0R/FUxca1XjWewakKybdgSB7LmQ7kQN
NAULLfzQyzXr3lyMN9WYfL0z4iiT2WyysVOMBVvUvgVYnwJ84VcQvD52uTxWTe/Vv7lX45jqVU+X
FfwzaShS6/fPnnwt9RxWXa9oWupUjcHzNKAqmiePO5g44wdCQTiEDGcI/hqqwXl5OsBvWyiN9D+j
BnLyffb1YOiDcfF49w58PB66ZIptDM7X1XaVHeWxjOBox75X85q65SON70aJwOnysBEK2W0zH9p6
2gi4iIzKbicGb9dSkDVuMSIykzaEJDELPjFFe3NIiWs65ISS3PDhQ7fx3PJMeLJVxEP2UOva9+Dj
wLJpJzdFvwTjod1yw2/rYi3hGXUNRm3B+YFGGewe888S3pOfF5B8abkRSum2DtHXpG08FvQxAgUy
Zgut+cODJzPDQDgG/ZelNZlcFurlVlE4hS/MbnmQzqAN3XbJ76mb5k4g1PXMSAo09tYqamddQwn4
IUJHgf5QFNNpvy+DGXWV8ImksKXM484BpdG0tSMjNO1QHa8h0dBa697W6rLqQ5E+lk7jLVnb0d7U
sm8VTQPpSUkUpMBUf+d7e3l/wSdF09/oXp0MBoMvjb+3EHoOfpE349M50YIjSbyr72fvZg9y/Pf1
gzzJBvfhgjXH0QlqaPEWWoUuQZTEGNGdpqBieOhq7ir0xxScomm1KgtLJ/18Q7ifpJVL6nJRQiJi
yVKrY+ggPU490MKfW87ShuIctBEXep7OSzVS142cXJdIVHNtAOCWgQlDr6fQ2YickJBnRGAjQ5cO
J9oLwUc1tD21iPlIw0g8+tFosBBebF43vimpvGuMnjJ35rZQQndKlQ0+jmBVoUr5XdJExtwg93N5
46EyJrIsTO49pvQEguyDPFgPbJK4rMNfA2uGcSfQbmdW9jC1Djem29zzRGMnMvhJv3Ov4i3dkNpA
HU7kLaPDnGwVTnuiDXOd2VqMarY+wVocK0CQ4oRT/aNA6Ox00aMoQgtgLR2rUlI9/trkGBtL7R6B
USfdLSqU4eMBuxVm8g0rmx2vJoHBMGdHt9UDSjjdlvMNZJ8EOyrtSh5dJKjR+TBXwx+fYvimszcf
aCnQ+sntYiU762QqQcZaTi/nhc4oDwAUuLuCIdtk79BbPcDmMYGIMfFcLSN7baniJWpLnD21Fv7H
p32MXnWNhs0bzu3JMeWOZZPzvWHNuhi2l91b58gaHO/J5CDpmX3d+xIVA7J1sB2Y+ssCWEaDaCVo
giSKNkfl2M4H9vZAvYEnvdndw+9qyoamEHgmwChmA6xO94wmPjQ0w0PXEsrgO8dFJJhg3nBUYBTY
PxmwrTF64/MN3HCwjlO4aknaV4VO/IEfUx98gBBwwUGyQJuxaVHQalwQxrmWt0QKdFp1aMV2HId4
mrPyhmMXE9JcJaeKNQNA9zWEpYEHBDLPa7i2UFdpgbAh2x/ZWi/A1k+6JNDNW1B3oypmCiAHPdi3
z16/fvK7Z69DxxXIfkQiSrG8KtdKGItq8dAnQJeRLMjp07BBipqLhID4/JPQF5fqDX4bjSaCkcU9
S8KBQNk7uKYARrHXSCfUuUctWV7gFyvdVKUxMapjzysJLJVr0HsXoMMfIGBa6JJFpQakfMc8JgBG
k+b+g9qVejy7ALGUCN6A1Xj32aND9d+vh92f3TbEOjjjRsM9WUFk5IppRPK3+3Um83Uxmd3ete71
0adqKo+G+1bo0vFjp3fKRletb2Ul8t1L8ezt89expTAwALaT6Nb2gbguUV0ZidKDW5J+hlcGuX/8
8OqFeyNyQhZm4imVV1LTsWrrxOKhKHVXdiAovyyU3OPzetSDcHnK4gNyyYayCs1FunczHTOXdR/n
qlb0xrLM6XassmvpRSwHcpjyqREorD0aK+achf95wW3o05UeDR7HXJ9hmBBCh5qmbnvC8MZ2W5q9
NwMRww9GbMjlZV2raX+dcsoVciKLFUIBpYFKplVKt7xFINvVbGKhqV+NUtSvteqogFConv28vqVL
VdOsjvtIMtzX/pcJNJ27FKSkA6QgmBwO4CTI3dKkCIGqMu1uoAnpNi7DFjzMobK1DJgDw2QuB1+j
HngCFaP0OvWmTvkyxCMJJWY6IcC8FGEijtw0cl6NyEzdOR3Z8jq7Vuz06wOpQvMt8stjBYTI1+2O
Up6vmyXS8wiob9ZDWP5u1tNkCXF7qnwgwRJGhhx4111E69+gyACZDvjYq+eqD/2PBexe8kBEbq6O
Py8uXW+F4GmOxvqllWj7PuUX0mjc7tpTUQryn5AE2Qd5TDzw6dZKstNbAatV70oX+g4pRR0ESFlU
bbUn/SSIg0J3ENpePISpH0O11DEDwD8VN5yqYTD8y8SiDdzVqEPmSnYIZcr7ruQLe8TrG6qO5Bdm
fsUsSFSq+0Bgsjz2fFm5ER7IyjCcCPxgAKI0+uhZNZCEprlVhOBaiWnFZ7Lbtb9zh62HbNTjl+L7
Ez+nLFv8bfzAMtWgRy5O3PeUbfWStTcsIPLwNjGPrRQLp3m7+y3nI4m94SFqvdquURHJatZQNsGe
VMnU4hZszpXLZjo66hHNjo4CBgcl+aRgjlNr3ZQMVwwgBHGaqlGA9aJ0DxiB1c3wxQrZiMoZAolM
5teT25r8wjN5hllYaGTSUGXnt5hvHsL5IQ/Pppw2eDOzwkiNpIcaBEnAxMOHK0n9RoOc33bjJgPv
EHmXrUZzhWUn4E7oKJssbyFp1VeKO//jtpYuXe7p6C5xI8WinrcBfJzNJxGxDjfKMxdCQcsYgUXS
PEYJ1K860fexki2iArAhkQTggBcb/wyBaIEcDpTuWGLgMLAGdAGsd68WjBrx5u9RoDz1lDvECeaM
tpFQCeMtag0o2WdESBO8SncZmdq/y9g5BJySCf6aZOrFOd8CmeWAAE6gNgz57YhbWyu5qQhE0EKa
B+FBTKQBSMdBotg0gnIAagh5YDKKmaCd7VDJC8IWVkYvCrU6OA82pNjaz+2yaf7bJa2A+K3Ob3FJ
sKGdk6Zm49NWFZwISVVhmOa/xCpoPLhM9XH8ydB5q82LyXK7imtNiR0ub3F2nGS1cZcJ+Kq+QD88
AoID6QSV0PPbjz/+uFlxxClMcMlzTwnSlpYJUh1uJdsmPQ7q0SFx+cOezgdbOzKaJcpmkpsdKPg1
NiY6aa0bDh3wDyS2UO3QaVVdKvY265+qZcQ4Q/zmYrOYH0D8/vSi/7hfqwb7nwweD46sNuz/Hj06
PKIPR79+JF/+43aBAB/1xl3ijhthSzPcZY+CreFrgtH0ZfHyhux6Jga6WprMugQbfVvYcc/htX9w
NHgkoDT10IwStHX9Pl2Uff2t7wNrFU7d9/rUl0umTplYYtcp9elcimnHI1rAdUS2g9DUFcKRADqd
uF7wv+a+P2A2FVn/g2ASsRk7qgsiXE9tQV9i/W3bFK2CVrPBEVN3AhShTU/6V+pKuFnME3QLYNRf
vBxYrR3HkOe+eiR76Om493qM8aFp6IOUm5Fx//VHjKRUVRsexSj58elrw3ryATBG0iwDhyWzTdv5
cNp6++2LOzUnUQO6DfsNf3ZmaVUiqjYdmwdF/Xc7ORwAsq+FHId6Mck74oeESwYOCGGCzhoE1pjC
jrVvcTh1W7nU7a8Trb3K2+9XmJVWN7WpQtFxpBVERdCgMQyQfSzUE0DtJjqDeMc1k+XqoTRBOW4h
9XEeB8iwBo8Dx/E4fjScTslaNCwDbcJiorQA3rv4hfNeVMyLC3kGSlGCgkA+mfkVGUU/093wBFxI
/eCU/VHJMWZkPdO/jwu0QD8d9tlhV0ddMYwPV3Sx2AcKQy3ESq0mNAIurBXagWmtDF/ngNvd7h5d
yRqWkB8e2mVpMXL/+XIl8MRjiHoid8c6i2Xm0mnUy+UG1ahSE6femhxrDEtGJOX7OQ3uP8XvN4UJ
3ErI82nAvtNff/fmyYsXufXsgQrMIiCrb5rymzh4/2CPqCUQdDmMt7PvUS5VR8TAMjnfKs6eoLUS
37VaLsQUHqcF4nxfqCfyVx+bLBDM7bn3/gKSiXfl9dKfV+fkslqfx5z3esErIpAYoP0HqoOk/zLt
7M3+o+lFKbMA2NvQ3LtnPnonzzQJ/eEpoaGYjefDospG1SdAVAvjbevG29ZOADBHO8T1RvCMcaJv
gb1TkEW1jIUrYj++uxJohZSQuGpLzwrqvxmpklLswNMLiV7MC83wXvc8vVSmBroACKmiJ+0q18b9
VkPvzGgn9o+cPg+WqnmFCHPDG/e5Hnfs9CO2BYTywitRLd+knMMZWhbXwDDccSpabB6n+rHYFD9v
qKqNX2ioOvabX2hNV+9CsUt845550eBAkPobiowadJ7jywBkCfJzRgW1JefowCppVgn2hPOMaq0t
4jHgL6oxTxcaeXJEVghFhf6ruHITPFLGmmViyDqMSa9Da97Nxu68vuTCB6AMjul5t2woc3wj2gcT
4YW/HR8NT05iU3BC12jcdMPbeiz1RmrfXChgXFI4N0TtgbyV3mbOIBKqY6kzA3V1bIuspKeiCMTe
nT2KrnZDzXRnAst//RB3/xXh7g5ASmg3cqnHduD1LJtAHEgWLcTXXH2HFVWXa7GY+ogtjXbGf6XY
LXvsgTFcxVIef/jSoseVDsMhB+ijhlVdJhDGC3fldopJMki+vkIo16sSLC1WAFDUHVX6IDOTlkEH
Iq7koSfDWbWHmx4/oxzeB1XTpmDwPXQ3+zmniSpzYJyqvmfTMm686+6hDXLNfmnt/mM96qkBjMoa
RZcHgMY5zbDBfWJdzbu/dO+u95Z5SL3++5fJ0eAxxo3wHlXg5TsDhz5Q1KiXPD56NzN4x2SE18G5
aoLk2kiGhx+D1QdS/p2qchh/3MOU6NcF2Ba3EJRcSWeldOvn+q54EINBkGA7oxpazAD3pDTmGGcI
T3wSLe/DSaLNk9rgkO7vJmevOfWRx/z5Oa5eRwRlUtab71N021srIoGUVMi1AOQQMqeoEVfXNZ5l
2AKKC4IFQvcw9fwNfBj2BPe2I2vgjCNj+9jnbHcnwW7T8g67yYPWO7IL+lY1Ao5l0aPqeWPydCX+
W5ljJIJkrPCdMUAy5AeNWckc7EQISYBbVZs15CuCzJkVXT21hSXJjVJGDoHhLzF94YRUfWT3F+2f
yf1BwwI1Dic89uLCphcVZh9rusSs+A41F3yj+tG5JXgqcjTaNy+/hUe/OhPq69zTrmyX6Lkj/jpK
tIEx4WXyArbgewsyxYEHURsPnN2KdPbdTKCmRjkEogTloWVYoIeTo5eEV4S5eMsgVEM2kjrPQ1XA
/rduqCTE6VC+8Qx9ltUiqvGof9TXMa8gJAhBJwy8BhRt4SsVixGtha44SBOaTiF/lGjU0HdvnTZe
/1AWdIvAo7VcRZ6LUDfqNKfbN3BynuveLvDXeASNXQvKRzGIXNVOAxRRBG8k5hOHwS42ZkgU6kfA
AkLxF7rSPGt/DCENpiXdt4DcZB7D7Pl+1/ldwIX+KrJSVExK849HjcJJ03idxu92H39AT6HAsycY
EzmnnBs71xi9JoyL+VgdbXDyUsM9Ve+SwEEwauZ5UZ0/41w0jKzjgbR1dE+SBA3/YDh9Vr4bM5kO
6sXkxWQb47FJfSdBk1+XvJYBlYWn4Sm4oIHImNVUK0I3ZRgh8A5mXcvMVW9JVlLjDpbjBYOpy5Rg
WOMFt+ZlgMzVKzu8QBZjlDgLg17XwI276MjOjvX0u1MbFmKUWEvSULOwnOvJbCgdj6QkI9dRk6Mk
Fk6ifq1WMNhut1UBpIuB0bEespyjO9X05eR/ge3herJZOI/+Fc7C6pIiqlQDV26ElZ+n8VSJhSCT
aFHf7Jq6B7mvhog30xZcu8sePw168migQKsVXCuPM2tADxqcU+L/peQ9dc5+BGKJTzRkwZ1ak5Xv
2cvZ05Pt3a2xhtg4Mi7ZM77DLPYYZBLJfoEiByw17JdOdzj4Hi91CDCMZ/fGHRvZFZ5//6yxrNrV
PcteFPM5wYHo3y0RyKWTEQ0cdH8LJXCC6jHzC5PxR0cobyrIBZTphm7RrZoZmxLJK5Cd7ZhMJbeW
s2rRe3aj1gxvRXgaYPZHtR9Za6xhAdclNzDAIMbX5DNB3Qf+JqaPXdhISzYLCG/eYZcn+fdcpzJT
PBfy2NN9hG7DeAk8BSDMAcJhvlTyWwQWQRoZQMbPN7eUXVR/+ezFs2+VSDJ++d3Xz6KI5pahWW6G
TGrnOxXY/38ByN03lY0ncrtvFBuHGdBxSWoWNx5qEOAFyGo1SrJUNP9pL0WXarBaq+U7m5dTsASm
2yVf0vCH+Cml4TFOyaSHxcAYNDYNQyPo4oof0fFpPLmalHOACIs1VS5BjQHNQQ3ApVyUNdqa4W/2
Z08JYeGSPrHZfRaG3OadJnQiQbwQlyR8v5g/8PJaxwBHBh7Ix16JRqltYA34IUTNQB5DHzrx1AZY
UnbPf8jYsBFkdz62/Wkn87kVRoW6CpLaPLPQzKRnvUv/gpfPQHCUPOby+hjzmIdcAZqVV/l5MPS8
ITD5GKqAkubICXufDS6LWz8WSk3Qs2MM4LswgGUu+NSgwCDVYz0Fs6wSdlnrCCJPkdYSMvFIvWMn
INSeFpvrQl2hGqFKAi4PGNvyQj1WriAnKjypUYtGCeXQ2kttlFRd7MjQk6TzFtzsQuewBkOd+r2u
IMeOYqnrClD7h5nxyNHeex7y0APwv/mnfo6fXj/AfwcPvlL//ulR7ycBIhJisRz91Gmd9NCp74OO
S2C7EV6k/ZnBdxs6UTJPGs8NEnVw9EYkg5FxmG1mhkNnD3N8O/cjuGepPYAR2BbqYczvCwqL8jgk
0SAfIG4f5+5kGQg2njI7oBdCED6Chne4xuHn4+GvTsiiffwrL/nFAb/fptV8u3Bd66eHvelRb/qo
N33cm37Sm37au/msN/0c5HrowW0GMj/dT8XS7vv0g4xIw8eq3R6mbssoZgWhc+qNfLnGTOz+qk0P
oe30q7fPI+rjsyVPlBee6OioSbmg2gKF/VcNuTg0TzaUQba1M/XUmJzWo6M8rgzQ5DXga0qEFR/f
yDHI8Gje3mE0RpPYqMu2SnsWQjOLZnAo1EpaTYS6ycik5U6/y6yf/+X2gG93fzTNp82lWRklUN2f
P04RgPQTHPPrNELenIal2ugs9MWM/TfXxbQor0ApqsidD+300BvJwmJJA4sBs2ccHYr9PEhh3J/j
SO83rC6eF2gymrvolzwHnoy2izQauZ+8H+CMu4o735t72KYZdFg4uarWG+HWqqZiGLIkv8jg+Lax
4kvSPPmyUZ1IogOGMKLtHGKh1X09q9CNdDAYQGjLxWRVgyHzerKEXxsaqjd0vy9Qi7cpbEsqBjby
TNQ90oMEyevy/GLT0BYo28oNqs1Ir7epVv25kkfmJmwG/AU5kvK6nBYNLWUVWK1Ud1Kvl8g36k26
Xqj1SfQ7AUNx8oaWTJwpjkiJU2hI5nygtRfPc7e9PEguiwJc/W79aIC4g7YPzM6e2nI553vpgAPB
o0fHtMHt+q6H84CVoVyU1aGd+M34bYRvxOrDyxTuEcgiOQPrMfmWO1HFlFOPd1Se00DOoa+6xTjk
zdfGMOx75Akx6Mf4x4M0GbY1jnS6b8tfp61t8WN139aetrcm7+V9m/tze3P2g3ffJj9ub9K8qPdt
8FV7g/Le3tkc4oofNkvNjvgl9oDWRqMH8Wfe4zDvo8ZDZI3RUW20jVMC+BDZrIJHIMTuEQyqjtuj
OINgJI9wJC/ocHyKf/xd+7BIEdI2nnbx4g6XfxwzFVo2PG0H6fj6kTgniWpLYnzB051E7ngjQAz3
lH2oc/PH7tdeGP6GMpt+SYOiHVwypoSym5lfNAKwHXEnH/jI/PKvcrr0UmgpTTLVtUDsaYeuDTlF
IvLDJidsn0lS6+d79LVOdA9Pax3U2HNgWeoLCHZHcWOIYoRVFW8dIwMYl8MeGq5Q6MAyZ9s5/Q6j
Lc9smMGLgqCXrifokIziCYYH6YeOEsjs6EIQQiq7iVkxmWu/FTS0YioLGLxaDnygYH6LTdKnnzGc
C+QsqxETaQvnZ7K2xSeOVp6AQKjmYYlRtkHJSFTVkhRFbNy1tCd1JQNMzlQfqEwpYfx/ee2JmEiS
u9tIZtW0wUQC1Li3gWS3W0Ig9EEAjh3YtgU3eoyGVmMCm9AzjqL87e2byTmk59RPFReZnCs2hc96
bIQKQ1JW6OOJZN1EL37fkoNHB0wjxRwVU43jwkJpgBGF4iU34PWGWMRe2BIOt5i7dSK9XU/7VFY9
tw7dVUZqFhKTBr2uoUwaXh521RHYCbQrR+SSahSe2Scr/ra9u34n+sJAYcYbrn4d7jfWHfqfZt2P
Pb+49mc/zc8HaH32XguxyvwFtq1BJfThQzXmpb/EaPcStZtVWJxVMn6MIvwifpLIlpcwuK/1g7oJ
44mnodf0MLSfaUks/Sr8UctesR8RE3HkJ4SO7EhXXhPdmF9kXe/xAGA9u/CwTeVeQWbJMQZ+N7uj
giGvE1Ga24mLk3rJpNh+uxguatry+5juLbuL2OWFZRs6AlA13dawUa2MyZ8QH1oXVstieQwMW32z
eToP1FjRgQCv47iBL0owwTB2TBlawY171zqsvcaPI/JG3rk7u+hEVDLWEQB3Zm0uhzMbV9dkEaN7
OXt3RwVOcAurcYhRF2y/YScsmw4dZ4CwmHN5Wzb/sKToS6ig8TaIdQ3cStiZW+AnjC0yy9izltRf
8g1mVxRLs+HWPWf6eVCp9fGLcd67BB8sFLJruy7bzDOabU8veP7hmoh/dY90WzdHXAD+8YNdFPHN
5gVmsa1ZHBXcE3CGXFSoMT+rvIBn2Zp6J9u3Ww43zTQUWTtLkjblYtfH2pGX1+0Cc4Rl2/WRbLTf
juYh+fDnmnj8ENDLfVaPMOQC77FimXEL+QcosX5JBYsfVDVscg3iYCsbPQucyIY+7NkPr14MJSAZ
MmTW6ql/OVgWG8BgewjBVBiYvFkrbvhwVtYb6zu3pVdAeSWy7h9+eP71MDmbHc4+Pz171J+dnX7W
P3x8dNj/1ezxUf/082J6Vvz6s8lkNnHqsyEteXT0qY3nBjdc8nelmqy5HayfX6tLZradF0NWlVg/
vQD/tqd8hTzBc6smu7psKqKGAL0fHjYV+FqRnCpxePi4r2bz6HP1cfjJ4+HRJ8mDQ1Utyb4FTY/6
/jt1mUEx2//4e8JXKIuaGv0BKXgm7R2pJUqOPhl+8vnwk1857anvX1ZX3F6bn5P4gkiU4C/vDWLy
urqeD+kwBccHv6wqpP5XGyc1tEwCh907aNIq/hs1EM8kH8SVI4A1gB4iOv3sOIX8Q3tiyJC2xbGx
vWyIz+h6ynJfUdNLGquyCj/0u6P81TBmkNXgr/REkohzaC5qERFMGaQsp+SO9TC2Z1VLy+8n+X4r
YzWBOrR4umIHoFZ1g+oaP7cx+rrauYXRP9bRTaXgmMqCGsI2gBopMiBCf5iNnbl5dU8aW+aXRVPj
UHKsb323Ya560tQ0SvBNDS84GzZl7b6ewn2PzrpuH9jGSQSjh6tbbd1Pjg7xvw9IADYeA2gKZYrD
cvobO7e4NUo3u7jxKK5Ve4pnYPY9UHOr62CqHhA/vHlqnIhBqzwB3cIHMFFCORO/lBTcAfv8/4n6
/yH/f55kxw/6J/hpcF/xGSdReei9EprVuQJ5unlIZ02Zz6mbP0KgTWA6PwAjGrTAwp8uiUDxgJvU
c3JjW4heavHunkU9iWdRh+CM5WyyRvo5X7iZ1CU5aAxP53oKEkt7Rj+6cdrLrIsb162za92I1TJJ
0Ylz2M0D0nLRhjh4uP+ljZ5jkIY0sRlYHgPHE96MQBI3nKseOjG3KgX8YytWlqpyKVcfhvpkh1bM
BeHwMYm6Lhhxt6ndPh0MadVH78aocwczX0N0tshu+XnDJrYoAqEjcaq3QjCsReCMYYBndHjiACqr
d66vxefWvKWKXuu6Zx08zF8Emfx0SUXtC3AmuphcFZRMSdCrFC19bEF3w44e0yKA4ODgLYn5SLfq
HBes2qGTYWxChEJyfGLy1eM3AWvFb7V4n6iqgxlYtrAhMRy5v+N+r0GxrYYlJY3lqGPC/Tmr2XHE
gHXiHXkYBT8dJHKl8cmgI1qGnQbJQUfMNGkDXSPQfIWhi17cjm6kNWAHqrrROvhNe6iOU/Elkh9o
kumyjr8o3eAFqm0FFTWr+1ALHOltUZ83dKXLm/ab9XZ0u9fndxtUs3o50m5ES9k0KZRFGnwH8SI/
/Lz/6Ndv1EV++Onw6Gjw6a9/9dnjz/9ztAJfWHefGCWeId0KSSWT1XrsyCR7TwiRBtpIgsOTPG4Y
RIDEKRz7ayRvX5EWkPpqD1JvHLAwUXjtU6QaNpfn+6bOTL94ISF34IWh5Al2wbhXo0pL/ftlGMEp
nKJnn6ie2TOI5Xr/797+248++mi8ugXVwYCYOsAxbTfl/P3/8vb8bz/6SF3eP6i/ShQKMHm7lFJj
P92en6s7Hi74DjPI1W1H8zRkpQI8Xy3VUpxOhZG+ZuiSDj9znuPX1kvHeTX5LQWtWA+moCX4Twri
/VfDPfJbCuV8ItMRfwMrypP/rQduoc42Ggza6Rwkby6KBHdTUHMmwpUlbQQsLWg3t3N0/9jW5BID
P0Ewi15d1Rhm21qtC4FdAbULXPlmB9YFoodivuxZsQGNPYXxz7fnkI55Uqtm5tVkxslawOODRc85
dnlRVZc1ECFmysFQoUmNKHnsUKvqfw3b/FzGot4jHWeG8gDFi5nca8bqypxPljjszPps5Mg3F5RO
SRUGMSaxCtGT6SWqjua36ONWLE4LdClmrE1VRXVM6UFnJiaKQp3UT0Q/hGEtCSHqYfJu+aee+p+f
cCneLf88SHC/KO5lc11hq7DoyxlDvJGbneoQHJesMQJeSmHSmNOu6J22C3a0Awa9s+okG8DlvBk/
VTPL8l5Cf5nbO895XAI1Ltl3oJVrAESA76w+1PiKGSF8rtRSLgBZYSUyIqBwnQ+wHvGbUonaE8y/
AwjitZs/3W51lIzhwE1Wiq+fAa6Es5OOQD9G8b1x1ztOOPGYaYRAuRld02J3W3oGkGRMtXKmrrYB
adJ6ymVgMpMlOUUqWiZoDiz0ml2cUPZF1zTM/NFloRZLKlq5RnLCv9SwB4OBIh1aplNMEuIuHZUe
JYfWUwXCIS3ZjUEkpKQ1eoZhk56lz26PCjuvFq4eYpGcrouJ0aSSW76MR+in7JHB06CcW6M4xhEO
T0Lpb4o29T9FjOnUzQM7gNL4/WKtnxpr9f1atioUSsRvWXeeEWgLfMO5LBty4pxO5oAcotjtGuIk
5fFmr0DPEpgJtYM27UFSWg8w3j4XrcNeR6jaT45IV6iI2XMycw6ZswFD7O5E9Rdsy4OjT4eqXYAg
bgUu88bx4Gjoqaxo/LD2n9vHzuHAdNraT7U+cK98gBmAb70qZ9vJnDkEOxoGTB5vARufzLRBFXEE
qIpQpwLRz5c/scpV8e+UTuATdXZRUWauB2zzVF8RxJ/JaZLHgmqDCUaxMCuDzJZz3C4wckOWS3j8
OWd8PbkeC8+z1wJUv4ptqedWAEmsdR66rnqrn2h+jg555ie1WU6K8rnzqv9TilnB3S9/in35Z//5
T29+lkTnreCsOBAkMyBeWI4HydymFCwhNBJh5powvsHfyH/WI4lw/9EQgWVT3GnZ5z/b2wGUUW3X
+F6XNQRbO1xnajPOt5NzC22MNBBJShSvGDi0C43CR2qdzxJ+ATIcZCYjaCmEPERa1rh3k3nFqcav
J7eMtBcQP4+oYM2MR0ByCdIqD9m5VG3d9BKI5ND6e4qJFuUr25rUQCtkPHLIZRgmvqaWYYPjAGKK
jmFlQIO0h5MU16AVDh1Bp5ea5gp9mUdK8YjCe0R+lWYO23JOqIEkD1LI4b4lFdh97BbbyPtH8EMN
xAzhzw5bpEepv34/+evH8kGsYGTioGCLTzXyE03kGEvASuBSyEBbTmvTkP6c7looWB6zOsGySMOm
xEj2xkNV6aAhFSB9EbqaFfWKTNWr8qJaPjbPQevnkfWHvABBF2C92pzS6gNzHBoYPpj5BQJ5fs/K
c8g5rJ76xRlAV0EQY3BDsXq/rD1hHg8XRkgCv5hsqnX9ED8tZ7UW78oZqm1/dQg3+6fqf2BpqlWu
Pj0Ck4L6DvhUbXOMXnJEoAzqZYE60YrSscIg8Q1F78iyGtSTswKD3mn8i8kNGOFGoOPDnh8+4mOD
M2uoi7+Zylixj74S0h83Um8XC1Dc49nVmaVyTAAjRWk5TXds6C7r2npJTxans0lyM0wyC/Pzpgd5
ZGrgpYAy2tNv74ZQWk3LS9Kzu00ZMuDqqmXFqJ3eG2twBTYjNFWAn3M9u7aSmfq5B8qIPxaQG1Ey
kAgUGvhOAimCOpZAyrKUf+Pz6Mp+OvrB0XVArpQVio6jURpwcJo+7hNhgfIXNsk3S5tjePSNdQu8
wRrmKxJtZzbc6VN/uWe/fFbBCOq0t+cINl7nmw/q9w6TRkOb3Sd+cfdOdTvNvWKPvOPkuPeL77pq
tlzu2nYdG8Mak6HticAY0orjgBpqoAs9dwFG3X6P3fR9WZqRpnFsNFikqRoi4gdGbLNAfQb6rEk5
hygk9ZSeDdSF5c0yVdfOKUJUA9wsRS9dYIgWgrDdar3qIE+9IIdtxlPKTzq2GdoafqCGNW4W/MUx
s1HvuSaScftJc6A6zRXlPZM0IiXmsBNwIFI2QsUflnMA4O73LeRM1ChutGRdX5YrdeFMEMsZKEcS
15CCy4jWhKKJKjC4wErIXLKpOAJehgCC92Iyb9MasXaeV5QHZnE30EnAhYngA+B7PYFIebVECIMJ
i4S7+MdiXcEde2455aAGg15My/MiU4PJ5HrLe3gh03nII/YrVea4PAHHaCyjPoenxtUswLFLvkw+
eeQ1hm/nwwgUgYhBKEqgg7SXOMRbKHUgXqvdwUw96go2yy7blbZn8Uit3QPXFrVBmJkBckeCNvCi
usZLvXTtJbjWI14RDwWI1l+v0dBxqNFrjeY/vdoetLe7R7pOHrcOwiD69r70YxsTbk7LBvmbtMfG
+JvzoG13dOandI8kKx+4R/4+DftlzO3X3ixdxJ+I5VUYpkqKeSsyaD+MdV6eDpaYaxOG4aRh7N0p
xYw7aKeh/GSHHmpf2cHhqVGm9CEMZB/mEaGdJxt8CdwA7cACgroR8wk0hc7If5BsoCdd9nSH+UmD
stc5mF9Gz2VkeC+AuOAoQCQqYcChE3iPzSHqhKwn+NUQ3wad5uFGXjDHZhQnPR45PbL1SL/Ya6S+
APEKSf4O407uua4tkRfTsR6UHmtIi8BGHtiXOh+SFUaJDVak9mFKalwsF9M33gRTYJT4Nx9I9zhs
i5+oR6vB+cEfHV6Cr9uzjZCXrh3dJ0up8Ewvvtb5Ym9q4EPfFwUj6pkjeo03dODtHYIC5c4YcfAf
MEia9M5Res3feZh7cbjGB8PuXaZ8GuiJypQ4QBNyTUbzzDybWA1A/mDYY3bJPOfyhJLlXUpcHOT/
OJPfMN4HD83liV77moEq4wJfhPl8B6lcg3uVz3F4OTYeJ9ROgV+axWHg79bOn/KcoDe13ydNZeOn
E7sL0xGjcEwLf9m2fB9Hlg9vh7YRf60KFAwQ4Q9a9+W2EmvJI8w/XQ5lWD+B/i/FoMEdWXEibch8
fpJdgPMlzMa83PuJIb+OtplJ0V2Htum6cg6sXZVd8uKb6AYxNJO/GV/u7nluzVQ4p5kezxXruVPd
i0E13nG/5GQ1FfqzZQYRmW4z+7LUC4rdLHpsSokxLpKJRliCrN5Qgzq4AFg6+ul4iAWJokAPIN/j
1w/g4GO9oXC+NQRkjFkziA09wHr26bRub7uCN1way7K45lrHpCPF7H+4JewoM0S53TsQNH3RwH7y
KA+tN+5BjRgerIdH6j+bghigeP1+8qE1H4Q1eSk844FYDB65hhTfQOFVNp7STE38e+f9v3/7P1iu
aOfFsp6qF8vm/f/69h876IWWqO/QZQEQg9TazAvMgYwebX2zNVa6UbU30BjqKYx3Gn+qb2t2WVIk
Nd5UK/Q4yCywaLQK3NYCKm2nMnNiXUHfHATbcMXc8lM/ZavHQyc/J27DKQdmz8r16CierkoV6lho
ANwWecl076kx3YIzg+ulrdpWRd2g72jjqlTHuE68qKrL7Yr8JqhBCjy5PIe/MlkrQotil22TCAlT
rEpqI/wjy+k9wKXlSzm84uY4SGk2x9IBKNaObwZ2WlMTOICNnJihqS0cc8iBvYnSFiYhCTaajrr6
9KheT02ggVo3KectHiSNQd89vYpSW4/aGS/4uOsf1JNtxvICHhQE9Z5eTs5dAOnVLdI1yKZSE33E
svT+YBXC81yCfUz2hmDazQ5RW+7rTY9YVTpBwsXEAtbo5GxyQVpjkFHXRV3rVeZ/BU4MALeN1FRO
L1Wbs+1iVUvBXvIojxT9I5I/N84wZL+OFYQ1/eyTASbXKGof3YnLIur2rIAiWTqpp2WZOnOC3zve
hHgT6oyCDmg+kkJbE4WNW+/B1XNZDoHOYtTohgE0LSUNTBjdmFhghr6/PaGV2l3wcBa6HB2AxWoe
cKfxGLZ8PM4HiuMgiD/72wFQ7XIGjg4FZLafq1EgzbG8g9FO6muHlG/Ima0w0aS/ef3dD6+ePnv9
m7SXmB2KFXz28s2rf4BiFOjgyhniVCpGg9mMjWoUmcHrgFE24oG+BtMbfpN10T2ZDHIceIU/DEw7
3X5fXzTdXjLBZ9Koi5C56m/O5kK4d1Zutnoz6tr1FsVmcjVZj7qwkl1T8qKYr0ZdzohtFlbfS9RA
MtlwXjx1EQN6Lu5TN3fmPl3M4OpUBKM2ieYk05eBONZHjFvLrFFqQVR/Zch3c61tyW+U/FIuJ/Mf
0a3YMuzMihUw9eQ4VeTQS1K+qDE3B31yVMVwS/JNjEBqyRdJ9qj3uce7oFGRDLpq8riFXiDv5noA
M8e54KmYybqR49AW/GHZ1+DR4NM+ex0MHj940G0TTHS7Pz559fL5y98Nk3gH8Kb1Ovks6UYfSV3F
IU6LeXWdzLaAjYzJn3FOKefTVLzrdpD8gOlKd7SlqlsCj57fw88oIeumPAXZR0hwXcww+YYV2itE
4XMTp7tUy0X/iS//17e1OvnPbkpUcaEwxsQ3QOLLcw80BTYxlmPJJkyX++hfIoUH6NCe+b9HqIDI
zj5XTNecslU3GarjTqu5v1ySMLjz/n97+28sQXRWTeHf9//h7f/5P5EYOitrRl+FXGxbxNODIqhN
IuGAIvNweJiVbuAGSMAPcDV3MKpBeqItlsCGb8obSJv9CvTdUPqb7XLK34HJlaqqZR2jORZDA7im
HOFX6MQB//uNGgMkS6Un28/gqByA0cxNeSn6vAwWL3R463ij1t5isPhA9Pmmv7QQCKAWKQnbJo7M
ZeXnnWM8n1enTQO0xtaFwvcHmxuf0weMXg8WxTeMOQUBYgXegOulbpEQP7jF+BxwaN4FQEuP93Ym
MjeAjfKeEeunTVPfDuhvYfp4+PBtPEq68FRwHqB8bdASDdx1jD4XvqYi32IRdzRGEZfpThFSBuYL
t8VgDcCXeEB4qDVkMVEdg6tNiS8EytqLfq3vnLBakcaXuLqj4L5z1i+MpOJxv1GDCtdRwjLxxEzK
ORfO7POUN8Zqwut/zmeMMqzUYdimKYKByeZPt6D4DJOnrHQIQUY0Eu5yYydZsjUMppH4leeGKwdj
G1g9qT5kXXg9nqvbgd04BvC5eUnoLUKr2wNOqTg3YwjPoHqQB2u7UiKH1Q1Fq+UD3a7doj8BbB6W
FT+4P2J38DhAhBw9XlUUd9gLu7Pag38y04SVzwg9S8A1RTFkSXfPLiVW5iuai3nC8Bdu3ADWgky0
lq9ZxiUHajneqH+/oZ72sblKzR+WhOFWzLSzjq/Z4aI8DRTOaTBXQcwnhy8RrK1VacA/uITGy+2W
dGaONKvOIMek44jlb9/Ijz8C0S4ryZ4aCSmh3z1I4WbPZV3ebv2BzNN84fojq2dfDT7mZqk2t6tC
USiMezz2mKVz3v2bOJPp9ngsPWndyyhPmagDIhp8h9AJnKja8yd+9ez77169Gf/w9fNvvglr2r8G
OyLcx5XZZLA5Pv1ILzxdjzx/bB3tcBL43+jtA4E6voXgGrSY3GTWjqAuNTuUJQLf20N1MSAk+Fed
NicGPZXjckiVG/xKnNCI7r3DxzMWILPywRF13OC4WsbinppoTXeRPnv75NvvXzxLXnz39Mmb59+9
TH54+Xcvv/vxZY8SvlxQMjeM0ERxAgMxJxshzRiEG7ocQwT5l19+mbYui9A3IR85Wv49lif96quv
2FsYFwj7bV8jPTSIsfCpIs784rwvb1hXBNOkQzAgNBHU8xdr9BPh+TYzTo9JnVdqPvb5kJCe7rtl
d5+IiBLujfEuj0mH0Wq48bx5hsfdH14+e/v9s6dvnn2dPHv79Nn3QDr81tmB8gJWaHtU1Gt+0twb
84sNBMqdQjCDjuvlSWT395kBi12+RBWTlVpTVZP3fpN44FzFcgk7N3UFycvVqsfj6/F+P6tJGCT5
pHvMZHHCvADL4N3kSkNalHTGxeIRcHtLPGoQN3YKCQdJrZaqPrtN/uC+Cv8Aknut3lIARaaOlyvB
QHDyZH1ee6h0C0GqErFbPSmw0cVkqe6etfUiOcMA5yzAg7fgkwmxiltgij9bgERufZcBlsKIhCRo
cQT/c1f3sem8ZuFRJsZ2RTM16nG8ptVRI3GXixa+qTjMYj7nL+0kS+R/DEp1S6ZBYQY33ovwoaSj
TE55j1+rqpO52sOrwn/qIlQTnihMgTiS5p+9ePH8+9fPX/c8IUydSHjcqIJgyDXLPPJnAz/ROtFh
74UhsuNqOS7AyMNpW8Vce+iL/PLOi1I1P0zvTNWgJDQrNdAGHnihwruOdSJe8DADJzAJ8/tvrMuT
CkT+dLaihcG4jfKAVrfUmJux/pc7iLGT4yl5MoAHOD7pqWofTOW87GOgmgmlpbgj5dj5Q2oM5uWU
I7LFvHygLQLBZVEAXl5ZL6whKyFgbR0eudjxezt/rjzidEFQ4b/CbzNNnr3WE+P6b+EAURqErsjh
gMYrR1PL7rsZkn383GUN/c1xTIL5AJY+9MMvFqvNrdZqBR3elsV85jy0sRl6JLCeARejl9Bz9P1/
fPsfowAu1Xym0UPe/+9v/9+vP/ooZgHvQTZAeCR2GGAFceDWokpEBSBkBUR1JYgx58XS1Vq6mDFS
MYQB6SVRvJVOB66TzcW62p5foGOIjauiRljcCDNiMbBZ3aCG57/ilTTAkDdIy/x5cxqkeXGrQVOq
CPwjvf99WVxnFGRigjbgSzL/YyEOCX5+ljyFh85EiSynVFuJ8FAW0xYtk6fZTZ6QWQhKrdbVza0E
sFCgIAOYFPLtDUOCTKYbCKHWjXKUs6rOD6unlP4adORzIuhTQLdI7stQ7kO1pwgWAyoeAz6zRr01
mi+gsyqZXFUl4ryfbWtRUUHmMXiYXMHEaRSY9ygcT+bO/ikkCJdl4AhTiIek6UVauuHF1LgopFMn
IGaaK8HMsQsn9YqIO9tNBSmepggfs8ZUl0toD5r7DnECKMwTQz/n5SW4kJA74sTqTLWkSgEhY7ok
0wmcB6ZBew1L9VIzy8L7JduH5CDA2ARmpNvjcFtsYDxWNcZjM5CBBMoX9porIiWXTmg33MrxGMqq
ZjD+ko0RjHNArFPREBW6LG5VOVpVNebf3oqWmjJsUUfgVWM6Lw1owqICAwxE1Z6VU3e/k+sLiEfS
QwFgb1xwf5f5xKi3fb2dXphGEB9IbbAMZLJWv2LOawC7MTH+qAykqVnEVMAOfGNgd3rq0KHBh4x6
5Cs+rypy1jS0Sg3h+KEHPfxRkql7vZecVtW8B3ABOacPUySGyUckwzreNAAnQ4G+0gOYp+ItlvCA
hgZ7sk/LBH+g6fREnFBrBCBHtxs0KpxhaJxZy6dAPop1sZtaXarbDsLAMMc7Rm/RtsqpmuMdDhIp
gJ1umshrOllCA7M12qdPIdKOkDYmdGvwatmcqmcnmHU3uwctgOlsXWJ4k0WCNMfXBaMeibaR0SMA
TwVbK2u5tXUAGiWqh4YQXNEIWfZK5zYO7xK+RW8kHBqudC+5DyKa+uf+5fXMV92rJmh6g6C2d3PI
AR452ez4J10JC+i/Oi4+NxfWS9OW9ozLmkbpC70aiM99B0DpsCmiBTkemWovj72f7fXlnBj6YoY/
HNx7i80GfJyz612UikOrE3+Ly0QcGK4OuxXFoeF8qR+3K6lO25TWwPf5nvSHKyk77P3iQdqzMOvf
ojXg6mbd7BbQRmWa6BFWuyylYx8CFkKC2KCszUoPffj2GnE/qaCax2JduTsSvm24kksI6PKriE0t
Gvw+jEBFQJ2nWBBFCjNqGCvXfTqQMxaoe6mRWFIoGzvC+/b4xFo+j/D0CjoGIz0oQvXLANbKfJuF
584L7/DmFgXltxSTZsKMbRDRSZoywHisGqbn6UVVTnHQPECLUnwa8fGKuW5cQxZM10M4KZYZ1yc4
j2grXMIGLQ7J6gDeCqegU4ZTpyHnuG6sWbCTZOlXKa+cHgi9bvfGwrxXZ/fWOWqfw+mat5xzPCWW
yqOOqX63oacv/ICOBuofVdMUBBbsGlrVb0ZOWfrtUkseadDTzqrYMd+q0h2TT0QwH5WkmKG0rJ8b
TxiCegPAhvhGQZFikpypq5sC31g+HsRDs00IO4Io12N+afHpOlsbv1lrzS3kPFzFK22B9iscKLlk
Pu+DxhjXiRM2wSjWpG3EYdfNFxqUhJQ+8KDXlTJ3M/3yA8uOmH4Bw/NsIA6r3lV4Skp7fuxao3iq
vvkdeR9Va0D/WufAg+HrKJgPaEJhfQa4YNMqSOzgvHwjEHZe+RjI3TeWATqw6PMgPF7L72KbLIKv
qjUPHW0HVnO5Yzy/IxVAKh1KrLQ/EbDqkTMPqy9ebxab7Nje0ZN8F0moobZvMvWy/wbzvt4U0/Ff
ZWP1oi8Vy3QA+eJcMoLC6u9xrjkOJlpy2A43aN1jsPIv0fphMQ8OdIjSACdWUat/DyH3sGyd5aKn
YL1ZhpzcGFcaSUImhguv+gYHssw6XtRf/gtvRZT/qd5p9HtO/V/3VNtvCGeunHRObzU8Z83XzjJE
oKqNknPgBrH8S16glruQzJUv9axyGhwV86U6nLY+Un5TbWzbtkk+ZTy0PQ4zF91rJsyOXclExrN2
IhLX0UmiAkRUbj0s90j7qVWrUOy+mNT6idZLUrp7YinoDsBdYr2Zbkl/B8LbBYWBGZGjboKYdQnI
vuX2BSChmfhLg9+Gd9Kj6No07G1XUNm6wWkxS21r1/31HQQAzMGZ7Y7hhliDApzCde+BAlJ/hYGg
0L1u2vXx8o9lKHkEZ/IONl9vbCOcMbX8Ya3gdEa0NiLMuEu6i0mEjKKJWewS0KxThQOSA/xkOdvn
8Kpi+x5cg3HoOGURIPlqrU/hMkxaGz327olvPfX+CMSHyKfdvNNMQdaud3aeYKtwvgvbKXLkUkAP
TRgXlqKh7OFjiHyeylZ9t95np75b/9eN+otsklqWtj3qHIB+4wfKTmWMPaNRB+DMJvPyqqB1poQW
oghWn9hbH9b7T2yZUaKvojX135Dw+SymAh84HAbKPYd8xRsol/3ZKyVRMz8NMJQfNArkt4/UhCN9
soYY7BhVhZRFKgTHV8enL3s6I/Mx34N4Ipf7TgqKbJbp1ET8xRfvbv+1E+bdLiYzxg+7VuCDfTn9
9S+VDql5mazl8BqKyvk0/LaMHIf96P/JbMb0n/kyw4Pgjs2tA/F6e9pUsd9a8dvtvKni/daKX5dX
TRUftvdYNc7xXmvF7yGXasNQm8ca5wO0R/8sjIABWyKMwOCY2GUbGYHgoURastBfXDi4OzAV68Tu
PLBRtgODT3s84WY2snd7OIO0JzOx2vvn5EsoNOM+/XyhmWb2L4u/WSfFqLIgpQ740O31Auayrraj
qnardSyDkLVU7GAELeTpz1Ve3O1W9Ecxst+y/8xqEHalijAD9NdyykXZQLNsfAVo66PkT/ZhPFum
Q2qLpv9TZP+c4lnqyNoTLWiDO0AEhllv/YT00X9X3F5X61lElr2kX4DcBkFUE14e+BOMZY+wAtNa
EN80CdZ34p7SSSN/VZMULbm1KvdmoKMDayEssZ8UfF0fc7UTnEBc6pfxNsZr8X48GOlBKNm9l7bB
suqXyaSZbTekqdCdpffq0b26h0pIHmNPRpDv1Tm14DXQwPfFyRkwlsYhRemv4ydE/5zHa91xW6Fe
2rqZpuXIplpreB8eYc3bFl01rGMNPbaBslyzhvWa7ViwWcOKzT50ycAXqH3JZnuv2QctGlaa7Vi2
uP4wu1fnofaQ+KytOcSg7PAp7e4KRYupMXF+kZ6vjdXslT4cD/tHJ53IMrTdjbu0h0qedhnSX9qQ
ymomXDPLFkLkAzoIW3ePskNMdb/WkwnNqTuEXcVr3i3/dA/IHT79hFxnrWTNXhIx6JEQ9Dv2b9pD
BuKifx0rQPQCxtLETenWVcNpN4/tJJK9Hud/FRN8sJc80yxU3zuTt0MuSnKiNMklpxWkBNcuxCKP
9MgBGdPVlxykUdQbwfl3NiBLxcDirVUPbAIQHzIed8l+F4M4Z7umv4tSM9jLFlMeTGMsU9DbKWJx
GMh6t93+ZbfbHyu4E3ccPaf1+z8TB0BFz6ui72WOhbRkW8gFIR4YqPsRowMGZOxld8CS+7iAcOhX
hFlQ3IpdLsosDhIEMaXIBDrsk1qRHqel+sKatuXa4fIGbHy394u0CTctR+0YgNNWg4PVgcfyo299
4hfxMJi/+qNa7315vtxz71XJffb+518UOy0LsV0cDAYJ4hcH6uGYY1If0IWYuCTVMaQWU3NcSDwB
KfhqZwHM05XUpegVE2q1LdZDWJ0tvkyqixO3fJv30h6eS6qFmONShPnaXkz/zBenRJyW9XSy3ssK
ykX/5ZJkQIc8R9z2PSYI5faZHXqkqrJtNkL8PVgB9WUeFEPMG54/+c1SBL0JsY9i0BfY7SDwULMQ
sCO5lbxXCwSPDJOul1ApfNd71cjXFSL66s2s2gqInHoEAUAC3JuU6BcC/iwf4UKiAt3HO4GDItBU
zattQxWJSTNUbdDXFFoIURD4d3aUBwUEE+IbLGDRGhMquvlC5ykQY+pkZLRUUCSyub6yDGkb07oh
PRqtm8MPouq39rNuzjmXcwI08fsgzPLYehQGVBXZ5+i1a6n3/EcoUUCxXgsFmERjFMY/TIQaGqgV
aP7gw/9TEtiT758nD5NngGearNSzelOrLz+8QaRGLc9puZctO/VFtZ3PcBEdmHUd0u6SABMWt5EC
709ziyYOSOrqnlcS1t3t2YDHrgqUxyBgIberoiaSfqM+5sP9yd4hRQ7wsrjQz6ExCfnxyexOpG0R
JMU3a2AU63YVqBRcQkBKMazP2qXAVzfrZj6R9jBo9xoiAUv17wSimVBgAU4fwoV2sUfyf4NQ0GJW
Amgo8jaI8N4ks5Jyc0DzgyR5vT0/h7dhtVT8MdIehIHDU5M5juW9f1qcQTYCFpbgR/DvhoQdffp7
pI5Sucy7scPs5NEjnOPNoj7PXNw0l8T4tzDcZk+AHSZrplQEQQJE1M3mFJHwNqd2gTYSPeDRm5MI
DchlTNc0xpVrXDAhiGPRhBkVmXp5b04H+jWWc3Yu6ukGY7z8867KR468wTANg5VuetBR7lF1AmeT
oN27uhfemgKoZNnnpLv31nhp3vRc7OobvXc/Ux6AVeAb2BqkIIjZ6S38nOq63PEh5knuJskXX4iv
pFzqTTkpoRlSd1rAQ5w3ggWGuLDga14R5OFm46Rddl91QzkkqfM0vtlIMoujz1wIfErsqKW9v7Lw
0X5nxK6LvyDf9mWDTqfE2F3cDUyVOkYw7PGYk6UeSNCwAYg4y8LQiE+NRH0e+fmx+fkiu4nEny0h
CDtlnw2UEbuqm+Q+tAbD+pRZH/+GXDfLwy+zM/aQh3qKiR56Zc6ouXNdF8DxPrFLlPB70DZY7dSX
WPnQ/cniDY8ePH7wiSKveTXZQAMdRl/Jush93Ho3Mi9TiumaZ6dIo6pWdcrVqIS6xHoJ5FQ46iWP
4r/Q4O2uAOzvGFpU8z7BOXzijiW9KObzKj2G35EKLpxe0/PtJVkvL3AV1G/vk7f/rQWRMlX31/uP
3/7fhwjm3GEQ6e8xC+23hICFGaTLaQLoIpAu/I/EOjh36hSA/jtWShFIJMIfBUzFQK10eKUAqXks
gOhjgZQbdPPj4aOT5MtRcpwegSHmk/SklwBnVhs1n1PHq3WlzuFCruouJaPZVFVSzWeAPLuorgq8
iFfn64kS/AGiHXmY22uuX9lvJudv4HpaN4Cp+DAq483k/BFgL0QgjFDQXvt2aTKKCPbfoXXoio0F
DxseMD2019tTLkiwr7kVBsXxTQyXKXC4E4jfR+OfG1ht2ezVj3Adc8rvYQiulaA5uj62rVdsHqzl
N8e0FdqbpR21UJbWRrFUXIpu0qXbZzFZqQf4mkds2Zxk0dSt1lV8xVrJTnBLuvkRu/DWTY7v1Sf4
xNHwjNx7L+kOuXNYK6vPk46jTKBQHLYlcF4o467Wgp1Zw3d0h/KTW4ZgNZkHAAKEc6x3F0E+6nq/
3bVp0AIXxdMaGpBl3QiXy6Uiq4dQKU89ZKkjOuwEZpBjcwxtn0R7aMVkWGnfPliXutjwMGhJ6A//
rOrzSB+c6ryy1dpZVP1t47mhcq40idOe4KmggG74QxjbsJu3gRKEqgzqgYT8TtsiggAj4w3YmeIZ
zQDdFSB7IuH7UM8VIvvBP+4PMj31j9Uq4ArpVu97FKkbHESoeOATAG/L4tbfmNiWmKaDnRz4G7k3
u3VRPzLdR89agQcWB6YFd65LFxUrsvIXVXUJ92IdAH5j55hliNLAxxDyANcBLIkeuospQFU93zD6
6aJEU1fwA77NwIQnhJOhwjzrUluEhNnNox2NZ2W94adn0GF9sd3MINY5/A1WQX39e/XPq2I+uc30
ssDVDSh/q8XIC/edVWOCODQg407uF5J74ZF5PVnPdGKQUp3BW/vuAZBEp63cYQvjdXGuJlWskbah
OemM/7S6syQ/N0qvq1uRWsEC+gUgYw1/tOFaqZCcBFx0Il/G/1QvE7hvHH2XfRvYFIXbSpcPEh/I
jP0o7oWT0AvA3+AftTMCFyMDSeURkiJHzMoZ/5RH/L7KWqZTzDJ7KnnMIvL3cD3y25yOWDKZA4DZ
bWKagct1BPcrvlOzHY6ijJjPPQfLYw35wJyMrCvdKVm323MayZtP77H4RNGfFqs5J6VLO7XwDnUc
FJnzZsRw9aO7pL7WmOkkdjEzz9Ape72phTeWW039P5jMD8NFESkWfAM0SW+XUaJmcl56T3yeAXPE
qP+k5pY4LLWy9AUS+mgZWq70wOmtEMw3IvQhnE54nFgKDPEuqeJINj+0Qs+KeRPJmJWazGaah2YG
MDi4M6SMbCAWstyYljUgmjotWW1cX0jeSNOSZxpV7RkILOlsVa18IAjER97/QrKXcjovJuugsnPd
6TKWj7rFUBo4ZIQluiSStx0qQ782dVxBfuZlZBakZc5CghAaHUHdRiAo96Co7cdbkWcGtyOy+7Py
ZtTlRDhdnxigxmBsqtqV6J/ceX7y9jRAZ1h0zwXtu/KyXJVni7IGoNu4XGUH9KsL0l71oZ+JHhUS
0KYIHgCXABB31IHJ52l0UrrFNqFORzH4+x5biOZpRNWMviJ/V6BD05Hf6/3T1AYJa6LSAXOxdcV3
WB0IdjggXbUaG3VRU4ZF+s1IqsXySu84cL+1tw5A9xrCuqoHqny5rmgAXDpyb+Fp2XHEVBlRbPfS
4AKxz7zgOcJgfapF27+ieHM4ZUbd7//hzbPXb8bfv/jhd89fvu7G8CRITzUWQlDtxLpVEuJ2tVE0
VY8xSyMZNf2RRH37MDHruiAjRi3WM3iXUlNjaqsHfh/k4FhWy5fV5hsNgmvRyHOs3UwmmBRErXu9
hYx5SiJe2t6PGEmNOIFB9xmnMDw68r2aWRYsVoNYXhr4zlb/x1hURKo8/nwY4OVxD1Eem1iJR4Mf
w56CRM+NDmlajFC9zyud0NNb9tjW7Nlr9N0kd3ameoXvtBAVjS9nucm55DymqAkVBD4yVhDupPs0
JxSSzVFP/Q/Cj/xRMV4C/US13tHwJLzFoAJC0vdX3YboPdM9jlG1lUEP0QHqEnqELs9VXzgEtayG
3ThJqpLHj0M6utttr+dgSaiWULqMx2NGZf7+UYv0HA4MZt4SGxQyJ6gQWVEH6p9exyZ5XUQQ0tN0
S/KE3S8HOktuhHXqEVBZr73YULkgDRR89KxHtRL4rCeS+lG9kJiZsBSinkeZ+8CEorsAK6GMqjjH
Y0batQhv4u6ptCfgWvdL2OF+F4n3o8zfu2lDnSCXIcVji0grbe26cpuvKvLRpHF6jUYEldNqsp6h
jXS9jbo27n1xUX72u1wjtkTkris3BJdLfkd/zMKIOM0mWYvARPHDImwvSUGIjeEzRZyU5yGVkg+R
loaNJ8deLVqqPm2JgGZWkCNWxGo262eGqIrBoj7P852Pfs0x8FQGZNHIEOCEmzMgzzoLEtnWqtSN
CoC6TQNgIGT5bw9KOtP9ADIKqen1q2aX5G69hJxHKUIJ38WAoY0v7osYgb4DsBtReyxlins41gPo
qcU+RVYI408iEGNQVxGwahcx4dOmLGjgZcnUBVUioaLzaKtQsbFRS5HU1GgsmpXqNo6Gd0RnTojs
S1Rim0OyVmgSxtyql0ASEMzleWjPs24/MJY4nF92B8ZNRwCU146kxs18C6jCEKafMartiBOb8lEy
LcghOqYPJ3kYP0+djOifHiUAIB9ScqJCP3i1WJk4PTu3Af2B9xoNk6Uw833UHwZB1rxXqoXjwQx8
TLzCZ/GKE3MO3mP170ln14VitWcNt61ZU+xELDt6zTXocMILo55UmPpD8dnKAD5z0mrQg6E/4UMB
IE4EgThiEOIysvPubvhafdxzSfvBJMd/ejRKraky9CHmvB9Tylmdg0+K+Ws3KDUQ4rZGx7EZhymp
2dyjNCXkRgb+OPYA8l6iv5JpBJvU/ULvg7q3ZJ1G99Zfwh1GvfbsSdu6T6bjZp0nd+uJQPil3Cey
i6HGUy+yCGH0BW9JgIMMGT3xlwwSK4TGdYKK3+OFwssnXG9d1FG27u9onHkaFPqo3q6xvi198YAc
jRqvhU3l1hbopbPT8QHmBMeEl0tRQOnl3OPyo1adKH5DF/rr2AURv7LNS1SnYxuP8byT4bsbX9L4
MHzC5pOJDFbP1dLwA7/hoqrglsCiYRgouzBChzAbWeKepBhZI4cCp0XJxAQZTOoC4rsmCWNcQZhs
VxhSV+jeSZtCyUUge4zwHXB47sKEuhCboQZkuZuUy+l8O4MsFEvFVOvkttpS9qAJanE515H69RTz
XHCHiNz+PXLPx0mmTuf0AqQHyoOzQr7M4Nl2NSZ64a1iExfJB/MqWjG8JnOcQzoC9I9XeXcsG9F1
7piQOrQwF8lW4VlpQmUgGVTgH53Ou3MHgcWoRTWN1pplaWtFNHjEDFUIh0aLLkJuAWZV3mSs8bsL
nYozSMpmqtjg4q2Ty+LkekqBOuKTq9rnL61izmKCfyYXUVX03h1L80PrR3WEpop4Ns6+Nq3yDeXJ
CToMqUQVvLFb1B7lEVIxTuN0RrX7w25vEXCIaLX+hA9Jqy7ICd4eGocLJQToPwKfEbuU/+MKsqeu
FnFvktWCPpKTDpoooLGua2eV9oHd675ixmZt0ZJSshh2tgzPYmYvHpUdNk1PblL9hRmmarQA0iWU
p9BILDwT76vaaiHuJBjRlAeD0504wphWiDGfT62f1Znz8r/qfZ6ynw3IT9rmTkO3JU3rcx4ROuz3
OlW+mIblZK0ci6n2qaBwqqwrCeaWxTXuUbfBZQFb2+UaoqN5INMYtHiPqSpBVUfUK4QW3CaQ3MCm
mbVqPZWQw/Y2spAxgyyWVcuiP3sJFo01ZF9hPHrk3P7o671zyXS/MBNPRLbWUPROThrX5+9+8GI1
7xNvRIpjDBzdjwdu71g4Z4D+rl+7+mFsecbi7/Y7uv4lhmMe0IFy6G7jkxLRZ55PJuRqZA3Cfx+Y
UsRR2TP6wSgxtoYFnPVAURB/WfoE1qoCw5fL1NIKhK+WhpcKu1Cp01nWF92end2g2+9/2QWP/drO
57uERJWRGyCcet+euvWI6bzvvv3vrSAHObzvD97+O/X1QceKAoAQqEeDTwdHaef937z991YtuERn
k7l6gykuvppPNsX7e2//n/9G1f84ebit1w9Py+XDYnnFj/5OR8y6IxRIf/P6ux9ePX32+jeYlS8M
joA8qZ99In/9cV6emqD/6YZUGnt4k3KnPjWZsfAnK1pArZ5rfQEwFCb8yeYionuVAvj8UZSEZsUu
SoygQJHFRDfQL5Xc9Kj3ee4nyLou1P+pdwBlY4Ucdijzf0Gr92jwuYj9K9UZ8m7pKPdawmjQAh8V
kP4XL5LqGlLiTZJ6M1MrmZgHRLnBVKsQlLm5mGwGOz037OmKhZnXsPH1G638IEk15aV3b8oJD9V6
82rSsHeOCyyHLVulh+qc6bIdxycBA5XFF4EANECEbXHmF0bKczmWdj3rSD1eXZ4HclN71sWmpt3V
bOjIeE9paBBO4YzBg5rKOXLPC9XjCDzWBoJXOScJzUxNszz6y9xvR9tJKYLyIQdQyrfWloUVYceK
tc436cYfwCxjmdpVPTi3WMvaCyvn+RgmjKAmugY9iXNFLMvq/aRNLeq1yLqdMe1Qk+OVv5Ha8dhz
aK2bXKoaG3ApIY/yfy82siuxkV0dNRplW497yaETU60WrcuO53oF55V6DCfOn++W1qNGgt3K6eW8
8MDGLH48KJb4ru1O6mlZdmEjyE0zOYXrJ1KRWkTPlDqD+2IwK4C8IRg9o9sEv5kV2EImN0MeRZKg
UU6/x0YBTMob8K7Jw/Gi7EbO7D9kuBRgbY+3Y4aIsTvOjSjlNMiJknImeAIs+5FU5rbQvQnO42+e
vXzz6h9+w7p4nhn+2tOZmvSheP+3jkBgcrtrgSJ9e/o/UuQka6nIaKfmWBAEwUyNDTAMEiAyiK/C
LNWEbIAptqXN2gmidLLSuxnmF9XysrhdQYpM2UXrq4Zs9FISktKTupEVCOqFRSmwKa5ZAmzO19V2
Jcgrazh7+A0QxOn2nAIneNfxh4FpJxXsA/UinaB+adStN9UaoDNgr0ccLavYi/oqqjzlXJgjcLzH
sC1VVX2k+Hz4jKgK3V4er85se2TVBhJRj/NR99vvvn7W0OtFMV+N1B6AIXBdzc3WJDjpc9gudPsb
dFIcQCoZ0VFNGSmuCsqo00Q+KT5Vi3eHBUA3OQNS31wUt4x6AcmqV+vqqpwVVtsW+IRFUdgRzjVN
Miu9dZ7w17EeIUUwBPsyn080lXSCnpu6BUG3kQqWVQMhIGwNUgPtE8oHPQuzH4hkWVFlVY425utn
37969vTJm2dfJ8X7bXk1mcMTQA3VxdpoG89icl5OIawYByd/xcbW8WmJxnjnoTiYgqqX17D8WreP
fyHHMAgiGOFMXr06nXdE7KcjiPd5aB0kJSr8E1cTcAiUr53TA+jmDo8wcVJOtBV3w42pFtD3PbOP
t0aGCArp7cVo/8jPuDu2epO747PfMf5C9CqRs96idGceCH4WvtizyzNJOtdMqDUX8G8r9bD5P8gU
DG+kp2TYSB4NPhscqlfJVZE8ef1GvWPOa3yUJIvJJaLIeO0YiqDZAQfCuLnFZC7K+4H/Es/gSoTn
KhxTB43jHxULTGG9w4e6Jw0dDx8jnIB6yCWfgVAUUU3Gl0S25GO9U5ZF3nqP1NnCwbtfIMHqa8xS
MDBtjCH2Ywlnmv08BtvlzMo1uhiIinJ1O2BkYvkXwDpcKJJonh2LQw/c4rlDMBy6qF+N7WTIpfkH
0/Ar2VLQumV5QJJNkg20R9MGl+7x5LTabsYcIjHWFGMtrywg/YYXgBqPy5Iyh580VZOgTfinsQzr
ezTT4Hc+rpDaImCTDKUDX/Vyl9coqo5zG+66aVSyEVgsaj5315Mjv2gxXWY3h0TrlCOWIOQEUGVz
oRqWDqrl/BbtqzBBgKZy7lLQTXBLxYyr1xWnL0NYFskDivnCJ+BRJ/HKYJ6lwfRvwCGcq7N2ZFYV
NDtu3uk3t5eKhz/48CXDUCIIiK114mkG1bNXbL1d4r8YF5GBycPSVmE4Y7lEnN9q1cMsM5wbJnc7
Gms1NzQxKLFzkVJx6Ki9nnJSTy9/MUxxhBWFkqvVSDocWb2OOOmOa8cprhGUSYxQ45i3A9gkuFws
8+bX1TLV1m8lOJ8X+PSB3ZxNNhMxh7PiSnHb20gj6uvTqi6SrH91FXXoQAcai8ONNxWgJGVpmhMU
gwwR/ffz5MvkV4f3f9UYFQu3k7VqA5KWBjKML5JHDb56djegdogMqbHT9OvYo0gJXssp6AV7qKzr
qhXoArOoiyLNTyKevzUZioOVeLf8s78Y0aW05x2XXho4urtlbwwk83U5nyenSqJTU8OA7ok4hN1j
jBYHgNxtx8pYSicdW4O3AIOer4lyEH5kWVTbuqGh9F5aC2ehp/UgeVZPJysUMRYJtDRo8AOqKUG6
TuzUvQfPrXv3unlnh9MQvCwHaGDiEyrR7HT4oxxjU0zWGJBqMY1YOwYijxtgPkQ2BY9P/9Lcj149
9iAicowW6V9M/liqi0GLmkaAm6POG30RxF+FckCLaNCzBZCOVs82vuWt0o7w0C6O7GoTy+v2cL67
5Yxh6CfBD0ej+xV/CxcSLnSRCJxWWiaD9/qqmJZn6I2FPt7mGVPzQQIiktvWvqS7rWABXTnN5flS
PQxn3VbXeHcQXVHvQH+1ut9vGcINFW3kCNXtdJpxWn988url85e/I+xfp/EHjYPuqsFOJ8A7I9oF
XgK2381a5t49vUXWocTrYj2/hVnw80WTW7Fuq59BZ+D1tYVpCgfsf/cV6Uk7jXe6Wjg8/eEvnffZ
23/z0UcfAYAXim/11XK7nr/P3/7x16R+YxW+ItcSDUKTpN6e8pMGrVoJGdLQ7DYDXQfMERy818vJ
vKP6WWB+B9VwOlA8HcKORZYrFwAvllxX60uSZVWh5GjwGG/PCyVOFOsO2KQWmOlPEXxFSzWZgp1K
ERFWUrxuXqqV40Gxvk/UfBU4sN4C5k+5QE5gaQIpclQzFZiNBreRH2ldtFpXTadaxn9Tg7+eAh2q
D7FyA/Rwgo2QGgDRP5tW9ebJFLp8Cr/3kieg4cLPnc7Xz377w+9GnAWNUb2vlk9pUb9XjWa6s4H6
Ab757aS22GYiI5wjwbJEAE/j6uxMLVgywa5hG7JVpc7P6fwWkd42RW7tdEfAEutyU63Lgv2uCU18
Xq+LK3ERjM0pW0xuQBWs6o2OHv0ql2pw/HVFM22n+OEhBC1MbupCXTizevTZ4eDQcZkAUWScTed1
jzdQtcmIF5NtaHQFfY3qjYh2YFXPGzCpqFEo4FtSrlCPi2+eKxd2aotxIvgbfPZCVcyPSoqAf83g
9GYiDOHpZDa9AM8nKJR3Yi2sCdE1fZh6/gzcNJfzfdSvUBy5cr/mYTsjtg2oO/1dTPDjVTP+DbIC
xWOye+tcp3DxFyK8CILKvYQbyOwWenoENpHA0VSsQn0rWr7FLMTUcoIQlsiMmH0pUkLlSFJt1wnA
LqkhIJNSLZbICUXy8JehDdeAUT/U2PBussbUBmlGrt3HaX+dmsmeqNsMIxZCz/fdnbnrhD//cqtk
LwwGZsEOJpSfSLVv3h96Yt17dVcTxrhAAZsEWXxSwid2YrB8KnVMkbsAOnrJbxVthDOgJBuaKDkr
b1C6UBwJjVaFvvrV83I6gYdoBS+Ga/CVQxuOFaOBaMYW8uLcYSnIyr2oNfAmyLqwmgiEjPZsasdU
VdKhQYhR/GExg/JwSDO/KG+1quHuqVNHdnavLYWkNYqGU2cbVWUYkeZVatFosTKI/lO/BicBOUtj
7AW0BynbMKmKLj8A3a36iV7OddbgK8ZzA4K2KVm+jk030F/TEvPFP3Ba9NOR3A68YoOIUrsdc1gU
DsUA5FJwGsrSbyBo5tmNOsF1moOOt38UUyc7nfjVBcKrcJppCm2FqRQw9MGzZ2+fv37j5RULQ2Cb
iGul3tfLDyCtng7ASFbliqw1MAE4CUpmL5f/cmkOp+xTHH0ZWwNZOQxzCitavCjuP+owQvuys1tB
WWo8K+ZocxffTShkPR3pgjfLg35mjZMHA1IItuvIbQPdITAjklNI7rIxgKyVgTfnKF2nHokIKSyx
rHpHgEsPiffAi8/Lq2KJdQf+TYuPWH6EZt01GrR/6PZ24t5hvXucWZFcDNRT1Naeew9uCr5GdGgY
3OgIvBmQnieCa1/DBIrFanNL49dXxYeLBuZ0pdKX3GTZnrmbm8mnl7SGpe8xlv5a3VofNiK1BL2W
sdn0MyvxOxsKNeqObFES2urLtZLxK0iPgu9USr413a7XYFXG7+CeLmbOsJHoJstbJjosJjFi9cDu
y5Fr7MEz3hJ9VawcMQDUzPM8+SL5JEahhik/f/n3T14IiCG8rYWXXU3mpaszMa0CjnnzHqJQ4q/e
nvuvnknoW4TwJVonHjTGJhV1hMoz1HCIh3hGz7zFtt6g6meJ0wDNncGQXVyqHdtrk6fqolAXyt/G
d5s2lTcSktk6ujB136SL+jzVmaN1ACDqxCkVINwy5SZhUN34rlOZsWpLhwKjuR9bV5uG00lOb0HD
AK6zas7qxmI3AN0IzoRfat4uxUpZInyKHaDXxSLtWcMJoDPdq8FqTA5WGBlsCtkIM6tbDei8VlNV
7Lw+H6Xq+xIU85GZ+nwemqAjpRabGqF9QsaqWDivODQ88Di9BOlI36maVSpwq82QLNFzJW105QFH
mrxysdhiMooAUtaTookBwmT6C2R/5n8UI8zU6HuduzHl3GOGNEIbNyS6mVTM3kgL4BYOa2TDuvTL
rIE2fX5KcbGguNt345KGnaOh/Jx9Y374c3YNkxjIrvX7SuycFu7ute8c7O0vtn2UTTw8hY2Fnf1F
SzujjhfT0RGfR/phvwPJSR0mLLCsLTaaYRC0+XuzLgryLZ6C8AO739nj2P5/zL3bchtZuiZW4RvH
YMIOXzjs8F1WcmsDKYEpkqpDN6agGrWKVc3pKkkhsabVQbERSSBJZgtEQkhAJHvvvvbrOMLP4tfw
M/g/rfPKBKTunnHt3VUgsM6Hf/3H77f5nf/GW726ca6nupmR3fvUfbJQEJBzVJes9hhuln44k1cN
i0kiS2KoqpZIqaJkXBJ7AghxqCBAH7aaDH6smKiaoeE2faBPIurcidJfRp8u6W/s1hnI+NtVMoPI
uY/4iraUl+aHltF5p/PhqbzWOrHxx+ycU903OxwG2QfJjN1XuejpoR2GDyAP1of63Yk7EXdz0rIX
rKjaF56Elp0F9GQA91O8IEHGcGdM1zHLk5NLMviwoeoevg+Ylj6cSkKWcTNor8lz5roU/T/F+E/Z
9/WidK417X/0lPz3odzyuHRwQ8hWuRwX80EHbgD0Hdkl+X1corPeYIU5lZqSgXgCH5Q7ke4iGpML
4Ibed3HK3pRfvDx+cWqYDMZbWJNUQXwvGcu+TB3BgLIfcZUo2p4aHu43/K9Vr4Org3K9PZrTH05e
D+5IoLf25Q1/G+P57yxSIcy2Ghzcu/m6tuspuQCLSGlPAhomh9nZwbmVm+xmybvoEKAcOFr8qSuZ
sFTN5T6qDjHTDq9LoOYZk9FA2dxi/FqL4JrKRRa9rEvKuNsWR3m3rIyYjrJaq6yl5p1XrE1V3Urr
HK1l63t2tyUuVI1ZeIyQOtJRUOIe6fwvQ6nPUo6t6mVHipDG1nArS01fKvX9GHbjIEMpklANg0Yw
TJrFzgEgrGLinJ7bv8aZ27FrLO8nAhQvntZEgfhM3aksi4YKcRLBw9F5kDHUaDRfQZc/Q5cYkiPr
JIkDDcAl6pK9GbjaM0xJiauxma7x5Cqj+r5EHcyCIANLNF5xOiiX1XGyVVDK7qb8wPErUJzh2qDw
RP1s1bjQiQPUaY5APFCIGKyQ3XwU1QGp48WiFagS6/a6ybGwz+ZQBGNvX1oY1QYuIiH8YPF9Grd5
RMVcjT+TgdleWewLfelmmP5lB6CltjM5b0jW/9iPovDtYJbYzTShgKIt8wIm6ySmBU9IJf4D6iVX
hoYWBKzWrRhyFy0Ah1b36fHRAfzz21H6z+6J7SgEeYFYSP/0mfUpe0iBXioIHKX9K+772Zfjf2a/
vy6Q6UJmEFW6/7ze5vUt5TziXgXOhTihOf40+5Senz1/fvymu2e/Cmn6I2W3kfIIvTs7D+CK5w2l
+VXkPYpuxmXiE5TcTkiyjU/PgGtk8Rr8BDAwNp7Nft4fJWKGOMy/RiIw22CUKPyA1Klp10LZ89NI
6aZ1psxZ+5rkDcq9UebRLvYPsz3tpkmwLRbGLQmFhHpFNHjQ4i0iRqvhjhYMIxkwYd/ZjOIYz9qH
9XmDsYbjAAIbtTqIleIbhdvn+0bhg4flrYdNUjqic5XyDoM20FFRvBnZSgLfETJZz/eB17UaMrC1
Cpp2tvdqHssBiF2MxQEv//HFLxjNBMQFvm5lUgyibAuLIpYTuxoZUQ5DKPyhuq92YVum8XgVh/Hh
cAC8XO8RSwnzyaKA34+EO9j4kEbg28WML2Kea8fH40RcqZGmGcMcKdjAuuk+E+boaC6r+ShwYmNm
1zC5/B2XBgEjSFkHB4BYcCrHBzDg6em3ng3LcaUVnB8nFHMn5xf/xuRsh0MV2xEgutqsMZ9reGZ/
rq+OKWZdnbdG4kRFv533dE+U/ZZ8/eEPlCu0t9dA9DYcuUyFCO8nwfvELaBMJPUJgLqt7u+Pn/0A
VTCZi8SGQC1UUAyNDicyZnKGfY/oLOyKjcepSabXxeIKb5l9u0L7dZbsUWBkBd+Sa+hKx8qiK4a1
KWolxomzKjrzaYrDTxNy3ZDfndq4CuPEWo+WmvBrz8Wv0D2PVVHxt+M2gzuKv8DrQRhBncRYFyPb
9UhARHSH+mSZsykbI/XUNtEk9j/SFKwu727m5M4yTloN53Cok/19KIi2c2M+35HaD2QKQ3tcw8Q1
nrtRCdBVPoP/KlSDalHBn5aOpaSwZv4659QmMg/7hkoUmQekDrMp5XDK0yCpPbBZhsF6fg2vEpw9
+PcLhKUIdW+qEcr5jigwuMP6y+Ofj38BtnPy4uUPx1F5CQameBgjUKtbM1DtZDEVAuEv7x0cHj35
6utvvv3Nb3f49M23vT1o4+jo628koGX5XjV8+M3XcMY/JkdfJYffjr7+WoeX58v7HicOapb1es3m
mZ82sOLD5M1/fYFu7xTC3MDji57ZKGoV8+oKne6HrIBsxDQ9K7/88ksawuGTw6PkL/X1YnFvLcjh
N0ffJr8U98nB18nhV6MnRxQtMUFMkFUBr3pDYxF3cpf9NBGZZdI/+L7P0omKpqxmsznRpwIF7lnF
JiWkqrfXJfq6UDGVhgkKSmvzevoew88aOGd0AzDaH3hjVlnPGzizGC5AlCJXwEJmr/p/Th4Ovn/1
HRz8p+9mj7LkEf6F96lePc0ffY9fHHzPZZrqryUVyr5PXI14n35Hl4On724fJY/ezf7t6G/Jo7N3
s9G5ahOp6NP8YfYv/awNH4C4cxsSil4OXBPEe8JYEirCF4+ve5Ncr9fL0ePHeZ6bMe1NaK8OYa/o
n79sbtRPB8l/2cxhc5PDr0dHv4HNB5p//dhEZBNEibAYevVy+nrgSg8UIDnmGjlBNyA80SCWXBOK
Yekz5kzO49lOMV85sS+P+6OWhD2a76LyIz/BOYNGExdEtov+trwYqiwNy8siCteFVHsE2E4g4gfE
v/Vf9d0ZsrZ2NmHPd/S65bmiQePcWw18rid8uvSa8J99j6fBw6aL4B/9c+H0VPv8JUkzBx6WBMaj
KCCYCf4xQU2PDgS7x4SM3Aie2WCUUt1q62GCwRLwj+2oX37QJ5e80VozMzNsFS4eldPfADO2N/mM
f4DA7NE9R+ZCofZ/ZlMUoNi1Tvi1kRiQ7ysWxfz+rxSYSZE/TMjoUhYJVgRCRvcUiFcqtxQe857Y
zkiirjfr5WZNgeHo+N1cA7eFv2GXhCzBBLkvvfORK24uqqt6I05Hig9T4UMIaDIr2LF3PUH2bUzD
y69oD1U2vjXZq+Q3aFq0FJnAgyxIlKZ6+OJTTa4id2CY9B9c9LVqb0YQqVvKz6D8EZeXjClOEaB0
NG9CFt6sRsAtbNZlGJ4I5CIdpaQVgVa2eGGaM01t+2PEFnBof+o71iPsn/mVDdmlDuwwSNP6yKkw
NOXjnfx+9OAX6OfJ6OvzYFS4UzgCwzKZoPoBFhryrgxxqYdOf8PkYEj/50iduv5TbtxdJ+pWY3B+
dl8afp0PHZGMq5uBas9kWQeWkTilVizMJZ6EAAnTRBjhzzarh1wcbYQCY+v/evrj/m/8GCUGCNIN
XJVrDRo+6POP/ay1Ce3oLa0A2X/Wj2ZQW97jxZ84o3U7U2X2sUxHn3a/TrutOfukjHl4OrvH9wjd
Sz48fNuTsMwC2PD3Vx8evf0//zeOyOQvRomC0+J4S3ZgJhEeOIKp5CKQGFH44j35MfaQBRNuZAns
Ss4F8np1RX8/5sZ7vcE0g6M1vwLq/X5Vvi/nw+To4OC3yX7yy8kpUM5puWhKG1itbgJgtTb8Sx8l
FlnfGUxcgsCX95PioiHzk+VOoygpuUIAk6VCUHGaXFpodyFwAfMS4YBYKUHD+QuvxV8KjHqcK92i
ptCXKiTP4PpMYCx0RRgFsh8+nrpfl3mxPP4pBtCejmT9gesFCz2A/0maMNpB+KnhhFJjZtSsJw2r
wMyR51CuybSrAjaqT4HE6jL2nZ5fPZ9xoiAbihNNsjICYXhN1pBLCxedK1MiAsba7NvJaFHlY8XB
Qz17D0UZNTszlRHY4lLjBEgqK6sPfTrsFacGzC/YBlfJra87GlUIoEGb+genSfVtR4vhqaD25GtC
73AOMyvdKDWs7oYLn6teoPqsniKsrYQ0mGNBnGVkQ7i87Ic3FP7NmRd9xTuSgzSAEEXYYtY2T5Pk
w2rctEY/qYZmikHBY/RsWfGdj59x1Dup5Nnqdz73Fv6kAKVKgfOEwdfsvFn1xV8wYEO05ioRm8ZR
Y/RZNwMWl2UdBf/bWiq9DmSlkvaCi80t99Qry8mb5UulMEb2TTUg/Fuai8uP1plSQad5bkvtM38x
TO6cF52/1Rh4eqENjXVgrWFqyveiKwrOkwfq6afkTXHOrs4fPLkplpNJd3ZJLPS+pNnqFjJ39E1p
EmfPNwEsH5VJxvwjx5ZzOxzUBxLI+n6glmGom8xaMQBFLa6zljmnVULL8ahGQstzA48byTowmRC+
v6c4Nx2xHxga9iYThFcBhorj8x1oZntt3SxP8osZLRUwf6oMz93pXY13BuUX54Ss8ZQbVvIJlXtC
IdGrhRiatnZKfgHlspijiLVI9pK1pgKxLEr2TiLFikyi2VyI8qL/oMkfNH1J0eDOIpKWY1n55E6a
ajtAWRQkUJE6qYwkm5veac24qOcXEk3pqARLawHFAj7qZzEoQg0iYETBaF5fWT1unyx/+AmEJk8/
7obuYetYZsy27KgK3AzCOsiP1Ne9WMtx4v2P2G93z+dV0fi7LuOKV/20rVaENdfsQPuEPuVotB8P
n7zotNuRR3Y7HoSXiNXiLsSfuIXVcyL3UykyRsQegruTAeqqbrrqoAthOtva77Om1mtcEnK7puI4
IsV3+upjPOmD5qnl+a5PkwUKEFgh3BZi1Z211s9l1JkS+P05Q2WhIh0VHvQsakpKFjfSw7jPOU5w
s0CL4sKJ4FDUXfUaJ++h97bvvBF50br5BNcPXIoQBYo0lUXwaJE1fChxOF0excHJ9vo8czMwdKZZ
QNUOj9vyUeEEcMplJxx8DPQQjgYipztFBQeJYG4xUBHH27TxVlcBLYgE93tnwMsjELOeKiTJDua7
t40kxVJhRT0hZ+W8aye27oYO+dwjLgYjlzZAMz6W+0pVUa03bMMqklm9uZiX+9jntFjAKiN56MWB
+XTS1slE1pkYLXM37fvKMpJPG/c4IgM94cSsR442KkpZZBcO1yBZVNBQtaYeDhjCJVhREVMQrZop
KiyNIKBlNJJx6JPFSeIXaMBQVXOajxqsfX2pZKvflUdzm/qm9K6kJj/mzhMrHG5Y9CQ475KQv/gD
vWNSagcZz9pcnKcSIcQi4sgQnNxDUjfb77/O9x5cCFtMYL7hzM34AV8OwnSD8K077Lv4vTN6GI+p
5+XuSnh7Z0mad203GAV9eSrvsoggrPXiIo1aa+LIo44/dBvHoEalmLuWacWm8mgMTyzyCKpQy2Nt
xofPNZ5L9ebqLbzL/NHKKtGJcsTELMoTqEVVm6sIsNdqo2iHLwAF4UlNtEFV1m93hpiYXrteg1Ik
PkKZhcPeYnZEXh9gYD4M3/4HUU+T89mH/bf/w39k7bREC8HXi3pfASzy2RBXq5OXyIPUDQU1NnnP
1iITUh9C9lF9peHgx+l48bFa1QuMemPs8NHOCfiIxYd9HpG7TaJ5fWoeuIqJgI4CO7PNCyeoKWzQ
J9cjDcXWappvhAcQN0G4LwqI2+4gumdY2hAYLPaPw2g261gqcnSdOnvQnI84Lmrw6RP77DmFcDy9
3uS2WtBhgcZQGUJFjkZ8gMRfndt8Evvy8Fv9LcEgybem7O9+ffOnIcY4EazLdJbMVsBADClXGjT0
y/EPJ7/+gp4tNw3wz8VHDEFC53oehz0Q9M7k5o8O4l9/8230+6/1t+QDP6QMBov79TW6EF6U6P3x
fe9vzmX5BR5JL8UdOfeyZCDRQFCWHkvkf+wLWhaYLI4dd8vk1cs3J2/lPmofNHRTQisDhrSK20+f
ivSFc8nyBOgHsG8bzKaj3BZ7lvgto/UutWs2Z8lkzP/lZ5l6OeLKpPvqBVS6je6Jd67oG1I/w/gu
HPGitp5MGorPW0/nJnrnChFdoQynrobKtv0z5IsdEEplvJ1bCQa9BiUWAprdVatrLd4Z1NtRqIEx
iFjEx5BIvkojjdMC8fZXFh2PscADxFGi7734Aa5IC4S2wQGT8YG0P0z4i7bsQ//WN2S6P+orx+d+
C8XQyuQR2uEwFyeHg8By/S0WIemvDU+XxhtbUP2bgUdB39pyNrEyk3L67i1YNigBmGTz7OZKR7FY
yDNa6muKPNByOa+mlHzecaKfTChBBMKjX8PlJs7VSRbcdjSw5w5YID4ZtEaxfOkORpwUHrx8wzuZ
tLzZ1skSHmaYrC+C0LRYGLQSNaSWUJ0293jn29gKuSkU5ZDSLf/I/gsO3WuVT8IU4TK+P1aLWX3b
xCbv0p0XcAMMmfMWVP8YUxVI4gPsH06IdDhEHgt+uUJNqEgwiWxL0EYHzcKPu+iVW6W29rbtl/uM
Ppy3inPt8r1ZQWP+cb+HEQyAA5TcKHjaLeUVMx1bj0mQdbvXK+23id9bYIzzt/9RGON5fZXD/z48
fvs//u/MGl8UDbDC8B1lPFMXvphXwFho9OxCBXGtHk/rRbO5gX1rgLTcoPcGug9rLGf2PH326mQE
oktxf1Em6K1WoZ4Ykaywn3KVL++/t+QDdD0fYzwB/DRojUobw/hx+G9Of3j562lbLriLzdUuBQXu
cezKqViLLnl6Xc7nNcL23daruQ1shkWkcqRUfEo0evmMWgjikcZ5nrfxlTtPw5uHk1KQEMYV//UL
Q+FszS8sSB6NnEHPjKh+RaAL+ej5TjEoCoGfWAhdpERqE4O6+G4r+TdZWNraQNY/oZuUjqQ5Z8DZ
dhHCZkmks0yhcKoZmKTxr+Q6hNzsYAbSTUmpTTI8APrm4KWQuJimXMAiCjxRo/VpcEBmFsQFYsKp
29YXbHCVLYSicEgJ3nCaD7xc61m9WQ8t7F/MMTDkKBxgydbTPPkVLzSBEqPGEbjui/vk1f2r+/3D
/NBwufRBjgwq7uQT+pXXHCWbYH6LVTLdNOv6pvorqSupllrxI00nfH645bzJpxvU6qzEno0Pv8sq
q4fW1OuTpdB/a62Tut4AadIVlNOq72zedbShX2d0cZBMt8hYoehMnO/jveo6btmdNQnfCalQpxLt
PXgb3Mug55h9mnjSZ6NIqOLaQTbRh3/s6Rq8IcFN47i/rFsaUe2FMXrygz21KSlzbXQkj8vl7CeF
BgqTqDlYfWBNVhWqjNQxHjSZw9jiQ6kn5Wwbyh+mlrfwdvgittCqCSX+l2or0u02pZkEIUh/4O9/
oSG0kPc2ct52V2EilFSki1QGbZB36cD4yjR2I0POUrJ1IDmm/1v5ESFhMXHx4kadgTs7YOhMWyCm
XgEKSlFo6+zRqEmHB49G2BpwZBBHQMwjDcL2VgQeZZoEEntbzN9TyJSO2kT7ro+NRVmkhhydKYGh
ZrtNOGc41iJGiNgWg/MDkj+4FKw2SlSFGV+ZYqjMsdEIaAK2x6u/wgBOykWm1xG9ukMg7xbokI7j
cqa+ORtV559oRcNnuVpsyh0PJoNy8YT7Oh/tRP1unZ+m9fwM9ep7J4lMn2ZrLgmmEL+0NtEVjTHn
0uqGXWeR8OAz1bREn5vuwxD04Klz4h2Dd8/D6I08qJrGdDyqutlJ8FpSs57caR1jHGIUBxstMgKD
DcXRDCLEsVBZDDAaGKsTo6ebdN0h9A4Epu56beePw7cBr+kgsp8RwV6VguXhxMutoe9mGnH4LpiX
ICoCz8d7pMaCE0RmbX9evedJ6m49adAi2IgOYyaxhWDq23bOyTXpSzYb+tcBQSktttZKM52YxTAc
qP2GMiOaDDgjVJ8K9c3zGeS/IjMO9PYofaeSEYecEwzYeeMG4rNu39XWWxptMd9SVbcv75f1crW2
ZxftOW+o1FQW3WgDpnCPItWey1Aa+EveedruUIUOcoY+E7craI2zYuFpGtBxylRGLLULEWbcfuMl
Pl+d/0tz8A0BkzI2SgZLGxRaaF8frM6gPh4DTACv6HffxraZU7iFacMlsMmrad87ZcBq8jlrcc/i
SjDgy/mmue5nsZBSapZ+HzgBS1t2hlNcwTqwM6LkaidJD2t37g30qHzn0Oztp1F3bKj3qPAlk6fK
ZH6xwfBtVDv4eBQ8IcmRM5bWI6uofJDWhG5Sumw+F9OdQDn92fcgcMcXW1xcIDq4tpyiv/R4UclL
7cyCU8T08RD2b/tODIhO+mAmZWdB/m9xML2XRSBIqcM027Ien3W4NXKL2aDtZxpnzJon7yFwD7M5
wkLVkfB7hB6+aif02Mnx69ef1gk8Hbu/JpKg7h51j9t7oDBbKpvMivJGkmi1hiAiFDJqSsd+pAtG
bqkfo+oCqyqt/88vf5qcvPjxpbvHVin18R9/IoEtghXMedryn4HTPQMcYwewoMjbTvAXFA3S41+O
X/+UPPv5+PVp8vz1yWkCu5lIQsvkxcvTk+fHCc6LU02lig/lgXIz4yTF2aObDH0Rxu8qVQDv4pCL
DbXN05mA/Coc0xbPK/K2+nCg003iAmACVLgjHw7f/r//4Ysv4vrUH0TJV9WLP3LxQfhVu5qVAL/Z
UwrjvR1jKQfPU/4G+Ld3HNqS53ErCDhAHz7BB2X0YOY4TVgIZNzWo8OhHlP2KVpUHD0HTxbLChd1
QNKveEjLIqAb6ft5+bGcj4+G2vxge6ftAaOMGkfK0dms5/fJ85evTo5/SCSPJcZPHuVfPZZtQxyR
PjkCUCZbIZp7yU/lmjXRd2sbb7QXCltmSL64dbmSRxBoDirO6O/BoWUdwFqJipFmGXpD7yoVzS8n
ZKGe1vr1YRRC06WgO+r6t9f4EFFtz0pZe6364su07u5nHEeRRM0UjrnF3VE3gGcN/xst5kJTtxvo
1OoElizu4pG9kvYGqHmjNqMLqsMeLbfEFwvOfTKAHZ+WOmT3QZOlGorfOqe8uXSU/ANrPuId0WdX
fchMPuWg6mH8rItakK3qMdWgxYkJ0YNju8SkoXgckUkwvdvAkkQboIZykFgVtxRLrkvzWVnB/TIE
xF3QQLeDjw9rdp3rYJ0x3wQfg0HAfzgzdqOaUiAfPjomTeKwY79NOzw2OCHyVbwtXcqatIkMYGss
6b6llcBFnyM55dczU+e8Y5C6Wvod6zWeprHtlUZZa2XiRXoG/K50NfKXCzzl+r4LyqmjcV7Mc7Q8
URx5CnRympKkFnxfpyHMoRqX+uiC6JDSx23nX/C44WO5Q2M6A31f10JYYSTlWWuacFpFdJma3BTV
YuJ7TkV1kbYkA0etWF199OESd/TVRue7i3LG8NU6UzY8PDXCsnA4hPTAGFMXm6tk7zdPfnv49WHX
sPpqOn2fhw+33KtqxfpGM2m4cpu8l1AqwsyEDErP4hT1U0u54ss79P0Bpk6+RoXWuryqV/djaW4Y
HPAx8pNSPucEHB69Gqtf+c+hxWJcVVD7fuwPRgd+A7co+hVXBwcnRRVWjcCpGSb/Jq5XCHFxpFnB
qs6bAjHPlqsPT97+3zYraINK9ESjSIwWgYUtV70e/htz3IxtneMEOuSagz4W6FPyCirYN4IK9Pka
HWTllxz/sEQj7aRYX1Jf9DtLMfPqphJtHIeDUNZK1C9LWYscMCLUdL5Bj0uLH0JvD+3Z1bBOc5bM
NitlE0Ga6fpDctqfpRJl79rQnLAmYy85c8O7L1Z/4+Su2pxsFhU9VPLzkDkDJ2TlDVzUKfKF8BAW
aGYBuadGdTHaLOHy3jIAbeP4o9OAN6HSN+2nKqxx02rKcPa0RpZykPYfNH0gpw+STS/Ane6n/c9q
lFPWxBqNMVM7DU5TWpjoMFml7/p20iI5v4O7sxGz8MUdv1HnATxtkyVPE7eQt5gE8XY3OBhK4KAu
t/8ke/z4yNVo/8WU9gvvV55npDXKCnPt3J3hgO6y/b+MzsOijZTqI9AdenSc8fCxdMR12zl9RiZo
O35tJ9ymCu6xtktJYlb4ghr3bdTBE7YHnDIjHH8sFtV8TvDsnFmFsFNNih9DBBgGkBczWBxyC70L
M3zrxML6KAGtK++2u1lGXCl3cKPErxXfIzI9VTUcFXyWQND+DjF6qhtRWJbx5ATds4u7a8Zm6fWo
BxqP9Tl7QJ5/cB2FuKKBlYjzOQaCiJoetvDg7sHd03giGLVeQ90vnJ9YAEhSYYBUV/Rk9DrDmxGR
DGMXGkrGrnPblabi/oX+pJvq3lay0Mgrjcr3YSK9jI++OrBeTW3opxHQQwkrjzVp7ffFwqgAh9nw
z1vBj92PksVFKVQMfCA+rpsGLY4aB5mgni646bIpgT1kjTon/0T732qdWP7TeG5Iaa2m0odrsChX
mAInWSOmH8eyoUftuk4WmDFKzI3WWw1vPy5zkpyiC6YtRxZsExL3o2JFUjfOE9/fx4pXcX0/JeQT
pzQrVrPkKP8Gs/a5T/8eIUiXt9ZkrqH8XJGaRpgizdRk5mtD4vmc4M54vzKSZfQ3woNEcfCbA9uu
xr+r05D1Pnz19n8Bfm6yvEe02Rw49BvMLPTh67dvf0/+r4n6ihJdsG+GTP1yg1GwUA2/U5lMer1T
Da16VddwhhhWlkGHa2I/hPx+LFZVvWmshq+hQJO7bprYQS/OWZLfIxWYFDPJQjAgyEnFEBJ4qsLr
JPshfTNIw1lRDmrrj+ISyoxTOWfCBFDt3OoNM7tgfpd9gXWGPxjZbpxO4RSt08ACBfzkepxK8VS7
WYyBCuCjN06B61yVmDqYy8BZy1Ph/iPdf+DuP2wqyrm0U+dUONb1rPyUrp0gDtVxs65XPC3siRd0
eg3Lb/XHVv0beMjgCMBY+WfTFg+GkDuBO1sVdMjg7b65KQixGqlBI+n1JIcttZEMLjMgQwha7EOX
pIPjrOToBiCYzfsKbjsUA9p/KTUGbzNUdpezvH2v57zYODKSGJvWFQBRZmOWwVSwVsE3UFqz5rKE
4Kyc2mFys6pBLwiar/JL6hjsPi/+rpvUvj/QYmR7LD/eIdL3ZH+VdQxmfdG3mmzW9/MyPJ7x8a0v
pLgeYH8O8ksYQjS9risgQuMz+R3ei2tagaS/qPHfKCrTXyDKfSz750ELPDW96Oy/wTbUAbb5mBp8
jO085kYeg+jftQlIJKk9Pjr457783Tl7c4Dip8UeL6tUphvOVegcGfFIqch2ByU6xzqt5xiZZd1M
/GLXbZLCZpOKzbru2qT7sjFbQ4XbNoSaNi+RQPwOoAFY/sdYleZlPwhTBOG4wmSD/EkeBP4j5znn
KivA/tj7gSikvJuE0YlP6an0/1q+kpaHlh03sztZzjdX1eKmWBQY0sEaFZbtqTpMWs1IfWXUl+5w
KN4BNZHu17TT/NXIEd5v7smyiVm3r8JQBUvNH8QWRCJr1ODYfEyq4EF6ZvWNkQWPEieFuYyTCuWr
ul6jzklYBHRg400acTE4M2rAsoeogKdeYaLu/umvG5MzAv5itBx/F7mwWlJVbGTbdZWAQxd9Mkh/
OH71+vj5s9PjH0ZC1IhtLQvKlahoaiIdeFcDlXdjY2sVs7ujGQgHoRxJBYiNcCalkAIgHKYx7DMp
P1afVALDmMVKF4ZFk8cvbYnKMguM2AxNGs8UZjd4x0/obg3epdY+8qsd3zb6Te2d9dXIcb/Fr0RA
DAuou4RlRL1kRhPTD9kjxVoufKP61aEz/PUEP6Nr2qaR+20MVvx3zsyFBbNFIeAJ4V/09LpKYdmj
WOkmLM3rHyv8Y+rmZ6HihEjzJfyKyg1v10zVyzScfQ4kblqjWM3lht73+UZ5HrIK1qeYrfZ+RUnp
BsUAGrmAPiruj5pbVb4iUSrv4WADn3VdIlqsqmRaeTr2cf+x9GUjRoOdSiO3wLIRa1m8On6VyWJz
A6/cHOTpckbo6D5sd7FuIrCRZInlpLw8880qkqLXM3NVnA8kElTBTl0uCYO99ca6vlXdMdXmKlWd
q/3+I78+5OrYCxG47c3hpx2RBJEdiPldrW8xYwSw/u9JlHNM4dvaBN5ihybdcGK1jpjMRe+5A4wm
6edsChZ7sdzydofhKIz6E77CNuReQPPWVSBKBoegLyS3P0r6d8TnMr3Av5v+38gmimWlgQCeQIim
Pw8zCH7qefYCPK/8LBu2eq2chEB4qnipvhxHFjCG7+4t8GUTQAftuTdOHfX8ApMrAF9IONVNCNuo
V5dYFfe3Pan4+Y2yE50M7RGyUFmvpdDKyQPMa8rZqSdWuOZQYjeHLOyO0zSKOqCPurNwX46l9mjn
JYguPzfSNWMfBxXTheJ4R12VqEQMoKB1IPtHZsVkrSQxtO9KFV2Qv3MZ6Jq7O2ZeKAxxxbnwjQ1c
hPWkw6L+IdDTYetxtFHKc8LqeTc7EPXmLU3kCNL3Xuu29cae3GcOwkw5fbdCGaC1O552Uy5VlBtm
l19Xa3nt21d1+zy5UdVctPemJIFV+pZyUGecjtMgiLajYShnp+jUe0jOAG3NUDlTworYZgaSnCPg
zWSgE0GHuZuujFVb8boqcS1ZbKSEEhHQlTdy9m2R7eTF6fHrF+iI+vrl66eJ2q/gdTgMhsiC7ERJ
sOVM0S36PnYlt4uplhD66udffzp5kZjmxd8yGXAHQ4+EUz42glEoPyKiwk1xL8CPyg4P0slm+Rik
ttWsvl1Q6hivCXEdnF4nG7TtrzeLYo3h3tNiiTnYmwSz1Yie4RrG1Fr9qlhdzN3ysAq3JUmP3h50
7g/Jz/7Kz0ooRjyhLDnhZoc5XYA5tL06+qYecAdn51lOIe2zAVcPulltFvTfeX1FD6IKKa5nZTUb
kjqSnOBsrwK+mcaYShQU03GiJI1KyhLWlVI5WZUOmzUrcynPifgocfKOBAaxsE2wmijzMNRJH41S
J6+QOnM+0z0KXNpMblmZD627Ymge6lm2bZP7dDOtS1MvXtATF1rvpMtbqUGkaefmMK+mc6tacqbI
r2z5r518no0nFaGRJd8ivY7hP5Y2p1gbiY9CZykL95ZDSLXw7AkOpdOkkEfsd7IqFhG+HgV2kUZV
eCJ27dv7l6v6osBUxSxfe1c/YhUNDo6Ryr4DuaszynGFz0Efz2M/M6mw23a7c8ehqVztOk9zF8Ae
79n1K0at15bnNi5gPMSUCBr9qoUU/HOHIbFeIVBvOHReNflvfTjC5aI/ws3+W1yzRAvjKTRaGluh
xLOlqUCT0tLWPULV3MaaaycdDR8J2ccMaAj+FacjHSep89TEiA5vlGF0AtdL75h0le84Z628VyQw
sHs+MY5RQ9Lgmi3I7Rqe4NviPq8iwPefMKNYZxFuZ2cpRBsUSDuj+MgQ+teT8jV5iJscnga5kM2m
D1LVGfAyeZ4nwKVe1PPZGA9n1jay7sehXVvYTsFT4klTn4o7KcA7tZYdTSstdGfjxK54eVekL+W7
5dA3kKHFkeEEagbJzB0d26Mx+fUIR+Q/Cnofvfemi57T4gpJ7x/3I3yEPLcyiIHry2n/omJaQS4I
AlLjTLY6V9/5T5i8ej0HzpFSr6pIT94c8qMXbEbcE+ut5q1qqaJUT14l0jPC8KNsWGq2oSvtt1ca
b0PqiqaPWDYNt5cCPfn8OCpwnn2kG7RNJI+TBzMpgpSJPzkLHzvhXn11sqEB+bjr6YqvmdsFCHqx
cyX8AD0PFqnoCP3xxA8mju1ED0E8q8t7ztsTA8vxTrBFqv6z3EmcMgiD95i70++oKRsMJrKlD/kq
kKntoiodLyusKUWoZGSNEWej8x91MYbW0qAOgJQEKbugcNcisaTRpf6IAcSk80/zALntvslVAhQs
dDZ6Yt8ZkYeX82KN/ubo7bi/n7ziDIgiEmMTqsBQdRaP1UdU7v7yfnk/sfv03+dt4w0a8AatBo6n
8wwL7z9o4P/PabTSeFtLT87dydMJhynzodhXWoD7nM6PnTymg4t/mhwkZE8NaKQ2o7+zR2/n3HAr
4PLBWS0vqkWYqs8fM3IYDS87SNvldEMoR1mvU9637/cOchof3oEPdQKlx1ZNic+bVauxo+P2uoMb
i+tpXxZby3SJUPNA76ls9jnEwx20beizmmMVj5zAqOOEUj/NKhbsnRByXdsjyW4OIzU3bEKprXCO
bdXxH5VdHarkIGSip+skSFBgAVTbOUjFv2E/beGFpW0KE/vW892l4SuGKJIBRgKTeeiNUpUhiVKp
irqoOOYYba5bKWyrPQ3rg9RzH9Ovk8ZHv7zqhaCmc4+9chQ2hocQa1bWIfASJf4Sp2lmk2A91Nal
WSRydkUKEWXm2qk30aLkGBbClsyBGkDWFk5yGNueg7/n/dkjXLEhKcqKFXmElAvEYlAocWp7PAvR
XlTtSw8XiTUmkL3CeFxp3oH+jG9lTPFIUJfsHlcTgC1y6gW7TVcrdLhF1aw6ORYXRdDH6LOt3bU5
5y+2cqI9xIv3RWIRJoxxnxaoUmVcTXkIGsJQvqop9qpGSp6sqpn4J9+gNxzIk5/LOG8rHgsOJ7db
z07voMXBMlEmRFyuTtqAJXJH89kfjeAVOsz8eEy3b537jf+kE8+REAfIEx9GhyVI9liDLgxlpR5I
C5KqcZtYz6ftAVIk4n0HVqNZtlOs+27rQ2virlB3QVlFHdg1Gg2y0QhJCeqSd5gVN5B18owY1O0l
zlvM8AqMEx8xsH2Oi7KcIfSPvjVqmgh3OL0ugLxnZ4ejc7gP5OyVoGsdzc9phjERaEhRfZ0Mdhz2
dzYiEQ9/z85HO0IX6PlTnrcwgxQ0T/ioQWemr1GkM25SHkKoFdFzkaPkPOcrAws9yNJYIHIMCjHY
pYEZTbIP1yx5CCxdkva2HnjhUrkhPPDzLHyC5T10nt/yrhKtO6a3vcF4VMarsa6a873wle3m3jT1
7POqA1x/jDMCsn80TL6KsXPiXs+pgZqYsVyVUK9uV5lrxL2LWtxD5la9tROpPPDdYsdBYjNrZrDv
RzGuRLjP9+X9RV2sZmRmXW2Wa29QJltdUHJyU97Uvej0LLtcFi9B/MYgPAiql4kZkLb5kig3CiGQ
I8PCPBdcIWfXoxVBVSBH0KIlhF+1S3SowPTzb8Z7jYO9day04SxaC3ljEcO2DR3ctSssMkslimua
rormOhdEp1aZHjlJRwLD0OY/SF8nqq8U7w0U24Ux0I7+0dhHGt5WtjL+MIazizZk1to1UojcRVR3
ovST2vTE4AHJrEZQBYe/c6sgkj5mGkJMITKxzsQADicnKZL0cbqv4VpnLgCTcQ5D/pA/mtf43Tt8
iR+nBOzj9uk5JyrlPILTfLevFCLxmWVh73GDMML1Rdy9qkuFgtKZi0/KBDg8js5upFQDNlKE1RGv
fYfSb6SVC7JNO+hMz9QhOE99Bw9uFz3X3CRFQtdRQrFODppto6SCTT6qG580xA8Y26e1iSwmOnHB
znSmLAHbGrmUvUK+dydEl6V1Kq1Jl8Sqh84E7r0L4r+7MFA6oZ69LrKz0defh7KiFkO2ck+iTvkV
qiQ1mMN8WOWcUIsmjm/vaVK0GcYTqpnZPzvvRgm+w6dlObtAQXgRE8Dn7Ukr55Y/l8NktLkF7uaD
hBPCdvSc4joo6mq0jRnEUkojg5+zcMyaedpt2Bz6Rt76i9pPfcb7pp5Kaye1iiO2GVKo7Sy1KfqM
KvzHZyc///r6+E0asvqicWntonuWyMLjQrZEkjiGeOd6O64l2xWVobNAW1Z1Zi90jz6F3KVfWreJ
z25EUEw3uH2aB6amg9MjzPk/9eywDfCfc3TI97Dj4BiqcibjiAiGn7MpUfcLDIRpUwNq/qfjXPB0
Ets8yYFw7e4oFHtDUVjop5SOtrderNmnCbVauzevHKB27EG7SnZ0svOZ3uE8+7/G3uetilh+2qZK
z8SZuJn65OLy27T4g9ME9tOhqp+F4JDU3Nk+altwQd/FllJ1OtblHdC38IWQYpGbzfJiYFilh3sy
26wYfCpm6kz2WwyjxuYPQhTFS/KzoF3lxK5vhNlE4kqSOy6i0WidNcfsAy4HgO0PwqcfC0rcHZYI
1w+/VS8+fLbS4hRMnDzOQ3oOG+PORsj5uK6/is4R3hymZqSEsFB2mFRXC3FQvQkG9rGYWxEiwuNQ
rTBxt/T/MWa313NRs0wfzJTggvomqJRRDoos833ujUWHGgisxA9IqfMgPyL/9noxk1bJBcA/OFbm
NcvnDa3X/VHiurwhHJewD+4uE1w4E2X3B88m6vnnUfNAbKOdRdAfA1/BaANd9t/AizP2FIlHvu9A
FjYXgYGNmHLjcQehtih8uW1f7a51jZg038fjHSVnQnBBOtjtdP89n8r3nmE/3gPOtLxbrvwubjq7
uEk4m+NNKGqPtr42xD7A5SGXYZtkXdxLs/F8Iq7L0pm92udw9VJjNs1cJzuNccYIymJxGnz04Gg/
hpGUHwXLy3Z1CCG7nSznOflMiMPDRxuYe6DzpQwteNgIwBpmV5Vha9O9FNNC23zUkj8HBLPBnIzj
7BychULSve0UcBfhmu6rcj5L7jvuNpe46/U+fOOgDtFDhIji37795V8ZdajZLCnQnt54+PkxvUwW
qBRDusExXEnaIDuxo4sa9PdiBcUggFyglc2ChueCXXTgwagKnfgeDH0BReXMY7ALbhs8WPdk96Tp
y5u9G/pFACyhBmL2CW4AcRm0MfSbD+owwdxjC1wDvtjz4uZiVow0Bj3XHQouAEwRmrSeNxzlol4O
2vMr86PZGJMxFM9N0vNx8vZHep8C9PugZ6go5FhZzGezalERuIKYx+UMWUgO5JpaXeLKzSoOe6Ej
aAGfkerJAIhdwt4Ah2oqWGajlP1D6bEu6PGSHMVJcowfCgEFW1LWE/QeQdQf6MlqQuJkGPFYgbTn
yfFdgVlleXQw4L7tG4bv1rv0tlo8OXqHEJmpOz9GQaV5cLgSHqiae6ZKiWooJ3Beq/r1er0cPX4s
R6ReXT1Gm3yzfqyucX69vpmnghj7qYtPW2eWEoWCohEQ8CHGCDF9HtHp50moiegNsQaLymkEbF3y
YyGyYmzLWI9tb5DVzMslA78xiOq0uoQbKCMjEnXBgSoGyYzygcNg6VqTccE0dl9vBAWZrvRtgVmB
60SMe+F88uTN37MDNl1AWadZot7dS7dUTdcDWvox/VsjOMhVk6tFvuPmKtr4gQzVSOYBXnCCPOY8
6ZxOz1CUAee6ooHxF7LJaWo1yRSfGsOFIR8PCh1biUBsHgPCDTTXk1vLFQhfZ2poRglU0+SaWe/O
nahFc1S+YDh9cn/rdlgLtN0HGlXiRsgJgI377g+Wa5jynqU04Ot7gxAABFUc6lqAVslnQKegQ9lF
t21LMDzii7qetyaqwB+5MveayYou6sVfy1VNa6masMJ9iwbWppq1NBqm9+kDt9O3OGfEnt4EdspW
jT2Jv1InUNIHVr1PBWnlYJi76dZM93aODKlzdng+TN7cL9bFXVs2eZbnzjBG5WFiV8zry0vMkPgo
+Yrc2f+cDs9jtTWHbfUzgveGtgCGjF+muxgaeSRKw6DuTM7Y1xMNYTlBR8DBQz3S0ZGHKmwRi0FK
wyFqi28dXOIHKyQPK5ZQfXd2B7MP//Wg6SzyoGn/8YF96mW0CDaMuhxxdMaEPcPWBmAeuAgSgeHa
liT9gH9EZyS51g0IqrKONeVgvre+YcdsfhT7I+vG8ld/i+bQpQL1xV/aL8NMZXvFavlEZXawnaV3
MCq5reC/VEOBRWZmrUj8wup4B6IdXke2DletZtxTlKuTu2OLoCjRQD6Uiy2/4N5r+bqzRevOkyNC
IOnGrzwfLhs0+aJoSkZwbVEyS4AuD31aTK/L2YT4Ev9MDBNufRa3ILQbMOxlxHcg5RcuzeIAQKEW
HGPthbu5KBMY4RRljkK7duLrGMTXRy6SIQjI/qAvGOUrE2ZK3v83p68xK1ba2Zhz/0nhzXk08REC
waRBbFDN1zV5i4efs/T0yuEKZ237zEXbF8s9hUG6nk8+WnFPu45gzXjfclOvnEzhfS7bHyY+YVOc
wjDB4i4gaAurITeJhTg6Yk5Vt3FkC4tFEQuixJ8saxQmgMAj0fc1rmLIweKdpmbNVyxX0Vy02m6+
fX1VWX2ojD+G3suAOOIAe72eH/tEKYR6keh+YmkpGtHI6l6SYOYmpcEfVWIif1tYeQREhGTWscup
Sht9lhgN3KQqnysmygFJ1EqagS7nbabI90SxiIIx897SOytLRDZEijKhb4CpxOXgRXDEluU9PUQE
xs+frYWKtGCV4XbaenGX2tDbdv2IPTlvuo6DovoysqL2MdbF+Mxv8MCzgNsLiagtQqVnL16evv71
BYNvOu14GxM7bZgMQeKFHQ9TJbLgX/+Ic7iXbEAeWrOrDtJ61F8UTckZfxSIZA0MxGahhPRmM0WY
0J7nbiRnJyzYDyAwtjnN+sbSCA4iRSmoVwq4wEWDIjZmMqoTRGiWzUjKBWZfadhg5lvYc5CD1FmR
h5dJB/lr8ukJphOArypwRWPACzy6oJS9VwPKQSP+oaiKsJA8zA85SEPWrjrHy5Lxs/jZV0+K2hb9
WyvFVgVaY19QO+kN/HMGHATm2/cahk3anYHRwWaBZ8QOR2jbDjvjZn2Sb1P3tlaFDccEXLW/28e2
jTwZR44wWv9TyGBA5rTIz8Ao2+hfZL/bw7G2rZJ2gaDmuq71jtdqO1/tbUzHsWgnzb0tjVp/9vY4
Fx+Zu3z3eMTAuUJ52ugfPw2h1vKKge9BQFbDT6MAEtEgeMUkKTBg1MKnWhUfhYuIAVHoZsQBAVp4
S/969ezNm9RbB1KUemuhyMNjduCzF2JrnIHMdo2cuf+bfXvWqzyKN7ynYqko8a/ypVBuFp5rhRe3
Z5r2nSoP/BA/HqMT5Pf7kxenDFiNaRkEnpoErLKUHApp2IiyBziJFS4wEI8OS5qFVVyZxMaWUCHM
YqO2wZnb1kthMxOItHdT0UV0IksYbBS7FTceColu6220LV73T2kLd/Hyx1hjTYWWl7a2htqDQrwl
0eWjrYPmTbQDPjSfOvPjzxqsQv0QfzA9WHGCb2wNrHGfIos8p+oJ8nOk8ZhyrzWrRTuMnFwndhk4
3pRhwupJZermSxbeX+ta4SeTJ9QjQpYzYow+LWvJrhaLPOSwetHG8rCSB4MlagAze17bjjV3ebfL
XDS91RO665jR3SdPSWxcY+dVa590+lYddwJugJlngUpP2KRouINpKNEytthknAXccpdlAcXPbcsC
Cj02C+hDmdkLGIM5+8cuID50iPzBkdp0eNwVsPWEvnbQEdnUs64KMZNO1W2ZSZn19Y8mfDnmsBIU
P8POz3uuxcXTLO9Zrhj6O44ef/rUpEoYJoOU2ty/qRqMJCbHHle/gn+hzGXlawY2bompnXkVMFvL
OMXhpVn7JHnU0Mid8GgD1bCjZZVJ3/XEswa1XTOizs1AaLTKYm+WjdTNH8UbNeCW0IlxzL/rABCT
dhSO12pNXkvoFYhE/QmsCpVmR0FjarDxtMg30ULSogpcdu4+ylSSfiafhJmK77ZCPlQj5KLIRbMh
mqB0/yYmwxDrLS/WNt7KoEw5PJCLL9Vrw2HaM/yTAKjrij775LJOIYuTGgwn5oOGWk29v+KAmht0
HKV3LnkQNkAuZ3I2PBbK85O/ZFeMcfxcuSFs+JOno9Az0f7YqT1w9SJH3O0Xm5swBlBIFb58kd4c
6u2AnbyRzUjSVkgNTQPxw9lvY5AnDhV884eTV8nZgxlmfBxp3Ipo44OOucDyf/iNTlhMtILT0X34
7dvf//aLL8hdYTK53CDm72SSCJVihy9CrbDolnyUfPJD4K7flwvMu2eS03F05P0SUwbzl7+Q6w76
8PWmy/uJUCpyGKdPvZ6msVJjotCxeHAFeujwL6/un/84efni5z9Nnr05RSsH/nfy48/PfuoJ4T2h
ghbt1SUSlaSCGlRA5yqlMq3JgNM5Wh4YBfR8wzhGKu0mqvvJv0Xl9SNSfLkqrm6QQvWsN7GpEJYV
HkQKjKfUNms3O6JaDsKIoDQSB22eFA/JCTrqLkY6eCV9eFIItwZ/W/aJvhqSUrUaYs+gCm5p/jIo
i7cIx0QwQkUQFCPkCH+JaHLh28hAQ5MiFhzK9mTd7eBnXoUwvXLQ4oC8PPG2NOtsS8NndzkvAYOM
G2dSLHS+Q2c7W0SD6QjuyTs/pa0iRTSsOMlhCAzdYqdxVJc62z88V1mUuu2icRCMYCIRQIy4Ssme
Opw6vluyfuq4jmdlFtlhDCDe0qJqYaC/zCIcqECEW2Wsy1h+0FeRMovu6pUjQxhzLW/4O7gjuAeK
GkG5b5V1xtaSbVH3Ghsc+wlYE4RVR0ZIzxJZnTZvaQ4fWayzLfM+g4Ln3eDMUAJOHAgXEiczYD/H
w6wl13JyAjvJLj7IKi8IW2peASEuhE7oJLrQapq1u0vhjKkmTJlHQaeNP9ZOsNYEOJpWDzHtYB8e
GqsHARzjPuCYWe0sylt5S8bqJcrCH3MHIo+Xl5obQWtBnnJdzQrGoHvtzcFKfQxV1Jsmbx17Fa44
sbGzkKjhnpcFvYQX82LxXt6dVXlTfyxnud2+DREkk8eHbqgXzkxW4/bAs/IdFVTI5/6kW8gUV3XQ
CLhNbOup/BxpE35uJX1Ydd9usX2z7J06w7i53TbL36klsByUgllODWcSGPf7KikvfhKa2E/6yUMH
0MbJZj2tlyZTc7C5zjz73E2fE01TR31JCz1TKaEVA+NvreQ60EvCf5uFodbMz/RnttsF0M+wQhhS
QNZuShC9zucdV4eHJX8+StR/eXgebW6/SPKWiIfrP3ob0F+YB8ZNG1U/+/byt5KPPL4bn0hQzmRh
H+22rO0Lg+QO5NbyxiyPEkdYrMdwEB/N2Vonod+6DQFYQX//At4eJ5UGRaLQglAXm5sLOGCM2Kbc
sA+yHeiQ8rfRna6KxVU5CMadxZx/7DsdXQVu7DOWYmC9FBySJIsBYh6vRHID8sFNMffpnywdJpMB
kdNZQfIg1weJxxNfImVHP8CUEILx8p0h2FnEg9h5mKUKWlLg1NMqWE9xgTEqzi4Ea4ZSmt4E95HQ
bI6qb5Ze83nCqJH7sO+85VzM8NFTbfC94yZyZ7onl/K1ciHERV0kVxu0QBbqhnJcixTE8+hq/TF9
PLpCwTov9s0TmifJm81FU37Y4CbyDVUwQxjA67Kt9S0Z/ILubisgIuitWC8xPTjGcsLvN7Dw/iA4
kAqO5OaGXT0uWGjAbSMjO7PG0UPCvpE3xf0FJ/shc6ToBFaMjomO+3NMTrSYwQnWDXoqI6ZlTVKu
p/ly+f1n0TF+bp0DkIuVhBcl24WyNxT7hvK+HCEte3g+SdYhoogZVMnxUDEDkGoE08pvVgjbi0bb
ew93zdYLWCnN44sdCBlK6ULBer5EEehBNIEgZ/gJu7Hii8zBa8ndSClkCKOmaFDf2EfXin4niLrf
nMQONhuQJZwYZTVdr77aXu1LqHcw6+zXr5e1L9SemhhXeoTitDLO4wQ9GcGdkl3pHxFYkWg3nVG7
VLYV+olcXC05A1ahVTwxzv5RIUUvjiRcABpf3KikdGRJ4FMQBglczoursdEUKiPEaoI/hMVn8AZN
qgWIo9V6DNw/CEeLy5XqK361pMkZK9mYQOcqgyv7GTRuZlI6bQsyPQAlBkJWXVbkdKdrMDWdwVWf
F/cBPZSTRQnuUYdXCVml38jzYpXQqDvfTd0Z6sicPLTqlyynA0Z/jw9C30i9Ou1u6qYIh+UQFAz+
PTjMEJ1YD9hVfi+G8jiPTQv55UTsSBM1wKHzK1dxGkJNFroOffdgto+VoTRls2Hh2lVsRgBBJqR5
nUz8oiGUnLemEdxctdpjHtSjpE9K86d9tMHChJMY7lwLBm+krVViN6fXJ9awpkvRW4etAR1pJ1VT
woA3anKhPuZe8pUc0t0LyJEd/eRBqQWRW2ceSsseXDnM7b4qG7w7TARVqhTippGy41vFaADqzfaz
AUSUFKPyLud1CjS98As/ztFQLM8enj7UxXEh/9zPep0VBkQ0bikYQsiIzpVGM8K0hmhYyfDY6ovp
4RHf0uJZS0vqWB09xRsbqaRGOjaTjBTSV1F/jhTCYGcugp8inDcV61az4YFJ/tWYQ1q1h9M61IyS
jHiHEXiuAjwmHSr7pJ/o4+AccUmexNCJkYtc3j/pN8q0lGsNMO7KIGPvdbY8qqSJkXZQb1Eky3L5
5OAIdXY1As1PJpRA6raUiGNmozsaWfNjI2dmnxKsNLXtDX2J2O1VJC2mAgwxNq9BOlneY3vS3GTZ
lJtZPeHuI5E5EiGnFoLLNRf3uBBn6oye+/m4vdq4+FLzDMth+RDi5ya31sgnutISbi55C+TsM2CP
YHDIjs7aGyl+hcy56u319pLl5mJeTZNnr04Scp2abkycdwMlehZTMgnoX4QvoaPdjF3FQBtX4nEh
lqlPGSaNfCy0HDa8gHt2a/MghrthFI6KxGuW/TiZLTupCLNRYGa9hpH7bfZsVaHfhacXYnsinHR6
wE2fTpfFAkPGphu4YB8pa+uqLNYun9S4pkbkKciUatBcqSU7ZFcWG8vlQCgcvwG8pLBhm/lMq2kE
oQRuFL4LUOH7wFVmx6escTdGYqw/ib9pjIzIPSnkBaytHWRaOje8qP7kuHng4e2JbueyYUKN1mLr
/Ly2Vbk6YWwysK3ymd5XPmHC0PZEqWAfAjZsgGA9K9lZBDZXqYYwP/r+YeZsr8NKeN5Bz+FfNFrb
RUmj6nTYsS7RZWHgkSSXNmOzwOX2YiGPfjVdIeBcIoNxYtRwvr0wL+7lgo6wx2EvcEAufdTP7P7h
NljkKGrtZGiauAT5tNH+AlkMs/bkZQtYrY6Rcd9oPSHaLgc6WndLP5HGyOITxFnKusNKZVYp5ydZ
Q9c9hEkx4uugOONTX2+Go/CA8a9C9vUQvR2PLZQskG0Rh6td3llSqxaafSQCmQm/ALKRcfWQr98m
xZrY8D39tpnIeS+wyJjlUnpdPZ3AEQO+s64cwokWt1Pn4rkruF41K4xtD65VsFgnRr/nHSvdSPou
/d3m6upeMefCeaHet6nggU82y6sVWeuGirRgqhju8J2QkPAwcftsbrZXR9FZ+VmvBK+T1uA46jcP
NcxVoI52dZYWaEDa0PJuCbd/XVwEaRZUiblsecCchjdTc+tzyfiV7ZO2O+TQXM+HkNnWLR14cx07
sHwailz03NKT5Jq1HGMYho2hXSeasQmykm1ZOEYeE+BzRwhl+yhqlGI1+n2muyt2XouPIuYqy0Wx
Vr5QUssWdwen0mSC1RSmitM4ThUoMvz/oGG0x0mGLqLm84SCLkQVnctwywl9g9E/3E+odaHmkqf6
EDQRLwTaf+A/jheUZ6laYMZ7+A59krvaUzubteDci+vWGdfZTw7P20+4FZugDznb5OGbsxEfuPO8
ahyoo45uz0QMx8Q6P8j9DRPCyQw07plJemdvTUUQtGz44i0pI6vNXOUPJJMpiqGS1CA7ybegvoxU
0mYE3uImGVR5mVtfi24i22kGzVl17tDbgU9wjadjfooffBwb/arvJc9mzJuL4QbYt2qGM2xKGOBx
fkXKS0J0UsBKqJ9g7KjcIQDKG4mH6B6gc4caq+/1KzXRprQJGclUM8gXoIM+Ob2NjIvlRYVgwcqg
wH9NVtXV9VpmJS88u7yihHwgQ71b68df81tMTyzAUC4ywV7Zm46/2D80igQ1Kjx+JKSgKREWUPw1
1pZwCo8TvFITKoFKOmu0A6unUDWHHkx23adM670FiXbjeH+Qj582ier6Z3aNc6XYoTxKJGY5XQeZ
1lp7s13iuUkY9w6dWmayu/WC5YF4rV47/+9scLT2o0O718AKpS3CHdiGmAxXtW11Y2Ef6t/gbKu/
BmpeQZi1KhFxDFLWZlVXn8ShNdWIP9EYbe3GGo3fZbEdEsclbTGnnR+UJGWzxC2PNB0dNO27OliO
B0n5XgMFS20reTxTh7GBy+U3C4SLE3estgIiVJ9YOmXyMEzSS3z7G/k7n/Cfw0TM+vC9Cn6X8vr7
g0yfAwIu3VzoaFB0yKc8wiDr4X8v6tk9/pdtw4iBmKT1CvmplEawKOZUxOwj9mgF3nPf0kUk5p6K
tyavcR0aGbIlQgc4HERoMZbKQodfFWbEjajF6EruYRoK9lyfX1iFeSTUZIunhMgtno8JfkEnw+PC
5duQDTe47mRgtRQqLfpdJIVHw28z357BzTxig0g7pVED0WEBCigeIzqod22tHSaHB0dfZfgu4Qc6
Z8/enPq0x0AFdxCCcDHr+ax9MaOKTZZHvXvq99L1JNuXVtZBNCQOBse0Xq3wmUYtEVQfSYl9XLt9
ZZwhXpAMHthXo8vgtTLOQU0yqyVAjpThTQn1Ed3DooBYC32H0c8I3lZURTF0vPGSQwOOoW0WSgbN
msQnSxDPeo435siHe3ecOcQfMxDgoMsBDpzqCDF1QnH20iz6PclpaaCqCsoR3ZnfQ9FtXqCh5Gck
Q+VexLs5jNHpTzl3jqPY3+ck9vc4iLGxCbhX867zowtEGDt1ZoR6Bjw0l+g36qqiWZm8voaDM6+v
qimeIERURCeiGWLwshbjKP+KHtCLcl7fSsXDnLRTrCpdi5+S/MGdG7YWtS/1Uj1+yqSAV0MBwtAT
tZYYFtfvD3mC/UPXAV7vRzyRmn1+2UfZs7CSz1eNtgAj4KD3FVm3Nwiti29kORtiODsuIflfY5Jg
/+XqN5tlyVkIqHfy2EVrnPeViiHSX+/iVD+v3qNJocCnlYz0aaBASXkpUuW0r3tMqTnn+3jSECcV
KTwH2m4crOKIt+SRZbDeSz5squl7IF/wL/IzQwJWahO19sErReXhONnu+WcBZOcBXwOxdyOCMl+x
FF0YUYWGapgmC0acMzOMmUX7BOZ7dwfSdt8pqHWU/XeLRCzHqn7Whknx54REd9fGGPEXcCeiews0
7APLgj1MXsLTfQnnsAtW39kpa5hH1hUsF7P4BdRBwCT6OHcLM2+btyF75Ed62CvGH2Ke83IQdeHc
dt3rio1xnmk+/LZxHxjhRGBjeRBMzMT9nQ7Tg1miE40AqcCo9N6H0dv/yQ6yxH99+E9v/5/qiy9M
cKQVSkkESlS/ikI9fzn5r89eP3v905uhfP7D8Z/++PL1D296PdGWTyQzKEVM9/Df8+qC1c5anw5L
wg0iguASUS36UrCvQa2VRciNdmRf/5Wa7CvWJdsmRU3CIwGKou8O0ac0uA4XoAT22s0oAJ/iQnFV
uuqi/UKQR4LlwyM1bMemWHltVfGrmB/2PaHcxqq2qjg97BDexSdQ27/w/K0pXkAvuoAA6OXLvHy7
aJrWQ9gxYs2Og6L6KjwN/rCbWJTbmlBxNCbWrAWzG81PHcFHBRVQPthLoJFryWlvm8WR90MTfYEg
XSahDzGoFG2K9chiv8DsExvCqy/v0AEXGsMTkOWOv96SD5plPlRrYh8Zm0qzuzP7ksDxYzaG/FPs
GCkV/SpefpmPrWccp7GN62KFRhp8eDn/QFmtZswbefUkNQJKrbMCy8D1PzNOGoxcTJ/PYYxl872r
d1GOWd78/E1dtm0icilipercSrm6bxzHemX7xkbUruIItHelncXd3iR2PxPSqqiluG7rMtAo6v01
B2FZH9WUAw9wrGTFx/29E7Mubb/RUQULy+/b9Z53bI3MAXNTxVr74DuLorbHGYU1PTsEBQ1ycnU/
FquuUJNCiQ4qhwBU3ZCIQMkSor4oAUMKffSRj0dDFznEQ3+16gFrc1uy1VC6oqD6wtNrCWS/HgIG
P4AkIm6JLeuIGiQ+VNqhCIZ8JdiauBCWCHNr3QGjWVpdkfspE1K8Guob+0bDqD2thar2SNcjh5Lk
X63H/NNq6Cffh8TjwjAEWsizkWrpXL3pP5JrjP+o/9F90wv1qrNjjQIzuKSM2XNOKnSp8M5ZyFci
ooGsb5RsJ5j25YwyTbQxBuxOE4ErYHcKx8PYeWb5cVajscra47MKyhysNumL2HNJJdwfYk4ztnN0
1kYU3ZA3744ZeZseCu3SDVfc8xvXg8gNlY07f8fJQJpGQ9T8JvMgSC9XlhI9I43APkyYY3v4EA5e
401N7T4i+c/KvnqbeGndyFKs3ecMO5JHJsFcOPKRtknThKYXmSwrMwTiW4hIqbMHxV8Ncx6c85Fj
LKad0FF9L7D/NNMAzJqxpmYqj4I+g0Nd316/u3I62WEBodjnLN5/6lq8f95SWEIGT7B7PRJrQQju
V/hIm0AFL1G/KS5hNQYYFKf9G6HSotzn4FQ27VKDROX78t5G520uSlXn2DKNQ0ZgR31NdO6GyACl
Ffvl+4w3lkxYeHLxrW0SgbYkYFx2ScK4X/3s7frAVovpfDMrg0c1iaVA/IRHdUUowWH+UWjCyhPO
0hmtAnSe7Zb+HdrV8FpQdegeyTP46jyLetWFoGq2PVHr2Cp+nuopnJ7Eyo7wqrm3fNs1tCyORz2h
pyrjy/ECRh+DA8KWSH+9JpxH5J1UnUS/gfhSkk1EoQy1S8zUkP82TtQPzI7Qx5bXU/2cry/sF7SN
fScWIR4mpl6TH3UZMw5sXZxiW1pelXMCE+tIyaSHx/tNL2vg1GgtFaWebA1q+87dK4nQSVXKG6t5
9nu0hvDo8POe8i3M/2a1osgZ/eA7Iedao+VPXj/McVCIbc/17oI2ydeI5e65mkcIZmwJHcLHd7Vr
tyMsmH6D1JAHuiUVSMD/TqUkUEvgWcvVvLgnJnVF+iL7VSFve6Bjsf16bX7lp5VhTKwNohR1wl4U
6GjNcKN4dRG51GYskHoSLvAtCmc4/EKycKLauVxhDWGDfZZOU4MoLxcGwOrxZXnMffHO4lEnzgqI
1cgs/1Ac9ieU1s5F77Lhzp0sqC3QQr6ayqwtr2A5o0B2eZ4lH+2dD37qLsg4uYueHVXAOXKaSsTF
9D1U2v8Xlm2Y07Z0dyKu7B8m8PAUeE0tmVx6vinuBh2UCV0bXIgHMwyyrpPSpSOUVR8/c/VyT469
NWH0AtxaLXTTeHUFuB713Pj9elVaDN+eyp1pfMtolcnBwNZNOeqUXAkaCvgv4l3wWVTMCYhvlWEc
L33GBTWuAM6P7K3AS9Hq1MEtRMlXCDvV3ozrkaC6bUuBrnyEFJKIf1KzrW4P5MvaBYRh0X1tHt45
RVSkw7HyM9g9hNSApciletRpC99xiTmqDFHCzM7HH8IAakUfRvs5UdYvi7e/rmazctGh1lP4DfYr
LuoRyogKrG2YQdM7zE09/yj6ahObr/XTNZwH1u3p8LBNU+4G89D+qJ71g1H1z3s7Mc1tvHnQk0h1
3V3txKO3wc5FoQvikUj6icQLHRxdoGhkvs0jA6Dq/e+//97IiWK68WmFow4PhqFM/ZG3OnBV+Ucl
35Q+Uxh9wKilSfIjatAfrID7xdcCk0VKykhghTFC3WF8ORUr+8Zal2TRLhb46yN6SL1Msoz6Dhpf
PM3NSXGfncPm0iwQvQaWEx5eUv2DtkGWi2mxbDaUjqggAzQI9sl1dXWNNpTyY2m8XQpKKMS3UjWD
hLUqvejIYxG3XKmiTXBbX3iUBH+tinn115JfV/afkWCctTeD3Nf8KQPp+mKY9EHUQl9UP2UbiW8D
IE8R5ox9qlDc7SS3JqiDNpVlXGwxWhKbG+O/cxmRdyibde7reWECnaAtsUpQxcIi2SjkJrzhYxNc
vKjlD4sjk2/gyqAGRFdo07+YA6C4XbZ3E6SBdiSqrJPoEvGLezzkHxn7d3GPrhcX1UKhLiV2dB1r
0W3e0QcuY+QnfmTW4lgv8En7F4jDICy1OTowMBMJ2vNQS1CfzCHGsIJr5cTFnai5RVcAeJo8iN9g
6WfNajYEkW6SAeE6MvsKQ94s6B0r2ax6Ua0bNlrNSg+OjExK5OArWc5Ib6v408d6ellcbWnjDPiO
KcSa3VmkKfi9NaZSORJSbK7tmm1qjWlDs8A+RXjb1pGza7sZB/h3cr6Yr+t+PEgWq6iiORW0G89a
+pdTZnVtgCTGOtQ4WtWWipz6cakH27P+zNodawz9vjN6o6hBYnfMWdZAkqvbQLcvaHC5XdnJChBB
qkWX8wEBsQ4OMn8S0osDSjOAL4PBk6JQ0+Jqvi6VTvtyMTYwWrind7nmO7NPIUjk7wfvKIIJUFYC
Baja89kZvPI6cpdvH7K/RkWplL1Du6sTO9u6BJ5Q10OiWRLfb/PFpEDFW0v52qUAyNIFTM49X6Ja
sjaKeD1uARYlIKcCd8b0i+dJdMxt5oSWQXAUVsCT07I6bQ0wx5NZcNTUfCyzLouAOa2yj4pTylwh
f7oqmms6yh3yAwawkPKDB2ARbVbi0uYgWG7PWyojqBN9NvnGW6mhFVpHDqoakDFwV2WZjZkaQqqs
zqPpN0m9a9au9X67SmgbAnP/8NxWyZHNpl5QdFDHotGRonAneRWIAD12th2Pzqo0bbphtqvqCt9f
xEm9RL0OcKCrCv5mvlMck3VdtgesrDNrr62X9UUv99Bo+hFilKJI/UmJY87M8ZMgMzNyY2U5Y/f9
23r1XqzwEbfgRpymb8p1QcnsipsbAgbklUL8GAq4JYcfUeAsKz9hJ10SFe3auJ57dArJ3x/eqOvH
ZHhKyg8b4FrXLjDhHjon4cCRmqBHckTFwucm0LJXs0HwC3qmyDrGII5UAIxyoZJkQ2rbCoyDgT5d
RlIlC48lgPGP+TwC8uXOIIzTqTGo/jL0ILAPB3m7YUn0cx61pc2+zJV18TIXI/KEVr1df4OmD5k+
TVIGMTkcw4dPr3Y0ViPNOpOw07V0j5Q21NmbauzdMV1eT83PCuXpt84I+YvWcfejc+1/T8CUsJTG
nVVjKp4gtlnUr7Xx8M/0vdASpNllxMXgJDHFx+qq0Ay1R6B1Eg+S/AWkq99vFRo3Sy2ysH7bl1f2
khe/y9kOrDxGxcCwqj7Cjf6SsGFpyuhKtfLA/dmD3FmLfXIpVDVuinumALMKJ0wmEN1RbjNQMNa4
2hZ/8KHkQlA31+DhwwxBG2eH58PkmQqniHmee3ZUFfOn6vZvmqsgz27HGOIH3+qg0Y13t0epXeUP
O3CoLyGg/ZY7xvylc1Lc+Y+SvmcFVlmAx7jq7m/I0Ipqhwg5ytFt5WxFF/6NyArwlaXC5J8012Ls
S4ZNcsT0rbZZ5wxiKrf1BXAqYzbQJgNnaCYNgBlC5vjSTNEzUl2i1T05+XX5WrisFalw2SzlcqDE
l/ZVg31R4paN0uEyS+w2hly3D5PGljh3M4fszUnOlOSGbLUhAnkAPGpKaBjGQnWqFQKDpo4cHCji
6YxJaGBGAdu+KIFXwuiuOFfsgDZOWP6f6PFM9PoH584ybSjyx/EtXnCL/Ir/0Tg8PS+MA3chsCFQ
Deuaedcoctmsrs70ybJrBED2BKpoDnf79IFy8wLw3J3D9yMnu3Q30uhe1qFS6U2FIp95U+L9ZjAo
GHx936DWhTm6heWSRRrGIzQdCsb2co7CIxwR+4w0OsQTwbiB9Lei11p0WiiLR6bpVig1hxQhahzP
BXMWOa2PEp0P/Hw71rGgSm1bKGdrLPLhZEyXXbybbrcEuZd+YYT0mmhSTKr0OlKkGbuzzhfGs8AF
J9HSp6BC7NRVVLRPUz3Xhm/Ld9b6uHKr5b9nqygjLLwXFNDCs8tcX8MU0PTwswDdOTh3aGiQsbvy
tOUHiCkTmdNTIerN+n5ejlPMCJm6rF9xQehlquD6gqX2MZNp1IKgD1rXm4DPe0bOcR5xVVpf12ie
BzmEeagj+qw8WdCv2hP2vVBzmNAowQn9O+3fvy/qf0eV8Ufr3nMp9zWQ+Y1Q2VEqgwXGKKI4HFji
8ICuVxsfigeowBQRjOtLWdkxd23WT+l8ZHLYTnW1qFdt+XsqaQm59j431o9eWTwcLk+O32hu4gVV
HbShl7bf9EE7a0W3v/1nvood1VWfZwfnRn8ZVsgyef7ca5xZa395g+8PvwfrcnbMXNzAOu/mozr0
9O/4mZf/WqdefbBOvvoQRrjcrHNi1ko1DMLK32Z/jVMWUvW4MnJABphm8Iht+oHuAkOXqmWRlB6r
Acqp9vjk1fv/zRhlPDJOFXvh73YggwrRYZ2O9i8TpzLlG/SjQmKkm/mTAbG1QudJy+QbMzmafV7f
Tm6KFSdiSJ+mOh+49e1xe5zGFoqsTyRT3V2JMNvJDZEZW/14hZi2eASRt1dRrLHu1wv+kO5RbSMf
3QIyePQy4U/ez8q1iHSAzmPtZJRp/EyUsCHV1QZ98zdrCTrn6CMLQrgjXNUBWHF9Pa0Y6f3D7B/v
VxD1AXEHhJepzRW7q/NwAG2DsC7WgX/TvoL7yKuAoEL0QXtZZB5HRdfYcckLPKuVuxDfeNeDT+9/
FvdfixvVvTB1K3XJtgRvEx2MoGMOoh7ZXtiCF1ExseIP/Okq71N1G4LAtUjqVXHwZ+YPPrCOuFo4
rKLl8O/nxLFbt138uUERxNR0VPtZlrWxEEgOn2FfAi5qMZITZ69ttFbGJRvvMwuq1WzDZKve4FIR
cQH1BpI5SzZLDelMeA9Rudlaxy1uj8ZpzQveQu+eMCephrk76Hmwj/LLdz6SnPXbIxuDJ2uzI1lF
QjzMShmUuclI5MWqvKzutB+V9QI9QuekJN0h5UZ1mUSE/1awkKDrNAk6Ep8pKfJIuRB6NrAImKWj
8psqDWILghhT+7FhCNVT4a246UtwxJSn6ESxGdKXzibI/xnSISzm4rudhdixCxUtp66Fqy3zm/3K
tOjfg9hRVvNL4f8eJh4o6B4hCHNUnqPfstkbeBStN0j1IcNqEW+jSY+ZQ7LX2+ZrHsngzg5H51FA
XOl71L79VgeSXjJmLpXVi1iU7PF5KxXfLSLgVhzFUKTKCA235M94bo5z30LHqNfA/dIiiI1KhLxL
/ITpOL6Enf3PaVg3bxA8JAueBoXkh2XCFeDHYizdnFGSiNhVZ3X0GLGJJJqwmUz67QC1Jp+LXQE6
+k799TSSRiNOafjcnlJ4gHFympJPQ8EZ81iLplJutrRA+m9EEhd+fSimV4VjJXgrOT5isGItrcyq
5mpTEZ9NVOZjuUK3sgUxlKiryOPyKghsAgPjPameStbpDbcdKb1UzuDl+PZAeS0FSr+WEXf8M2DQ
VUmrPqum6yzbZVMf7B8e4Gl90KDDqnAKapAtc+naXG09oXA01fy7d2R4oObbWtXAHO0/i4JiSSZr
+Y/SgcKgy+JmrKRHJHC3q8pJc+uxNz/z5fcTshFh0BLexHifCJPn8jWOKBJ5slidWZFyB4k1Oaus
LzxtlRvY4TPUni/XP53FCfd3z3DtQXSFie4ZWv5JBxgP9BdSj7d36Qj/+xQC1N6PiQDqYP2MHIup
zFN61lKKaAMR2s9VjT+qNdeKNanjHZrlSlj2MNkdv0wtHJRVk9EPtNjgbaoW/hRr0ti5VGzW2mIr
+NWmf3dxOyH+fnywN2XTFFfk7E6u7EgReD/CrEJxGmBaUFeB7bXMb2hrKJC9NPUQiUk3wMf/pnhP
/peWosh7Dat5Ce+cUOEWVbh9uFAhLmPzFopogDT0iXvr1LW2V5gK6d/z1fC1tERdJFGDOixDq+mh
PVnZ8RibPPo7uN2vQtY2GJsdUcD/Zg7Wkqn1thkX8hCMzFYG7aAPWYQAUVl+gRa5ck59hYdDVCQv
33QHwsQ8kgmPfJmpTMvrFTefxSLe6bwulr1Ysy1viSsVOOE2xvavL4u2WPr0TZsnvOUzbVgafHav
tPfXdhCMBPlp4cAx3L3eLDARW8xTJGjR9O55I2YO5rhd0tEaK3uLQ9zhLK8KESudIXNUF9PF0FfQ
uDR4rpni2THgkchTlNnkKkhipOi5o/iOcguKT6BtDlxCK8N+6eYCEi2LOfaWN+akYxYm/fLLL+Hq
Kt89DFiYrslrAamuCCD/qtOqZumW5CoWMTCuKDKFoelZW1H0Q+qzU7b1I3YBsJB9gCNLq6hW5lQi
8491azyzUG+rYczpeWjaNIFRFPNRzLHqaJvdxjhxWHaTTtxuFAS/89MyUjN5uSBDR3+zvtz/TT/U
ae5kpfEwRNa3TE+rOlcT+yNxyQPla+ihG67rtZQbrC0AN2z2plgOVL81SwzwcgXjTCUP69zE0+/u
yfSgMYb/Yp08OLgzGBM6WoD8XpX3ODbrjaoti5bR8xrwzhq9UYIUUB36dj9fVizBtJ4OSSz7JTti
PFiRO1ZUzcxnLzyu9mnMRq3AJm2H2jML+dfO+Tssqi6b/uzZj0oxntGJ02MrZjP5RUbHQqbGl2/K
5TjdTwOTlbSmFdlhNdsIYZ1T8VS77Zxt28FWiha3J425o0blvb238MMSOl7y7DL/VxKGpUFndw1Z
jewsy4AlskDpJGk3HKpXJEqVY6tgXljrr54HEmGeFf25y164804sazxqbqJ1hiqW19sfZpglyhXY
SLaLq7Wxs7imQm8ZzlXWOVRMqK1L/Rz1MguKNQ9CPtrOlkEkkfUcRc+JWyZ+XMSFw/munR6sL/zM
V84GpuxLknYdlDMtEUjXqs3z9nMjDumGlFnVdyNkLQNXwr/8vOvZgwUmldAgDVODZ8FAY0NkFmqH
K2lJdMoCyH8pv5OIVNcySZPvwdkay+Ju/xkW1HZ/80ekNR4OpdfUAwt8CFhdgv/ddcktFbovQvt9
t90a35ZgFicm281U7kE7qcQxGqjabzdTy4t6PhNnCmhmDP/zsgO1EQN+3YMp27vSNnP5uesx2jbt
3ae8+3TtKcQMH3s29dR3YpikrFJt6Tckok4X26imdVRGn9Z+ywHbxswrrg3RHPh/pMTGRDoB+diK
BuNNdvfyMniHPjnart0oqe0eanRhHslRWjil+GgBvfM8UulHo/mTT7tSiD2dkFonUIRO6s16KTjC
mNkx8dwB9wRTsFhYJdn/G/6FeC9JOavQdQtkAeid8LoNXnlzpeQ0NVh92HACzRWhV9NOu3HNaLPb
P3SPH7cG/z4bVbagJYcSgfGakWhT9So7uCBDrO2+QGKb2G1vO9/IT3ohdyE4NhlxT6VyF9lt1Fbu
XuMGxg8aPWSf+MKEZJaNB5cF+d2lUXOq+Kvr6YaNkBISxwQb6ZnJWsx9So/YZGjJVolMcRz4xVHy
FFcQAbtuq5mvA/WcTKhWe8ygvRPcQbvBT9YB5vIJttrdhmHaxzRj8ArANCPddHflD9RroHskWxbC
fiDgH3j4lOla3A0FtnF6rS3cA5P7id9IiaW0Xa7IrXSjMxsR+XKDLRLdz7oW4O9+o780vkzx3EZR
2H1dzlHFWFOyQ3K92I++G5+ro0stBYhuxYs6iYEQt5QlIDH5wvtJTVZWbLTTHKTwpwxeqnSPWm2b
MZTKN0qH9HkngmO/djoUtMGCB7/rodhp/a2l9ON/+uf5slbRX7Gt6F4qp2W1M6pJlVrNyrxOEYNT
7UIVZFCzQuQtt13l12EWI6a3g2qoA6w1HNyWdC+cIV5FstLg+tXNBDvrswtoZ1EsR73tVHjnkmoG
flmOyptSNPJi5oDbcEU3P04/dB50i0M/0Bb0k+n2xJ1C0iDlVUNv+cB1hlX/3PHIrb3NVZNmk8Um
GHlH1GDuto7c2+W7yLb3eh++e/u/fvHFF5PlPQIk5Jwu4mY5L9flh/Hb/+t//uKLHhwdAmVKLorm
el9+RbOIuNkyjCdBvli1JU3xGqqWs56gMJifn44P8q/zb6gNCf57kh89fpI/SQZwN8pVIsr9hlzS
ekD3dbKumwJz7XEoLPkcTZ69/un5y19e/Xx8egwc7secoA4JtQJaQaRxioHtkf8IZpPh/LqYjxJ3
rCrmjHXR69kT0AWbzVIyX3GQYv51Mijm8HlzxXlGOMtdo5Oe9ArKNrkAeg9t/qjBbVb39voOaOGA
mTHOHlNarWRWYYLM+T18BU2WPeqDAGXq5JnA4byiyjm1wY4Q2NfpNaeiaVQntAyE9Xtfb2A2wNxj
KqFb6hFBWmuOM1amrGLeM+jqtDiXFctHaOVOasrvA8NAn8kUxokc3OM0KS6xK/riu9Nnv3ua9nDv
y0JnVVMYOapOko6IMa2Xa16GvJjNJqrrwXPyHssphg+tD1ALNcIYl9F/2N8m82W5nv7YXY1e7yVC
DQwRZ2UuE9PT5KeIyjVqU65qndEAX7RZrTAU7OHS5jQYoQ2/3OtEp7Ch0jS6YjeJGVXPxkXA9Orz
eZ4MTi7dC9RI9kG5RENJOqShkkrTDCeMIvbotoKDghUvCIV3hsfizavj4x9+fdUb8z90TAheBe6R
3WUzxehiOsfeZe8NHpfr6WP8dmK+zWeP+VLsW63kzXWS9TYI1FCoOyNQMDg+jf7GaHHol869KrwX
8llcVsu89yc4sugUBavIHuwW8UH8NjrXUJjuH28MJhbg5tY6qUAP5dxXfzr9/csXNqWYvPxDr2EA
I7oBwUT2Ccplvyya+33ZhX1pXNGGnrXXcnltSsWqeE7mZSWH4njFUnzaMAdY0xM3faaj8cH2Tl68
OX3288+Pfzj+3a8//XTy4ie1oeaf3qmdSwFXQ4gXIjAvCGfDOfuCpkPpGDbLfHlvtqHHcwU5bh9P
O3vdaWV724pisvViJRiASzXrG4zllrNgLQq0rAZr3TJaSYEz1MRtZPKhTpxbwuy8R1uHHhXEykOh
oW7J5C+bZi2E1umQYRNUmhfMIzojzYdHXgUCy9BPk52i7xAJ4OtOyIVxcaXTFhOW0oqyvhW3xT2u
NKodp9dIlp23qLzYXNERp/MxoJwZCO8C8i8DQDBlmsJkakRGM5RM2cDuOHX6s9fPJ3R+xofQ22oj
LPEnnf5/kSyjcJCMla6cXtfJv2CeNqJA9OeBCkLnJ5s0InjYLkpkThFCapgcikcTzh3uNHrHNEm1
ptk1+jbBmYItwNPDMACktuKh2ydwfOjMUI2wl6h8wkzV19cr4GcKg/Kb9KmFkxfHfZVCFe4Og8FC
v5irRDtSM5RwhG4CpaXUGVZ6TPlYS6JMrKoRyuGzDrssmjXqIpvnatt4an38Qe6EPnFUpRWdh3mH
Gq1NflgTaWas34ERtf6y1bjMjiqNJwWfYOqf97eergmmS37iQvpqxMdbyJ2xiHXjBV/XDfuhiUGY
O8CYh1Gy1wtDY/D1lzzcqqZwEQMugPogq80u6FanxQMrTbx+WBw3D9ykCzIMuP6xwISo1MASm0Np
ir/3vg1ECZnq/uE55Zk0Yx4Jbj5lR6Lg3DtJVXmLCQsWNT3IAeMj41MeivjnQAcL9fOHfc9JXP02
Tmw2qr2VLIuAmMLpLbkUVArFJTWrqoFVHtxFRSAcwOO+5xrNighO/b5UfJaMeEAAULgGw4Tzpr/n
OwHPVRakEhbuRDQbd2dm10fngV+NxeT0ePggQlQrqI84+X2bvCgRa8+5/SzJCLuG38LTeqHyEBqh
If9mxDkMHuMPj1nR3mtzZxodoT/T4GiYfGNfYQKsqtaDwywU55Vmy2LFzDNgC/YnVNCT6nXT+9K2
99CNI1TKdkFqkW5Gbm5CGVexWdd+yZ65rV3tUUqJ+ADJTv7h6dv/w5JojQptVZLR+sP3b3+efvEF
JVahL4x2KHn25pTTcNezzRRuXYXQwqQLEvtGYxN4zCghH6HQolZ/oBvYuq7nmvzDf9RHYPOaaxCy
zMMgn0DMU+/GerWZriOvCLp9N7r35T2/KcFE1TnYrEFu7vX2UD2BJRhpEDGYcNprwvWeolsYEHz4
RJB1k7xn6RigoWGSosfzutBe/8D2Hb85nZw++wkjxG6Wufw+QCqc7vPPqbWZVoMwGWhweb+8n9iH
3fZZghZRiUmF+NEo53I/lvNijbeGnFX+Unws0rAaR0qoijb1VyWmS6vIRzo3/uUL55k+aPYfNPAv
mR7aL7HBIbZwdnDO/1VB5rNyjn8Pqc9e79Wfnk+O355iM8Bgo1Z/MJkQRzeZcLL5aUqu/LAQVPj0
2cnPVBrLWuPAP6ipXu/18R9fnwDH/eL4j8i1vInMgkjIl2OiId+yssovghTmyTA5ynrP3jw/OZmc
vAHO6cdnv/58Ojl+8fzlDyBixBpmV0vFvGi9KN8nINy/r+v3AXLEq+NXTw6OhPYk11BEeDK5l43c
w6YjB2fo20Z5PH1XZrZIwh2el40LhirpwMqrqoFLOkFCPFm+v5qsJDqmsSlag9Ej3IOKz+a//GFc
ooqbXVbp95yD33KKpK0GKZ84UmbYNn1vCvLJhmZezCY8DcfjLwYRr/1DpLmdc9VgBR27IyMwEXqY
moBHpuY0kV3Cn9yC7Gk/SK1hK8bwktw0yezvuvVxMt0x/TdfccAsHHuQClwfd8H+45Sxto/7kgzt
vu+9QjSPxzvvJW9Q86ClqVdK//hkqGsWyeQFdraEKb3C+IhMYiO9lqYFKTrgxYAX/6ZR0RmwWHBj
1qwOoHQBsSgd/CESeCGGTwZuB1p3GAmboPpYAi6jM3MD5b5DVMjlDDG/hwjNNhVqbh86tfISgxKL
DomyEl3nTcHJzrpzPFzO8um8bkovfHeNDmM42rOjAAYaf+M5vPrTBDm2k5+Pfxh16dD5feObP8HH
dEKvYNqiSKfkK9h+UGNwufiU0F1q6HJxNrJPsn7qYB5f6nm8efnr6+fHYTN7yQ+Uuf39AgRZ0oNJ
cGS+0y6EY6MxKdadvHKW6F3BFxPx/0i9gZczg7XHpx7fMks8WAATIefajTOy12YvOWkkzwMhMhMP
930Y2wT0BoNmqjXFJl0uApjqP5aMAMuqN444BtkJ1SEELnZR0s0UXqhh9VIlUNLTYuFDV6+qqytU
jSXT+6kfsNz11LRfLTRsFGvtEcFvRDu8slo+KHiD9lK8/h0wxw7FpQpIa9SKqrx1cLdbooTDGJXt
x7btOl9WpCCLgPCED1zHCdwypcFt0Vj6dFQw3MzId2bE022dp7cMGDiPYA8fNtA2tCRP1Rw4kobl
Sutskn1JOBRg7HMOuq/sbHWk30JPKVJyFXMrCwuKrzVm7kHwvGJmpawc6kbt6Pw9xQOx8lHXMSy7
IPTg+ScdoKTUs8tYralXuGYFa6HNXcDvUxs3rPxE7eOq1GOuYccuKsRTt/Fd9/BVU3nFlEwh89Ni
BTuT8dBJhQsiBixanRQf62rWc27c9P19gruN7XIaaJj3LSuLKXMEKYRrtD0y3BglBl6sR7iB9rAK
OirQFb3c89viHumL0QV/LObVjOf80mS8RC5prVbA3oJ1fVNB0Vcv35y87TfyN8yPmBBotSRyggAL
94ZQ8GaOmX4BwzyrF+sJfTm5AAHNSezCshbroBySa6iAKLjgebKEM9eJjhrf4ZGHHm7eoypGd/tp
cZ5lAJQNgk7OEm8MYZqiAPHX/Pj47cmb0zaEg+OKrBG4ydYctUryHnYSCNTsHgYIfFKTDNpxLGBT
bmqGicT8NLCv6B4Ez897tiYVlNYCF3xB6SpPFh2gGHPETlpRY3gc+/O5oEnicUFpmXcVj1O0Ba2f
CF52uoRnsjYvXh6/OB0m+q/TH05en7et1cuFuZ34xuLZBjJMKbAL5qMKs3JDImPz+5bG1NsIc1sJ
XfhrtSRyF5+ROtturIk3NbPrz54/P37Tgh5u0/gV2YdRf6xHLqQ8CS5C9lkDa33JHEw3PnkiYpie
L+AA4JfApj05F9kbJXPLw2fqX2B9xYZWq/bT86Jeo1KJzG4luujgRTb7gEuy7y7JMDnp3yRXtZ35
dA/5HrLPsBOGRQLlhUKIfDECozr+ppjneW6pu2HgE+wMj7EhO0NswSEy07Z0v+5GKtLfyneoHvnu
4NMxoCaGZsEDIWjagXq/l7wCkau4QLucnexT2RFJJfYpAkmEmpJxCYPxJ/ptpeXyB06rhs4D2a7c
DZm+1KY5+jjaz+gq2koNRiWizBTUeXd6KnyLQz2CxVxKK9qXmzshZzT//J6AyNxcw38YbIiMp2RW
RXmFOhJnthnx4cPkYrMG8k5+HBOy/a3rnk2KyLamDG1EmbU4fvRoqHPCiceKBWw0rVfsJWM3d584
YzAnAOYkz5i/hDjwRXmrFsidcPCgQqlcTwfX30aa9oSKF+Jto2ahnTXgD5xdHmmZzwS1be+rKYDz
K1dUgHbZeXmMy2F5V04nA9xYroZARpNJ1gmkiWrLYIU6wDjVYQuq9Oy0tZNlMX1fXLUcvTBBardK
wt2ebUoIJ1frjtqHiOYhqnVQoPtG6/CHnybwhh8/P335+k+S1J40pDclHOiZ0Wh2qR+nc8++qj8f
L5oNuSjQaia6ioINooOhUyXA9Pik5Mkvxb1xTzEXYgPnBPjjhAzuQ3FeMhnhCUR7VsLKYqxtvluO
X2WasKfkG/k4k2mxsO+DIHxj7vrLEpNbSUvsviaQw+5Rt7sIM4HsdjB6FungLAZsS6HXgzPWiDSB
mow15zHXXipw7tflzdAT4JIf2NMNnqeP1YwnYCX1MgvpzkCfCbnflOlvigGObjmvdfTDpzOleVL1
Ok11gu2JedmV9fK0nF4vUFZF4nSLro6oS1IKFf4viqrIEsmjhEFBRSP1B88zptJDzNJH/AxsERpt
pk2qnCzY+ZIoPLxtQJqFNkkbvMN58vv6tiRjCUoDZR9F8PWa8O0xZSeciI+cDA2lypPkGg6oNKC8
RUQHRKD48FG0Pt5jhGpYkaZv8mTwplStsE/WDbnxEbigfis5/V7OZOCGekJIRAWLScua0/cKXdu5
D5dINGC0iwFxB+ntRepAIZy89M4lHPwdJC2Hj2DbpGK+aKfIV36dYKgUVRxzpA2NAb5w9CCw8yLZ
YCJY9LNFCoM+hEozIfoF8n10NB4rdlW4wYqSXHWIeQRuKtL27M/KRYUui7Zkd1GSdsCLWEvKtSVI
BRTbW1MJPFZ2R1K52ODmugRbUXMkl4P0u3k65C20ioopNp9tbpb0Ul4uleHb02dBo/aTYCXZ6PVe
v0BL3bsVBjMK0EhKQCOw2/xT5IfetK7fV8hXUjixigsYrNI/nyXv1u8uzx/u5Q+hDizY2Wh8jl+e
Pzzbf3ebnz+C+r97+cvk19Mff4OJat7dlZfv7i4u4H+XfaEGcT7b2OJOMZtjTdIG3Suli3p4uXgo
X+gMRFZkQR6NItA4eZdoYy9mIA94R/2Y/Snwjnln3mfI4W1utUTaoY5USEEBKt9iWmVJNvhKuV0k
f6zQU3tNCqGZK0TNSnifV+Ld/EqnoEH3NVIeoQ6Kge3xmKMap3G8gU1LUKapZnxleGwESpwnb4qZ
ZgAvSqCsFVxY7VnODjcuS6zOAikyimbNLt7a/kQOMex3SLmL4U9Gr+XhFbZmco4p3/YPgd49o8Sd
zZodlLVfm6TaKe/QIbRaywolfDqbPEtO7ZHRO7z6KI6lTHvUnOgZYZkAHgsEmTEjV1SZFsbRdYqT
rHR7gagVZEtGMuEOJk9+RTje9WZBuX+G7GdvaTpZCblZspPhYnNzUWK+gdPrDSsV1ePGUjMcWWBH
P0rOzcU9uWVazSEAanxL1QEghmVRW4eu0evoyCR8yEAWweQKwI2vSkZEuSNEFBQGmk2Z7B1989s8
+ROwaigAKhHIMznucWpczjOLXqO5hfU1OzQA7pGoWyhwFCkw5JqPbIMrppQmNSaXtaLhFd0JkzJr
gpaz9UKAqg9G2Px5ZqfQ26WeGhRWPzLV22NzhMilBO2IJCvNOu2RDMXJW9cobhMjIpLiCq69xRUJ
bYwI87qvGIBJVBUb/dLQ0JybG6RFM62qGBow09NfGZbpByrdorXl8/Jzie5I9LQTUjoiXAvj9Y8x
8ZDM6K5GT6ks1THWkTwm+TEer6P8WzYVoDwA1xmjSziRK97TJ/mhYa332DN7hTeMkl0+XFR3wGDe
Mj6jejp8p5jIGyULvSqBuiMc34th8gIdl16E79p6VZYUg6kImQ2HLjthaZ7s5+nnUuyxy3r5/7H3
9ktuJEme2Pwjk1mZnXR3Mum/k+WAx0WCRIFVbM5Mb22jd9ls9jQ13SSPZM9wVFOHRgGJqtwCkCAS
YFXNx72MzPQqeh69gfwrIjw+MoFiz86uZGrbHRaACI8vDw8PD/efo0winjKSI7kIvnlobHRWavgk
bRAK18oc+vKElGP/E3opWeLMIYNlUDVARDG+JnTQctDZZ5Av0MjOb0VzsWiqg6GUsEzU0kHzNxmA
o6cuzhMoKiybi17IzqO3bco6N8OjCy5i5G2PwTK75s1EY+47c+IIKupTm83PGftCbQr1SGXMI8EA
Xz+A8g8GKhhVu7J5UCPX5bLjATr8DrZUdV1nF3DybCi113xbY9ItuUQDdbxgsCMk7SXULOxVjh+u
FL215EOrKzbcyavmUmIYaJMHzwDpi6VMQy/hZge9ljzp+MB2zSNA80K9XRTmEa2s7Svb1zhrgZ6p
D2Ls3Fgu3BKnxTEX7J8vxoSp0KNADdnc+FzrbttQdMQ2Tvzfh+JchwgdVY1XiVU51feIctY2dCEW
HDEVXuPFLZ0LyCwZpVzM7eYF2FFA9zjeSLeilLsHVugt6gsgQFDL8PlrwBaMN8y/Ae8hq22JUdAr
QkA4xcXCT/zbem9d77q3hrspEKKhATW6SPt3aHuwjDdjvFOs+E7xeWT0a75UNB1k97Jn9skEH5Px
URiTD2/ptX6Kvm7mCu0lEabUItifHjrmfI518NPpyZMz46mj7qJZtT5IQN3jZRRD4e11lEg8OTnr
SQoDvp7uHgXJb3N3RXNFPlv1Qg8ahTiLTEuew4NnwBXv4K/ImeYdyO5uzfcJ4Dngs4F+LWrqiROb
u27NvAPCA2pRTR3/v3HuEHIyZDaFFN260I5tGDZwOUVH9MF6uySK0JjNJGAxOs0XDS7TA/SVNtZL
m6wPpMhybHLvjhJf34O5/jCW7c1WL8bh5BAhMkqqKHCZrygCvWPMlx0vpNyY8lGooXFLEC11btfz
qppXq1z1CURtWY8strCBXsrJ65h+YY9j8jfm/6HPPX6/1JTIn7vXkRYpJIgAKzHAYQ29WRESWw03
b0JLXKGxEjGGRm7EtNOoFJfAdfxjucpdxh6ggqkIfTpt7wPTamncLNbKuNyMGaoqeUo67JRpZPx3
LlHwMzLFQI85/WjAMXdAPiqPozstz9y0BB/whnPmP7cyrcbHCbPlTJif/QbnDTgfrsfrW5BEKMP/
RL+j+vqy2pxkHaR4v5aMpPj1C/JRhV/+m/f1D2+35/Dlof/l0+kUvnwIXx785eDgvFxWq6idr8rN
qzWU+rOqCN+9R+/fzn/1v3y6RHp/p7787u1lOcPufPGF+vaN+fbLL9W30hv1jXRaffM9cBR89UB9
9XX5Eb55pL75Zl5Va/laf/99hQ3cvw+CGi6K9WS8YjdmgR1QaVdslecfoMZwqIjAvNOXP9dffkdD
9L54jt/oMr+mAXtfYJkvdZnX1TWOTg/vRQ3flN4S17z2zFDe2uO3S7+z9CUHsNEqHxgv+LnBSEP3
GYd+Nqnmo2o2gxJOjr+Fqw+WykwdL4+SgHB/LOa3VpizQCtvdhGXLdLhAh2UJeTNM3LB9P5moV+b
wNaQkmtiX2quBl2izAcvSG5yWc7ppQhnFQ+nEX0zQgI1DTI4gGnwVCY5+gNbpnGC/OMFruANsRlw
UPJST4vfgoqwoXPCIYxvTZyDO5llVb/BGFyCUEkdzQ+g/APPkEzPLEHUJ+WdgkP8vJrenkRP5WSB
o2tUNWhDhYR755K7gBdL489o3Hwlc7mOPOe3HLxXMkjFeOlZ56oJ72i+RYxGsy00R8mImaTrDOiL
45oxXHES6WMSNckc53Cy/9Pq1gIfddLgho5WpzFmrcOk8MfReN3paTh/NIiM7DDCUw7u516gq90O
Rz7wP6ymUbLi9UEdOGoIp0upmSW9hZIcvFmtYwOhpeVXMbnFSYRv1qkUEVOCU3WFB3XK7bAjcVJf
v3r5biSGINrV0xDm0vcDFv5Az5RpWaOeNU1ZJtrMZQcp/DXMwssxy9ABzPt5nLDmRWsXO7KRd10e
6PRusvmF+xvQasXxEWZpXoB8zb7E1EHrRLepjAwbU6c5nk/ZTC3DMGlfiDZ7kiPfwfiPg+BUu3u4
3/kpcf6ZEWrDWLYNj5Kuf+T2hnVJ0PLOPNPePsTGp9CNE/h/8fPBDujbYDWfI16WiTs70KKeegp0
HNHrSzRC0G/xyUAq6VScpnoNGImzsghOBvomeSgYpEy36FS2T0FDDYEBy+I6zqLnbfO+O51cbg8i
3BJr4HdDDirSwmjiWmrKNgN1v6bATZ7pQWt5GISXcOcjHlTcakssQzsgoqErAeFMrLV085jfvtsx
YMtBezRXC4QZn+zMJdDRXtrJNo+5wXSpUd66UCHWvcTeB7JnzYEZDrdoMsZiYmxqJBbtlnDkmyzd
TWyyKZeeN1/Mj04vIbf/c3Fe9PWSXxf4sIt8bwp5Wse97AdyK5pcjtfjCbkTid3HwdGUmLoJY0rW
ZFClOAXC2Lj0xL64B9OJzlNg7IdLTHTBrCp9GE0qQssNvTfN73akvjebUeDQk9BlfYBFuljahC+r
dTABaA5+gN8/wInAaC09AdJrr/U8jEC1upzpluxtbJeENUZCClYrn9PVGu11Z9KhaASJOt+hoUrn
mYDDdjUf3zYO7Blqm77xxmCVOd71xqp8XwcIHgPKbsdU7UgTrn0poeNqH8ToJtSLsRS2QWR8eHqN
A1fIXNvBa43NnwSr+sHWlxoWazEXSmgWIgDYaAoTk43dzJEaHw+cZxpxPnWmMTV4UUnTLplkjLPu
H1LURPI+wMIPvKFLieT4nfK7cwxuBjTBftbCQ8paNVrBDl+kuckEja0ZV+Du+1a3Y4nVp/bPM8rt
oXD+LeI52d1c+2hQq1Vw+2pbXxpbnjgFJOPc0x1IhLbDXp5cmW3cUlVNIigsqS6wKW4UTKUJ/xrq
5nyVx+UVgd8SeP2NY3G1vAhvkwVYbgg5fO6RMgN/kOcod2mA5fKelzYZx4SOz8KVX8OfWL2WXWJq
0q0C6vpgNwup9VW5fMUmVpqMvjEPoUeOaqOXPCm4wN05DtEC2B3v7jIZ2/w0mSyQgCNWuUQ4+Kop
bPxvrdOQ1SB8F1IxYtIjFHTGExZiP1AaAqs+VkFQ/bZMIWcgdSUcF9mWI8Zy0mBgDpo1JyexSgWO
ccQarAFSpw8j39wkXw4W9UX8bMIOqyZObWxQYgaiceFHhhgoN8nnlFOhftZ0IvvqPP2IwZIjUGhK
DKnNmzSMRMWQ5/hOx0g1A/ouoRlQOquQkuPOxI8pmaadTn3UHfPAe75dIurdTFmW3IxtQETZGNG+
FvtWtyGONWuFp65u8hmsD7Ro+mxWyZ1gcFU0lNzY3IYuLkxjZApHGzuIBDGaohzwOriXevVilhuy
fWqfULn1I6ceY0cmCwWJ+sVNUIHeihtz/KKYVMV6qYQNCbFvqCgxuLCi3mhU8TNYh9MuaD/mWBfw
wbMbtIHiZiKVSKExhGBqoCNnsT6jj5wERs2XwzAfIMWvjKSNN/gBsyGG5OK7ZFu9sDYupk1LSfU8
VpwTOibGVayBjTK3Yc8JZnAjuGcElYqEB9HBaqv4ffS2fssJ0Xx3NYYKi1lgSfoWhQmNYZipI8mW
7Ks1x+77K53IKxNvDyKv5+yb8sb36vRMl/Vm4VAWHLVQ81CPGFjDSnubOsR8Ds3q4dXCNREeJjTq
pEoNV3A+hcQBDO8ybJ7m+Z5xxPKY82LZpdT2u3vNL8w9QhW9qoXIBJ9A6SrsrL7VpNY7iwZiLgGd
uSQb4g3WuMmWmoo8cBppQxfPcook+HUpR03kFGuoB85pJY/y/h0tPaxOmAKQ3GFEJ4MzSyTwqzUK
4FPuXV+aOPMEqeHVF7PnN6scyfQzTzWgdqwN2g4meTNvVDaC+yWzhHSUmYJf673ESPXo43jdckEn
tROV1UAVoj2FOiyuVnKDwWaKrpaWGlwuebM1CGJ2FGAnHaWq8QAG1YprvdJqqzlDUQEcRjvFblg4
Wke6TOK4JfN1LYCf0iSr6L4ZfS9t47W4iFHyIwpuQ8xZyqJ2OCnXky0JXHydKoqpDhgVc+lH31Tq
dyd6JSkTODA44nK5JH0rYZolVA0KR8RHRQoCX60xSGBeVStS0FgJOC/m1XUaYSV9WQANAyn3VQ9Y
xzBuKztoofOVrRnL7faJF+4Wrw9PSfvol9LHZfKOI5skuuawPh+8p8ylSy1ajlaQwiB1tPYoMQnd
gilTZFvE447VwEexHOn34gzVlCYdeT7mHuKNoefp4lmoR02u31KxQV/FXxPZJZq5tF2TZT5TLLar
PjQi8sLnrrgYyoqDHRzqlaHVUvpwSnnlXdzpZ0og0uxsF+wjFSjiLdzkNdd86faYWKRt20kyj04S
s4p8lJCfj5I/K8IswauC9QA6pb9AWLv8zm5dRRKOaI+aD/FeNSTo97arueYzU6mvW+mlL+6m3/ez
XPeiHx+kZIWRcxSdjzxQ3MV5hT23Xkmn9FfD2OfFbCOmHPNnMGyujT+qXmO8jVSzfyfr0a+NN7n8
fp3R//XIMd32oC/D0NR3zThPihqPGbYism6YeX2NDObaqoiMmK9mGnESMAFFn9JQJGYAyw/wt56G
/aWCgUkCSUmWuuBbRhmPv0dXeXxmNUVCKDZJR5OJsTqRg67hRIKCvsQyfbPXOOBfr4Adkynhr5ZY
J6+r9dT2Rj7v1yMpPEhkxnMzpIWwVLAV4ffUCRn1W5VH4/TQGBh8p0TsP85+PKNmXVKD8Krt6Efn
QXPLPNqT9Cw0NixY9buaTbQrBDv369zsUsvt/awL/8e+spaammTsV6hFuE1j+KqvVrG/69Fa5tfW
aLWQSvNeGW+b7mUktafEFn0U3Hz8YfknSd74h+VfaGIM+X7m/goll5M5jl5kh7V3FDHFwmffDhu8
K2OBwWRz407UtOs8jc63axPtA2c2If8j+ich0KidYBumhbEbgnFpwqr8KJe8of2kpVmRjuutyYCX
JeRjLNmwUm7QqrfNx0V0TogRQI4K+BTlwW29JNDBRdQbzmEkGR7DrEg5S1dwc+Z7G8U6Wh9zIkOO
5koJQJwHgz7sLhsfvbUiYh85cFI1qwzeeB1pJmNuKzvplLiYvmO86zRP7YD87ekv9i4fY1ZeX2qF
Vv/bRfCNuNnjl2byz8IbL2oP+FA1EoUMu11uQvF7IytHf8UrpwlEt0LsgvWAEErBRep24alzaUXO
jFEfhG8JZX6RuBlK6ln6v4TqpYYS142aSFweZVutAwuZY3IYBd4fzYjP9jKG6iux4rbT8uzM7uR1
0JP0vkqsWeAnk4yKCQIgOGEVmhk/wtXLtzLSXUhOP+/aFcaOdHZm7aYpfLddodMPrLB/b7pDZbfN
P5mERLZ8Ym0b2JI8AgTp2+717MsQ7JvPmsDm+XRJr89ucK1vF5y92pbVgOH7vqE2XXzFoPHhn97/
p4aUGxaj4sPT9//3f/+zn8VZLRrzWHhBWV+xsTpIg7vlAC/jgT3CDNc3m9j7nUrnSRrNebVDd6Bk
9UGykveAHCmv99CRiWPjHw9+iY/nmE0RGYEqD4ePGTBfHv2O+ll+07/tNdPgOHqCK6IcQoxZ5REl
HPmAgknZiSwh4Dz4JEGNZRtk30EKl57pfZmCpd9UlODGZB7f7aDpV9Cs2QiXwBCF9JC5zaV+EtC2
IRYsopHczZ0vTs/X1VWByVHoZWJ0hkcH6lJHN9MvO8lKcKhIh9xx1c/Kqe1mr22jzlQG5W/W9JZ0
Ww9GsIwz+nQciPn07FgwHiaE+dH5u5QBUUq3IvG34laksBUQo8ScY/kseF8zaZ/Tz5AmxHg5LW5a
cC28ViOsiv1cgl2vUWMw8BuIYYF+EL0BZaHKk4wFi0QYB4Lh27IIyb7dEyR6hMBDCO9qMtmus+l2
zfHdVmKKcwBl90uhFePLgYWeNA8ImwpYd71dftmJ9i73qnUzqNZtlPqsL4BuZHZNLF56nhXVzhcM
DIQ85rJ/qrCwYDvZLAB4wYqkpyUuVqLctIRGw4QnAEgqzHFzJJI8Et/szEmAmh3rrSgBVXY2RtUc
zekd/CdcIkZfNuqQZDT+TGdAajjfgJiDcDKpouwX4zrsQHp0Q0kShuEfeSO2BKVL6vVa+7Msrvfu
jxqd+tZnIerzh68owTTsmZHANdgj88Oz9/nPKBNXdlleXB5y3Eq9PT80wA6ET4XbAl3IPnz9/n8W
SmU1YDaCo47C2j48f396RKQODr5lZ12TKJfTMhuMPAIQ4Zoc8UXYGYMDztyokm71VW4uUFFgRa8a
UiGhmwmrME4VsWlJCeaJoLkODmB2P3s8Qhv3hALybbzPpJpXIOLHRmKk8l1RZUl4lcr6ZmjszPPW
BNrJffJ+SXTYCy7bhbJJSOgSPw6nwrSEI4EcBk06VztLsCJV3Z9Nlpt5X/KfqQc5+n5QVpPNPD/u
S+nBuxevnv36dy9evv3f+50/HB0ddR4ISMRlgcbv/nU5pWwkPvhC1rmE/zqSDLaXYZYsHT4plTOq
LZ2nDGfCNSP6wQzAm0yvKsJdB2M+8M8Qq7XCWhcqVaQD2Q1Beu9l3zz97ruvnj77jQtKkrbwpT7M
XPjs1Xc/fP/ybbeffX7US2O1vH//nnS+a4M0o3tMIOLnRXZeXWzpMon3vnq8LGfo63hebrxsBNSR
L7InRycBD3EHPz/Ssyyz608qlErNNKaww35uqWFSWkc2Ihs2uTyNU7AEdHxEqlbOOw/KcSrgIb/Z
CmQUnWLww3xbXw5pD/ZsrjqCmolwx+lb3v/1ZgrHPy8/NALf4j+DtdYYEBG/nlg0B2W4pB6Rcq2u
Fox1TL9F9T1J8HMrCazwGpQIJXGba9AE7lbeFUv1afcPN8fnp/frRRd0VNALOU654jiwenLWy7KH
sW6OVOKvmdbRotszWIwv375g8cMuvx2ECdxYuCOZ8qB3mAD0D8vuQTjaSOS0DBOqHcsIAhQHPq0j
TDaeZpr8/PTGZTTF74HYDU6vBmSINRuh7IdpQnXyt0J//Bxn5ST75tWb579+8+qHl1+Pfvfti3fP
+4lAqiXBgycV1Pyz437Po/Lm+df9ZDjWupg2kHgckPj1m+fPX6Y6crEuimUDkc9SRP4cdexedlvM
Uw4qTOVJQOWr735ITAkCuM+3RQONXyRoxB1BqLjtejVvovLLHVRkkkC3vh03zcmvAhqNK3x9GQIH
OiJ/vy8R2k1JIn8JA4kMI5L4J0ETNuBZJpCZffYmOn8e6movXr57Dhv83e9twbfvvh69+uHd6x/e
jb59+vLr755Dy4fHx97vz9+8efVG//zYg18XEeukqd8NgSUdZr8uNm83Uw44yEO6bfu0mYLXc/XU
NZ+SCKu5zjM4/qp58QJBmJlWb3BtX3jqg3DCclf/77Kjm6OZilp+a8m9A8nnnomYrrx1BVHGaGdB
OYl2ls8e/+qXnwemAhSIjHOMpU5PqExgUVaH0ynT8DIF4vetVPcfgR18SsmIqNqDFk/foBx9lwvK
kVw7p9VoMV5fbVc5FnEHdajuUNa/7169edsla1f3uBthlNkTYY/qR3F1J/aNsigwTpwprMtHVLcX
H1rZH/RM489h8++ev/m+SwBj3el2cd6Na6Ai0XjB6+IFT1oG0uKAD8SWm27P2EPfiWb1O4YC8TK0
jtQphvFM+fkc1OXhZ0f4zDAdwoHE58QQzhUR9kM4HdL2FRTjQ5D6IouHILxJoA5B/rJUHIIUTdf9
itp9Au2+gXafQLu/pnafQLu/53affNZYF9p9Au2+5nafQLvPsN0n0O7vqN0nTe2iNBweY2gYKPFD
aOwc1Jar4S/QsQ1xkoa/EtZlbXRawBUXkwIIIlBZoQlzuzCJChIGZaWHmjqsfPYtFG0irWtaHzU2
HaHTlHvPdG1oFFiOqMVt/OJVvo8dx9UDST0QiV1tNwdeZHoKrox2q+Ncn7K/czqMcsKlg59IJHTC
wHHpFnEy5iSX0nYezR/egRdQ5s3ZSW1X/MbcqJMNmx8HT+HK8a76HaqtPGKc9GK8CAI4DUr00AFG
g/Z5IeH/Ij/MT3Bjs1DxHpmRtO6hj3N6ye183nqJ8krDRLBIxStNUsoG7WImWDiTCDfGsbfcwoS9
7Y3L5197j/EaPkkdVX+VC4u7n0SpWuhtyPReRqv7/uDB1XV0Kzv1X8hNgMzVdbQZ6SeGz5JpMzI1
AUWNl/zst+iAwY9Tne0S8W6X0i9BruX4ndhn+OqaU+0ksvXVEy/+1/aBK6RjWuxK8jzwDQlX0suG
vbJZsFcIqADlyo2VaJYB5XMwlyjILIsmpZnmYJ+lg1ybFxUCPtVephIKD8IkIJiqBcGJHDKxqr1d
TiXhoA3WY9xiVLcolWj2xTCL291lj1NwtZjNLeM3LYF7xq2D4mK7WCpEXHqnw5Sby2wc0EA0Cx7P
diOpNuiBxgAgw2FUzm4fLYvtZj2el39kPP6ASE55NDkHHOEoM3DCeLJh2jTA8EmxrsTIc05JFdAM
xKD5DIpRLFabW6pu0yDAQbDabhrW8dDD/5kxu7Sl5YY+X49h5h5nD7PHD3BRQBbNEYeS9GGs3rBC
MvuDYoB/ufrCqr0HL/cmYv6LCOg62aGmdpg9biBCtfLmar3s0aMs95vyVwVziPwkAjiFtKXox+yB
QJ1rfKfINQbryPYmtWXedtdy69YyYQ0rFcwqjEXTSHXUjSNvrNfzAKQFKa7cbCXxq91T66pasNsw
AmjCbrLUBUeOvKj7PjXKhT3ZzqEU73aQJXXJggWq8aO6JUT5hDujrBOAn0NjJb1sUIZ5synYHo5T
KR3A7CASKYjjDx1HWGY91IO3dkifyU/iuXxoJ9PZLv1jH4tRGyLO3WEgOXYYLLC+SIn7KGA+duKk
mjm9jvARnIIH4jdE8jnxwox1SKzTZ1DTCI9mokI/q/hnOf5d5/fQhTUR+F/fzk3QxiAPc6ep9V0N
NXc0q3KSDrvdaO6ovkxw2DemTTlMcFvmdS9VDS2rqsF1oZpUy3lykHpxbVDRYoVlMl5iJczsIjCF
h3K0cMIdo4J2kn3UbNU0OmLv9ADX3WYV1e4MpaqqKbPToCZgWs5mj+mkHgbkDhU5NV2uwpfZUQLl
hPvZgX3/wJW1F3G6LohJRS7j/t1ceSX9G9lq2kCWyp+65z68s2HuJxro7AJzjqjROY4wMNYdfXMU
lReLqquWcp+5uibsmy6aDmAf64ekJL0my2ob7XXxCaTfPP86jSBnewzb+O5k0VjeTpdMRHcnTFb3
dspscvpE0n/eOTdNrkKGYsgyR7+KV+1uht6WQyNxzNn2T+7crLLOGvFjrSaeCTBhsqIOcv7SZiNV
k4UjENCKFhRSn9JqBSVcOInQLwzxk1S2CPzHJNvzSmMeC84KEplUVEc4yYPrDlujA0Aukzr0gB69
/ddKLuWLcrRSReL+IO14QV4k8vApv6JLDv0utkfzLGozwKBuaBLFTLiNg13PNNETjXqj8fb702e/
oUEPmemP6IkOQYHIlhIV/+F5posfo6KLRhnzOkzIkjWZhwdhbdqouvbjhtokY6LqsK0zr/EnDdXX
Jso/fBvTlX8VlrDC2pT43CdfcvoMuAnMStMAOmq0ziRM5MWaU4ETEZrauKo/q8fJqom5VTTCuX3c
TkPNsCISzvCTdiLrxDSE8/yro7BEOM+fJxsJZ5uZ+ttXb96haZaznUxGhPLAfkQk9p69evXm61x+
fkteQdu1FmQCx4uugKd59z0cNkSzweE/7/7eljhTzbz9/ul3340wD/T+bX1XzDY7m3tXrXaWeYM3
1p2lvqo2m2qR7P2zVy/fvvru+ejtM+SZ0Vc/fPPN8zewLN+82n800+u35R9Rv6AZb+zF9PrZdl1X
69dVTba4nRWUgtftW8k4+F1bnXrNwhEHaxempUvfj2/KxXbBlbxhnDGbjbTm6tgN7XoU5rteFvPP
Hg90qbgeOq8al7pTO5CvcSRnidKYlQdK4LFpyrLgtkeVp05fQV+iE8snyWVk46QViOaxNVRoI5Ye
MA8iWMqzVjqJqfjq1avv3NpIrbcTFGJfbWezYo23BKjgXlSb16yh9i7qrcPbGS4k3Xn9CqXfm7x5
C/bOdnakaX4UoyTuTkrP4rlqEQNOgWrph9U+ZWznt+tiliPxGI8Av1VX9aSn6CfdHWUs6SHr3O/b
8xpTCGLKuoKVMHaKnGJGxArthddo/aCXAxJZoKmOVxpLHu+7YrBDJfgPGPW/rcl/TdnNKb8Fx/MO
UrMwYMk5+H3f+/iecI8OrIZMNwa4K/BV4US9Spuc6HXWgZ86lMd2Rum/Mrn7uxKkvbgEs5x5eF7i
mzYn5rUm0wFsR87pfl2gyysOo1hynt8t5yL9x9gdlkKUVrePvegkccMm+1CVuedksZsu48vDPZNP
fltvKb3fdbXmHG7StMFzrLO8nPkZhUu9RJPxikHVEG2tp/1OxFpj711+mtXny0SaVTO4X2Ls7gIP
XbK4UN/OC1r7TEZmcpayF7j/st9wnUnG23gdNbccTaOXjFZpG4UybJtR1Bt8L+CEsdFs0hhlHiMi
5O29l0OBdfOJXkXDXC8zWHHU/k4kBxPloDHrblHd6wDgUs1PRwofcv1OrzHHboIPdFc/fPP+30n0
wQh20Bj354dfv//Vz2xMI0UJTEtJOofZVAeErJdTHbzaj0bQfEmJVpHit+//vYqahOnbFOfl8sOL
9//+gKMi6u35QrL3ItxRtWYMvJphd73ESgSiakhkmKa7nBQDipuw3etT2KXYALbrOa/NOSayHmad
y81mdfLo0TkRGSyNB+/NYr5e4Ts4lXuYdR7xN4/4Z9ib1/pH/PxIcsYZ+PPptKI4vJyT+IrAQoV+
RfgE+CU6QdE3ecfYjREnsaJk4LJK9PtA0eseHpohg5aGq/JxvB52Fpj+2Z26YwoDGXYpfBrKTaFT
w46piJkY2cjHD9dOR7isYArr4WmX08oi8AdwX/esrxz+56thp8ZHGi7zZ2RPOnVgPdxMxgvT6XlT
BJemWXkBa5wDc0FXKJEpxg7yD2G0xGJlHUC88gPM3YumH+tEzvUHPF+WxegBG2p0dFZgKjgyRcTR
xLQ1eGcQS8ls1L1+qOzvZCkVAqv59qJcLsZLEMi0qPxF3jWryotaaPt9NZ8yJDC0tx6MNtcDP+M2
TtSmKEbem0gQnKvpeEWCbOPxKM1TC4YB9vSgXE9oIqR9b922S7dy/lI5ZyP+HpjHa7Xba5/7QV0U
V/mRNlbRpp9XF26y/RqUJ7XXTtUk5HRTO08W1Hlzb27LKXsV0Ye8h4Fir7Fw3uEcGp2+6p7GKGGC
IGr4hVlel+HzAOVE31BXo7QeweYFRW0dauEQmjghBBqkpZr46/EjToos/4iw3Uej0y51p3tmA4Jk
Lk50VmcvMO8LL+zQWvdYes7Lc7Od34JIKNavkVzKe9bVgcUrEXu/oZ6okeprmmiu2+O9X3vca33E
6u1iAVs7D+fEjS78ZdAgWDA+hUVhJ7KY2qUJiZlGjKAl55I1gWHUOmwk6AKrwuiW1BligrS3onsH
xyJxKk1L6Vl/cVThOKbF+TZUxdLN0otqhycXj1L2nFAz3vcg/XFNiGO8jeQ5lkEDNPCwPZoG9hGW
+entoSyaeN7VAP2hMIIdc96vKVE6KlT0CbhpXRY1plugz7jzQWVJaZH2At6gQcrT4Zrj1VlhuQSB
RPMEtAPX9Gvliur5JFvH2DjOGAeDAA9cPt8ET/Z49YYdawjgjBG+UaBmCqgCeUz4vzhRp9YrKet6
YZTjXcWcHDCale7X2eHhl8JG9FqrZRtoi//b+/9F9M8J56k0cbtw7H34zfv/67lWRHUca7nENBsb
DlHFOx6s8tpIEdK+CJ2E8pED5YtiaaNZuSmXDq8NjKOfSpF8cIC3gs0laG4Xl8VNANdBgY9GIf2G
mabxKSpIRCFwJZO+wU4mzC36e3MukXrwM8ni0P1VJWwzrf+2LK791zBQnfFLVm2pkNiZX8yyZ5TB
HvVzrg1XSizbx/vSMnuW3/SyCeUcqAnOF/c9087GZMZmWXhp8t1nN3DDxmSEoKrCHTezRLEIV5e7
1zMUEQRnNp6zHkqJLbMHpisPsNozF2Y/RVx5cmXaEsg8hkpiYzbRFci/bW1yCaA3E6qwH3Hg3Auy
ecT9yf3RP0PDgpkGnm2KRODhJSjdyGQ6iy5V4Hc4HuuMDP7iaMitosfkeLupULpPyBiASg9exW/Y
nvIKbTWMO8q+XuR/ZXD1x6oxoASlkGOLqUksRY3gZhAe1HNYwg3eTYusl1k+YgeHAGAMSCt7OGOe
QiAwwrzPo5HriMwC0lJzDkxaTHjD1YmlHI2wLJAhP93a2J4OGNgV6wEPcaGr4hbK8axCn7+6NZed
Plt4qCGgrBova0tsUeFtk3P0TPz1zq4vQZN0XcG4B+Nc562y7JhlBfUnl45IWfMCm46M1/Arxf0X
U0maQlM4XpuICs1MBa7AN3BqFjfjxQrtjM9yOjGgJpovyjX0fF5VbChyvMqEqP8UR2y6P8zywWDA
EO39DP6USJolsBih82fTqqgxp8uMMuFKDlnbQiV+TzHFcrlhgn2zTsuMfuDh9OFvM0cFHDq3nAcX
T1A9l8+QfUB04bQZf0NYNrxlsllSltXsqjmi6sIWwETLPMNJ9kIMEvT1XUPBJbLXeEnWTuBXOjJk
trSkovNkQ7tuFiw2Ah1kFQxiXZIiplhwIDZ9cYeWVYu8HF1uNSOCxa6PhCbjyWXhcl7pmdaeXiME
wRzl66raUNdopgWYie+F0zo4R9AliIY3iGqH3gKygalC+JOtRAXsJ53JyrqXuanJe83QDFLWEeUv
7GycAoWzEI7hN8VtQklrIMW8YLYHJddKRQjo+ZWgFXswB1id97SYjeQ4Bz1nlyVIaNjxtzRNLIHx
6NBU1gXtL/hxu7L+5rRMXXxdNudk2F0TU6PXy/jeqFG4+U/6d3jL7eZNU4BDaHLpSPRxfftW0njp
AVCEsBY2KGs30/4KSaIzUxDGsVhX/orEFlWTHc1jBJUdDX8/SQAwYp1nVJBUCtdr8j3jus8GZo+d
he5ITOSkwYArP4dTenqmpi9gPDuDemu6Tg05YxsmYnDf5vG+81O4hWPb5STpBhxDNJj/XBkUPKpG
L7Qcur2uOCXkkdAFV+o2Y9l6w/WD8AhKkOsz3luSipTQWGspn364KJzTixXmyEYTI8prqZsii29M
efcfuyZ6yHSkD/La23iMptaw67oIurzuYdRXYnVVZnW9PXsC0xJwx2RutiClCcIfKG4F/oGariCK
4BM/pdD23Okpy5AuUwpY45ZSO6uKB+5bKG0hCV9Y6CgMkSRt2V43nvJtDI6BgvNMm2CCGRzdHJ8g
+rFNqeeDy5Nfrp3qsh75eT4JRq7XENXskmrCVaqhAr/HHKL9QB7gOGTNpQLkRL7NB5oCSH1uK+X+
YoblBxbjbJh1v8DufdlNHW0sqncVnlR0CZWbrurFM/jm13Sz2lRrzuaDMhi/zntpCUrzM6AJm1S9
8Bz2br4JZ/qgfMrdXu7EvFl8dcN2IpC1IYxnAl0aH5K467gXc0VOBw1sl3fkAgv6dwcm+J60vZxD
Bhlcd7HJT/WKnvV2sQTC3bUuMrey/wLLut4Uk9HfZGHtpC9BZGrzSYOUTBhaomSvPStyCJHakzsm
5N8dZAa52pMe/HqZZgILcdi9T3mROYdY3jOGCk7kBV+gKGejC5Jv5AkZGc88tL2By1yu9he31/sr
r0VSAELr3Ps9h/7/7qG2HxHeWCW02S413mfd19405LGAVsnTgrxu/4YnqOUwrLeIbf3SjqrHneNi
ycgqu6VCUm1ym7PBixVAMMX32MxSdK+RiDz2VRPTn7WfSi85SLKAGJsbp99+bIPgq1Wsd5u3Ue5P
lw+fbi/lmEJOvJMtG/BQe8PMt3AnVOmHU+EzdPH0GEgfc+lYj3O4VF5FZ83jaGro2/hQepycm4a1
9eNt44AUmmpXox/N7yCZ4NXbs50RocOjBXyESPDYoPqKgluxeUt6T/DllP4jaLE7XR3tf0HfhjRi
pvxpVGg4Q54bo82kwHubhUQsKJqExS4NTe0q6pAFJF9O99m8UGzfjWtYoI6RKqgDZheSSpZUw2Le
TuhbTZyte6ByIa1buNPjILXqBzt3sCqc2MH+7k1suW7ezR5mXTq2BGdEdx/jqLu9rlmqV+t9VurV
+v9fqH+RRYJpaVujg3to4KCMc/q1Zzg8uCqK1XhefpTU5GT+r40lGP4yGedgvv8kTzOUFjEj3MYu
cp0SKvhHt2/LvSAkKCyX/7egVE+K/WWA4WTkt6yiKqinT9eI1JXiqpiz2IbguUCF/KWHM3R/9vZg
nsThvpODEovlGh1IdF3eTU/e3f5rZ8y7HUyuj592rOAf+nD62x8qB2znFbY2m9dxVE92w1dlYjvs
x/9Pp1Ph/zzUGR5GZ2xPbYi32/OmioetFb/fzpsqPmit+HX5sanio/YWq8Yx3m+t+Lq6LtYNXW3u
a1oO8Br9qwgC6nBSEPgZtkzZRkFAw0xTClJf2tJ3ESpqx+7csEmxM+dYOh5wsxjZm95aQupkJIre
v6ZcIqWZ1umnK808sn9b8k3tFGfKwnyK32yXk71uwFLWt3ZU1W6zjnoRUlMlHkZIodf9qcaLu52K
YS+G+i77r2wGEV+qhDAghy2vXFIMNOvGH8eE9vEnvRlny+4J0+Lh/yWxfl7xvOsniLWKdpxZhDJq
uwyTbJD+DWdITeiyJsMrsJtv8bOuhvQT9mWPsBhHLUJXGUfzOw58GhvlKwzSmMnVrNyfoo0Onwtx
iv0a+M2pVDujAaS1fpvftm7IGyTr8XBoOwG6e7+bMnVEN5Nxs9huyDZjG+ver4f36z4ZIaWPfdOD
3l6NM4WAQIPcN6gUiMQ7ijnKfp3eIfbnXrrWHZcV63VbF9NRTiyqmsMHeAlrXrbkrFEd1fXUAprp
mjbM13THhE0bZmz6qVOGzkDtUzbde84+adKo0nTHtKXthzY7sZ+ak+Ssthy6jMXeVdpfFU6ABX0S
OOl+aI0Nsnydnhwenx0kpqHtbNxlPQR92hdI/9IvqWJmojlTbyHMPmiD0LZ70h1Spvu1HUz8nrpD
2e0GSZVJ6mCG+Tgfo0Xv+bU4OO2hA0nRv80rQPIAptIsTfnUhe60P4/tZJK9Lud/kzf4aC1lpHls
vvcG7+FTshel9ZlDx5uidj7ERh/pswcyqn8G/gRD/T2YJbMAedc8sARz1cc3AY5oEtjrbkIRlXfN
cBVNzWgtW57ycBgjMwS7nEYtjmOD77baf93lDvvqpY8hO6f6/V9JApCh501xGGSgqxhd2blgkO3H
y4K617sDldzHB4TikJPCAn/peeWSwuJeVpeINjC7zboSkkN3Dkx3uC7k7yF6Snf1GuRM0M2JlwOB
asFsmtqcDyEI5wrquyMPJz9ERQyLU/Sa/9Xp8S9ODh+fqZEx/DMFXbAYG9eZHeUXqqryWvGlHrWx
27HH0EQdIuzWQetTimpgn2gyloTpHLx/c3OB5WpMir0fV0PJfbj6px+BO99MUqsITI7/wCKG50bK
5+oQ+m+Ya2Oc8Mc0Rpt8QgLdvAlwl3LJRk1+QZG9XgnVBQH1tbhpQRNnfvk2x6w9nLKAQsonK3Gs
aAetf2WVQBjy67KejNd7ve9K0X+7LBnxoYyRln2PAWK5fUZHzrZQtu31k36PZgC+7EXFBtiSjJ9d
gjmKdiQBqblpOxgtNTuInO9cKj73ZfJNF+9jGBdzknV4E3v717dYBNUERcPi4Jiw9/s1hhSbMPcc
mX2i3J8LE/DomyVozAR1jBgINBMarNM81ibSG655z4YZp6MCfoJqxWvCqOTBjI13kRklqF6OKZ0F
kZRR3w2YyybticSPzp7oyYOkYbF9r4cZNv3YU/o+iiA9VdfdiKsS65w8dpXhMrxeR8AHLuqWw74d
6EEDtyLP3/v0/0C3fPr6RfYoe75EiKdVBUpMDV9+OkHixkRCaX6z0kmlNaYvB4BFLCCMJTS6KPu7
PcUT91jr6lzApDMJ2JL8x0Fs3JU+SFYjhigjln4Hf/ZO9md7jxUldk1JoZ/CYyaaKWSzO7F2lCfY
IgCo09VgAtAUAlNNnOhTqxS5IeedPGTSPsUjU8ryEv6lnNGksKCk70T6XYdaZM8+jHItpiVm1CLZ
hpHrm2xacvooJI8wZNuLC7z1VkuQjwl6GN6Ol2iROCow4byALhRGWcIf0XUdDvPDw2W1GF+Uk14n
tY9lrBxaIdgfi/oCRNKE0OxOEtwlvyXyvPIPiqGe0ypBBwmazhBljhYmlcQX0O7mnLACNue6QBt3
3rPYcmYTMuy+OSNp+c/pwKeGB5YXTo15z9n9NmtoeWCvmJToyia8veEcxMFWh/KJ3e6gHeIQrBvK
jdYLGDrDbSlpB1yycR5cgQyyPGTcB86alAEZ0UtlAm7s2v1EVQBnQQ5f1UnOiTPMvIsp3G43korB
K4c5wx/C0Z9lX3xhHEDNed5r0BOQDNtwiURPZx8WXSGtJ4TmZEn86F2b/Qvdidkf3TijMOWSPP7l
iZcxm9NqWUXvb6x3tB8XqZPiX1Bkh2oBgYSb1aB0jBgMWC5Ho65BbZRQaAd7McvjgI9f2F8vEr9+
Zn+9zG8SMXVLDCznexjrhh1oI3uAtLBPvxC5J7+RtM178Zf5THz+sR4Iz6OgzIzJXdi6JczWE12i
xN8j2pTnCfPVwI9H/k9KMDx++NnDJ5iyuxpvkABzICxbh0SPX+/GjMuVEqaW0QFfVNWq7ko1LgGH
Vz9DqLvjfvY4/Qt3Xje1GN/kp0gRxn1GY3ji96V7iZkLuqf4O7HApddq92J7xe+xlzQL8NuH73xg
QPqnWH/4/v3/OmJgwHxa1sjmBAkhof89jEigI34mQIEoD2HfkK0O1838zRBYNaMDanhAbskDj6lq
8xG338R+WhfmLwQTZcCY2ZICXY2mIR/5NzMY5H6HYEVHcz97/v7Fu9Gr31jYGXmHsRg1pJb4dCZ4
mMvv31bV1ZtiPr49sFJ+tNrCWTmhnVfncxdj+WpJoCG0MehHtrQj9hBaILK5TV8zzubFmHAMKG1c
jS3auEsT0aySiBNe5g0KdwTBGnXODn4SCuLqdl6ee8CHHu7hsjrcVNW8PoSjD/EkfT9LgTzsEOQh
XT06BvcQNgDWq5ZYS4EfciJSTYTQDfMOsNoEQR0yqoeB+q+fvvtWo0BcVDhLooySpF4Q7tNAut+E
degh5t3L3hH8AzmXnTMODfC3DT0mzNpBRuu3GV+xJkc4H4wVweFHhj9GaA7HEcYSP1lCBD6axALs
hLA4Qj/UjO45Pq8JylNY0mB8mmxanWrSUdaxBKHwK+t+1vnPOItockFgM5AAAV7kbLucjNcXo5FQ
yNfFhy0a3k80d76m356uL+zPxo5jf2lEW/IJKkMSfU1hp/SXs/JcYGhydbUuYKNMCxPbg19pCGdV
AojwzpUK+NtgtFr0kqVZYxnxR+A3KY7/i3gJdS/ZUdw0M4QLK/8oLVh68HVZXzqCEe6FLm0nDrfp
FF3LmnGqWGmnIDk1cqNUkUjq+rF0IZAIv7MNtqvpGDNlMamGQpQcB7+KEmKJGXbphXU2xd9PHVqB
EJ5Uq9sAKXJ6Kt2PrqmdL9zMgA6eP3hwf937EqHMXF9A1lgG1AvfOJMedmPIiSPvV5Sg+rNfFmVJ
4FvGNMzy1gwr43KdBrzmeJl5LTLzKEXFlupnOYJWmFtgYPCxxbBf9oMfGGK+Rk6xRU5ScHa2pJd+
1g7PD46mFeDZx/VKpBQaqT1K8C+e8V2y4PYFjQDbQ7cNO/Kec31OReZBVXdKpiPxanFxUJ00G4tH
toATAFeVj82cO9ILw/TtGBSdPPEi6ybKrsQZiTjZ/HEFH1Z0XVyUqKHlpkaYbhlhooZOT9E8sloM
EyT7ATJhMStvhh2R/h2doiiQYuG+ZmRJmYiYMfjaGS2V8dMKxlnWZqTF1I21wek5qLtd7pol1asJ
qF5rD9UpWHBJh8fLrmzBLPIE78xcvA6UDxjMOPCq/ePq2mpALsm5QuDBgvC3YbBw25OnGnqVwr+G
p7FrAVuTZxdQxWf4I7h6BOJ6Rjs3ap6D/E0PWvuu39WvCzJr4bXXItGbBK0mZTLMLqpRtyudkrRA
xYcUvOt1RSa51XZjk9pDm9vax+UnSK1HBmkZT4jxbc0oXnRNLD+Ces1UdTOgZ9IjvzJ1QFdzc8MX
hMrcqNo68oHzYffq+znNWa8+afSj9lmKjwDjdtfqfO0OslyaW/dtzLsSMXgPu5/6QY5E8wvn3fb0
JOqMUhdCqCh3mNCvoUnMcHotp3sttqZeMlE5lTiJJLiA4QQ7r1FiWyEfQ9+0hE8nfXVTmUTnc2pE
0AQsYhjnQI8o6JPACQ2EuvIWYh6HWk4YoF2pBZHgw29FeUrPHpE2rISl46V3q23SYclqC+KuF+1j
ba6X5ZTMRIHnEWbCcnuFCThALKZnf0c6zHlOoITGtcFsFOid15cI9y7EYkbrsy2DUvtREdI6j3oR
U5WI52jmGGGT8FYt79U026flyVlaLVDTPmS2TnMZGwPyzsun3z///um7Z992jGbgL0VAnkF6cBR9
NUd9aVZ03l6zODHNPvv2+bPfPH9jWiZ3LyLbg4va4Zedtm40Z5n0BvaqvY3WJtjPAp9UtkV6KtD0
jBCWD1Vu9vZd7HcunvcOWkySvUoPWMxQA7STwt6n3C6060vGE2FeE0O+Yr+e0geA/7A5JUPvsqWI
VdsZtdcmFxoZFC9JmtnPmgDy8Pfo9eA0nIwJ36bItcDNRu/MrzkobjYoik47mTwD4CPFTc+ZplyX
zhQ6v7cMf1h29PtCLzqv0lM9N9dGe6r5DqEKbXvOmHR9s6YJsYmYdGk7B/YKFYO0oeM7+BUBT/IG
Kwn+/j3aIkn5bCAgBdIU8NO0DGtvFiv5ATlssXoXlPJeSWzZg4N1cYMpnNDTl64ZTufJ/zB92Mvy
P1w/REOw3NS2yzcKbjFprIFbJhCkteujyiV/TbccChybcdiEEzgBGRJo45I/Ay8hoYwngfzpF5CU
R0M9o7mh1YvKApGgrCEblDUDwYAM+VMJA9TYZD7r1AWIXlBLhKXEDA1wd/FGG291UNBBiE5rykSA
ZeKdbpqjabSriUal6TjKXK5Im3rpc2Dq4G0j12hU6bYLlFp0w28nRMROoegZv8vmUDV9Ygh7To1l
xrHxT7cNej8/I2uv+9WkXnDYqsH18XbjUUMRI/uRcf07UqSjbz+YBG1dLDDX2HTL2cyQbRhPerHC
rcr3UqUx1QX8kugYl+d0cetBsay3MFnFglIU0QypluXmaUhgR+kVOLqD0tlDzDheXoA2dnS0jwOu
dH1oejtYXKGgkVAjlPdlOvsXSHm6gQ2eP3//4u27mHJSW0jAdNge8B/+j/KilDKxgfKJpm1Zk7jA
5BIH0kPA3/k13h/piz2MufSL+bg/wmjnC8fhcLZ+2bFQozywvm9cIeopmbJytydviP4MKw1cDPxY
Jl95loQADWtUzafQkW4KIEx+M7OmGB9UD2BOec/GWziICbrrnxcFXkbH02IqUSX+AipzHtvy6GKB
PRZaxpoXy0kokMjvtmRnFRrSAvOGd83LSLdPz/9JwQjV0Ggw8zz6a4PjzH3uNSjopHa5Didu3Pw0
H7xNVOf/7F+8zUIQYHiXxUC4DPAbo3+LmGiuj0a+5tr4a6gm+fXd20a31/fwxil3y4Tsmjgs+4KC
AwoKDYIXFy1bB+GTTVN9ziQTUvOHYJGfsZy6/xKjBvsHmNh0hRc24mexr919Z+DszqeB4RC5z1zA
bzZ9sQNGOcSIz83Nmn+1vK97lojzNU5SOrsJ5qq52ZBq3WPdOs6aM17lcRXuYA/Bjpro+dIazgQ5
fzxx2XwI2dEqq2RuyPSzCGCR9UUvMN2JDYlJWzLFQP4F60zTQFcETKyTw3iG8P+9vSfTGAnfUpmc
/bB8TzR9sCEY7dAFt0pGyM3s8PMOnjgPM5xMH/jaVZO/zHNuL1V8Jd66UhYvw+fB6hBa+qY5Tp7n
dhVvpM2BY+MUF6uMBgEje3jhtmrM+j59lGf0mMxtCBOkqWL51S2RNdWGUsEnCorjvvSIWhc4skQQ
8eomJgjyCn6dzC6aSFp2s00LiaBNTD2FeUFJz3uxLFk3hWP5tMuisXvmD0PGys3edeK7g9Vtt3Xq
Nzebn0Qf6jc3IOqJ7HR5WoUvtIeeMCr5JSQ5VTwW1F4+aNJxnEjB4xu/6vXalEJjSlWl3eRcmeMj
Mj3ouRDp4pRijwRlZU0TcQyjqrrNzTo/3DTkAsSeGCEzmSzq7Gg8NH9ptkXvQ+mByQcJq+TdoILa
xjsmciA4ObHeAzhjSMYbjp971vv5hkZL5AczWoHBOTD7upizH0uUPJJ8LMQWIhVJO8vlw7DWoRs8
J7VqRDIsjSbVHLM95Kc3Z31MX0UnhXj960QEzc3yO2fYLkrCcoMuydt6KF5kvViS1p4IwbGq9SCW
C5NxGrYg04LzWKKyd1izXRPukfv/8Jxz43YDzOljkvvNj9DDQa32kYxTMpcEjzpch9CBpXoq5Ymx
k5qJst2SSr3UKDBQTgeyYcGm0+cePrzKJQuKLkHhNirYeE4embWnayrbKZMNDqt75ONGVhC2zXBs
9HVBuXHEQY5aw8fdGl93V+vqYzktJOMbdsELxSYyKsTdUxfN924WeAzDuCp2mX/N40nj72lGFfbW
Ek1iI/iRR+nNIZx14jIYnHfeeW7O4GCW5kZj1yRAVztdnSXPCteT/MG8pYfH+jTWjlyMTkNN+kg0
VnGlmwM+kfU+bSTpIcidLzUI3wO9dvc1e/Wtc+M9AnXo33l1wT91kuZ6qUZG+8/6lvI99M3Zrh4h
9z3awGRPq+tl4hKIhdE3PpzeQBY8aFFwyiWlg6trGqY/Md3DQ5FvmKKte5Zcx4Y2qH0ckZpN3ZK/
qHZIGkjOKy9ZsEj5EkNYqFpxW0psmaum+spAvNOoTiLXh8h79ibIDh64PlmtSiRMYAGRqr2mdrCD
MsNiL+9Tp1OPt3SRlOZ42wfXeJmVQKNM2QyNa7pQk9nIo6uo8iZXnOHmX/7txdsH/leffrJP4gcR
bytM+O0qZArLSb7NQykN7TLkNHyow+/9s81+HTtoUV0vkOfwkMzDxWLV3Ym9HL/NktSSie84UkN5
U1R6Nvw/6S9d11wvjDN0jvqelcyqV/7vgxFMISYskGnj3aRty4GL07TCZIyYeAZORUkjSCcl5X+c
ViaDJFxmLsfz2SFJrSzozD2blZUyM25rpIwCOkO1H8MPYePUmcryPsUTFjSsOSVIM2kgmdYEE8Xm
lD/xGj1cJuMVcAm6xvOzbr3Brp1TltfyI0bhufyVPR/gla4dXnb5XmMOhVSic8T54e87vaTJP06N
Pq1S2ewPdtaTvtaX2w0eBcZpr912Hw8wEriyVKlNZaAX4n3VqLznD3ypLkOBQasp3qPj8/HifDo+
SdDxFmvnaKzKR8OwahCqi3gHHXbs83MnYaDUWmMdqRH0wiT6cEIZpnSmxWJgPBlMm43J/LB0KBbh
ct+5L814Hltsgj/5wxKD1bl1yWmdsNybpp0eyBeDaJ5qf6J6XvweHFNqTrgDk2reZPuRgqKGnDKB
M79NRcRfIF5H5KUhBpGhkEHLgBfATmiemOD8T+3W2JMssGZKh401070Da+OL0h/RQOSdtaY7CfO4
qmaMGRg6eq8TcL3dQTvvwai029nohXiwZjnoom1MHv5910RhURZu5y9HJ/7o/JZmyTj60iJF1pvg
8selBqNFsajM7Tfhx8MVBu2ePHbfUmHP8agwKh9cOOScIq8H+rdYr9lyp6O+lx85agj+KNfAAUFo
BXx92n39+3ffvnqJoVXdMxdjVBcrtpnDyqHrNCPwnAbIJ+sciqMHzvUUZA96GX7Ej7km2s+63Z5G
67m6Pu1CQWoN/nUHmQApwHedvveLM56SvW97Llrw4DVNipmOfaGNedaG/uQNvTlMYcg8kHaazKJ0
JzJl1ENQKwX5JqWTyU9KRz1OPWp0eBxqP60eN5WDAepy4luHF2fQFjqKsTqT7RpqDjv9wKyn85we
027F4MwBMyfaUY/RV/CaFhB+A7JDfPf4XDU7e5yq93hnvcg/AXWwIcXnDfB/gmcY2jBWjHiMYjhg
dmxXf/Y4Zp/JvKqL0WwKIpdszPPxBg1c5Gx/XS4/e9yJAj/oioBtDa7HvmM0nuTzEE3qeEBthFkA
H0dft8z1+hPnen2nuWbnKugw5r3Pe+z4TV5MQefZs2r2uLXgT5wN3m/T7WI1Ysq8jy2WUUtJ2uIO
8yY6FKzPW25828itra/Z7BAYT29wRZ73uTjBzVYayCCCElP+YVQh6bBq3iFlrwqKBpymw9kqQhj7
AZ+WpsVzem9MpOImIuL5ircYwQusMD+XidioZpYryENltur3fHG4uj23afvqCWgMG1alQp0Yg0tF
tvEyoAkc6+KXuavZY/TiOilVSagaSnrSfWJBZ+I4OU8jEuUZSIhDlx+b3AtTEqNv1zXi3sHtq1pK
O/BxVtAtCyaQ8tFX6zXe6eSw9TDx7PVxMl5id3CmO7l6wMJQlC0hSPSZfC94HuYFKihKplzCjaem
RqUvcNssBheDbGy9a4FgCl+/oc0oIriPz9TL6sO45d5umqqvMOhYRoaYO6Cg0y04jhanC71bpoCv
NpeVz1h9jEBDuwBDt/hPhvxLoHJqZsPHPi6URy/jid3GTcq7ei2P7ujkxt+zPOs1cmk4o7KMPsuq
LgWuKSby7dN4FMfdcfAJ/xC/id5f9/7BuGjDwo3obdIaVHptqd5rhxenB55YvNHEPrUs4J4+9W7I
9E3T+sA8S4nGCW7g3M7hpOMaDDvl3AoeJEz8nm5DV5XRcrs4xzDDET7LmvhHS+uw46sJmD1kyLox
pgtHlUm7jYV2N20pE6j/VT+LBOC9BgPcPeMmjEbo5QymAJ2SzIPsTS8oGdve7vmLe4/veHXtf6M6
65oZDrr9qKPKlHoTeP2bn/ywMkFZOvM2cspI6/qw6lrr6unR2a7Dgg+njoimjll25ZSwGl8vRx5n
MIITYRbhDRAPedQOj48GR3+1DerLylgwojj7SPjJoo1nK+7OIXWYH/E6Pd95uFiEbk7sTdCRuqp8
ufxYXVFuzkziLdAFDEbeTx3Odv50VBtoz5I/1GNigwDFLfRtz8y8piUmjQoV8mja/Y/hyrkb8K71
kjngXU5zz/KxWvMamDkCCfLZ4KiTdoxFJJru6nZ1O0IH/hIkHKIvdQXC/JdPaPOJYDL3A5Ajk0uE
S4tYQHEA0jz85ZPsvGRoIIHQKaZ+R7x7xxBmHy4wcPx3kpRvOKLGDHxaFZwS+7paX6HyUoLAJAWG
ifzjz5vb0tb9zmxdFOf1tNP7aa1aMu7dsbpAVTZ9W8XVHtDFlm8uvoMbzDD5ckqzmp+E6lD+3cu6
ykX5ohE0MhD+YpQ1xXCRqRVLWyAguHKBEj7Fq0nbLUDqIgAr17Bugg3K/ddFg3Jv3MxfvHz3/M3L
p9/hIhzite6QCfNpia/nkzHwmexOMpimUwGL2CqrQT0GTReDAXEwfYulETsBw+z0GqM3AkQWDVDg
OfSq48OBbBiMlpZSDg0hSG7fGkimhZLnZGxqBYpqawBgIzFTKyC2IyQbI71NiFrJmNsK7cQcqnVG
cFcMhVRQ9Psyo8ME6THY1r6j5TA6c17eU24k5ebW4nuZd/aEp7s3lGHj0797VSZrqfEHiGby9GYg
FXztIuiu5zQJc8F1zOsxllmBOBt2lPb2SR3sE8Jo+MiulmlM8yMkri9BmoBA3OB2M5BieNZTH2Hd
9MokYsaBjB6wmWDuecgrjRE8cgEmaFQ8tYDMgD6gGQsnsWO/RlWwmKZDHYz/hXG9IL0Rec44aBxm
82LTBYXvYsl4p1WdjgROBv6IkHtqspEkBFykrJrHBzMyE/sBPYJTG782cR84Vvy8ZxSSwdYxjJNR
hLn5QPF8K8J1LKeCgNA5OUk9eNpofagQOSfMUwjvHjqBH5Srdp7b8ffXmJXO25XZeJPBsv4cpHrU
odwOo++HFkig7JfZ8a5e8Yvbgtd4DEfXstBM7/Ws8QFOT7btUTbvNYTmKjEjIJe+nOneaRt3E3IG
99quPddjl/rVgCGH434VU2kHXScbwsAMZTuObrqLjjiK/qYgU96xQXQbKJir8Evu3t4y5sAHS070
MGv26jqJwxCaZUs58/dtB/dpx+zboZFRDZgMRDS5ydiUIY3LlCRsPvzDbgoChR4HndH3yfrm1Yr6
2DdN9aWKfnmEKWxaYMOfuD+jyHqPM/QhyI/l8pN1N8Z+DI9sT/BP7svwyGtxPDd9xr9tv/GDZaNE
++FTveHNofgyGbJRQcuwrqR8FRU17buS/I3aLgzhFExjrPAwUMLJGW8JW1uhRaVINEJLJZJ0tYL+
5VaLNhgCDXpz2ANW20uB8gbl/F1xs3nxSmNW8VyNDAJM/DTxOFBeZHIpdJMLsKMvE4D5ztm/dX7b
s68Vx4PU7WNsFVXo1XrMMft0Ygx0i/sjZnwcWx8LM3Jncumlp2ZgLjkaKcYvURfFlf6VhwQtAWFz
pHvhbjFeAwNWIKgNw83KNMsE69WVCo0XoyxEA5DNtRS4naX2aTeRqYkAYwul4eoH5udWTvAtdfxz
CgFKOGQYepGYkbfT8ys1keY/jEPKwAd8sFciLKOCpvU6jNagEVaLprHahqzxy1+9xke6x/EjnpbH
yWc8VK8ISn8IZaGCdBSNoQTX0I43xJeF6UlmEHmoTu+vjIRFHQxRsEC2r7YbQhsjWAmt78g0j2cb
G9Y8WwbDEbgbM4EB3E0DBgZTIRGP/6r5ksfPZcOkaQWL8e7Lh8cnZwefPGhpqYHDmliLNulldY2u
pMfknv04tgUHz7quqDzuqreO8KF6X9a1Ik1NiJqNJQhoSfDghdcque0pi3thG7VtFUx2AZNHIyYV
glN96yKMBsYdj9dX9VhOoAQYWLDfTJ0GtE9cpk5xM57IveXkk/aYVRQNo5pWW/c3Ny5V9mmYK+A0
lRtXwbR1x942AYLZxLR6sVrADqlPUnqfQSQ5weOj5JDxfr/PkB33GpXcFr4zQNi6QC9/9MK2Ivg+
PZcU4jREZlNaZ+jJh5fv/0eFnc8+2MWHV+//j4yh81fF+pCu6eyj8khQiJyz9qKYwH26rNGz/8cf
4XvY/T/+SJYN+jibwidjkHPwmIMsgNPHRvCcVXD65s+q/knQ8BeUL2/ugcOPPHR4GTbGBGskeAX3
To+lzosWrn4fx+thh3FO8SH3sioncKU/7c6m6DsIvcd/llX3zFUjhPiOnVE9h0jmhIwSmJsUVFeD
VPpnoPTnZTVo7n0ddhtVaHpEpH8Rwt6i2ctAO2GfgGPXm8mW7ZV2Pobc7sE/2fCR9RVoqbcEI69X
BOFSSDksx/ORCXCvc9K9R86bFdfFRP6MRgRVj/cZCyRIT7KyfPTPCAMWlpTpW4UZCYwyiMnaMKzO
jiOTqReLi3dm044paL7kb4lbNYBGhc6E0+1KWwUcHVgSJgStw6UKvnrG3fieb1ge/K2ehCbLP9NB
/0WmIwVkze+pGAu67Pil0IvGRlmsi7rYoCmNXokyE2ZgdQFjB83bXne4OwOiNbJcqu/KYvd0WkkY
/S72zpbBAwu7KAjTrXjA8+riAreIfaBjfagweaDIbogPCIX4YJAig1k1J8VIKo/GGxVw4V0dOlKk
Q9qww8SJs80ZsBxb42xAXXnueqKVg31H3tJV95zBfoewUOUFLrjZYQRThJPDScRQC3EyJS/relv8
/We9A39JQfwK/+SJ/D5NL33edhVHlmKXa6RxQ0RnTm6/3tZ4vPkdCDOvRdkdfd/IMPOf7w9J+dxI
NqGMhE6uxULHf8vqUyLNQoIpDIQJcMAf4d522sHnW9iMeFaeBRWtUzxVCadqBTd0NqOaczzviGjA
l8upCQ3zG+7Za/fLSkqf+ND65uUuMKE6pyFa1l1Fik1rCVmbpms6zkjHdNSXd40mAjnEWByG70+C
ykG/PRaO8PH8xA1WE0FAQ/05QC5CDw6LDKKvdA7d1xQZvCtQuxivb7/BCioio7ieWTMVnAFEb6a9
jn9494331D2LXH1NrAaQCnpobDkN09xkHBvR0yHvmgYw9+hUS25kbuDtZipL+M3XOZyxiXQ2ZgcP
HXqKnd0ebevkLx4J7Z7ndw+P0L36t3/v7MxGvXO/7Nk70JiSnbP7M29zPIxv7NslKTIJjY9he42+
4C03fxmgoDAqRiSM8JBhhdBoRH6X7OZJlm0ZS/JtNqRmVb7BGsNoOkx9ZJVNhs+RrqfeT39T3Da8
nMbK2/4KXBc2b7d3ghj66GT8z+TrmZ4Wp9Kp1Q4lTKQPJWAODbS2gjv0JVw61wf8yOqW3vPeYT1S
8Y9BYLkdhTWtCOtQ3JbhHPoQrkGAG4dlMNUbnXhhTDQR4J/IxZFDpNg/fDyZbEFBwGSkUqLipOte
MKTn9OhpItEG8H/WEi8BwxTC3tklCoeQgtJn929qDu4mc/SXvlUblTzCaZfGr9I837Zs8DZtsr2k
4KGCJfOPNMUUtkTARLjNwhwbQW9sXg9PVCBjpjqUauWUP56x+qb5K1y2kJWt0tImJoXntxQCFWoh
/sojFyXVh2lBweHAdwZ7us57n8QawYrYcmGX77Ii/qQjn6Uh7eymorkTNSyEmpi39kyilFto52r/
UkrVzP7dFoKoax2rWsdnvQaPQPg1tQKtskWFTg8CEoH+aV8Lo3WX9+Jb/CKQkKaMQlp15YCu+b0T
Aq6iddTUbVxCT+5LcRT/pubO9E5YOO+y6QylKlnNug0GWJWYs5G5zaR0G0yyPiOZ0syA8iHtA2V/
HowYZEvt3NReDE7IVJNmQZoHkViUqFB08bQbyi83GFmQ4p0bbJSaCp/PDwIYGIrFMDHZnjufd4Hu
m7jtar3POW4L28PcfROe6JHC1nLa7mnNkSHfQyyY7bouPxam/b7gh8/G2JcsLZocOleFDl7VNW0Y
8sSaY/bK64JyWCriEZmEVQIF8acaJZg3+Gk9ZZCgbZa0HiSQm5gH0ibSgDuM0xG5JKZ1uXi1WPFT
8GB3ako5/d6pJa9UvK8/tTvW/Pipg1c0r4rb8wrIjSjD+Xq72li0V51q/q6KQJot7j5UFAVtEoCP
HkoCEx1UI59J27Wc/fZCK9tHKFRBPxqPPqddaMXCr92iYGi9Iqik9Ysde1EuMc5dL2MPQfYXxhui
YbtOU+/lMnOQvuukNRLa+5QsYMRH92xKnzJy/rPxsvwbXVDpXDf26hoPfozJ6KRTqHC9IINKp9Mp
lhgSUasrSjXLyBJKiIbOePrIWUqpdbKEHLhjGOeJnAay8cdxOUeq2cdybB/xKHRU5r3344/meCIP
tAN5955cOheqH3/MjcEXi1MWz8GBdp1CezvNQYe9kTmkxfL0SeDzYArQJ5raPDHdXtYYY9Qqb4iv
Q2NSr3GqZ9M7zzTFAE0LDpjFTIDHNM2P/21NNho2ds32HeY5MBgoM8tBMmCMWd5oe8uimNaI1UJP
a3dYuG++7gWmZynZaHqWaac64RHjjM26lLM0akOcwZKNXRrNzU9ddl21plQY8Ulk3p57bWpKcCCY
po3JKKnIhsa9Fu1VMV/Q36b3oKAbjncP7hAa4eHQhH1ik3pq4rUK/+H1+/9J+S8sqiWoBSt0fPjw
X97/n/+JfRjUt7iLaX9Wkyv824sUCnwSkLnRC4EgY533k0Xsw5BRNi2nxYpqNhYu7xBxlsZfoKOE
KivuErhfBJ6WMBPkXRHf64u1yAeSQ4tqWs5uBZoPk1iVPCLKewjHkQNSOjk5UEBgtsGBSR9MeTRU
koI+CWJ88iAcg2RdzGIa1N1dqRY4t8V4taIYZtVoYzNU467NwMhzb0QGmUE2ekNbUbU9GmOM9pHB
BvCvZbokJ+/g36nA0/mcV1HSL9WMynheZNvlFH1CyF2RQYqZj8zliF6BrXMNSBZJcFxMBxly2I8/
Ssd//PGAH/wwpyUSm+L/LshhDlN0WBM8coy/Z/n4kQ7RGTHFKqTDFJtHlEgHW0cPhLHtB0b3jPE5
wD+VFjQFaJtUm8OcBIkQVy4/wHnwzgv+/kDS38EJBRfHEW9MQobgP0fqscRzNNJnmfL2VbX6en/3
cFY6g44BklflQqXl3e3K2JYXW1gcxK88r6s5zKYXTGei95Fi2rzsgp9Ur3pmFmoF7M0TjZnLXMkD
55nIP7fIdM6yM5L5gyOUa7DdxfxvZzSaVpPRKA7xfUHVEvIdVRA3aWE39nbRDBSLMA+mzOp4eZvV
23Nk8Y2EcLVGcGkO8Z+8ZPCSIka4eC3u9YNOPzsOTZ/IteK352VESIDGsvVurdO9Nb6ziWMphYzA
2RGHuVmqsGEJjxc64uNox8t8oYU9/hUXVuQ09q/5bweFPWMiP23ZBYD2/toImbFpxmRAdb1KGqjx
F84OBf8B+9S0cZisi25R0sme2Ab8FsFTSI2QRPF4K5AT9BGeUY/g8Hgkp0GGvpEXRe3CmXdGyoyE
VjI5HJ+bqZ8m11PjE+3eUjzTquZrOdeGPAHB8eZH2bA/mZlkTl5ULS01BI9cl380YdyYqorKuMCZ
r25tyngWkT5nmLPEtTEteWMXNyWsu1NZviH4yuXHYlmiy1R2W20RmCrDnF+oAY1FqMImg4OPOwjK
FN2cXOpmOcjIaLTCBIVTLD9OBDz3DSgy7MFxvSHJ4pBMCjNk13EOXM+eM0axm0ZPvbPaVseCUIJI
YaDc7OYk6zzq9OS0pv4yFnLBRoMff+Qa2qW2mslPFfrdspuamrVADZAV9I9/OcuJ7epLahG3m761
+CMVg5B5Ll3hRWS5sV6rpJd6m5mvr4tijNg6gjpN3ND7xAgrWSc4uhHc4kB3VRJsURfr8MSJD33D
yfq8b4o9Uac72nbMUia2FqsMKTncCWtJeWY2PsqZuyxTpukYR2nqcpBFi+eA0VoT+cmtxhBrTvyT
gqWGLc3hbBfJ0fIke9Y4YTa6aEnthtVICYS80yjdvaZ7qo/3svHHCsQGS29tlJmX6EkKHS8nfGd6
RGXip3HhpEFZszVA5iHQlMxciFZgMqHT8296PjyhrvPH+dPIlL3MT828pR/d5g0ifh/ZLkkw3Sz/
+CNWRemCd14nRPvZeSDFLZ1Qmm8apDkiRZXVtp7fRoL9Be5J1zbwCov0EmUzxRFYYV7WVswrY+1d
pLoV6i7kyAj3ZqHu9dWwN/azICDKGfmnxTKSnrugeSgCU6AQbItDmYNPk33m2fhvIOIMg3k7r1G2
+QLKr3t3Cbe/uLu7VAsMmd7wYiQAXvGmKQvk1647wH4SYT9Y6FaZHKUrSSylpy0qHzOQa77MibVC
a2iCHb2EC4zbwcDvrAR6fO3psHrcqi34M5alahzwO6er5aDnbeGJwnT/dwtAX+zhuPqytNYsQg42
FIWAELS8e/XgzJ6Uyy7QuCsXmab255+fMI/GUmzn0+MDNH85oJLIdpYAEkJ+UAiyUGNd0iOH5okf
fyRCP/44oPn48UchqNRdku9waQIBuMFgIdQvGaZ2u14j4WQjzAn2+rqcmq6KZizNZuMpBpqDuLGK
veqDa9ZbWKNJYQR7YKB0cK7sfyquPMrgmtTJ+N+Htu5DVUMWw3+Mlo3pSrVoA+HS3ZX51fzGe2Dj
7mX1INra1kYb9dN0Qv6Nk22ONLxslGOQ4hBlsmDB4EfDTwYwFj4bBAc5JhAblSMxsnCbJjyn6vHH
QrqSzpasCiAvSLM6Mrk9n2ciwXNymGwv8DgeEQkpAqpck8vPrUFudkqSNfixjRftDnAvRtOjuRLD
6o193FgxZyQ1EAMaSlaFFn9RNjqoZAqpdy6+Sndo4BEYI3YnTGYdSzxoITKaM+zJNAqkwFnE763C
6WwwcBcua3pTpdDI7YJfVmxVqgYq0+QKyiJYHWl5G5xFTOYxped6czUpZjO0Bm2X80KBe6FNAtMp
kXtT+O5krfjUEPbD20ToFhU+wTg3ctEXolNF33aT6qC+TITk9wkvjl54Gu42FpVGj8fpCM3Dih1X
d13iGy2m6liTZgOBunfggQmPQv92vEmIpYNlJuIiVej3fgligVYUrpwlB3mNN8U+kxp00KozyUM+
mtqEQ1ObCHPiyj4oq+INfpC6RKNkaHALMvvVlu01Sg8yWX548/4//uxnPyOEbxIJ48llsd2U8w9v
3/93/45ecQ9o87J5y2D8wGd0esLMyOPpIeJ9ZlQTD4F5gWeYnAEHB/jG9mxM+IW4aDwiEFvVGp91
pzlsVLjk4p+F2LPpMAUuw59A5pgXVzzp2Y3R5PfhRy9yNuIb16Ycb8xFkvtDJoXBAQ6DTlkSJHJS
0d/AYtAZ/NOYoL8a1+WEerwTonQxvsGelkU9PH78eWhQdr/is5v9ECBErbdLBDrBR6TlJld1DlWd
R59HOSpKwir+01/2QbKi0gP+PQh24pnm4eB8NySrQQKn8LtSV0erbVi7z/eRqHkaJN7Ia3E6zVPD
IfqU5AdoBLpV0IzRa8J0VpFscrvKjWAvUbTz2uCh2xhmVl00LN3SOb68KbdjxfZ7ddIn0LSfgrXY
umbMeoVrTt8OgmteYhUTGgAVovQvho9J5edqGdfzNcMl4VLINplL0ipeL9/V0JX7chjusDAFB2gg
mC5xIvBpqLnRoLq19CGKsCL8mpxHzkV4QzSaA/CUdXPoDlbaaxJ5cBY3M6irdeg6VC6nBabgVkM8
9MVDFHNCNb7MjhIQg9AvNQCbXe/0hCqdNeG1Qrnv32V1udmKLwQF97M0zxbkKg6K7YUG/4m4S4vu
cJda+Yq8OanqzdMJJuViSeuErvPUyZ5y2XcgnB9x4UMCP6RUlYnjxln1sOvi3l6hk4VVaibVAjcA
GSmx0GoN59oESx34dq3t4nBMTRb1YTU7HB8yiQd0ahxuqkPaYodA41DtE/PuQ1/V6hmrQFNnvZ1c
SrdqQswjLi2m4XMPj00dBXjO1pf4xIemz+2qWE+KaaEw8vCFzp+LbDaH6+J5SUDMPvpqcP+wSyu9
ITxYzl/keZDc4wPTPn/BlkI7JcUUwPDYUVNmBQoqDLrRHsKRnPn4rSNIzsV2f6nhZaObJsuLDPsd
zXMxfSYc85z4EojhhgWdBdsDnhSmTJfedf6LeQYmlVc1Ovg8y4P/m7Cn9TM0NPBeZ/52U8gAhqmz
XWo9jAilD3LXE9M3hjDZ3Boji8zIU0TFaNmccqWb0Cba4nsmCn3+wvEt8hLyXlXhQ4vPUDsVKvrM
N8A6TNdAuyB3neS8FL2BJehI9SK1TGiyWiYf9lWL/DM31irKmRyfoCPcrPBukqcMG76S22KU9H7X
5/Pd9lfzLuLh0EyqPWL3FWY08qetqTuKb+60fWiaSGhhiyF/77OFfAoq/7ldgtC7tk1ybKx+IRvp
w7v3/0FfkswwPvzwPvsZe7pul2wQIt90uLyhs8h4VRKvf/jt+/9BVycr0IffvT+/5fsVW4XIehSe
Z/beQogliEx2bm4v8o1g2yjwrj75z64xqBoKlBuH9GUdakemMSa0WFRL/g1v0eb7t6MXb797+Zs+
/fH1izf8x5vnvz7gspL507qBnddscFpW6wX/VdbwZV/MlvgRbqb4D04S/jsvl1cHB2VNyRGNdU9n
L+GsiWhDy63vEzq+U8rVbp9Rfwj1ubskDGxJS6KTr4CSlH/WP1JcuBivRmOEua03OZ5mFBK0jm8+
VADT3qhCvQNnVlB0SJKszBZ4C11NMb+MwfJ/OhuEjssEcV7jmqwpx2A/69SbEeYYC16Qgn3Fs69r
RrjEbE71jBBxe2RidB9NLJacF7b16noZwx8TmiutX8rx4GW1eWFYvZjKqyelzeM8maG3yep6Gkks
4GOKjxiA9J9cFVMOuYOSaBRdXW/LqeQqgr/SIstixcdjYgS7xP3GLBJh3tHDA+LGIaxNaMj9Kw7/
Yr3ac/hQkgH4LuzwL3YOn0WlMRvFHCkCIMGRA4y3q6aaF3l7t1ACCbIvJRQQkdKzcbmeUZgiiViG
i/Bqb4h36+uqLm9e41s1S8IB/g0qT6H27uTSxU5uCUyPGADF7MRDQ1dPCbQt6styZZ4NONMKVuf0
E4Qo6HGH/xuIlFvUy91zwzn62nHGuaxae1X5J/aDIwz6Cd4V8PEp6k/gnSnRv/PbtFME7B4+LLFz
5TTHf9x0X5hfqcfwM/3rOyUVkzYQ5I9wR8TsqZOhc8W7YaZGsMwrmN50AH9Q6KQpIVFil5CZFOaj
L4nw+hlJC2+n7FOTWFNX9gJ7UtyrRIh4dOL1tL5dnFfzcoL4r1fJDDyNvTEN9S1iefBUs7jCnzeV
cC9o4cGxQ1Yb3AMwpNs5mtbXU+oIxY0vK2DddRanBmrskfQGz2ppS02W1zEYttc30QeN2/7wONxY
MNhNEc6Xu2XzBuN5zVdVyZaCYBS9SFCb9iLU5fT4pNd99TAdTn7zEwT6AdlgMhI3eeI96B6PiVfF
qYd11n3UZfem6/EtPdgTCaIa7Op5TYDp9qIyImEHKgI3B3wzh3nHiqG39nxaOBnbVGxJbupUdEA2
mVxezFcpn34b/WyyxOfdwQD0t96DJSgzue0tfNO74yIYn5qA+8Uuq2WWH40iCkXsssXiHXhHMR/J
5WFmdQvgI6Z5+vhMB8fgd7ZtXyJ6rZvzPG5djhW/eQOwaw93bJ/JBh2gLw8Ovnn7FfMZU2f9Gs8V
e9Zl5NLtHXc2BQHyG52HTKYXOeJXcKsmKGby+puNJ3jBt/cBcj6l7caci+oA34iUUJuPuT6q+sp2
Z3Ym94TjW74vawJUZjVJxbyEYpUjpCoUljzPeU8samt2CswWQgql7miEHRuNunXHwNvUFNCP3FWs
VDeeEe+ta6MfmM8BiPuINJFA3jc+mHpGGaxJlpNPiedofI8NaHtqU6A1BU3abff8u1evXt+d+ryB
fMOgvWlMqKGNqig3M1B6o4XeT+igjXpoMxm+urYQ0gT8qqnTfw/9NaXDbgaBZpy470lW4eJmBfsP
5VD0NoZ75AVDRZd/LCSJm8nCQjuVBIMRRcr19rVybKGti2AxJou5OMtYJ5mB9thlAwH7sZAbnwpT
aHW18ai4QRmdFuQ+GrIO6Re86hMWM8giECfaLg4XLYFu9sZmT9DJeA2XKRiZDRWkRfHrwzxWTMTk
sCvdnNEIRS6SM3SNNpxbmVrvQYITBxD8hKo1yH4AUb0srkFawazgaTnGjyCeVw0qeelPbApGhQ/C
xmui9RwKoDjVScRGhPSVqKktlixrz6yQpqtyWjT7Rpd66VPJ/ahVOXcGrmgexxYGPRVjVW5sVcZb
7C7YohN6ToGlXjGCMG6/gMt2OFd3EL+sWh4Wi9XmVhzGaGvYI7jjbfrLcX0ZB6/JJsYf84Y7wGhU
fLDSgs5mfaE+VhmTfIvQYx22iNXIAkWFOobQTpMH0z8e4BPwOpB0ybOLs5U8TlbY80yUOfFThhiR
fYwGQ50JaDRaFo3TYxCFQZOiElhZRq4IzDe7CHgc+MXuadV2w7sR//IOxFEa2bMyJq+uq2RKwGTy
VKaL4GOgVxGbW1Qdsn6P4ZqC1Lpp4cWq4d34itVY2LR52EFN0WzquL63bslzPAzzSBvvFFfhc6hR
LJeVncf97u0xWKTIMEPGv8/Ey7YuFtVHs2hoOznuSyLVETVXJ09/rgXHC60TOQkaz9ac/FSVp+u6
KMR4A9fwgTc1uh13IMtnDp2mlvSBzo4g87kK4xRC00GrJyzbeIAMDjFp6kmZmAS+v8IbsIy6ErQV
lNh0JcGbAizytLquU+alJAf4ODSXoJblT558LkuAkLzVBBMNZ0e/Ojo62M8WZVI3XaLX3WC9wJkP
lj+decdbbu/TnrlrGgxKC3ql2deg0TZT/iy1TU+LeQsXr9G4Je4c9uzr0zG4uV0Vw85i+gvKD7Nd
XtWg8A5/8fjJ488/Twu3y+JmWl4U7MiOJNiIRAms8Z07NvBHJ1c6mTLf95EiPt2NyY8ydaA1Qjk4
p1MaFmO4X46PGzKcunJULFZAFgR0qSAnTA2fafBbJaShlpvaXgw2lPsHcj9LX9PTitTXFUb2IFB7
dgn/D/qPLKugDVxiAKppXGM9z4ziUq2KZd5dn3db4DhZMCVwHM63SGdGltTcskuvKTE2FE9PvdLD
BpafEpdgKrBdTTGxHRDbgZrpUhxYpse7AjP7gwdX1002Usa1KaaZPIZyuDpek7wbjrmczCp0rUWR
fVXcXiO6gs0YZYCOMOoH3fpJycCwzVrjGkkI0MmjuloUj7DMo031aPyItk5xswkK3ty0KMZTjCKM
KgT/+V7d6yjgOvWfqou3jr3rGDG1XRd71zOVaZekwz0YGETugaiFXoui5+y1kR50dR1hyKiLTVKP
1wBk5/984M1y38xd385I3xtnHzqvfVTPb/E9KlBhOkzLkLJ1Q0Idbzhd81MXb9ThwLCAri+FUG/p
AqlknbSAKZcgzsupeUjj+CSBLYEN1HK6rc5h6FfXGAkmpovc75M/V3tcbng2r65PaQxnd0vKEKWz
T5/tdG/e2NQM8Ddj5dX4qpB3B00I19w3+D17mIU7FntsR41e2jA1XEqvaN4VFjCLZfqA1c1PZ0Hz
3m8YROzt/niIwXqYqpaT3SoEJeti1e1n8UOFv4WsTcBPan4/N+Tr+zlWh3/sutchK6nt5vmVye5h
8Y1/RuGcZAtiJ140OXWxUFesb+HTYJBzk2RyIAQkO7EbtIvWZFx2kM+YCY3tF9iY1Oj2uz2VqYVh
lhicSDCXovwbZBfzEC51gC566dDSdmPuExAnGsDp0VkiqbojIcvdSMR/aWKSJ4fHZ7191GKzopwP
D2pCxUb0eOyO3RMNAFHcp7SAaO4GtYL+elJvsAblYEpbtykNZ4ndOTxuzr3pyWLZ6+5zt5sWCY3d
20Xy9KQ8c2fKaXly1phA1MylJ1mbWzXM0ih404yD8nYnUSjU+4SJaD5zCN4idfD43bbQxsoP1ebm
BkWPczeivkfpGJOXGDYbkxRDbxAaEIU/zudZF6t18QLiaQaoaMPG57Sg/BQ+PMY7/Raf0hCHDLtP
7n7rCo0wM4R7c4JT4IkkS0OzHd0PWV6p92Dl17MWiXRqbh3w2TMMweceCRvJ84DlzzSBNiUIus6T
R+H5XehoeAzC7JkenEUOK9LkukB1Gh1Kubtp1xS2VWF3G8wHzhDdkPDBzYb06g7ZeqWGiofGnhwk
RiOtBEh5lB0CUzZwOvXoTb/tyn9PQvS2S+tPwp4LBbpmOIvLIJbeQbPdRwlZ5wqti9V8PCmwGB7p
QRcVL8hfD4nvHnoTfif1u0FJiEwkqZOf7qfi4V5Ni2F33cUoBMyz2WCo4w1N2J5L41Yc+LkgJR8f
hyk6k5zcBUEE6Re6kqHHEBK6xfjGtFKO6yUNOO8NuAjb5np7OizBcELbZvi2ORvXGyX6Am+lCVzj
p/uvGhVvuB8Zz3ZmDO9OJ4Og2rscEk0KHfdjylBFZszkrb3hXmc8Nd0LdGyOw8zlwFlADk+44xgE
FCP/G+9J7mEDNHAqepb9V+UIGrdnyCI77kuXyjJh+9AwaNo1HNVhn79MUAfPHs0bTyk6sIPWukTk
ENZ4N3nnwT+edhjTgxVfIxT4Nb7eOPwH6Ii8l2MwXgpWhZy6nTlcEobCEbyq6ro8ZxsyogrASeu2
JHxH8VG+PEVUFFML22vZdUjAPJdjvZqg9lLPvMiuddv7roy4yZKrJl37wwvOt2xB3n118MZnn3Kh
t/u/5CI/SASqWrsBuRBJ8lYkmDgxzZb3+0aF01df2jHezu+1muxOqexZEyeH2kA5pwyM9DD+zcvv
Kdn72u/OT12d6Gqnr1hEPEjplp6iWDUOeAyowkeeqxQwmb3rSZEQJQHYk9gGCvJeatdo6wQwvzrv
8GejU26XU8yrTVDx9FrFLhYJ11iZQXLhQQrKw5Tie5rbI/w5jf/NEY7SAxIiuxpc+AFH6LMRQALS
gZ865LEsa+sbC9nd/AKGk+A5xJp4Ua6onskS62i9MKW0PV4H6QuUgCVjiZ8PuY7PbtB1spQXU2+0
Ea/h4BMbGqrjLw1146sWP3lO8lWv8bhZ+d7gezmcrz3zi3JOFGXhRtxfiYNSavZNMKVSNV12p+bU
5Nt+3CC9iKj2or6x3uB5L32LxXOsXCawnumqfNPGa6m1v+lTJ3rt5Jq40o5h53Sk2chjJdsV9UaO
jO3zWCgGSKTaVxG1Ef23PrN9QtA/dTcNmXi33z4bTXxd2ENLJG/I7cIkLCMYoPNyGbnFr8rJlZOQ
cKBWPBqDxOVBHflPZtetT2atD9bc6gA7KH2bUffu/p61uHKa2YOEeUOuMX9n4zMR9seqZhpYjq69
fpSCGS/tT6a+O4SBetR3KSu9lV714lV2g6E0RQYKcbwZmxvfdfuNj6pRBbeAg9StTmZDRe+zS2J8
yfvrX+3oyYYs+/GGFA1Pa4NlfY4I8jmOqrfPO43zqFtXCL2QUf3ODveDuFmMi21q9e79pJXFhWEo
Qip1BzOuVNUNbirbQ0p0gxYpeaQwue7zxmduvi437tnZgDnQ7+a+W3HECw/rHjk9C4MNA1bxnVK5
CJdp9JeOq6iD4igCqqMCumO9Fieho3187zme/coHv0u6vz9//+Ltu5SVC3GC8TSdlihhT0hHfQQE
ZXtyqJ5EHTFK1iNh6kGCGpph52MEaiGwUwPyjrs5xcA7xuzDFKUWwK637PN9jMwihhjSgW2vhyJ7
6bTha1eWK7zqsW+1ZL+z3gCHgHB9Ln1A6HJArzyEnNkNHRWWVjfHaSIPL4J5JsxFdyo0m59bT4TA
SAz0QGwfpV3/Vw1MmXhFVpI1XUdLpl1K2Mqc3d2e3cCp7qlrlwlMaUUlfeOMjrVcbyS9XXiAGBzu
IcNjpyMsXJvNQVxQJrgB93YmQFOETT8bKLjndEpX+sJkK034I4V1ntM/ZQgkK13wk33MU3E/0YTO
m2f0DjM2T06ZRhB2d94+33+bQIPjey97uxlblmggeOYTna4xGlhKOeMv0l7vub2JjNutuXEF1W5C
PRhYKBGhwVm26PmI8IvQz8O1bHEhpgRHi2dCk35jW46tZru08i1WjQzUBEuykzHvRjQ/PDaUI451
58+Ll799+t1fozVOJ8x4J67dBDhuOpbdg8Alb2oVPoS4VFEYULuzZKWsVTYytWUeXr56/vJdioTH
kC0h2622NjOKA0GDCKBVzCSNa0TObLEnSb1MKkruFUHB1XsAFYUpA2ZpGFyuN0XcE7hpGHKD7NUS
IzhQfI1GqB9wc7XN+wPkGnzt3TSHWL/RktyWxZyyhuzI4j2fWlo62n3Or2GNc6Mep2VaDJIoes/r
eG4f4NqLs7OsYmz8XsMpiTjexwTIQG93N/6NfePfCBPPrxsDdLoUMJbfX/e62X3/jVqRqDcJCtpG
yrDS2BKl/hnrVESvd/Q6aG11u7q6sNMHx87VBSUwSZwY6ur9+nZziUG248kVcKv1N5hXFSf2xOdl
X/1hsgOOCOQP1gKN1ezJg6txEPh1ijjZrkAvm9bCPPWmZCxpZqHx0oZEDla30evL9SVoSc61B08T
GsMhO0Ejkr2vlb9x5zwnaZR5IjcscgJWiZwaXnNMlUBG4VDlnmQMkuRolItnQaiZmXuSP23Je613
l9phw2swA7qAdDszuZC1luKTO7giuF6Rtt1Vq9Q1z3pNyB12/pjC3QZiTkkmsp/PZDBQVG9l2ltf
j4LuBS1r+FTSXATZOY1EX4uRIXrmrbOfK/T7owDE894KhMIm60gTgnphinfg5EvCUmvEfE8ZwIgN
567QinKzwk1Q1wwYWoXuCbjj8Q/2DgowTzcFHpKlURowPxFaPySgrKoPjdsUkQguzgHGjcT+7oaz
iSEXeIg+4oKzT+m0ojiQ++vMphddmkFQ/kOi0/s06Jv0S0SLPkMBGQa8xjcJ7VNLgGsC1wuLm2DX
3h4Hxngp3Ju6P9rtgIZYkslMDQ0EfGSYFHmxdV/Ltr5/JoiQZxsnHTmRfMecAps1wriiYcJi/gpk
OeeR0nUQoVV+NPmrVtvNI2wWOrtd0QLBHuEydSsjKVtMkn8CHTZ4SzIbF7Qx2bQ8Z52+bylrOEzi
yTs5SMtQPmjsed9LnC/GppJGlvdsx8IFbcAQgbQT+s4Q0kvkJbUxSaZ0s3snlBDAQFmk07BO/Jxv
HuWpgxhEA8r1sNs1WDh7dNFQoH8jz+oG19ba5qghb2KK4DLnYEMUF9NPJ5Q1Cz7EFL8D+8y/V9Df
PVS2jeJWT6oVXhRJlVqMrwigQRxVir/m2vsG47YRMZc2BdkoBpEqPi0ObdM8IcXODlIGVH3Od5Ri
0ml8Y8YG7mH2iSlFrNGuR3/YzaUkP4WfFhG4uDf/1+wkzyu8QiOyWY7BYEAwueXkim7ZRD0cH7lC
YA7v6cDg1ySkKf5wevjk5Azbyrswpkm3n+G/VSr0w6NLdU9Cr3d6z5VfB7i0HEnyn0G60ZVsX7J/
f5Y9pI50G7rtiCs3LqjjLVD6Wcdf1J/vXNTGoT8+O9gjsrOuFc/aMHohs+NZIWHO8Gj6uAWe2lIn
1VE+a6grKdAk2Qd9M0rxqztI83iLJbsVymjXxtsZ3nTPZnYZZxNQsqpFZns+rfAaVhfbaSXXtobQ
VnmexljRevA99Qd1uLTIkGp2M4n6nXb4SA2QOWd/TCb1DFjcFBP2iPQUMpvatAmdqTn5zs4VSNu8
Exyg04RhP9UL0Ud8IVrhE8SoWm3qJivFFNHAyRrG2c2AyHbDmScFHpl91uRVrB/E6hZrTu/DSBIS
coqIfnTLYGrxvf0dI/qTNQc0GXuvKJcfSbUz0Wiby3W1vbh0fakvi/k8recxLvL23NLlkO7X7Mr8
+sXr5zqi6iMjATt/Skx6jt8r/dzO3WmX54nj6fyvQVLQ114D2Af8jh6ATi3fnLFfe7gyKjICW8Gj
Hsky5PuE4Ae3S3wBCDCwTYHrcbkJHngTr+ZMPAK5ovVPPnvb3ux8+KYb1AZF+VFsEEl3BcaXOB3c
wJPdgd/26Y6n1JvX3Mliitw4YEG7pry/hEdoN/V+gcKplerH/tH0u7c9KTBsMq9Nmi++9a3l1kbB
Fy0GvbEHYDWrtsupNubJswzvEt+IoBw0Xz99960f40QXf7q9cW/0vSJIiaJSCPMmxbS+7Fu9MAnc
aBRsPxyjj9F47Rv5JuOlWOVoBH2x3NUyysIX0PjkdML5C9AUuK7Ox+iajRQMDhnaN+lBnqETU+M/
324IoBerYPnJ7QUUbrwrckhQwoC2YrZ0yPBpr92dz8L22bfVWYaDcdqsiK24J1zdHrIqfWYXbc/d
MxM8+g/NgYrdrsnVytRQKGPMsf6yJeSQWEGckhvjIRvPXznHYTUpnm2oM4B23/7+7bvn37959epd
96whQHqP5HXJQO094yllek/XxQCOnLx7/y319Q309X63r3oulsPdsoXtzYRKxuTP7uC71LbcsOfd
cp90o9D38RS1sdNu92zv4DGp9dBfFWzp+ft3tjG5GPjBzJ5xrYGF6GViOkWdBApxYw3jjvbkTc9d
tQmhmC1gePASxbtz4T57OigvwvRk56rLbUHKJ92m72RNb5UzrTedp8+ePX+75z7R3hWyT/Fww2AL
vGNyHsBcvg3S2F1WC/SpwYOwLePaTbDjv331/fPunhnhkqsZEOwgwa/fvPjt886Zn16YmuJNc7dL
UTgrOjxtXufifK3mIJgv9YvM2T2b0YnO7PFccG6tGRVxvHCzhPi9vLF8GgJ2PIKfVyiYghUI3g6Z
TheU+ALVaMxgbp/4vJHncgWAYgzWhVmEtvUWH6Stj512Nk5jpv8/1L3dliNJcibWd3uEw6VWq3P2
Qhc6QdSpBdANRP00lxxCjeb0dFeRtey/ra7ZLio7F4lMRGZiColAIYDKTA6bN3oCPYteQG+hOz2G
dCv7dTf/CQBVMz2rbQ4rERH+7+bm5uZmn5kVrOdCKRFlPfxNN9kDDUrpuhENov0UWi7jWxa/8Bua
re0Ttr43wlYAUA7HjKq5pjvyI4ZmQXHE3DjUuw37yeWFD4l2KR0PpGtucoutjHzEkSDR2M/xIEeJ
+XsHN94yVoTSMUHpWMrX4OjyPRp4eWvHfPammnIMBKhD1jyFSL9c3E3gvEiXUaNeOCEYF6laTz7d
J40DnbyZ4pU+H12e/PXTXz1+PBiTYmJ7Wxfz2X2Tm1Y4RL3dWRsZtkXXQA1XNEsbjr9mQG9D3d7s
bnGzu8HQXyWfYyU3Xpw1GDGZBGP2x3fnWg5szF1PrlWww5h9uzGNcwEZXfOWZPqAbcOQWRj3DjOG
27mK6IzG1u6++OH0FEAnr2dAzzjH/YzvHnk8YAIaSw2GsZNAfyTq9NmMlo4Plcb5wzEaBA0mKX6l
uLVZlTMUdL6y4DlSznugcKt78GrbP1+doLuulnHair/tjeLb4yP7IXvg8NK2HDaQKElGRoiNlCAU
r0+9yE12bpB7gVYYUEp82cQAaqnBJ1NNgeAjnTbJU+Ze3R5zd/5cRjT12TsVSLj3mipoFPwQuh5i
zkhkfMAXH0RJwHrzDDc7tTsmZe0YW5PzQKJfOWLmUa2fPBn8sWzM8/bkbhcQt5hMQQvmOzfVjOKj
IYMhn2YJpTm7gpNubqQdIUxkPMfvoU30VMR5O0dJm2xGYmlzh4bwuIe/YUdQ6UwwXWxAGnGpYrYN
aJxHiyKi2T4aph92D7/I5QBOtliwUEui087N/ZrC4DCqNsZ8SI7vGjRcCx0WPeO5l7s+0ZSBhx/R
FdZ2FJiRK0KckloyI8sFGp/yuE01V26RPuDbK1jab/oDHyt2yG47asLlBru5rnfLeY60GZMVM9CA
CsLt7ULYOhO6FAMsfLNbBTzLlLRo3iDrb6pKzClhXQYiFPx/g/qC2abkQKmr+jZ/9RC3SmlN7LWo
j/1FCQvotpJdOVOQM2YmBfeGYAFWCxDuzqvLWtTaXOKgzK06Q0Wo+qQZ27e1HL8VOXoQ+OAj4j8c
2IMCSwyKNghlX6EH1qafIalBsLI5UjTbr6pw0okEmPF/jS2F7wng3WcTFYqKETWn5SCN6NIsRbQz
iaOUAlvESl9eqreAuJe3pkagdDepx+UR1UAg66Je5Lzpb5+Otk8GxWd7eGIbD6cJbd4s1oGgyaYF
WFo1P05dcFirRjXx2uOVhoIebGyNW3cOk7u3vOy9/xSIXTEtEIZ1PqAK3OdccnibNPd6xDug6S4g
czguw+JHxk6iJ7QX2K9W6URCDsUWM5mSUcDQCWKIYRUXv/3h2cveqWVxUNLublhgpIrlH6A72VPf
t1+gXgbryiF0H9SZmJJ7IgD3/Hg0m4tCLnp3pBXxGyEHZ9lcnIzhHwXdG/Xohg3+wr9a9B7fhqbc
rcgJH8tLnBq++yHT6ICZ5koUEaAPzRoW2XL7UvCwiLGYM9EmB5nq4zP9ToXJ5MQdn9Hj7xJ/y8M8
uEZztSHkuU+GV2abCwkXR+lNSQ5vIirMITZDqxjiGwj+eoYuKsAYrlA2oNtBSnyJc08zHGNGB4N+
KZRA4c2O8ZQ/BC5N1PQH4kvnLaypqSLgHYEmzQHbjNdgkoza6hyDGfbKWIObGyyxb0uQrCjNyePT
EuSu5fp6BnKMoNfAS4JinPYG7ZEVAhAdtrVzaG/daRehKge56BwcjFVCFWLVuOUPOm9fv/63En0Z
jYtLxjYBofXtP77+v/7so4984GQTKLnzG74l/kITM88NLpDlb1OGiTodcgPBIznqLPkQjYIzuecN
3SkFtWPiJ8xoK5uKsJDWJOVw9kWDvh3rhudhysHSptUdDMaKBO+++e3jtNHVJicmO1yTiFf5t/gN
NakIDVnBwp3PoSlwpAIeI7rWqrmYrStEfa9v0TNBdlb4xMvlGgNEVOom2YyLn1a/H8I/P5Pw9NPq
X0SNdbnYwFJC9RmWij1czUUauKixXKgQT6+mjQglBIUjkBY5t2CBM+VpQcKOCoXV3Qw5XFP02ah6
+iXqHoGT8JPjiP3BQNpF5xS+JXal3F4TRmFQB+HkkdFJs4ahvNnBnENNBY0U7IHlVUn5WD+5aCDj
PQe+uLxswqB2m9ntlAcYxEpbBXo69gZ6l/bTSuj7gUyCnxgkcwZqZ8qBEefx/hfKoIWfuJpgwZ26
7i1JbHafnoxPg6W7DECif9+jVRu+/Dn38l/ixUzF6xXtcp9jNDcEjWI/Aa7wE/QcdthlRyIaSghq
TjQWi0SoGQ5m0MfH5vmCgAn0FfV1sSJVQUtX4W3c2+SqXEvG9mVMSlC2wKEvit4RR3FKzncavchS
Bmpxg1UhhhAF7B6kqaQtOFZPsl+1mMexZR/FPdY78+ITHOUetPtjqpByD0ZP8DKzwfGHwcERi0KD
JWP2czxmwsdyCTNdTq2bXTfST9yFE0qBY0CDoA3dQ2BtTfqX3v4hgrHxQ5OMiZbqU0x0StTXD0hZ
IaFoOjsaXTPcLPrZfSYIGh9E+ovhcbLZy2ymIOJtAvq6ByejuRJhGXOl4NzuCOcMq8hmsMVGtXOM
PaOpt/vZyfmmflOtnHflafGwQbukh4/v5p9jZJA87ja31buMDovF3PVgH0+65F2eZIXnG0KQum8o
8sElPcX63PzIafDdSy7ocreUELdZy2VOfVCpvd+qxcX7pR+oQ0KFAEn9/UuMylyt6mHhxB82YNh3
tn0BO/XdgSO5qzWx7DrO4sW3GoV/bvkcBCF08MfNmrGAP8ycen/b5LCNHp0U4LG+gJOiemWrLLap
tqLgvBV4gyx6C1kPqxuO4B10P9vsVp93k2s5btVeoje19zUwMoYLIG3q9HK2WGYmr2XfsUvpAvMT
jTnnVRuC9/NuK2ZnyjBc4QJT3deahsDrFmSZWErkmSnWUXxe9D/1WCoJx2K7ZbLA7qpkLWKTH40p
O7R38U88RQyZ8w4GB88prCq8/7Tb8eNClrzuHDB1KwFLE9HfFYneHFHF+V5Bz58Oi78iuYg4hQJV
2P2m+ztoWFfj2Le1A680DrTD9Ma8DUmG2tp5+7++/nM49kzXePbFGN2ry8XV25PX/++/fPQRo0/e
3KD4QoJSvRa5G/aNEQfxY7e6S8q8dohcsDbYv9yclfwBChFZOtQ7qdbFv6rrN2hrwkHzMFBAxVFb
yQYCNzrcsEiit9mh1XhmEIPr5e5qsfpGMBk6DwqpgoYZg16zcQBsuUiRav5KmQIDWHNxj/rq4oLc
P+Umnb1I6VIMr2xHepFBNcF6FuCSMeKQ05pgoOD6MhxPFzrJppe2+Cz8QsxBGgUx2m3rkerQQcpn
hpThnwsN5ZsLn02zjXG+0BULDl78os8AT9KQgRVYOEGJE1XywE4vbhBKsprSmPL3Cf8ZqDAjScZ6
Bw1NOV/AIebe+zeJBmmxIh4PCS6c1gjNd6Sg3zYwqywROegdM1/E58my776kqdhh+oLevTMohx3s
Lw4Mg+d2xJh7Kj1GNsUj2b2Zbd5ws5gRzpY4vSukAYE3mJ+jYceWqoNT2BbBr1DDvUbqEE7Z3ZIB
UXFTr95U92sKLQ6DfTsjP1iYwfMFxpZbrmVCVohD4NZ7cQWr4AL2t60r73dY5d3NUgTSZQ0HyfoC
29DV86EoZbCny3o2l67p9YeIpsgLdSwEMJsfVMRF8wvOOhW0kr4q23KfnMJHy0nQLVwFKLw/1gNs
sztvqrc7vD1DfQcR+Q3bVSN+g8YqY+Mn9chmHSG1QJFUJog6AaMQMAERC4KUpSP9L5nkg68YgvD1
69ekTRAA/L91R0ZiUNC0iGgMAl5Qkfgi8ss+hfKxCypI7KcsWoztbEqk9NSURvZb3GrQ58MdgOI4
y7LWrQ3cICnlROX6gSkk8o/ngvoEXDUk5jVI0euT9FDwwcB/DHIniCYclYLh37CSscRJ4fKM0C1N
d0rkZXUnS8Ofc2LqydG0DnMyzyQ6MG9erNLPKSm4K09+3U4HWf5K95P99uInwRN5EzUT7qpwz9za
CJ7aD5S6MzeTE/1pw3MxHn+mfEXmd8X5gtyvCC3cjVLoXcc33nL7w7B73//jq2c/vJp+9ew3v/27
WMnAfjxI/ewjE34Urx0D1Uu+PT39gG6wu+3lr3rHRGpTj6FFXc53ZMTIpWlhE/2RPZ60QMhlr85p
fLZo71PitTYGLSONPjWflfsBTAPTD7ERD98oe7MJlcF7+mwObVxQQFPS/PVp8wMhthsaQyIIAWlr
xwXtjjMiDhS/+OqIpWynlW12F9fe1iE8OnRl+dzM0H+nIpNI4Pdbxg/DnQCvbamyRyDgVrMN6qIb
531eGszXD+gF6nb/VJ3AurAPS9rJwi649YkrfOPFGX5WPX9GbCTx28ni1Gl0fyIRp2Uhk0Sk2wlL
rSDVx8bI7J8/W9Wr+xsM5TkpviMa/btNvVv3u+J0K8cBxDqmlk7CFSuhIDFLFKVA3f+1evEk5Ico
FYtwE2549A16hj6w8PX3P6efFIUhrvia3ENZ8nNrxVWPtyUCG2nFS+s8blqeaIM5U4n3Zy0HYZO7
L1V0rIk8DZgJakO3mCT+oTam25VDSM5+HNHHMUI3C0wDwQ+YS6MKmj2DqTimazm5EGM/PEl5xSld
QtMCOJfUeG/h3xCBjkYov6KB7XpnIBfH1FZfBxtsUfFDDh6GmesNsGjkx1EpHeuwqz56nFut468Z
QubsDBY+N/7srOCjgw+E1NiL+GZxtZqRmA5Zx+v7Ma7y8RmTcOmKcek/C8/G5fdRws/PyMdP7+ua
6/p2JRZhNk7HGmOXvKuk9WIxxh3FgTk7k1pkDM7OWtyM640vwq6whBJ5DhW5Ig9PJcIHJXUfufhw
ySeE2LLmMRjh46CxiyEUSK2tVsC1NjOBPdd2DzIN981m8unsv+O2xSlW1eKTJ0PuSXIJzL310H86
kaoGR6dodJFGuSD1UWHhBJEHUs2IXVtYyljXE7GioUTnRBLZkDUVrSThonbNYMVjolp/oamQdkg2
RO1TafUACN5tVLyM7XidwW7BdvDLxfkm8oj97Hq7XY8fPYLDY1PyibasN1ePnj6SxI80d3m9vVl+
fhbAiCAe4topL0hdR+qRYGQw98J3k653vfqtFstJ1tHwCrOh1HCJv1vMoMey+mSg0fAH+s03ZWdn
8kgBCdn3pzGhSM7vSZRiVIGzM+TNkNiN7NBeDbtMhqP0u6MRTlWXDUomXawOHsqyHLSt02gH9eyi
H9GX2YNIwGcijO5vAm3XFD4SEni1dVovNKI0r8NmQIW8WG18Zn4TIn3aIvph1kEWKtJ9Ltl6EpvN
gAN3A+9nSfEXI0c8qT4CsDzYT3Ly0pdeqLc9/OaeGdf3poaQtzV+HPjxEzjghvN1mme4e1mtUHjK
rZBt4lGDUtmNU4sus4HCofvKiv0448JX+Y+bjxZQF4OsF6twH214ewvJjIbrwdVF7Kqfv/SZ+WRE
wG3JpPFBg/sfr4jwDbAZwm1eA6d4wypNmVqVKhD+HE72LkRpEOwxPyj95y++fjb97uX0qxcvUYDC
U3Dv494hx9ZB6ahq0kJkCgCs1UYrdwosJ9hIWMGRSJGUWJiBw7KipR/ev2ooryEL9uyw7LM6/Jdy
gZgDsV1zI+faWnZrU1K6IWr8ElP8sDDjGDOp6ZsVCDm82rP86jiOY5VLCb9oHfYyaQJVjlYddk+H
A0AgRKNQNSwQ7kdOP6K/y/th6i6/8serdItPxWdM/G62WcwsNNoYKx1T1ZpO0ziRAMS/2fYa9Vln
Z0Pce6BLsEfBiJyd4TbKXwKJnFo/dkEUhEhQzRa3WWMuoJgKW+TbHQjb6J0XiNecH9pn+9Bo+2AS
IA96y+BuPONj7bb27XFytOzTMN84A2QGmIjQX9o0n7fsoKKipmFDfDKeta6OE+y/XRwl/Ksj1B3k
D4gnisHU91TgCGDQcnJUFTjDbDi7DGEzrZcQ9Pcl6jDnHAbZZXGKa5wE9caiM4oGga6Z2SxW6GKN
FAiZzB2R1tB2tsdb1ZTfmKtd+DeRDOiKci4368l5tAVoWk/CLn/WTLMr5PcQhNmHDSlpw1xD17h9
lhZW4MAexFPhhp5fk5rmZnGzuGjEAwxP2+jkc15dz94t6t2GbtUEq0LYSslCg5vBKdDI9GaGm+/v
XWt6i9W2N8ZbV69M6rFeGl4jhpN7/bOa5q1IzuV7w6a+wQMgwfnBGqznVofMmjxW+L36x++fTX/8
4uW3DuSt1dJHjhbZ40qzxYtR6N9NQ9vHZvEO1U/vZuK4j36eyG/N9pmRYbFcjKaMf+PAjniSIRSp
nIIHZea2j2I5TYVKEBV400tj9rihOMIQiZQHUuhJD58yAC0ILfNQ1j4FqcJ0rShsSCN4TwdT3NCP
fqsk0dNbxxUGtm7IQFYOT0VXa+yKeQYQY7tM0hPUZPLl7j7sK6eCNdRr90x/vkOtxo/c2PZkZIq2
BF6+nHw6eD9/if3uB7Bi/OAjiz0WMSMoNl3/gbsL3w5RKCC+7SU4zeVyKGCbt7MVhxGhS1bkBSms
jLuCgkYeGZhW8lIX0Vnkul5cVL1xG3bRPsr9cAIj0qD90N2AISaEO1kOCAVAL8oebsr9Um+PvByJ
QNHuYCtAV1zYbCmQ+BVFW14vQWIoDpXnSBtage082IC+nkhgf6BwBzwlDFS7HxHtSGrfT/GexJyW
pLpbk5kF96BgeBFBvcXjaUt+3GmshMf27BXfJuRPSHahYIBQRJ+Ul0xfTe80saPcbyr33zLp/TdL
PPFEqlxSqgxxAn9PMwaBpP8m05Jmy9e7eBcy+ty8mArrJxSaTf5CgaaljfEeMPmTvPCnna8/CLwq
SCxlGXbLLhnQAxHop5lbHLPj80HFtVT34A/ZJUT8qEj4mDLZNf00CrmzTEwvZNxtjhNTWsZI3f3C
xLAyT56OT53LUQ+hZNBj6YihbzXQTevzEhZWaK3Y39MKlzfO8PjSLoCg0QusQ1ZQb1RjTeutp6jA
TiRldVE+mk0iJ34SD6QNwrNN40nCMlnP6dJ0dW+14Wg+e36fuzdUgVUJjP01SRp0tlExEaTkwZKp
nAFdmlA9Q/ZBKECTwv5IQ3kWqE/w31NjgCB6FPiZFcpCZ80DopkjbSNeU/9jTwxHZu0iM6aZeSp3
kjMQuxFMI7J/EOeCabriLPgrSZ9pBBaQJyY+iPgDUcwF3O1tdBpycEikZdaDX8eEyEUBUwBC/alx
1jjRCAeZhXJ/qUKYTVHwZFa9ImWkbAVdZKoV7l3oQv+0DaP7iIXadWoDPrq6zXccWSi4DC4iCTql
zMTfDo4acCLFoyctdzymQ4E56G/279HGgxz8tDVuLiYRL87uqEuiJL55ckqA66Pu4I/ApFz/LXc6
PArBSKi0hjFhRnfDon9HfGZVr0ZzVEzj4Ax0SNqPVHkc8Ij1KTPB0Tsu0pAfyfFTGUszmE9/kcE0
d5LvM5aMOEbarNFoGHgf6mD+QcPototgFI+JqQbdws1Ax6HfS9mkn6NE84S50enQpinQMQ7r68fZ
0QcM2VznWCHD1uCTxBW4L0n5Jj9tYVFO2rIybVKYIWaVLDMOWlvG+uuwZHx3RMm6W7QX7hTZccsl
XmhaBWc+GYH0RYdx+EyW5bPl7eyemVhRrQi4HSNN4ebf77rp7xaDXFMmWux49PQ0N7yDXrwb8Uen
jjTmEq1mlHl7HrGkyNlgCTpFcD+oM+uuESe2xJyCN6eAc/cz8WVWzh5in5IRM1DwWm4MBfeCvfIq
Mjai+Hi0Uio04hHvGEbjszwHT4IMEY5HC7ar8Tf5FSkjNlWjR6friiS70jS7NxrBvoaBohukDXoa
8aO9+kLD+Ub1f2S2QzeMNjlBOQ3p3sSXiRKbF7XYBgIaA2Ozmn/8MbUIbzLRV+qCRM+FuG8vGr3c
KrWsfDROnVVHs5kpiJW02vupU27pPR6NOQwaUBipki3XfK+5/uXaxV5qmWb5XNYkL9OnxNDcp0ih
obxsRveobdzfW/QpHsWoZySZctHgFrfp791+rR07Jb+YNWIBpGsTA3Ru3lXzbnosWRuL0MzqLUOz
Rb29yXEAs23yDY9wrchaQrRCTnGtU82f2+3D1/a7MXWMmEykfGqppszarEqX2SA0FCLwAgNXMRPD
UDAtMA4udXLy1aZe/0CsZ/M1sJq/h6TPNYmhO8XNWC+W9ZXU7YoyfVMDUttHtioNdkJ8EU0dOiX+
tJJwXCddTAKiFexxd/72m/Kd4saHSbP5xQM/+a7HpHinsneGxlDHX93Llbjzxspcis9QoCvo/Exo
kHiFVjcL1RXrLWVwg0sGEBLtxJNtfHVv6o29z9+lixf1hcxG32WXK7ncIRLI5sov2xbwT7oUjWlx
2u/tVuh9c7Va/FM1911DPWMLWBt1ruIYSlDoQxBmxK+fYrqEmfTEz323ZhYleTzO+2EgGEWFhuS6
avcRdLKGQ3J3t6WyF/P2dxlZI/ItKoKT0OmWYWGhdZtZcX2/vq5WcgAfFRcg6c/WwNM+/hgLgB0w
KGImSIZkzCi6DDa3s2VxUQozPhM8F6d1xEg5KgPTF6RFkBN2GxIFyLBvPhdalHGzvdHysccXaARx
6WFqBJISDsLOQhFZ/ErPhjei2eYzpjiIQ7dnFxyHHhVM2CSxv2STRaMzEPwed+R+SvGjm8BcwKL2
cEFT77Coy/QiutevgR62mwwBB/NdHllu4GfDJdM40E9cSH/BCwnkbBowf0ukyyPHiTi7FbqMxotr
RqWpMtl52soeB2mJgv00WQuBjcGF9IIvt0FRc6C6oCxRZ2BaVmlQt/vmNV7B8BdcI+b9E3k/SNTl
ntKQ1Huj66FYdfcIw2c0mp1TpL3RLLzj4N6X+8bDdeio4ca3FGAqPACIcMXmDel8RMsNWvr7n0MC
8fmzEN5B+dY1gyQtWpnB+9BWMWulGEyVnw9+PmHtCDDd8XHwwSIkigRoEV9GoxzIbk6V0jMcxqwD
0oEgWibqasbFycPmtHdUVI2HeqE1yOne7u6c+K19HicRUzSRqrlx3Uq4HD8lGbW0O5VNfBHhvUYv
i0PnM0otZoJhcn7K9ZvQcFyyE1fEKSGKwkdtQYtYnc1rGm4uDtAOw7QIXUXOK8LZJZZMkec4FDV+
Rz0B6a9QV2U4yQN3tLutGCwfc2xIWVyrHa/4r5Bm2ZrTHCDx4sEfh8jNSo8F/ax/0onoTvzo0CVB
y9QPPqjC45gZBRwVBCRXZCr7HC7LnWi+FDiKPu/FBh6A5BCYpmVVUAdZnrwQwArxnxOfOtg6sQoD
qbCtb2d4/Fe4CwkN3TicDnopKA1lur+HZ5p6xRbtInljobBfZOKC6CFje/0UE0GlTc7fTctD/rCK
Al9yCm03FcUxMJNSfDMQu9M9mEByZLKJgBJ5w18aZvZ8pJg8MzGEgDP8tYIys6zJgeN26+JqWZ8D
/+TRD8V9HFhtdlNyKDbn6mKcPUTGmjJydhjq1vFlcUQpCz+DHDIGo7OhDisHdZLEEL9FcXbXcPjR
VV2omTi6nMwuq+IWCgPBUYFzYkwO9uFZ436jdw/GmxMl0ost92lTLRmuv25vCxWGucjY1wbfgV7+
WBW/Q808TgdbsdS41Ycw+nW9BYkBV/A8QAq2xzhBxk1iXQWW+LiURyNPMr3QCYwhBYHJkmRFRDNI
ZIYnFkBscZrjXU/sZo1XAG0qmyfI3uo2nGVhtNSO4vNiscdMBhcCD4A70EHjPnlyasJ7ZSx08s3F
Q/uk7b4mqQoLONH7tk+ejKXKJ213JMHqXR9wmqMIiLIs40jC4Tk7lcRUyQ8fh2gAq13sDfToHaAa
jbJXVFnBzDUoHvNs1zHYOJm29znfYMwA/Ryfkqxt2QyemHLLqKHfEyKkOAaphWVcauyQ4czHWkeT
IqO0y1Ql3bTKzzSRcFrp4r6d4QQ3j1PVtsBq1zJkr0q69gDWwbbXMLQZWZ3PEcMVcnyMoCzofxOo
ZCi7hKPDzSqORud9ODilou53qciWiNx3ewoMOpvp0F3oPB1/Vu1kEB4osc24kCNSbkTx6UgLJYmb
2R42M1UJ3yxY9lBTFYel1R1kW3hymgl0sGHnAoakp72lvyf0uWUTuFrjlyD+Lbd1X0JP5GejNQKg
FTJY7esWcdfghHWzxwmb+ZiQhxdW/KRusODjV5cpcJCzOMlMNYGWQ7lp6DJ86Whtg/KyvdTDzLHs
BqWq95YQF2abIlecSq19nzuReilnWGWY94j6SUJt5wdhvfIWKACv52zgDyQ0lBkXK010fMB3GgVV
MtBDGrn2SCunhO54RelyVF8dHbGIHDz+iSOKcexulKHBQZinnY0EVkqxuH1iXxxr8vjmShZS0Kz1
vXxI4wdpjlYWNK1WzQ5kZAl+hCCsPKH9sApIo5HOcgtnf+842n3abJ6PqNXAcnKdlNdpFzW9wz8I
FnE+nCsmGRZ0dspnyztRxowd2aXE/UAWKS0hTEfuwkRb1y5QCp3h+IjSBOtvTU6NExYHv7NMrG62
MrJJCr8EjYRhMojjGGTLwJzoQTInyui3sEpbHVXVSm3wx4QICJbSvFoScJMwmhNJKshjbeuFY1OI
CuBmzreaTaoEuK6Xc4dI6HX+jbEtLfec2z/++M1tdOblAVFvXLZW7UuqYwyU6IbuM9/m4uGGAIuD
chEu1VwV4XEL98ZpvZnSacvrPhZXaadZyUF+MYIGxfoXPnQPIxw0InN2SiOQrgOQQiF0nrHcHQf1
JofraNBtvv4cBqu6wAPwgHBijIPplXOz3udY6rBZ0IKgms1zl+KI/ucpxVPxFK1XzWC3XWrLnXkU
HAfvqrsP1dC1OBESOyVVsPwDR+1T8oeczoZQXeStYHCZYsQgn3AQjHM0gwFEojcaiNDuQuzBFGTM
3doG2HQGfwxloC4PfeJ665QsE6ORU6VXyEM0aVQEIXJlm4BfUq9dvpLL6cbW26diVJ9qvJbVbEUY
C4mBVB67j2m+y3SX73uYE82TNTfFuj2fgVDBtbv3FhEwHEOCbptn/ECjvMFiFBHLYLxBvZIwP6Qm
QS5TX8uVjqOJDvGGbt6c8/e9oHj0jk3q/Dkz3wp6KCCMHuBWGiAgh8OjblR0CxV8xLQFJqTTAhE6
9nXY2Sh06ToM93xDTCSXXETHQYU+xvwxOMMKAyHjBwx0bOA7Dow5MxNsQo/ZkI6scKWfUxsiQ0XR
qXdPVS4PVcVPUtXPRsCe1xG8YCpQOw8c0459ZB6oVRK68LUJ0nDkigIN2q0ONel9mhMqxpL22MqC
Fu1hCCITYVireX276mcgGwPIz8wum+jdTc9QCs1h3Q6LXEmlQjWn5mURHok53SQNTofBtI2DXskO
rQw3UhsRW4qSZOJ7QDo7XEBbi8v7qQsvJB2EZwRdV3PB+FTsbsHUmA+vljDyAu1skbC7vafIrl1y
jdjjpOwSorHnu6prD3go+cEnaRbummTnjOIMGV8hyYccDa1RScneTK4iIJee/9YbMnlG4gM1ZUL/
5sSFjVcIWEqmK6/VbMlWTNLqifx9L35baE8n8jfxyDMu17PVPcY+yagRNTQP3ghIMxTbGg3fMkc1
j/Yqgc66L759hUECv3728uV3Lz8vHjZoPPcQSx7sy3253DV48mRK+7UJYudtBTf1DUrmZMVGkeXl
hh1fZq/FULu63eww6CVKtC76QLM7dwFsy+Di5Wh0YtW8EaB1bnnHKKkCMd0oCjPvLI0aCEepY8Ah
OeKYDoeKIlIAh3mz6MiSJGkwKaGlp9A+1PHGyjjulReGYlEy0vXwUTI+s9F6R7PcGYY1C9MyqnQ/
fGnhpXMylDY9UUBK/fYM7kFAnWnzYJyAP4mxQORBSW9aPEpCkdch32AWfNHJOWuQv5f31VC+mHea
xQVsvTxK5UyaI7cyjZNjkl5/CRpPABN3gzAOi9WVwteLUW4TK++WS3dP7PT4Tt+caKtKPvea1eOR
/0+MKW5mjhHHSqH09wXPvVPtjm3aaVLfJ0ljM0pa6W68CKSMZDOmyyO5pXflNoGhdsKgokaYa34D
lr6v+FLwlFWC87NISR0YV65ePLtdChg3/BCedCKyKH5GkKdtfac/oXkgHkDS7ukgezgXGOh+Txgb
WpmwxWnukh+/Yp29A4XdLFbitoI58Hb/Rq7q3+4WKC4KjKUmCtd7FeM7erZLl8jj9ERL45ZG5eJc
49jMoTkZ26s/zNuVtF00JycD4mjCUcEtze0fx9uCneO4LDRd27peNtNqBXRC0VyaI+urVu/6ew6K
LcROUOFTI5EPk9GaSOQDD2OrE27nLRihGAUre8BjKtGJYNpmF+yAfhKrVv4azioXxF9U9omvBm/u
OZWiZk418NC0LQdWRpk+K97l0HX5UkXKMyFX8k7FD5vxw/lYF0Ej+UYPm6EYs/g3vdY4bHasVCwg
nb39gGu2vgzGsQXng1MMM2MyOA5w9QF/LK5All85bbE31QECFj83kbtY5WqUmBhOuVLMeAkgRpbT
PqZrvWIgMLpvx1tgEQrK3Bk6cqxkfjUs0uHsupq4C6bxFBDvpqaIplL1l7b5iRKv3iyuBKUxwz1a
WAHHTvEaXFIij/ec6fW0q8XZZWvDWGSZM7nLqONEhpFMKUECOhmUEeN3SmgSL6wkJ5jULIb8+uSi
RrHlCHrh4nbej+VGO6YRaKUJXWCvd+nMkvG75I0MEQlSqErCdZSjLjlpk9EamsCauxY43ODWhT6O
HgDhviLveMTYQGQ6sjMjxzUylsUazyvYSasmQJv2uEd6dlvAUqEOhQebu2ibCm8ZhegNTtedxJPx
BbhIqDgsGLm+nqPHKCP+kSJ4zvGzFhhivQpsREJk0AT60xwwgmsSMe5dFeNNdTk+g1LYuOgz+EWG
hJ+flcWLEH/be7KSMA9rGDHKyAjZxO7aXm/IW3mGCH31JgOqGQgixWdtgPSEphmoEou+2khy60Vo
BMYxM3YoeF1MTH8eztP+G25VuZ/427mDN9q5nEAM7IHtkGJTmgitIsIzQut8Js0P8OsDENCYI4TI
ocf1LbXu2a3I1S1HTRK2iM0/BntCxgfmI7wRvk+jUDEtXdwbN1QGV9J2Uty7ao9ZkwSz7eWmy1yr
aEEIZOFgXKMZWidmpi2iwaDFQmCZt5LaVEu1GshEhGJc5LRnS+Uz87UaqS9ZLnG2n1kaXYYxkV2v
CbA2u6RaG9VSkoO8zZZ2wp5HbF4wW/eXs5vz+ay4GwPnlFCpYphkNWoDmqEj4iRbcmhZosER0Jxp
pzrxB42XkpVApkMwAXj1mDnlH2datX8Fc/ODbkFBaswWkCWqEaYU0BWEygwtRnQoNIhtJ1to6UZW
u2J2PknXHqDNN53Lnugv58XRfdSNssPxfj3YU4J02VE8tyBv7exWiKRLtBTLPB3kp39Y0Hkr59RP
H3KbCX1wxYbbRtYiiyglIA4rHQR4CPsEhNSegReUAQhHG+q2GDtlUfxjvWPnAzS+ZVHhPrSqI2kL
XVGWxdnZaPTd968QFFzdiMiwQUvtoiqtawOO5K35BVEkVBOWfkTCAWyTAK5yCr0w6xEmfblQf7YH
c43tp7uknSga7YNc5I8zZZmsPo81VkFKvUUJFOU88oAxUZVontHzxQTbIYRqUfTOClV3hi4q+Yn8
hafGrhy7bv1Qh4s1nZt6g3FQj5qhYAh9P3iKGUMew1rNDBSMOFyVWIfA3rOoxB4BoSA7nZKuHy0v
rhdzDh8dXDTnxC5zSPENifcT4U6Y/IDZd2Sk2iLZHmcXavpOa+bhRs8o6IBg5UoywxMnCZ60xdWq
3lSTZxxJ0fmz5izxVDYxtrDWCI9LSpKLoYpa5DuFLshwalKqqhaNAHEXBn9wIC4UGCL0JelKCNLs
QVzLixyVTjt7vVrwZby9w7vwUgubLlnFxl6spGPtrfN58t2lo7DpfQa+ERqAn3nPNWmzhvI6n23m
qiAsu9vA+9I9li9WCw13O2gzdO3x4bInbZacTXWxJ3SMmXyX5UQLOrVhTn//c4cpg01g6vPfsf2L
BLNNFKqZICiQPLXWDXG9KS0VHF9GsVmyu1PH6rn4UB2rWlK0eG9b1WKWinf+ch7nGAfZWD5crwj5
KD435Td0u/MKUZ/VGDZ7Jx2bwbKAlqsB5FE06ZmmJyB7B2c6nZgLwzctJAx/kVoved+GfbEbMpVG
eY6u9gHabZz/DoEoeLxj2AV3RXVUFzjijZhgBS17+9PrP//oo49gsqaLupSY3m9PX//vw48+Egqt
GxPOPo1xD9RJcWA7HcdOSf2yqJXEfyBkxRffdRS9l14bVk/pNVWSq4PGXUAbqtZGK4vis6L/6fCx
EDzbA7+q7rYvvutrvoE1sr7kiBOyP89n29ng0GkEEw0x3PlFPc8dl/E7hurkBJI8BLjtTW2M3d++
ej76Faqxe3JaiXAGtOFl0tSOpzXuJNTrh6dt2H+D871n1HnUJNWHD9t7DBnLBsgFgmDX5/fbKlQU
UUGDo4enQ1HmgUY0QOrjMXpqwsDDuD/h3/UOYXyf8kO12fR+VgP65199yWTvTcjlRQEjva0fiTaR
r0fqZkQA6SSsqo6t3pR7rcbRcqbaXgIT2fJiUQyq+pYNogrtQXIaxOY0qMrWMgpf6VCMFyoUp1fV
bXieqnDCEdSXNLeEAQTnsBckPUozYPJCVuFUuDO3rstX8qM/cHE/scoqZMsE7X5Hvh7VniiBrhcT
16FAZebaRRsR9c9lAWb4OBI9sBjbTurwc2xs7/b8kziitRQ+KTR09aUJXN2l9RndHl6WF8s6cA7j
XriS5FeM1Q4TRn2sGwyT3Q+6Htyv6Lxn7VqWc/hiYbPvG08ppLHVMk+jS5vbTHkkWVqLQn4R3fMm
hwPowiWk3PZt15Lj1Xc/HHXkxdzzaOkQ4yU83mEGebc7X8yL+3rHGn7pQ7G9XVxUf5sBEfT0BcRC
5ONscmWqIgqSWUKKhoPZu9VuKUpPePnd9OVX33379T8O4gGBOX3ax+X8OPnE9HI5H7T5kevWwLPb
22dAlEx30MXTYfFVvdq+rGbz58ChXqzWu629fEsFFG25HY4S/1nVfQHiyVDpL9h+25DAsHmVmDMj
H9ytqDSaVrKbLTCyMkvbfBsWrM6+8Ycf5NFH7YAIabeOg5vc/EJIKDBlV8G4N1X1pv940HnvIX7P
4ZVS8npCZV+dYL+vN1f5LR8ngVL48CcCKoSX5wuEhqIx94s7r81Blr1u592DvdwoM2NcoCPlQbIR
wkcWG1hgMCbWeF84zqVXxs9HOLNj4A4n+/f5DjEDcfd4PCRuhz8Fl9NtLEYFZBR0tGHz3m3IlGwo
EKIAK1wuUEmEarnLjlXR0/bovs9VdiX5BMjGNapHLsSbeuk3ef5WNIt/qgoNaCCzCFKBy8lWkT3p
Uc9KhIsVRWeuKLb5bFU4e3R/r8qBlOiOHhFjZtuFjRBpe+vDDGDQQ0S74xrxgwwj37Ob/Jp1S/b/
KpFowPRBgEUUkA+x+ktHJOGITvgP7EaXeAitOkeoDn17M9tevNwu6QXMuZULlH/kTjifT+wRR5Uh
QlTt95umN/5W5BwNCSPRxk12rB5UfD3Y9+e0LVKjme4NyXsKHxa0Vi7n5sok3Hwuuct7yxu8Xz+l
lc8wYTV/Hklzg3T4Rdq3GQLX0YzYjlE2qtmNKTa2W+QE0Dv5FX52vZi4EiJOq/bQ579r1e+QNiF/
pkINwYQUClS87EzpGNBtalwkNHjwXqHpuLaGFRyD7EAIk6XvYUfxMkHNcvU61x41+TDdE3g0lyKs
xbJwM2ciHgfA86nXcXBV4GZWQBTEo5iPfSldoOzJjg9oojIsPmYlb8Y/OuDw6OwxROg9oKDNBqRW
4NkVx3EPR3a3JYYLiSgqQNVQzDHcVndbkOoecfQ/fghNCHYMxac4YdVddbHj0L2x9xuVx2dYl5wg
ALEHj7gjCeqWKITCdkilaO+UqS848tWEMNL0916y4REHm9SPh3X/Pq3jikRZlzhoFm3Bw6T6Gei4
sZgUxs+lj68MRXFRGfmTPqjvDBT6iFUJNFJ678YDNpcRQ7dvCdUTXtJkpD0qPQkekRyhbtGTfAnU
O7/nBiWGjNzMiKtDe3Dno3GQsyvHViBZG+So2B/GDG+Xto/Ap4xLcwcseWahaZ7ME3IpSYENj6xY
gsxhvCptrKwO+v0Xk6R6+ZStnrugKTLVB5lTCnLE48/Nu8ah8LTcvgqKXLOarZtraKCQBdDiTXUD
sjKIXyr7RoQB1QlNE4QONJff9AfHTWWu/dR64XE/bOfC5p5/1ZdfRjol2EFOKcDTFF+UuQDxX3rz
/KsnNPjPv3rasfwCb/hB6FwRLnTx7W+//lq0T5jlcdEntAW8S1pZzKHZXIAlZWktVgPWVGF4IrmV
fjx8Mnwany4MXlfjAktLHA0NbqUr8jBK5W4ryjgYL/l1s7ir5iLR+7pW01hrx4+qzkvEBA/QzM6M
/UFGYdQ/VoeDm5HdAClzbK6/mlrDB8Jw7sFLE/CM12WUBNWkxijKU6JPgsrTU+MBcEe+w1Eiem2S
6UilKfVLL7Cdg6YeG2OOu+q0uP3HOS1rNIWh2COTqD+yaE1ZBVciNTFrHLeoHRMTMuuWhmymRyss
p1PxhdSRCNA+MDzBfmCe+IGRv3tdXf2wHRqihNCFjnSy4aGMFaUfMrRAecn1g8pORHD7xm1/S5Zm
KqCa46ZCRYp2mfnA/LfPHa88P3dP/4RzRwtcRwxdhD947kJVM/KtmE0lYhDyqEGG88F7p8Fuz41k
l8uN8344N3Y8lxvHINGeYziEm6pFTIQvsvGgpExittOQ4cZfZHb+Vgbv1KFDugmKtxgjd/Rj0YAV
Oc22XvvmJPKGp+YcnbYMcqhbN+uqTQaMl2EkrbTMRliNIcY2Wc9Wg8kPVpMhuIhpIOmlxeBU5G5t
oq3cS2HBxFg6ciJeu0DpBEm2DKlxRdkTiJvciKYyhrlqo9Pv8ZVojur5TNbt7r8DiM7ZVtkT3oNT
wiwX4Tu2WBUeHQ1zQjvN9urCVEUk46/bUxsXr1pqiWFFtRm4hum2xitNPsVXeO5vubnnXmw3iHGx
rXI9yffR2BM3OVviWGL/cHndn1n/WSimu1iN6Cxy3w0keBKd+dRJp/0GA77ACZbLwZmIRPWyeLEl
FzyrUGX/pKTWf6YcNrIxVjmvK9LyGfT53cV1sVtBluU9MlDU1hVfebEfPX2q4CjDLgDXs+3gg+R8
L9JbiX+PQL+ci/jsupimEOnZQW2kKWAsfAoDRmXkGusOyTIiLqJ4wXJjxAImDoqm5+9DolSLCMXd
iMVOkphs4w5ITVxKvoXuiN7JCPbw74edi/bIHDouUUluLs0+lWRLRs5PsNl3kmzJgcbNemH2Gfyd
udT9/5Es4Egq2f9zG3I6pLJ0DkgHWf1Q5sbUkU8iJuxpTXCqXc4TKT4RIrLqorbW7JnoiV357yMh
/KkkzV9UFmEiUI3i4eUYrkNvmT7Ii/dt26+X/9vJJ6cxdJS0r2ISSPdVTEeHqOJEsShx7uJ173Z5
dzsOI7075+0eFWTzirxd6bqVKQzDtxEMaTUfOudcTx4wbbvlHLVifFsKkweMhIhmXTfN4nyJDjpL
vo5RxVlHNPmXs03BFxN85Q+7cVWhaw/JIL5o5wB6vqT7W3T35cPqeXUx24llOUeNRYdg9PyAIWxQ
cbfAqEUYyo3EmA1GJYdNndcK/EV7drqDxhul+3SnJ0FRsGujixkWaV58J1p0TOmCGPHYMXSb6O0J
xID7L/p1zEEuMRP6Gbxr7EsQNxCpcqrvPIwW3zzHoLiJhh/qXGyqCxycH3RW1021m9fMIDC206rW
4gZdD7y7wK34vgV216P70ZWWM2UZxLjCTith7KHkzppfdPZcjKPo0kgo50nRW2171qDUltf79rdf
9zKXxVGqR/D8CF/0Om//y+t/89FHHykOLmFyrquLt9PX/8+//+gjBjyu36gVI4dKahgTm8FqBEUI
5b139ZtKRNmbGUix63vaQ0T215SKBvygGP2x/oOyXjCEy+KfOJjTH7PwjsFGUtjSfg60GLrlozUK
WCriy1DERsJYoBio5Gh/RbYqVFbxbjErZnKhWHcS5DaHHcs1CxYYgUwHbXMIo33fHnXLRiNe2FUw
yehztYupkbXczOaVxFBCTvVutlgStJzspsiVxF3okcy4q0f5FFGID1XJUZTU541KCbGy4SO5yZv7
g04remU7bqXv3kInX2DYHCDJUKM2MXf1prgb3UlzdZYEQiGxORWPKtfAEHAkblzglEYHuPvQEdDD
wciQiZ+fizQVj4wPtx3ES6aBYBIpNIrniAElbUxShPfgtzJGjGIm3othcCyC+wDqWGB0Kmwj2xPN
BLxCiPsz/kvh6z4/40oadyOsCDUzvnc/hyauVhL6d+b2KKmeHCclXHXxqm4NqDUMvfhyEBcyRGVZ
pgDf30epPj/zt+xSq0NBkXhutC5boDSoEldAe2UErCE9VZR2hNnBEF0bHGeVMByWhwoaWDEJJ+Mz
mbU8ZjkMv5I8EO4a3a3eVXi9L/FmfQcuIoxz8Y88AgpdmBPtfptF9Y7hXJzImgVHK/c1wOF07Kmd
ho71JFyptGEmFXv1iSDYMImz36tQdhVyBWMjN9tQqCekeLwYVRBTDcNHrBk7eXYmLTtjfBQfW5j8
SldiL0cNnHP4XsxkUb7PzqBRd2z6sXETTqVhLQG30Jyu0v2cEnfbEDeXAEN5DdJeXW0w9KEyQkwf
saILj/hjMTzc+p+JcQusBd7oxa9ZUHtlJyOrjd2KGoeVLOt6neWzJB8cYLMt0Gxs0GIB2qLw8dh3
1+6GdmCyreGSTEDA/JjmgYjNgM4ucdHmmJMHSJINLlADIh9R3Ebhx0GDJJwgZWdM0XwLW8GSTRtl
Q2EJjVHBkOqrO5Cj52VapkwazhcIsE3j46ceQUmSGV3T2TG9r0ME2Xg/wfNFcw2kHVCDqTVLDH9c
EdE0h6j1FxMSfUWZsZQBlOXjWgSTtK3hWZSualYm+cM15HIdFFRcyinLH+z3KoUOC10+9Lqd0P2w
aQRc6QSeiNE8BQgXUxJEyYikASc7UtFA6iNS8pfpanMtZBJpp74jWsO31qwqn6fiE/uba53izp6s
IJEpcTBx2xHTGhfcjimfdhCJRWrmw0tSTiaGbDA7snqQBfAqJyU9HUlruggg5C6+aFguNernDa5h
PXpJLBXHErKdOpIiplr1vYxCEK0tw0c2MzKkhr7PXLPvo95btVhbdce2jxwDWprm4Dhcxd+KnTn5
t4ULiFzyiy9gb8fz1krN0FdVNafbF+LY4eHg7IyrhA0b7QI1CJ0cWpf11RWOA2944QhkesJ33PJQ
2y3KvWOteuPKyUn+uIzkezXv45Mp6VYitboEKl5jurJoWXNweMH4S/wn2y5l2ntbNq+ayjSryW8b
rjlN4TMU5/fFm+r+FjhGWu7N7I2j6qm0NTeMyk/PztzXUlf44OzMGpLOii/5w0sqLqDUTHV/gi3p
+/vtNd4F0u6vZy+N3PvLblLre+0tdl2iau1fcbOCMQcMiexZdBEvVK8SQxTV7OLaQ9aYmNhBAVWO
N3CZbhVDubuG3XbQr/F2RipVPonCAWFbFWHEbSLuJcE9wul0XtM9xw07wjW6ejh1wnBzA3eIrYV5
cB14Wlb0AO+wEFi9Nlv0QIccj8zCxK0Ey0bi0TMN4f8wzLxOS7mn4cQbDjYbiZKCDvX5d8R56Gxs
LpUDQi7Dyl1Rh2q9qlbVBuZsyvL+TbWdYV5TraYo+jfobLXGizggWhjG7YaVQFANEcYsatIvoHmk
xsD2rFLwL7d8o/2Atxb8NaSNLfaKMydKjoEWSt1wvrqiIgZl23FgqlKp1lLdbSMKMActXP+akSC7
H+E0PNrCUQ3j1AQirpMPmWG47QHVyBfLHR3Y/B2Lc4hrxIPCi0jMqt2GbLVJWN7YF02rhqFoTUt9
q1CkU0HSFrOiQxOWRBc0sB/CEpuPtvXovBrhiJgq+soP0e/+epELy7jggapQUkVU1QX8Gqgm1QWb
9xoK3B7qTDlGIxXOmY636pvGwkrO6xr9e8cs2TLi1eVuQ06HLK0GZ317/SSq/YQVxmRyaGXHlNdH
skUfdTTh3oZSfwOC7Yw9bCgtapgg/4plUBRFl9U+KSegxH6Gb3np9iweQmJ4lOXsrL1knyopGDgP
u/GwbEnNPDvDtPsK1JlrX23BUSjb7LOzD6ZdJVxPFxmy8+nxglJLTAlYdGK0K2fpV+W26m6GdxHS
dbxHKmHfkiXOcm21IpUoeSwBe9/kllVT816ug8YI5k5AYKQDJIWR7gUt+h+vvnpTidjJ00EeTRmZ
yGhoVYmJdFptylfwm0XNMxtCPpSVTHbJ/QLJpNCrk/biv4QGvVhd1met69L34T1WZtu5QNVIsqXm
ODzn4R2QXaAdn/dK6WJ9jVBo9aV6utGCpo79EheD0lhSKvyJRGupTbFAYn2GcB1V7ZDyduDvz1L+
IClDBcmQlHmIIrKzZ67b61r5IsypHuHkVP7HHltzBiY5deGu2jRKMTkD/nIjzdVQZFxyi9/4u7h6
DdtKdYn3IQhLn9wZVnfr5WzFam0+xWP+RYM7H8jSl7MFMSDpCKTeyKwKf33psX7ZBrhWYd2UDM1A
PnqL5giOYbjo8GTmI1Y9dNCRJ3dWQsddPiavqls2ijgnMzH/ggr6eLH62MNHaO6qAQmKtL0+lASy
QSwigtrHLGJOQuYgdN/XLBdX2+vlPQMG4NXISkEJlIWZIvgUBo3Y3dzMNsaQ5BejucXqcrmr4EwC
ffZiYD+wQxCWCVsr6qxny8EvRorcgul1NYPTkKNC4gHzxSa3cXi0Dxi4+aIBqrnnyyMupCDMAtae
cOt9NxNFqlRPRybiCSkD9+67sAmM8FxwVW9gdik02JJNhFC8fldtzmsgWNTLCO60rbWtwkNbjHZi
KhTS1xdcUnB9Q6Ep5vMF0tsMN+4LgSz2QyGlmMb9IuQ2ry+Imf6yO4bUIhYFqIii1daXv+kkasQ/
snfAJKITYJlCijOz1VLBn0DFxf7t10BYzE/p4ktuWefV+c5oU385XVdLNOBhkRjwCIhMYNxUXNVb
E28jo53Nxb50cUTzGlE3COwSkBb6pro/r0Gw4tI3u/W2v7dATV+49GmZ7iDNhYLkTUcwFm6HRcIy
oEaQ0xd0KFwOaWRgOJ6WfznQmtFQkm/ELW4NOtQ74BoO/gI0W9MOQnZG56yG11aQrQITiTt8B9co
JM1LlRjCLF+ZSJszF6uCTAJ0nF2G5eJNVXQR/Lh8pi+7ZsN6e/b63wmAp0ZUggGGPQWPDm9nr3/9
rz/6qKM2ks/py3P4govy3WKOlzvQpnuyWdgho3eaXdLRcVG6gjt6O3xVbdFSBhEgbBSEoVgCi6E5
WQN7HITbSvZtxESgAzNrSLxouFN5BWMqNRVrJqqbNexJflHiUW62qqhPKRppFqwUeG9zPXMONX4U
QqSPZ69fvPrh1RevfvvD9NnrL599/+rFd98Co/m0zZEEhkpCNrGfMaNWqNPx4qKaElLkJMLwYdDn
HNKuIjoHntBcaj6DfEtDvUPTGFSjPRRT8MEVFOGAMEiYTMGk8L/CGAs3b+b4KfbrePns1X/+4muf
T6JB93sbMqmOQxD+8Oqr7377KpNc4DvT5M9evswnJ582D5q1XgjoEtJz6MYHn8bFAwqVQjcJAa2H
MfuoEPg3xMjhzBfXi+W8Pe+Uvvc9UYRh9/CbuMr5FDZA20aiVd3i/RkqCpyewK/YIQnLLOX6VKaQ
m9maZGpENUW+wyGwrskHLUgGHAdH0npvcBhDrDWCYCisf0jo7hH4ffDkMs5i79bOpcvjQhF7xK6o
sBS8C2bQpfqLSfFk3ALVyEmGxZOkYrH2D9xCmLQyjaWCbI72FmEqaNHTPS0il6mngWMALItLbQUv
H25F9/bc4Lx4TgVpH7dj66CCzBFU6pOFhtrwOSDLg+7p3EhtIxBf/2PHWxC7x/CTQUtmRXISvgwj
crPGMwB+G+TQFzLuZCxdMCdC7Cu/O6KaKeMtGgTL1vDk8Bd36xgyMBlj6lRuf9gPUSRUGyPLmObk
Pukgxd8cAOWTzLunwbspbqp932ALxzVbbJ12BlkOvqg2E8iFv4C/WYcAVHjDQuEbHNiiYSw4fV95
YgCCykB2LnWK1tCUP754/sOLv8M45V/1bdqcz6CTDJh7//jq2ctvIHOYr/ikePL0V0cAMSTF+fEJ
Swyx1tlB45kiXT2fIXptn5OGgzPE+JPDFKk9/U+BzoA6EBBcnhAkbB9+a9ABv+zl9mESTtS/Lx7f
/fVlHMvRFOFc5ij7uNO+yC0j6m3Oe0dzianguvFTzo87u2x8AXvWB4IcCv9AA0TlH9MQ4TMdRJc7
8GzMblZRe7N7RJxGELhu6ndVJBKJpPySDrB9PxFDmYGhNE2l6KFUaEUFU3CLV6sKQxLCJAdAKym4
rABJaDqvliLeJi7PQZ9Ekpa+HMJRfI++RrUGFO8fIk9GR//0I/zoppt/5Fyjc/7bdo/vvD1//WfG
7wjI7s3bi9e/fsg+R3qnflNdwCFl0fDlLSZSHQLbEFHQWzaD8Nc6dIjzp5j9PjIaV6SHhffGxTfw
5+/YvACO8oOfO8c4YFxt6h0FHmSDfzjK0Zt+lw0VliJm0MvSFOLGpzd60/NMjo2xJ90GmlB1McZk
s510xVKq62JOTno9xD7bzt7NNpPus9ffv3z2ww+wf3Z9SdfVcj3p0uEZj6Ic355P5GztYixNd+ei
kPS67jIGD+9+sTKfyTVSx7/C4yrwCfQdMkniAgipkURmmgpyBnJVj6hRqAO9miGyZuH9jZKCZhwb
C05Ocsig5YM3Hc/uZniTPS5Gb4oe34PRNUShdmhxUaSj7/GIVAT6EFqNiNUBl483LI3E3IoLQq0a
Njyotof18guuKDOqTtW5dBZxPDQ6IHg0kfqxLb5lLU3gecSA9Tx6qDmlweIw9lOpZCp97uF13jAd
Y9YwoHbB18jkQ3aTMoUNMolK7VmX9+LNelNyecIN26m/O7rpHqJ+XJ5IVob8EULRkf83X7z8B1wC
h4jfmZsx1WOxAcEnY1A5aropiEk8cbs8Pj3t5XqZ7eSI+rBpusOwj9PtZlcl7W6u4WQqOYq++FAO
VSNJoH/VZgSnY7L+gn23GYQtCR2Vur5y/ekNM93kwteeAsX2Bp0jnU0wuiL764jPj1TgNx75PK+N
e4XBL72VQFN1+UrU+z9uSCAOQ2hpVOjQgcj1LNqY2apug/2bUFYNVTruBmdWboEcYrq/NhtS+bAZ
a0SmYXFeL+eZAJaQlYJtY0X5L4PMMFgfjkSceZz1lgGZkBURqqc/wtKf/o1s3GXpI9EjZmswcfKN
jc+JNeRS6Vq0cdRsqX59SAmx33RHZAJo8W61XaAo4T2pUT/nCzsZPRmfUsTacTcQZEzeAJQ17J4t
aTx6ctqRTf9GeKTTDBqrZPdOjLvZ2mXF5suBdNjaafnQl9xDm3KQxJSWihW5SDLtE7hZMtUhSCX9
cBByGImZuYmONZQCp9r3wmVqCRx3RF8CCGXYqxarXRwCWOYmKUKpw9eSMBi8LihbDNMn/oU5XOOX
E6QwX7HDdUVZ8BtohUOjB8Hye6/jJz0tagDXhHogAioyVhZyiG7Yz0+orqmX7xj0SW3w2IcKc5HJ
H8NUl/ugllQ8MFNwc49FMA7oNuKZkNzEvtPMaQC81iB4OAio/qEImvnPX1UXNcnLGbKQtuE21Ifa
41Odb7r8ikDIsZ0HQMgVfC0o0M3hP3CP32sa3ShR6r8jQYHNNtSHHkGHUfO7ujdyM3xDYxAG121U
OpLJPgihZcNa+vHhMib8dd/YQDOi4bHAdKawlJFwTkxGgWvbwrPnAhpIqE7aibL8gjcKf6P4SlHP
+PiBQW1hDGUFyMjOttuZCr3+fCKFljqEasbzDmNpSz3D4vc/D+2y1aaUbtUMbGuPYNNpm7k5ktIe
dSCFa6oY6lPdPG0/olWO67UL21tfFq52b/qsnxv8jleCfMYRivwORVo9rZhk6F3D0jkeHaoFxxMp
1HzwS/xzRs7HbFko75+L9Cd2hPYwMnTVSERlPUnsO0dAY+RW815sus05ggtxJwV3dsC5DkTRcMG0
7uo+IMcIRVvcuOS0jSu6mrsVxKyxwwpE6GM4xuSNKQjjzLfRYRc6IIdLGtw9MUZVRlB6Q2ZxcU2W
feNOe0BEHh61+5S3cchR0w9io1SBBFnIdQfSsBkRHwDdeRLPkNLFepN2zPIL2w0qRQux3Ukatq9N
0ZrWhrirccU9YMoQurhRTHODk+lWS0/z9lqYnnbCOX9Mp4iDMp0eGF/TDd0VJtE+0reZXLyZLvwf
TfDKUqi9cyQnQ3T5RgtYBwiDjW7I+h5OiW/I8L9LBIhPT8pPuzHblWadmFpOOw5B0K4OG/qY1wYX
nG3pyV+OT9PtlUTBpD5Me5owYpOAebHkjI+Sef/1dGklJ0vcai/M3ZU9rU1d0GL+EchxTqfnjVue
z7wXq/LCQJw5czAQI+TzNQFGiMUGWd+XrvKzM3FJ2JKZBy/ksnC6qLG/OPfd9K9+bcsqGyAKNwQu
bCiNnNJyPxdnht6Q9Sdy4VnR04J6Qf/IZtwapMt+dHYWVHGmaZI4nAeCwyxYRjp5zOe2aTcXfiPE
uYpC7GYiefCUZqGkp6yXD4sQqgzm065ssRLAjNlOZO99Q0kzVnJkzrhHRLxKQNmD0lF1kZXrVRci
I3CURsQj8BxUiNxhPHFNDjOpSfuUFF7EoLvAN+8GMQkoj9kzYFlq6D7kCJYzY3UnQiLqY8Lg9Pi9
2VKwG+MmKPOvZMQxhrpT8pGZToUN/uSa4hCPNR1WAeng92fL2c35fPY55PmLiXm03MWRmOcuXwCJ
ycuMoo2jsOIrp7YufqQQb2sYaLFk06gb4sLatoa9Rt/Z1TFKkwekOb9XgJaGRRUnFHxG1/UU0e7N
Yq1wSEGfHD9jlXR9uUUZl9o0Z1s6dHWzLI7VpJMi4GrffvHNs3ChPNBdL6guKOVprhSa/ycTDrcN
pQSun2FZHR91iZgcOSXJGONguDlqUgnU9ujX1JjO+/HjI058bcZu8UFQIsUHseFj8zOU6uJr08gI
DZP8/mfu1q/Xm3pdbbb3rn3YR6yhBezQN+QBUfT57AJLnY84ImHRf1r+ZfkEltycATJwQQU3oeSn
kVyF+kgnIp6VF/X63nRE+j4v1/W638OnXsLnu5+FJPsQ//e5ZxbFPGgJcwELa9kaeYwCCjNWlHod
Ym6651Jvt7HSUcVrdnvNFwtkDxPaKjl3EDRxp5ofacUOJYXPmpJ/czPzztSRyWMUd3dH4O34IWbQ
kH5ZrQg6aYCbMt9h9GPWWdSbfREw3AVFwFR70+k5iLPNdNobDLLaxPb0eTVimoXFPLqibcmjyPyY
ofTJW9OmZ7HlsIgi6OX+i8pHZfGSo8KetubLm85kUetpbXTev5BMs/Jtai/mmnCbLfo/jb1b9W34
/zpjnD1rU5utRrex/t6RMdUbkzjD21pzDzrtZbZ0kNv2IePPOUkMyjdz0KZWw0b4y4tbZYacK2aF
b27L3XqOwMhxqcLm/Z7wSWiSbHk48D+UJIQBlmYf4mbLPvTmdhAcoXCyvHzzjRhjCPyEygMgabSc
pZwUUR63KWo7rNHu2CnNHADf4X0ScsHZTX2JnIcyLvZhUd2st/fMDVYGBOiwjTeUqjpAVyTuX1gL
elWZgrd04dxS9l4z8Sm8Ir0zrOe+HRQhjMHp4f012CFxDnFzpAmGvzLVuld2Pmzpmb1VV0DrHDr3
LkdTvGF6TS+NB+5P0tUUZ12HRblmMDSDQWbyPmm30I9XlKVMAp3O4bbfL6rl3PdABXFCtDHuzwjJ
wutkRMgnQVfIY8s03J+UpHshuwmrjJeu633n7fz1nxvLKnYWf1u97vwt21bB5ru4UB21RWw0UBgR
EkToF0IGmh1yryRgY/mAv/ktOs6Q/fAFBdbmz3qz/xKI9DiTLG8dhc64PY98Qc653l6ihycX+xmf
zWduAIj4lGxc2GeTDE3hbCn4zJ9BYH7QeRD5hLHH2h9iHJZ6deJx0z4QzsMka0XmK+uNRvPdhmfK
GJEldjTb+3U1Way23oCGTx3OhMYajlkjlG8L1CN5n38aBVdl0f928ljB7QZldzD8IK9PX96kiJOU
oQ7QJXUXwi5vIn0YXcx2kyma61YW66/98VYBX+JtB2lRt03JeMD9SNsMKTGVZBi3hXeC7yDHaktz
UuzccjRIPrDmFfOwcNMtztYgZAPsRxPWThR34+LODdTAJEQ0w40z/9XSdQCt/ostYqZNBaQ6QcpU
Kgjnv5sLTr4n98MmLgA2HvfgebdOCv09GbsUsuOZoY/GhjFl6EJ/XfKDvyQaj2H+UN8Fv7qDtL1k
sNN9+Php+fSyKR6OfgX/YAuD2cLZcYNLXpMlukEOpWpVTB1Gg1AICAapQSpD4eUHfPgBH2CWDsFA
tJdUKizHVCKSsCSHWIlekstIYBEWj9st9b1EXCW8w/fHjKIbmYW100hAgaKZnPB9V4QUxG/1adjx
Er+U19KMiWtPJ77bps7kc0PbNB6tlmO0NLDGN9XbHVua6YLnjN2pfOkOzEWWplabIeqLphwHxikl
TY586mujcT8hHQaUMA1QarpEAF1q8sAgZJETJPw81WYg1bJiw7IVSuosb/I14GupIKjhQDYlR87q
t5p0ZgZyhcgwR2yOKwgzrMX0MMvsEizpCfYPz1NOgtSBJr8mvJ8Hiex2dp9ORTzofj69YZr76qow
plUOXIGG4jgkKiost3LFS78vdk9HYE9RUfKxP/gAdKnWtjguQkBNceb3waBgokMwIoqug/ESHbV6
6ggvkhS9iByExjk3oCyOxVBBLEbOnDE+DpHbPdb8jP55+fK7l91OEIlN6kYxcR1XrqXIR9rg8J8f
/uHF998/+6q731mGslIAeAoCz+Lli0Do5oHNLyXebDxPQgWiMwgjD1bhDjr1uGqCvJKDuRe62E8M
YzZrGvlZhlVH2FgTD5A1IZQsnW5oou97C88P4KYmQjDu8hcv6KYGJcAjGPQzkAVxNS1wB3F7tZSg
9mBBy2y8X2sUwhMPfupbqRw/VXoZQRGKALq4nTV3SPfdrFrWloqeNUaRGQIbvG9uOLw123l5Pj8v
fzM//0+7xVblmaNIauxoSlQx3WjKu8UnHjRiIcSnKkdPhENXRjAdinnWZzl3zCVYKpR2cLMm+I8T
fTSzV2Kxs9UjN1i8UQSIDe9ExPBn9wdjRn0CCTRwkNXr8k7oTOu2iDzcwQW3NNRyETzLHZkb+haM
1bzQc03eim1Ou812MRipHPQjDQpx4ImfiDCEHR6U4DRvTfTj6/D2MNsOTYbuFbKB3/9BoEpeKFJJ
5hId74WPclsWp7ZjfZfzTsXc9Xrte35Qr+Zi3XHVsZeyONh1HYsY80kCYZ/6NiOfK/dZe/vCeGyh
JL4HN+M9zFyJKb3zUsArseZzn4+Xrbrn0gqHZdgsQbbCZtFJCLmldDmYcGcbBAM9dVku0MSrc4TN
A57OKK/L6j69o7uB7sOmpP/R4fCkR+DzxMYv697pyfjT0+CQELcBFeFYysnD5hQPnKORIm0/lPNc
xIChisW8dzrEH809InFt8eYN37xDaQJeM1ghXvv1TpOxbpTF/GbWVAz56D042xY/cqrE4FfvQJ1S
0lAixk5m9YUPu2sKWNYr3E2dN60+7zGrwcHLxL1lv5WEGljliCSRNdXR+qBc39K4dE1Ums5gN/aQ
f5bVuFZqgVlG89vVApnBsxX9mxreBCV1P9ut1hvY3uluV8v9vOv8qegWeFLovXnf6lqglIuaA/R1
OaWc+Fh0PSYbp5RsIlgek09l0EHbvf5lg7gye2/1WU2i9kDjLpn4vC8sqmwqJFy4nUX1JPoBuexI
fsFWY617cc0i+fdP+nfDJwPS8dyp540z4ZbV5zm+FWasVsu+H9uQqTxybp5yC8ht2iE5prfGUv4w
v++kS0vrlsnOrg3yy74I2CIdSFoFNlL0R2Jfa81KLuGhxNdZon0fNoNihUYbuGliHzeyDUEWgbS1
oTW0qvHXDRwjZ1d7t7QDI7G49FRENE6CTrp2TXP4BIw7NRaItqWKlpbCWMDZsTIiH+u12T2YhaeD
FU1tTdP1vaODY+wnzMZ+v+TTSKTi3p7Tp1BH5dCE+4FGLKMIS//TxTPUkR8Wnl2TJNCSUZfvxCuT
Zasz7fG7nsGO+w3dMwkkPgOJ8VVxf7ZsakaGJstnGny2hxMtD8P3Ly4VPfieuOjhMPSxkjDs1OEx
UKDJZtIHknZdf4x7NZnBR3fRaJyjUSddMJoVXXpFt7xOKc0/OraYWdGnYBSENMIROHQtzWv2SUP3
KxglCss5pyis7CQSFHOx3c2Wru8myiJZ/I3QNInhqQhu9PKyYgt8uscLi/KBZPDEUZVXJVkcCZwv
3rIsVtcVKsw19J4p0IbN2K88DsaAjoujz8VXzd+kF8ajHVUb3j1+NbclqMEskGZ9sSBbBLpX1jHw
B7noEtjvPfozaBnldgQzW97O7ht3EJMdZOi42NDz1zLBxRCOJ7+CWvRAOSuEp+CaYW/AXKMNO3JS
ni1O2tejtdUDgQxXFP7VJdZDbajQkomFgMjcZftJ0Vah7nCw9C7ExA5RYsTpxFiuiXWqQrFHc4Zg
M2RMFx1DZSGSpXKD9yYXcj8UzA1dP2Ool/qNwi0SYryC7oeFGlFEf3b2i9+86N/DxsIzRbSy0HOX
zPg+MwvhXPZkZqjGMF2eP5Jl27hvy+YqM+k1BO03QMoVU7bXSn5t02eQF/cO8XuGfhI1p9MH+eBP
U6P1iHQgvJ/mxLwPERkT0XCPVs03D9mD1xs2HyCnma7GUohtAfQdt5HuoHTC3H8NUY707Cqpu5YH
gppt9aCTsdLUE6YrCy8M3Pmxm4M5M7VK0C9dMqaYVtnWpTD3bUHsMDOhunRToSLxLmAlcY81Nj0x
qHR3aKVR3ZNxfOUibU0h4dQFr/OqaWefFzYu4Qh7ZKWMLCQapVAaamEFsWBzYM97PzbiVInyAzbI
mJ20bxRHM/b0wKwiSgubZ0UIn6p5EoVb86v32S+CecMtY1mtuLOTh83+fSPZO9DC2wzboGULSZaD
tcxqp5eb5mrPNgBfj1VTOX1LUAZSncKrSEONHUSAhkbRga5n6BPEJhNyiiChmOyTZuSPT0cJE3jQ
h2p0eNvZvsbdhEou3uR2Mh+QhuGCrRGk+6aafn0eql9qbBNJHrIoAGrKvP97sOEoOEkjVEh+F+Qz
xCaWpqzrmZeGXKwRjc2O0ajQCzqwGGmrasZHkaHzT9I4PxQ6D4ToIOoxy30zDrixuFjwJot9o0ts
P4YEDyER0a3Aa00mJRqH8w7wg9pTpw7jG/JAkhsMFz+hrXMJLGUrFnLe2fjkdKDmDS6lXefrek33
tO6SPSIkbcLEtID8WWI+5aYADy/aKmZGIRAKOzCGI8ikltJXQKhJf7EZrqOhjT9syjF+DEjzFF5d
s49j7Em8+QkLPwLb8hI95XMaXKfQSvM8IExzAkNgYCQDCS9k2aCNyPk9kudmmCmAiHteEe4aRYhy
Ic/EQHBe5hxacFj2OjrwuCEaMvyiW4sQzRjeRv5i96VAfKFQR5dffdhwL+xk7yGNtokX79CQShJM
npx3fdYgw1CyswuJ/FLjtTZto5R4bTbuDJxdrsZXk3eRAIzGNZs4k70ysRwtWpNMyb6KnGtxsq4T
dB1spkgeYXezrsp75jBAx5HRISzZqNxM59g0h4kga92jIY+r+dRvgSi0abQ05tT8YMErciKX78S2
Rv+/pp8UnSVbTS3yZ5Ln2JnRi1fZlydpUSdjJwtRosFpukzPN9XszXEz7vqidllty40kE9Yli0PA
DOUbG+sd6RuHVjd3lE9GrFcLl4fVh2IM8kW9a4BLBcWXdm/MzW8GjuRD55IZJpnAKHAVSGnbRPN5
7UN/4KaPUmbMIMKV3WoyDSlxX9eQNcANc2bTWc6JsShtxkGuCSnZRFQzPs1aCeDW7bHJjtjcsDVs
/Pe+W1zcj/ZNJbSbYHud6OyY6FDU+safZb39V2Cim49lPvHlmWNykHGvJifOL8dfPAm74xqKD2pw
6kUT0bWwY/cxRlnWbOMYAzETnDlrJTZIz+EPgBP9Af9BftQdFt/xae1ZGPEWvTEUsZM87UT/j+BS
clqKM/bjq0BkTnEiDscEfKrZnXMx3tffaYetol2U8WIndY5gz1u1ZDacQM6czcEbGzg3ihsKzAiM
f8VHQD9drqll5jwa8jJ4Y06iPsYIF0ymQfTreNMeKCtru+lMeIJGuEP9Qz+MeG7vh06XpYJLiMnO
dNoglgq0T5vjDsCsAezHsyYtZfn3GtjRorlGnX/x6RuMUnKJwUZge1lisE2B6xIW2UhGeIswPXTE
JGNqqcqI0GQJQX6L1+Qp8wgjWmykxXzHQ43uadE9FwuI0fxbWk1BzHh/4KujzMlQr7RRQQgyiNLP
gXqfYXyGxLQsUy3Kjjc31RxvidC84mozu2Ek9AKWf0FEguAGzSN2WlpUzeAADXd3qzcrDoE6a+pA
aG6hzaShWQLHAGqCPMQ8wIXUJBZPISm8ZgY7ia9oTRI2usZsxdvUtEoKnbXdLK6ufFwzHmg3Btdw
PKLxdvB/GrtBau50sEZveABJ8Ru3D3XYfRqfrlXewFued43/SjwkcgMV1bOErxwDm+o1cJDb4WFN
YlVSyMIt3SVTxhBFCWpZXCq2IKSaV0AnCD7O1VHcxcDWhgLZF0DlxOKA1TEQXsMgJzeLxoFHq4FY
o8a18wrlAQxGSVAuP1TaILb8xwo4H/kOIgXOK6hlGXHIIwZemQKOKo1/aPeB/kOcgmeAlhDPQJbH
Msnw+sMYbJge50YA2pb3o/2z9I3Mkgk6LbWMKWwPek8wymlz5e80OQb65Q6Wu953mhvDzJGa7aYV
a94Nkjvoh8f19xhOYVYymn6I5O+gg+0LhpdzCKCZ9SXtA3fim9CbxUosFq2bkAnfyNkQ1IgYGiGv
kloLFXLLCuXqri+ky7A9jfRJjSGn3gkeSO4FBe72mSiom7qcc0xeIvSOKAAJF92tfYdFy+1BtBw8
e55Xrqk8sj6SrFbj4+Zykbc14rkSlQi6j6aU6LZ8B0ch+J6UT8tPuxRji8Dp6LmcV++edPHgSWAp
77NCsC0wOX4iMEJCr6K4aLDrUXwvBOBs7kGKuePAi6lxK/Bh6jNwYiloYI1aX9DHyJiQ5r97Qdoj
shC7EYU+7v/aHLnqgUeR43m0mxNJ4HyzwlnMebRiIR2xl0UqMDb08IXUoY5MelZ0JqJdbeFjHxPQ
lKQXEyeQpH9nLPBcWrURLLsD11xtg9Hh+CoIBaj4TN/4rg3i0RPae7ihhWDp/CFdD7zdLYhSG7F/
jq5D3KRLpXYZDgKBHcfu7WXgJb+l+C1vr17/n/8de8k3uzXNII4Ax3VkDojzilYpjFNZKzRshF1a
hDFI8Cuylo6NSQyzv3pT3a+JIjWqon/lb/ypaX9PsTE3rdczAZCiEztiRMQwaI2IxWJ6Ro/ont7v
8mh0jeaD4wC6IDlDWcxDHIfJk0gF0p8jn8ezw3ygY56OHGKrApNAThEeoX1IEsZVnvFuAe3jkNzA
brbI8CjKpxUIPM5dLBj0uEc979jhMDvIOiXUxTps6JWgeOxWi7e7aoQBF0hf442JfG8S7Q007mo3
28xA2GLcUwzJjcXlrzYegPCyrK/K6Wy9uJ1tVv3u50/KJ2hQRJ3gIJFJ87uDrD0vzCJiLHFkIQ3i
aKcMQ1L72b15Y2YWM/JCWu1uzit3LefnWIvWy4SgtthWVguJ9N2ILyh5MNTlYtPXevfZGqzTMJmo
YdA6plgOkN7l4m7iupEBx6/WaNO3qWsMzT3RdpDd4JspGhXB+XUS3oT4FQOMioYL5alk9Nd+UO2o
ZBBEJMM5IcGmiyOECknUSgGcj1bzPpCT8RyG9qdJiTKh+j6jTvXlBRPU1w+D3EWKI4FsaK24bB9e
qzUFk9LgiMh1bQ1upyhBIRtFsdrCOUAHIkvbcB5ckvV9JpOQE0ZV1vSI0zHIzfTWE5aJNBjzeymR
U3QHx4DuUpPNviMNpTAjwdaj2Tom3sL0AoTV1W5dVndb1IeeQI/hFFVDL0puhFjrw3tF3eKsKKIw
U+OozxuQUmCPRKEcmNSEvK6TrLqT9vx2hNlK/5iU0NGALjCDMJAVjQhX3Bcnby+ey1jP2ncrg5rL
Zo24BdHugBye4H8CUcDYnTIjUpwsuitvduemAoEhDzkCz7rnCsWr60poAs1z2aKaLuXPAjI+m4rT
imt0KEeLd6YMAdmQlA41Sz6KK+imKqGh/e7JTz+e4n6EqKieUX/zxWsOSPzp446BGqQExefyeRyD
ORJA8Jg/shx5Z1qj1BUQSCm7VG5vstLdXeft9ev/3iIgVRe4ob5dvP7f/oyFuw2ikpFAhx/wUIIG
oe6I6yav0gCVZZEPjY1ASHaNCTIAKvKozpTEXiqJ/Sg1v6S2VB5llY9XTgjhWBKi9RFnso8RfRpv
7cX3nfeqs7OxjgHdzEjffMgruoaSLKUrCJcwsEzMTT+dZa+ODdeJKo3r7XY9fvRoXl80JR/Fy3pz
9Wi5OEdSfaQZyuvtzVKC3IeHeorWzq2ShixiJTGHOy2tx1/x+aToPx3+tdXQ8vi7Jrpr4eX8crHc
sqWDa4+8Ohn7m0z3jQ+pnKLfE8MPw+IJFASmACaXS4l3qUwtaPDmGmJEBKbuwDooKJprvQXigRJi
AnHgH5lSMEupj8FqwC9HRkD0UjtZDPQQ1TR48zMXFL0VGMU8iipZNvFdNpE1ZsDZPzsj7+A409mZ
HMdJLYFc7SupDGhBBiQkFxl9OTs6nhUQxtIbb8G04KepUzn7c3NQEuwxQbrewAT7Ne/77eCxDtMz
BGqMW5CUYNt5oHlRqz64MWkb0rEt43ELnlvSS7KsA7F6iGf7n3hpv29bDrdHzicmetYDuvMIsxmt
0B7Fk0EyJ2IHavVI5vMF64SApc93wN9TkkZlBq2iQXSxuO34sLDEBeaSo91Mk/WxQ4OLQuHUadNU
u278m9xQcEYUBvlXpEmQ4sg+mX+GCbQWMv/in5G1KFUu0POrOv0on7THMf8bH7TWPIYXmBot5psW
jYB3kr5/9FBOHudsfSycWzR7qSHv8dM2OBKGAZb2dH9vsmetlh5S1TnjhVf36zbH6gfFZ6Kwf1r+
1Yc2MmlQdCSPSvC7vqx6862FL4S5bXpv91OrcuRi2UxkEqPzPKThSw0yRdzIhGtdQ2EPxu3UBQ9v
IkMeEpyGxS3qXSuQc1GdqdbSKUA1BW3T6/v+belHDto6aI3m5UkUO7cYHM/nZDt/DCcBCdNwWe9Q
37UStSzUO/Tla0AgJ0dxSIfNvV29D9pkugeBhFWDeKzZS5FbM8mmU/mpaRFTXlK7OeUXuUM0Aw6e
MnOwp26SruIsEZOJySlDo523v3v9P8D5xMGkKj97++Z1+RGfUOSiC84EazapeERYqowESvTzxfcv
SAh6u6TC/GEHbf+X9dXbm9e//tdcGDzBIQDPANWIUEgJ4GDLiJ4cu0wcLKzbHkaZxYvOxapDWDS4
EMtOdA76JfBPpQN6WcplNu3opy4HXvbo44ifIxBUH1tCQU/xgNyNIFGjSLx0hubwjjyGGJ+ehlBG
DdW2xylbfOfimKrui9AzmvxRpC7YP5CUfE68SVtS9OUaz4f9u/mi8YhZPp1CTXlzOtG7CKjHerft
BeghVzjDeHSBKvuuHBQ1exj0BRpFu/T5jjx953HszantHAMrfV1fuWql/CQYLs/yzWwFfB/9wjje
Sz8pNBxfGz+3dYSvEo2TJZXAnMx9sCKqvCql6cBD6sZqH+fVMu38/v7tVq6Hcc8k5v2UtIXGsJTN
RQV3IDbkvCLiBE5FLzFQj79IZG8kpDT9xm/cx02Ib0ix08h5fSVZnlgLyFV160okdZEpTeVpn2Qi
1SebFVec7kvUE5WXeuNequTNNHm/ijcssuztu1kI0z6KqndVBzuh6rIC3VkUAQvGstc/6eUqC10j
ggaE9yB+2HV87Smh1yt/VwNBXInvmZ4XdAEGvlOtN4d+gb7PFaJnGvqLOBfwB+CP9xG/8GdVhtqt
r6bA4UQQGNJ+JOgHdH+HG573Sgx3XLX4pQPbFE5ZDtbJ5tUSByxFTmyT89G0HDoR3XLj20Tdk6ld
6sbk2ZqMX+HVVEwjpdOK9negy9oTw9Q0Z4/d/3qx687i0ufK+qqYMhVlM1zQ7IsezdWBWTIycxvS
o+14FuyT/NT/QkFfBMeVvsR4tZloa5uqia60AtzJFoTSAPmRmfec1ePNyZPT0A9uTqhLvbveuM17
uHHTU6boVITk40p53V5Kr5eFI82NgcnVzYOY5gBU36vF+8BQ9xd08vR0kHANtwyUjA/RkRgXH6Qh
lHj2jNT+ETEz333e3QcCtHfA2nckOTuFg9nahB/2NgGY3vjhXFEFo8Zk1BEfMOhkFA8SOllKyaDD
CTZiTg5ewHAnSUXO7vI14VApQ+OEw0Kl8oSjtXMzKax7cTsf84iE19CD/WyNWVrvL3pk2aLNHww6
b1ev/605XuGJgPnK2/r1//E/8fnK2WoRMgEmUX0OHITxNE33HSKvCphoaCAUmgrpU90M9QRoYm5o
Uy7qjQuuAUcZiRD+4eex3rw6311pnM22w5b0NT5aTbcgGHVDlRKfnuB0AofIe42esVycu/EiKxnu
gI10Fpz0TC+6I6SJ7miEBXfzDQDqbbaTLqfItIYCWtgZEs8FMzfUlrY29EZr03WW1lytLOo3/jhJ
KoREz+bOnjhl3eQzN7SabZb3cIidzcUYSk7D/RsodzGiwMHVfFC2H4tJjSPTSQdjejHSN3G1LaPJ
x2KSuFtaymZkUGyzmFcaiKPmwL1EqOt7Eooaig3SFsEEae/oRuFgU44jm0jZC+VmbklSEbHOQ6w8
6BsyzR4d7YOz/c0cRb0pLaD+dEozws7nemRTmTn4WPJ1som9EcU1wSrHMXusG2Zjs3NGcexG7TMW
YJd6eBdudttLTttTyicy+6XPyQyx35Wl2UhfRw8btFuUP6iSgp8/rYDJTvAvx46CHz+tEntM4VLG
jJMwEoPnbtnls8vNbI3macPk6ncwCKcURgMYFtSPkCTap3qzuIoiPwU2jWjohTYk1MdNXzprdgSo
FEd0s9FhwD9kaujHOqCTbV1gt2mboVOXxfuW4NO/trGpYY/BO/mjVBiBC6RqLty89QJUgWhSExVF
rmtown24Y5HTd1xReELdN9y8iWdXEILbJX0PF4UQg1kWiYWZozRsGKzBPZ1/hfgYi0YdRdxOhPTt
bPJpl82NQj9L04bqmEcL7BDQKT9L4gXDVHkbKpVpXKYISsGcS9vSZHtJmT4putCBxD7xccecQew4
I6NMCGteG0WmGVfYRDF90hufzRJ3phEdvWuzxQjO9C1P8KIuFZjmRyYkMUy7lW4qTYocA/2QX0zN
Uyp68J6ZqvUCeGrfADNBVkKryT09gEeMcFRwiCNh+5fsvE54NKq21nzdE6YhVAwtRqLaVic9dMAv
QplfuH0Jqf95W9/RX3IlLi8uuaZxN26ZV+ZJvPKo06j+IWlxHCi6OUKYlV0y+dAt98SrGFXRABlb
lAz4BcHMyPbYgNCvycYFFTf9h83ABOrFHJ5i5F64WxQPR0//UrQ8fczNgrZBqJf+4z8nY3hC76Lb
xXyLJojhCBX/S8s8wjRG0wX8EGG4QF5bcMhfhZnEZo4dHxmNXMj1A/nFDDEtQD907WYSNrxrRSqh
nXT6dSGSDyABH8FxlThbTP1TTYazWlJaXTBKPnKcESIi5q1E+0lYyzgzZzBpT/5DU8is+fKOnLX9
Cw5VnXGbQN6mdH2R4qYikqMArGZtIkjTCiA3WZCxeyBmnDoM/Fa2Pe5o/wyyYcD0s0p/acUUL2os
SHWOq1PhqgbmQA/bul42hd4bwAYlnbIBzMijkAKaSvf2bBqwa5pjN6cijyHeP3ukWNEPkb0BBaZY
bXnpgiyI/mqieZ3TZfKmRm3zVBBB8ZWMI1nXR4B7QV+B2TFNYQWBREXpcpGGrqvZ3BsmZ2cnL2Uz
zEyAuOxPS22TsUO74rGXjOEXnGZHmfhwGTkhFH0HBlQDXr6nxCA5xpnBFDtsSSG1JGNwRGcZSyFH
aspT8qSOE/+U3ylHCYiUCcMTacQ7Ivk3Jc6cRQMKDFyjJjtCFbdhvbej872USdvP42YccLLNIEel
HwoEAfnezZaLOfmk8/CwmyIDQFC00Q8t2i4d4UWenfRl7nRW1C+ALY49PIiZEB84SpDvdHbpFW7t
0HolCdK/T/VLE0ZqctF3bSWaVlcJCw+L2giGd9sX3/W99eP3eBPDloShOhjfxMFUuDA9EhTxEZRP
kkbajTKQWO1COwTXkYxb5Gy17T3hUMGzJ/od7/+Q+s0CEQPELnnDsD1EfK+4aOSemOIe0SlsnHDo
xWqXXEayW6pE0BvnUFadAym1DxYdc4vZErPArsAeFelK4sHvshWQ4iBgnnHgKBCcbXjkgtvUdJlm
hiODlMSvphKQj6C4VB0qXRkkoLK9QDHTI3L1xYwz5nnuozohRUWEVQiwDo11JC7rZ2mvayq+GyQH
QPSnP9Q4JnM7v/l0Zq6c96NYcDn5FAVttBEeFoPB3gIYbB7WHt4l++hGfT5HwXs39vvLcYIxDZm2
Sznt3mgKeOTitat9bqkrR237ocAeyJLAnJmog0KXzrFCiT3pvF2EaVliP5CRxBggpudYcieBeCq+
py//mbcMY2/cjH9aiUQmsduVf0E9BK7fxzkRM4F8KTnMFoWu4HA271wOcY0hv/I23qThomYaICcQ
zsnXP4D3w4QcHqZB92DVeXr3GrW2srtGff67ZOsyW9NsvdDzMPp++dS6hPh7rtoYAlHKP5Esp2YO
odihFhX6nFvYFk8n2KNwkGAG4whVmMijBvlG89WQqC6Q/uGx/7G+xibJO6plIDFyO2/Xr/8Vmx9i
796+ff1/w5NxaFqgiQhXRnfUU9LTATmKii64qlINFCHROI0n2icNOp6j+9KHcUbFQOhJe3qnjNSE
aUDgIbs71N6jvSPd1RVrWPIIWVWpbzfe5vcML+oQwJGaucBhufN28/p/FItLgcQpL27mqI9/27z+
d/8q7L783J1LUnNNR6pB/0X79T1q3YfF9y++fyZ0yYX34W8K/rHjoFQIkrXe0ZHUQ6z0IEePTS8b
RH0ThCJqccfAlkgFJa1Xyu/Nihlt8OYG5UZerqUGUokhUsXLDD3de9Vm0/OIIibshs+MlY2k2Ro3
RKsq9aiB78wQiTX+vIZTmXhBcJUS2AomFUYDqnmk42JxQpBCrhwerybtDwLg4SFXC0dRVwuZVQMr
/e2r56Nf9UKXIW3ZxDSzpCnE+cI4stVyyaFVg2UP7cOD2Ww5XVW3JO1nEjXbOTqP25KBKpALo4Y2
fj/QeA1DHFwO9sXUCYOK4zGz10Xx5Qjal31WfIrhjBCw6/5TRP+C0SqikRzy56c4Mu0u5TLGUx1k
wSjJjv6D4ndsoHwzuxcjU8Qa276PB3pLfTyApXsL5C2zaBHEIbF0sk+jp+OWyR3XYwxbaMi1HHgK
ykGF+uFyXGxGzbmul0szZfR5HDkJPVO/UkEf4lQajHFYEBkqVQTbCLz0cGdhIcDcaIEyT2i393OV
UbyhfJUH8fcGCUQ6jwL/iD6ainiS3WNkaniDxyP4N3zN0wT/JsEFOLCAxfMjKL02tP1oxEDGnBd0
0SNaZd+ToWvQ0LXBgbFtthErZdww0uCjhIE7WbTL8Jz0OiHbnsRz2HGLMvpQhtB3aQW9lly6jRc9
aYEszH64Mofej0fGDY2nOm+3r/+NMX75HSyV7d3N8u3udfM/qyc1jwdH+UKTU7rN+I+/hZSj1998
LaIOicaExkLAUH+/mzdi/4G2lHO6mr8S84um2rwj2KcOhrHAEFQF7euzJRtG0cbzsoYV8PXsdlnd
Jz4JznpGf22qTrq/I8wHzqgEBn1avqb2fAp/CdJquzhfLBfbez8nyCiuN/2/+g8DHcVvYXQNZ+ME
ZHC9CbMhf+l90duXkW2+gH59TjTM6j9py0TIwhMcOuck+B9xgpARwCSV36qjr51R7PGPFUHfou4b
NwvZr5tqqwgEixWJ965Zm9nqCuFwLtRrfltDMTi7T8rHgoOAwHqciy6z6I6VBpYmuCyKv6/Qt/0W
PdCXFzuE+YcyKOH8HqiUztH35ARfzSgYwfVsMxfuu9gK8AHplqg5mILq6yAo8WzTEC7Ul/CrGI8n
xYO7vyn+Gf79gv79Cv49eXD39PEIfv/18+en/Pzs8WN88/z5869OOw9yJ0NK9uQxp3vyGFI+P+1M
l9UVCAFc66ToP757/DfDAv79gv6FU5+kkHGbiCgPCZ8+xiR//UysGuDNr+gNNsq/w3bhW2yYf0vN
wNfcDvjgKoLpnm6QfE6U2Fg73x3gOULodlnfDoZKxNeLq+vsaRnXKSYdFpSEBGnbm0wWCvR+C5II
ifOzO2nDab51ULkFFrODeVp8UgR5OiDkhUVsqlJB3ty6OvkvD5vT3uCAtsAl7w1YaRDUBGOB3hy2
NfaF9N28kQaStH8OpyN8rpqL2bpC9YnZfYFDLvukKYiOnLh5bebuU8lWeGFo3UXx2YQIIYv+47r0
4O7h46evcQiKxR59Wi7bX9psCmLEDGQzu+2HE0A4INihoaYxXR4M/tjeYK2mbboRsV0bPY34sc3f
iyYDzcYgmRiXHucC5mzIGLilcJVx8E7dAOnOHw4gYquHxikHms+gQrYH7s2eTrhGw0ZxuM2IHI6h
Xbhkwj/EfYKvPhcr0xk+1B3nxCZDmLiwyfsWBzb4eth7TYt+T981vdfGnkyKr+sr2Jv6UtYwaqUZ
/Pf1RYMSj/RC44ak/mdMpIHnGbwaZz3K4MPRvmTcMmoafL0CORHbx4pl+lfaxTMPnPgOI7AtCSyq
XN+jJrVrmLIQCDQO/TB6/UHv1GfHU+fE/fTlPIJSnJOV0fi52OgyL4f8kcRtZihUm3dRFKNIEMeB
VDC6Sz98BWuk6asDzqDNaUnzYCBXMq6MjS1bSpD1NJEmRhhcqI7KhIty8cP5nOHiez+OcBQ0Xrie
OziSw2NzysElhbVcgHzc5gqhM52QgzgKpBG/zWrwTEInejwK3WDMMETXTy5vCbJctdn2Hw9t6kFm
sPSilcTX0vUsX+7EGY36qiIjUUoWbcrcDehFlBaPAZPEp0rDg8JifexbPLBhTdiDAMpGq535lBlo
22w0ZO1Esc3V9yOJU2cttvo9KI+wazebXuaCj00woAzG6/xSWoFaJlYep9dubASAOdOboe3MusvS
NCCvpYP6qPdJ/g6P5k9mDgroRwOuhhx20CS1RIULxCFPDThJWi6mibNPcSG1D7RfaZ9MiidRtJP8
pKnzV1yRxqdsqesBg7pnXIEovKWDUSgp2GCIoGFsHJTourez5g6r7ObQRGRAklngNdMoKDg7NYCY
gwWxUdqcT+c0KAhWBwySYjsv77uDDCihMqZg9FJZUu7UuHodKFc9w6jx2+4gyai9iUgmdqgatA4D
hQNPvwrv/IMnXiOW/IIEYHsTDuKBQTkmwrybBxOSw83GINGu4Q5DY9Y2Dkpef5Jx0Mp+oXHQsLrx
OAR0Hw+EdYDL85ygC5z8D+5AuJw48tOHTOOB6ftQXnTMdGk5R01X0GdlU0XIS/b58KfYRdg4jsjh
vZyzLpKLS5M2uAOWaArjIhdmOCg/GgH/7eRvxqeDD2Xp5ObZtLh5HhpVNDKeqA05ltg9lEPH37c+
ybB35XwQ2/1wX/H9Pr4c8Vw9yfGebHG1wstkWkuPNPZZXrQJxezYQTwzkyyYxAkPeWEfWVebe/x4
b4uYEeXKyxtF2ry6H+3rUNYd/Mge2bqU2Nso44/k/f3eY93mJG7bHgsLHzba8VbbNhLHuWQnW4JT
MMy2s5RVaQF7zmadlEPlD2phGm4m1jr0rN05YtLXbsveYE583PcW3sVJtMzuIB0zAbni8HXZCM87
ZFP0neDfEcQb/pT4T7+1vDwON9r1MyRY23V9ZIyvygjJharZi6YkcwSrrhAcJr19nvR228vRr/Zi
2kSQTkeV5gdGxqRe54YkTIUvp/NqSbQVZxzlx9hrKnY3qjIJTm9WpO/EOiAxxu199reoatNgO90n
5eOu71SXOtX928/NKIX5PSFT8/oZjHb8llEv5GmSl9zELL9hcvhBJ5/J/1fclTS5dSRnh4+wHeHD
XHx7A5oBgERDImVf2obGMpeZtsQluIxpt3sgNPDAfm40HogHsLsly9vJf8I/z//DlVtV1vaAlifC
OohooPYlKysr8/tU3wL/EbP/+WcWBcMItaUZy5glbBnd+6OvFqAqhFPj0g5G8gRQAWfVdvzlIB4g
iSQNF7/Y6SfEjebQe+hrJmLywWv8nxh8AQOujsD0yKh2ZtJhMjGoTYJKPCCdQefTZ3yQBj8u5MoQ
4LzrD3/3Bxbr7iOYm8FdrN9jyNVN00OTcg+i1AFQ0fxtPq5IwfxcIWv5db2ZN0Q21/l08+EXgb/Y
ZbVcwudPtx/+6w+tw5h+biaMhg476ghFGOof19Xqq8fo52gdBRuwB8NSAjNPb4Cedqst+9h5zjlc
wQyUuWYvExEiNNfQ2v66msfgRYHjQL+7nTaXkLz44nnxxeuTp8X9OXhDmMwCR+tLltYKXr959eTZ
27eTd8/evDh5+c27Z4U+ffBlAtHc4fEA+zMyQzNfLkeXRnqXy68ej14ZUfWa2tjP6qpRNeydPiyg
Sb65LF0Nx5ZuS6mL2jUsjh4dlP8JbA9iRuCs2t8xO0Y1LSQc3eLRX/JzQpAQ1vElu+ehp9r5bVEp
ej5VcufTDx/+hFdqBZsaoUl+/HDyC9oO55tptTqaw5M6s3HRwxujs1dH9IpF+Ayd/pNB8cbsi9vi
9WK6WjWzi6tqbq6Dv6mXH02ebzflZbksjo6KFyfvzB1rVq6MpEb3C83WZNb7l6PHQOP12GylyQQd
5MEg3jtZVU8sIMVraAh5pZx1Ok9evXjx7OW7J7/55s1bKOHeX3Wtj7JNGDkmJ54PGJyKL4COG7GV
TDTOFFoU8b0rhsdqBTV2pI+Heijpu17f1jzUdT18NCwc96gwhZJx4+83oK1tDoZ8C8JH9uO9KfYG
hWpm2lUvhNbbL1KjzfIjlko+Yi4HtC9r4hwuzEjl4I0TWmVW2nZshrG9GimF68CiJDP/68rmf7V9
f2IKgPAcO2jAz95ao5j0T221Z6cm05kuFALZ4+nnnAFwmZQXduX0zGcn4AH123fw4Hu08/oNgmOG
uRWYcEzZg2puqxJY77w1QaF/QTe90s1PuWJogXPcjCxxKzv2rm7aMnDn0OyL4V4GayVidwCrOayH
VQUeSlqPx8tSMqx94enUDgKkFZZ7W1+WK6vmMnwMIZIEpu4I/d7htIQAH7HWn6wGuqJBDZE1RWv7
mLapd5tZqQIn3A2J1yL9FGEnoq1NFgqz8EGoCTqDYXsiE42s+eT4uiYRobvU0VvVNiMF/5LbfzlP
kjJxf069pp3F0lqHyWXbo9u88kclHWuV7MB8ByQF4Ekihd3f9O67XTbImJiUcMEPZ/4k5Y0Mtl+r
TEE/q/FYJLacZvvgZp/KFOD6UKKRqmIXgOTpzXFPThtQB6R3dLtCafG7QlH+DkKM3/CxXoQxrIPk
UhdYFofOnirchX6y1UVtSAt6EMPak/sMkILREGUWKZi6kG0TLjYiqeTLFOw79FSsOf3MpoVyBqmm
cLpgve1rTnYjITGgyXmHtUdMjDI5eHFKrTmZOy9u9tBBIBVDgb+4QeCw2mn7KNxhQqy1Epp0h3Fw
D7gsYnXLEkOCVFBj6bqPOazTMGQDfuT1Cp9Pjx4fn6Uab/PkJ/pn9yFbHzQsXQ83uUfu/D2EApYc
vK73LAfsLfi5FA8LslgOIuQiyhEKGNzMTsjYLa1eSIvz5XR1yRgBXjg3RPoA0bYIhEEM6xDCukZ2
PkeeMkKa3gQNJ/yY1A7dmletDbd7RVomun6ZQQ7xrIHkUDO4hA7FiAegb3jxHOo+kFPSbIDuZWF/
dHNgssC8ctbN8lxgukcwrUEnoyGQTozCGu8Fgph2PI8ptLFqPEKtViVQK0ded3tjRNlPLf3ucVe8
8tKLn07G30KhdDImqVrc7wndNNXW1vZ2IeA50d4DKjtELuB6BQ2DgbWTpJgcsIvTxY1MzF5SdGe9
o2kxeAshfdP1js7wpmHtAp4iije5vpfR9/fmbNmDQy1qk+yhfut3t+bwTD/g/pzjl+XU/TZdbhAG
/X1b3iYmnkvk1rRdtAMTQoBf0aJ40zaQ6gMQfa4/sJSoGvdf05O3Y9UauiNbW03monxYCzj8trGj
4qIKVG8YHsJrhcTiZw6WmWzjJS/v02N27ZQ5D4V159O/fPgjFQa2np9/+unDfzO5DD68MVoQYiuh
+R3DvF4//dshBhRxjNVT/LnchGyalh49w6p511ACek9Y5jF8j0wX7gQ2u2tKk+VgtNkphwjqseEx
mPMYgCM8vRCF1DF5gkZgikRnqp6gUJkR7g9G9vufDnHfd2hQFhBC+he71Gc84l/Pz09Wn83FHuRu
z2St8C9R3YDwSNjl+VnTpHHt1KRK/ZhLyUtLXJoqXyaROLl4dYQ0xeI+CJAabFeyA+ls76+bcjev
YflyVDkQWiBW3BbJd6fLAmp3lKVGeunrIr/lY4Qnfmn7a1uXYgSnMcRqXcfsljJrb1Ov17C5pqvb
4uRVQW41wIjpcYUvNmSkhUcokK34t1kliwn44HkwXnI+4WfTXLYUcus7nqIKSbLnkmmKWSNCUpNc
OxbcrN9lfyD+JfAPVcHwVOqo2TUwa5wrVgXFeQ3qBtSi3TbhNS1dHtHPEJSn/gTF9iFVrb/Fu4AN
wXaYhe0IpSG2YfQtvnx+DS+f/lz3T14d2UkthOJ5sRioIVKr32xCvfn7ONEiS8yWBGwpxSNkF/qI
px7+6QSuV+RkOA49ssDVKP5W/KfgF6ovRj1GOjwtlZBeXbxd2KHHg1Iauv0zSLyGum7obeYWbXBC
tiFwR2bWdNm0hRN+atAV7kFujCkvv2OJ0DwO39RtKPlETgxRC5CgARqf8HeSHoI302ZiZqDvkpue
zoiO4Gc4EuEHxA88TrLkaIYEuQ0AKlrgbBWhL5+8fPfszctvvnv25s2rN18LxHKs3auci+WuuVA7
aHsOqOrrutlegQJ25cgK+9JmN5s1zBEm62/PJT4qGi49UpaO8MOHDxCrvCmPIGgdxL5s9TfsxdBr
rGMDAV1zzvNyNqU8FWLUXOHBMf1cV3OjNBrJbDElNuUSnR62RkE5X5ZCeEg0XFuMQl+W23J5y7zc
5I7JdNkohJB5ap+0Dd0vuoPRZHudxjZ2ksmOq8WjXY+2tZTV314n8uDIQs/MBhBY2gPnK5wrqRLm
CXCiVxq/yxpj1jKnrRXInH5TzOsZCqf39p75zPFx0tlmpm6xI/Jy1aYRF/GeFwMiPixvoa+K0ZNi
162WVTVC6S5NGaG6NRQh3tKceJt7RcD2Jp+yx2cJF40gy4Q/QGIeMCP65hMy4Rmpc1HN5+VqQpqC
afLsUnRFjCC7gRiuJbx24U/Fkdz7Ce+wQosn/nZanSHP3ASR2em+240YRbsxlmBVHImjCDe+Esh4
vTJEzM7PPc2yY/0FQcj2+XAc+D4rpi0TbCQLO3BaCkUW/DzEXnMZo2SuiNOuxdgN63/vaCefbbkx
HUG9x/Oe/hohk6r9y94zzO2GrvmmjZ/+9cMfq1sbgHJ9+rcP//M7urYJe8yyhEsiIW/URBmGxAfk
eXKM0BtDoQk1MpKOvmJZ1+vwGhde6CYxmw0CgJm/HcYFonm48AwLqfVitwVYwBdTVnmbgj++qG6q
VSfnAYXFmV26eVrNtlIWfMZsUTE+o47pnLk4Wn8r1lHMeE7opOBQgE7HrmejTMyrTcRIMAkpCUYm
FQaUMohMtZ1h+DOIGQ3XhdbTZx9O3k1efYtBn/j53bO3794+/+bku2dP0Z0Kv8TD9M371+/wy8fq
Szlhzddf0dfv337z62fy3V90OogKs1G3MwWr0P3d6fToh2+O/nFy9k/XD/68e1BUP1/Bza9mvfS7
4GM/25mvTLeBDqdr/kVt6hYe50HvUIcijAElN8Uqf0qKXQAIO3Xv7vdGD8B56Mlv3/aQFmO6mSG6
94/mw8VPPTan3QsaRMOPFXMN9whdX1oDwCLVAgW6EThNBYiUMJeWzYaiUMBNShdAbYRZA9xR1c7T
7oMHX+CIPRhtb7Y6j73yuRQUgW3+fjBhePcuifV7g8MsHcCbaxYvXQ7ncI/jh7AUea5nCmEEAliS
qKr3hj+HCYisJLaQLKmRSVDQobg1ag1EpSFbNuqgcAHlAD4cBJ8tyLffmDMJkvYUisNqd9VtN+l0
iVxgbKSl2HW4HNW/L9uaPl1swXUO22zqs+680HRnzMk1Guxts+0hzFm2VjOpQpxFejdItSsURkI4
DQonqpXOmNS6ZFisEV+FiN08VoVchFbLW1op/MURf9PaF+oEpBRpSv7LZrjrVW/LuIeoUl21sUOt
b0EMHFYbIEQa2YJH4tocpQVcXS2uLJwADEy0NvIbKNpyQB3doyOKFuomCL8C8JCgDRxlhJ5Gc7rN
qwjEDIXXvQIje2YX0xWaUPECYXpyhJRdR2agjowYs3AW69viidBBGF2AIbI68mRfzptit/64mc75
+Lwul8v8JK8WdJT1ZGe4r7IwI3YMMFFq0pG+TJFW9Bq69IA9FJgxUOhWZsObEvavXMVPp6rrejzm
zgId8brl1/j51AxgebW2nZcvwq7nuuz1HDIXkLveTM1adEcfHfYwqYiWtltFzGKR6Vd2DWMHnJhi
xydo5noiBoqx/WS0auCefY7hHW9pQMb8r4fKgWVx0WP+9yDkF4ZoDX1E8Y1NINO21W2adMCeDpGR
mROwMEYVp0M8wdO1BNtYEJV5XW2dn/TbS3PUGenkaVCgvZqVfyVWUVkcYxkVj1SBf8X2WZBC1sGo
Jyv6obRgHZ5BKno7aydPCkt8lMqrOWuD6CWa1kZPa6rYxz7WZg0P6jyCUV5WpllpfQ9BWokHPA7y
Rqxfunz2B6cA1WBEs7n07bH9oPZ5LDafvilsGMX35+bBqa+Jp8bzerqZn4C03+zWgSMN33tJzwWN
e2Qv2CfY/n1jf8nlk7UMKpCr/DgyN7X3QanrQSd+D01e1VujvDozYt/atLwddqe2yi0ivOuKUcEM
hzJwvEWskGc31XZwAENZFy6jcIU8LmbT3ceLbfF2bY7J2jTBFfRLj78sfq9Hf0zqAhE654M1cx1V
F6u0QViy4uaDQ2p2AeDhPkab3XVfm213fOB25uDBXGNlgw8T/hXSi3HcsXzDHuVkVJqpLWlULFeN
STVpLoxeUF+v+t6hEjfmQMo/zp8U9picjyad1Qkx92onvGXhSbCtzREn8S6gI1U/TMk/whtjuT9R
LKIjHffe1xLz6RS7tHBOsZCTEQU2QJynE4c9q7L9MZMxX5cbCDWT9P1B6rlCV+eegqUI/yhWWn9k
VUQzrHY7Ado6emzyPN62zLU4Rfx6YZekOz+p4I9V6suyREOPPl/gaYyVYnTNAl8HKKKwAPIMNUel
YRmNX8IWwqjAwkKvoqY5y3Kx3ev3IqOCYQyn1UMFfiWWp9W8vMm7ubhnX1x4Q34/1S7DXh0auYuH
kxCZ1OB6T7ej/LqayKLH178xPYtKOWP5MOgkBGlzUe+Wc4jdTXnWSCp72qIDTJjTkwq0WPSzE16N
7ErVCH2iZHKglbKZYSspI3yj2KfggduywMFvyP8iN9uJvcDBT+N1siTvTzNZ7Apu5nm5m5dmQ7ja
nMMEF2xR/FxqZdNWBQsx16lvKNRIqK6IM28IifN85RUnT4q/MbP/elPf3GaDYRZNYogdYFLD0TD0
oT0SzHfZgif3nMsWLEpLdyQRrzoMCRJ4Tk9SNL7TCq8KkUb1Hzy4vA6YnXCdEHtIEDQFTZP8nKKv
upq0tLvmjtbQAGZiaYaFrToUgol2CiMJXISI2mB7q0lQ0N9FvPQ8STkv1+aOiG+BRwiSzZq458PO
FVuyGDayqzGU9SJ1Q2XWzeWlUSW/5SDnvraB5+OowOgdLpgVwVTDP2FU1YZA6fBNkv4KoouEOnNc
/IiJ0J8VJcRPh8fd5SW2qoLC7g5yS7RYhtT+7DsOSsBsxZR7JFHkUdhfE/eJ/VXDAfa6kAiTgVj+
trDE0BO4O5uu4F1zDvfkElKDLJGGoimgOzgkQrGk8ET2BZVmDjr5gUy7CkFJQvqWGrtoq6FnDeTy
mrksV9kw2qUE5tneaFAQU1GTywiHB9EbqKrAzyEfsvvXemfx473ZI/ebrx2AAT0fDTp6M/phjBir
DYYjSgDFWLsOKqNg9UFFBo1Hypq43ZQlPU+7DM3uHMspGyLmMFJqOd8ARc0JkjcCWLvFW0Yg3hE+
5eUEAXE24jw5N2IjbfkP0WEDH+h7x0ZV262qTzuOFwOnUKY6bmZGSAnePC+ZWKhE4cZcrMrkHHVQ
pmSEEsujqIxbHk086GhCWs9A0A94vepDURUptibUfnElABQ+OITWizCkUqxS8skVzt94pePjE96O
SSUwivGmVPVcTxsZDSEx75vNDyQ5DnA4PvntgYI1D4se/WJhinUTZI9+IaI8qA9M7TizAbGHFTdj
/xxSW00WDBjFC3y2+0jMSNuNQ+eACwScj0gKxYdvgHkE6Secng7osmHBpWtC3iAsB54S0F5srnqL
agY0E0zFbHZCw8S6YMKFuTELcbvh+2Mgt4n4FD1HXX4/SPOeayLsL2nQ38iJ7aKnkUAt4SLKs9aY
m3iFVnRkzVuDBkg9AohvmAVe2aiYaOdQfWiJ6v4RVJ/6EkvxdSV6TYAWuUarVt3jZnVplvzrNT+W
veQtYH8sgQziRBwPyrkr6fyWHKSc9GIPA/jxBdFIjUMFq0s/sCb+BKVnnAi/5zQnbLNKJJOfOOVz
ubfGKeUnSVkl2wZfS610o4tqNF975y/cZ3ZmTV7hEKTV69myCfTqkGcTjO8m0S/HaX0xAoVBMJ3p
uoJHxX738ehLeB6EnVYvSK7ex6u8U1SHAU89rgSnuk7Wt3IFAydJvMCORqMBrk8C0qdepopRhxue
TTGesSjhy+YO57Tpwv3N1xZYYzLBEZ5MEsx6if/80e4xcA+HivIaFt7BBT5mz8kXDruQ2ecEwp3a
6NPi+PhIGODmTORl8oClFwlRmlAHAKG5AWAfzxm8XV2m+u9C2qXzmTV4YyOccJrn/eT96kbNEqdL
T5LWB7mOhwWAkwsamA8yMZlcTJuL7JTDj07/ArAc7Ye/W4fZiCBHkohzc3uqyVV5VYNQw3sjR+qY
8SOVSQxfKnt5Iz9DVOekvJlA5+S7TgAUFm1zl30YnOrMMAqZsrq32elAx7MFpuxNScF3nOn0y7Oh
FHD6SH1+fJZErfa7mgYe8Ztu07YtThBsMmr9xAuVa79RhcqbhJWq9XXFDavC6HPPWcFSzw08lzJo
qbxJdh18jJtUULEOMWyMzgKW7oRUUNcUkMx40AdqMLz649s6VBw6N6I1m1S1TV1jGeFNwpMe2BAf
HUGFjdjv2A20PUIEipL4G9/+qIpFy2ZgPKCcwCay8ZBP5BCAnxXk8XzOd1Mee/ojGEhNOWVyGHFL
yfgOIKE+dKWwOb//nlJ9/33BivVUJDOo7irw4UEhL2MjXa+K0VFuh5DDuj2az0/LWW3Evopl8J2J
qRFDbzfA3ZGaEnLVUr/GfslcxiCOaU7U4+UcpAzD2t7AKyA/Mi+w2O4gfUU4pVpHAhNCf3rvDf78
BuIHZhdYzcIZZbpSuSzVG8JqqNwqlglHElHwTsKrsy0HIxoxHAyjjr3p/DxdykEo/UDX54ychuTZ
jdK6NE7wUTe9SOKFYuoZFn0v9dAWMxhkY+ZNPl8goX5/6d3a/BF/w+GoBbPIgWQKrm4cporGDAhg
o11OhgcnstXtzV3b2mQPPq/w2wpH3ooAHYQv67pwMUGhxEneGgexO70uwB8h4lZJ6yGnN2TIsJb+
sJ1n3vLWVut2wLG9tm4tFfFt22gqsm8W1So6XSi6M3ixO0fKPnDLxKc4z7Ahhc6VlHwHCdj6D2IS
PcHaCqEAWUVrTevInGM4PsQdac5fhC9kdzq+wIIWN0iLWO+KS3Ft+Bg+8sZiUa0IbczHn6PFKWh5
yyYhYFBcmCWhrVdFv1rBIw7KPfB/1kSpxJMI/V3Z00EsYEQbhhcS7/zdbTZC9RKdt/KjcGWpjc8/
hU33i+RP4XErxyr9qqFONrtV6aJrbMRYECgW6Md4KWOFyUg1P9uQSNRCw+HCBqba+WOTCnsidPb6
oyyuRs8py3d1fblba2rT1sAa4iuFVvdj27b/SL7YLZc4HmEQCXSpCxD9baAtDAblD2rkVESRaPS4
altqlh80ELzmjSRCYYvwN7ASVIyWLsUSlm9/5fUK0T+Tjx7pPm/PKQugrphWbbYpAHpKIQkOAUKX
HP6gBRPE3UY2e3iUS7Bvp4BGLuprikYaJ/rjft1fEM4r/t8+uLm1DWE+wVoXa7+1y/dfuhc1ECPO
YC/rtrHWEbbYG+mwqXcfLwrn2mF922G2L3YNPpCgTy3IWaTTnapXAWtBu8f2aWacZ1exJnCWbgpk
vMC01xe1mRYiHu6y5O0IpopcGzqCWj1RZY6LPgfLDHmsvKFIg7la2wcjI8Su00NhpQevEjYgMaq8
MhXSMywNV/YSBSXIPcoONmoDDbsOy4VqEMEZ4ekVWKnShxCpxy/rrTJ4sp48NTsJYra09U8voKyA
ZdpliAAD6V/EHD++cpqVkbgd9IxEChNghXjiMR2nhtHA4gc6SGos0TkQnAAsKwaBZaVlGi2JAHhd
G6WFNQ40gM8QpoHYqOuF9R6ye8gfIP8FWdtzehP7wNEDaLSr8/n02L0SjmyBHjPcgQdlZEyRl5cI
RopLUpNhv/PBBnVSx7o025GfzTjrBwF3P5VXZU4AWOlKVn4t7MKClQUxcoOUl+soU9bC3GoRToEl
6PO3ToY+CcABWnxe7vxiaR/FfC8dHjIj9m5ubkifRTGAPle4zMB+vr79lce+aL1qRnA596x7wBGx
55l8Uy5dAeaPbc0vdLkJNGnSaGMI/rfsJL7Gi6zwd9YNRHIPi+4X+mq+A4giNfysLDv8bP027ICl
IxfzpLfRPqswqyCwILU2mNTiuiO9oYWg1lMiZTbAUEIp0p5BploaDGTZ+aIbTQwXzp/aR9Da+Igb
VxY0RFqqYc17AYh0t56p9NCKr8F85jE+MryxBArGFEqrZgUHkH3mgEYys8GxVJJFAyoyd64KqELt
3a/DRhGj21xV4KNt7nAYuQTXsFSJjVM4Aj9GHwRH/GoXddsdVzYxbVg52+M3FInSDMtqf/mQXHd5
+5A8sq5UP0K0ULXGyAX2E5BxFA08bnvj6wB04MwKXNkm4J0zmcA9l52b+92JKhozdYfFjz8dgD5t
Nx/mOpXOmBP7LAUWmPaW8grat7lUFTFMYNyK2AsxGPS+LAmbC94pQP2wXzw+S5FGTlQh0cRr9x/5
zbrpvADrSqSoogYGmt0cZIVYYAr2tMJSyHDIPxAdrt6sEocUiwABO3cetlGMS6AINdVHowaxWcFm
c0Fmnq4zmZA6j4QMPTYfNz2GHVnfftVp5wxQdau2R3QK3vE58E/i9nsWH9PBQTL2TSl6l2QQypxx
2OiWVEgXKaXhAQJvjkFxOoYkZmV27s3mN4yBDriyGIWMG4V/juCNhSAwokhb/ZbKsfI2Fl27GyfC
6UP2PQ5NibSWLItakgdLrvv7PMBNEt/928vze+H04zkIOeAtObNwSvZS3JTeND4Mw+tciGFAFcFu
3RIPHmlXkpGj64N6vh5LggyMvbd8IHqZUS3QReb+3AaSo8tgC16qqnSgAKd8frwYqMsOviJoaGDH
WiWIPOTjo1icz6l6duJhF3RlL/X9h0QDj4uz/upWlqt50IsnCGixEJgNK/AfyxWqHLSTfc/zYHbh
q7xWQDd+i80e1As1usr8VZEOAgL5CkASzH/hpFcs91jUqbZSmEgzDmpL0kFkqrekdCm56Txqr7SN
Nj/aqu/eToXf0mZD/MWfAPgqIS373aDeLq0GqnmQyEEClTjLUmyjRlos6t1qHuL5h0s3elgKkmyQ
jC4sQlaK/Kt+R/2ZoFih7YHTBZen0P9NIlhaKVVFN0Key/GP1sQQYTKdc8rACLN28HgWy6afOE+r
xIpikU3/jAEgbd+0HOlpkUPFzsxxZEszw4DGESdk0mk1Fnp/VQsBBQ756hasPPc3AxSfUmJskPLP
sijcGQ5auAtB3QXhyIO3GJaHIHURqLpsjuRFA+DLGC++NYgV0qXISi3iMD6T6fJaELXpHsbBRviN
tBHR7gZJ/6xAHmSMbgtyT942ifPAfBuJAZip4+Pu6J/ratW/mq77RlMfUgkpJV1kAj1UkEcugXZ0
Cfw4kyUvFPJXIe8hVgv7QXpoCbj5JnVP4jtCnO8e4AeCo73Rr6s53QcuS0a8pHOqqdO5JGQD3Y6X
t/ikZh8CwDBvLe0Z0hzZQpbpAtdwFLqfI23Zs6vlv/NNOb3sHJrbHTfRMaODH+GqtEdY8vWTblVA
aPHlwLesGRXYXC5nl30zguNHwZzCTttsLQOJOeK61cqof9UcF+39jez52O4vyhDW8NloUNv+olqO
yS5c3BwXN1wvGGdMxQdxnps5HouJHJX8YXG+oKctBBkPNZz2RQw19wPdq20lxxKJx0eNIvemk68d
b7ro6NrPtIauYk2iVdKiaH1g5qRuWmktz9M+I0MeTDMJRqfvJoanYp/7UT5KdazjKLXSlvejiZ3V
UmfsxI7S2DNbh3FIvDx+zpC4UFofIOD31GXqXHBQbPWJHlxxo8HXq22Fqwnqzo+sfwM/eD2kRt/i
82SnoDX+fWJOGGZJQJQqnp4b1fYrDIP1yEdYhJ3WDd3Yp+d4Her3Rr0BYE4jgaw2hWnqgRthDBmF
N2DLb29Uou0FgUhNtz3xyCPbKZUDdt0gL8gCjMCgrMvqsiwEe6u4hkB6eGsF/1JTc5DX7K4Z4kmO
EOuSjEyUaTrb7tDREouuQL5f15tL9twKDac7s26gxc1uva45ouUcCYLgFN2AXSCMhArpqqivhO6V
Ys8yO4kkFkXntsR8ivv6fo1iPoRpJoA5MK0FI2GfScKHmzYm39ZmZMneFvN23ilqrk/vrLJjB04f
n8HbB3Ti9be/njw9efPsybtXb/4hLi1cyGY3QVf7ptuDswMaLPlN+rNwo5nvQqYlt8F8tUE5R1vL
Ah04xXYH7xVkaUEJQ9uCOG8BTQVI7j0bKemuYFf21I60Qwxtj5TyyxabQDwEmgxfDbkuS61z7L0g
LS39LN3u7ItT9wujG9OzU6QeqUcUUsG5HDN0501gAOWDUknhwSHOQKm+O6SoLg4wngW4FQt1uzrE
KUiXks2au8pB5odFPNjIYOVLDWvpMj+79RaoNENrzY81GX19cdm6cZY7mTSwEBVeY5uTLRUQKccY
/IwJB3saWJCpCEAHvi5Ma012owtT2+98zce6x8WXKaOxPKMEDQfemehwpWgp/TB88DTwIravLgyj
jsmSF/QoglQ9i3uIZnJl0E/48LoplxXK8Oj4LOCVlCkMLEVyoZf6j1tcuClynNf3iSNzCI+1dEcT
zZErIVoDUiZJoMoqUzeCXKvcC1YwyEnDU1zvHhPIxbRhj+x59PQS3Ub22UhMXeyMPR638KmFY6bt
KWpFnt6cDd1CSNym032wOPa+3gKOntMZqQNPG2aeqW0gQrc/6JJHAL4QWo/B3HLQdSIcVglHgAzP
AAbgEf7gBgSqyLDASSfN/ofFAIDwfizX73Po0J/ujuZIj6AxkCLWBJbHNWHpKCm7nKrz83Zl3AEo
1fqSES5TvBOYAVvDIGTv5/9Pu3FB6BKH2iR9M4GbByphkN99KYvbXZZFp/Pp3z/8qYK/35mq4cOn
//jw/M8IAn8OHGlGM7sltDfGf6vBK3p+tKzOEb61Kxm77KWMT3CkMLYTmNGTullYq2ZBocCMmuCD
zjMaMeeU5IKv0hE2t4m0A3BDnB/CSQMBTuC8w0Hz0H5JOnpn/vdk2pS/EvVWfmEgT7qcUKxST37r
WVArmzq6IfFuw9vWeT2/xUAPFFMQ9fzZiJyoDTFwqthEGmk7dG0Y5xx0DkP8dBGYYeinZzXw4MgS
ceoz5+tGtzY34FVuJlQl782v0nLPO27saI0sEIyXWHYw/MLlrmpxdudzA8PMKSANVpcLi2DSF/4i
4ZTknTXmUj0vTRM27ABW7zbOC3pek9kMbuBmSUoLsqW1R1UTWAHufzNeWqBKeLBJAVD+dlwn6E0+
6cUsIf7Sa5iOQcXcInC0X6759v0ax7SXCMWjPC1gRRhh5J6PHeOVXwv88NT8kK3I5mypywyRF5wk
eUJ/TL1qxCFTWrrvFactrGaEd+zFFNZoZTVumZbgzgjY3uXGRVJ72/Y7/FEN3JXAd4iNjiOseM0T
iMfAWyGZhYPXQGelClVCbSijJkJtMlovbZjeyN+88N9NCFaH4ihWc2BDgNB1qW/MCqiuJvBDD2x/
of+2J9L7nB+js4Y8MilFQBotWCO+NPFtk8mRcZZKpRf6qYK9xewuqXHomR+hRdHiFq2B87Yan3ap
k6e7vQafrblRwqrp0i6kbqoiOZnsmQTXPKn6l2Pzgz02RtzgtoeQaIhVL30zvEjsKAev4Oc+1MPE
4ThHDJCxjHTOPTPw/h17QBhm9PsW/CL0WfDXoV/OsEhkC6IOdOoetm1CgRR55ypIPNJJ+7YpLUgR
3cmm/LSDaU0WzD+CRFouWSo1Wpzl4Dja+iN5DutSkFr3ys0euM7BEuBeSeZ8/KMR6LwUxPFteh3H
gtwzUhYAkIt1ba5L8Kbu8YqZdQQIeAVvlAI3ij2gz8tlfa1uQrYCtTLcl3DUur96XoPyblL7YMlT
hbDG9u52nfKmTtrRszTkS3fQqDARjNVUsOcPUs3wilH31+7LV++eHRcnK+EuMOeTo4N7IxFWjBXd
zV5VuuYesV5Ob8nTl+Lwjv9p5aGX+yAsKDDAi6/fZfeI5QAvcNAzYtjJ8qr3VXY3B8OizUu4Hccy
h5WSbC/D+L9fXa5AmckMXstgbbxxNeNED+5qfbYPxWHA/+me+jwH/ggep8bkQCR+xpOKvfV7E7fN
Ts8GYnuzS1TH5z9TPJxOhiWlBdWnJEu47LnI514k4f+lUF0qBHbGRSI3ZVv4hfAzQ25OnSGggCS5
iYla6Tu1BEPKrIr7x0E6MO52D+gDevqCO5N0JNeTm7Y1dqeuOIrIt7sZOEhlZsDvAFWxs3kbyoux
S5Dan9dkwdmTDZyY73ISspLWrvr0yZgU+T4cij3g62Kj9nydgyIsJRjRj0oRlx9QdW9GCC4/Wkw+
Lutzob1UF9qe73aRCa5MxUS6OM/9xMoJVmKCfodQ1kGKlZR+DjVaX7nCUDERYyHpt+MZFuwmSRn6
ZWUP93m5pIICV8kDY7LcOruX1o5SI+Th4w99iuifPVC99C2mh9ZPd+Pxmx/eh05tKWQWHHFgQM9/
S2eBJlE3Bd57+EsbizPiG2kAw1XOLu12m1gC0GaCzZ4ISXBgPCII0BnSrrldNMGoCfbIh7+RXk/9
eZ6J+GEq999ON00YgutHrVMNeQ8NdYHQWm8Lqlv+vVnVCv1or5SXJ6Uco9dE37Z3cGg7+1KEGk4Z
usHhynGwHMIJcnMjZecdE/XMqM9ZdTSj3P/slg2C7RGtahLRuBD3C5cDOOBbYyzybQg6xtj/h26u
uVlXY6MQWlkzT1jXHZ/tD/W6JI7fxZS8rJCfFuwRFleiccl9QVSROORMJ0Jf7oINXRH9UMINXXpW
LE07RYt9NOh8+s/d6H8BTC3ksQ==
"""
import sys
import base64
import zlib
class DictImporter(object):
def __init__(self, sources):
self.sources = sources
def find_module(self, fullname, path=None):
if fullname == "argparse" and sys.version_info >= (2,7):
# we were generated with <python2.7 (which pulls in argparse)
# but we are running now on a stdlib which has it, so use that.
return None
if fullname in self.sources:
return self
if fullname + '.__init__' in self.sources:
return self
return None
def load_module(self, fullname):
# print "load_module:", fullname
from types import ModuleType
try:
s = self.sources[fullname]
is_pkg = False
except KeyError:
s = self.sources[fullname + '.__init__']
is_pkg = True
co = compile(s, fullname, 'exec')
module = sys.modules.setdefault(fullname, ModuleType(fullname))
module.__file__ = "%s/%s" % (__file__, fullname)
module.__loader__ = self
if is_pkg:
module.__path__ = [fullname]
do_exec(co, module.__dict__) # noqa
return sys.modules[fullname]
def get_source(self, name):
res = self.sources.get(name)
if res is None:
res = self.sources.get(name + '.__init__')
return res
if __name__ == "__main__":
if sys.version_info >= (3, 0):
exec("def do_exec(co, loc): exec(co, loc)\n")
import pickle
sources = sources.encode("ascii") # ensure bytes
sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)))
else:
import cPickle as pickle
exec("def do_exec(co, loc): exec co in loc\n")
sources = pickle.loads(zlib.decompress(base64.decodestring(sources)))
importer = DictImporter(sources)
sys.meta_path.insert(0, importer)
entry = "import py; raise SystemExit(py.test.cmdline.main())"
do_exec(entry, locals()) # noqa
| gpl-3.0 | 6,475,151,297,539,713,000 | 74.926949 | 77 | 0.951958 | false |
Poorchop/hexchat-scripts | old/twitch-title.py | 1 | 3987 | #!/usr/bin/env python3
import hexchat
import requests
import sys
import threading
__module_name__ = "Twitch Title"
__module_author__ = "Poorchop"
__module_version__ = "1.0"
__module_description__ = "Display stream status and description for TwitchTV streams"
t = None
twitch_chans = {}
def set_topic(channel, display_name, status, game, title):
global twitch_chans
channel = "#" + channel
msg = "\00318{0}\00399 - {1} | Now playing: \00318{2}\00399 | {3}".format(display_name, status, game, title)
stripped_msg = hexchat.strip(msg, -1, 3)
if twitch_chans[channel] != stripped_msg:
twitch_chans[channel] = stripped_msg
# try to print stream status in current channel - doesn't seem to work without Do At plugin
current_chan = hexchat.get_info("channel")
hexchat.find_context(channel=current_chan).prnt(msg)
# get the proper context for the topic event
context = hexchat.find_context(channel=channel)
if sys.platform == "win32":
# HexChat on Windows has poor support for colors in topic bar
context.command("RECV :{0}[email protected] TOPIC {0} :{1}".format(channel, stripped_msg))
else:
context.command("RECV :{0}[email protected] TOPIC {0} :{1}".format(channel, msg))
def get_stream_info(channel):
url = "https://api.twitch.tv/kraken/streams?"
params = {"channel": channel}
r = requests.get(url, params=params)
data = r.json()
display_name = channel
game = ""
title = "\035Stream is offline\017"
if not data["streams"]:
status = "\00320\002OFFLINE\002\00399"
else:
status = "\00319\002LIVE\002\00399"
display_name = data["streams"][0]["channel"]["display_name"]
game = data["streams"][0]["channel"]["game"]
title = data["streams"][0]["channel"]["status"]
set_topic(channel, display_name, status, game, title)
def update_status():
global twitch_chans
if twitch_chans:
for chan in twitch_chans:
channel = chan[1:]
get_stream_info(channel)
def get_twitch_chans():
global twitch_chans
for chan in hexchat.get_list("channels"):
if chan.type == 2 and chan.context.get_info("server") == "tmi.twitch.tv" and chan.channel not in twitch_chans:
twitch_chans[chan.channel] = ""
def channel_check():
"""
Check to see if there are any open Twitch channels; if so, then start/continue the threaded process
"""
for chan in hexchat.get_list("channels"):
if chan.type == 2 and chan.context.get_info("server") == "tmi.twitch.tv":
return True
return False
def get_current_status():
"""
Update the stream status every 10 minutes
"""
global t
if channel_check():
get_twitch_chans()
update_status()
t = threading.Timer(600, get_current_status)
t.daemon = True
t.start()
else:
if t:
t.cancel()
t.join()
t = None
def is_twitch():
server = hexchat.get_info("server")
if server and "twitch.tv" in server:
return True
else:
return False
def join_cb(word, word_eol, userdata):
"""
Restart the threaded process if necessary, then immediately get the stream status
"""
global t
global twitch_chans
if is_twitch():
if not t:
get_current_status()
channel = hexchat.get_info("channel")
# TODO: make safer and don't modify the same object that is modified by get_stream_status
twitch_chans[channel] = ""
channel = channel[1:]
get_stream_info(channel)
def unload_cb(userdata):
"""
Prevent HexChat from crashing while a thread is active
"""
global t
if t:
t.cancel()
t.join()
hexchat.hook_unload(unload_cb)
hexchat.hook_print("Open Context", join_cb)
get_current_status()
print(__module_name__ + " version " + __module_version__ + " loaded")
| mit | -6,793,062,347,922,079,000 | 28.533333 | 118 | 0.614748 | false |
kusamau/cedaMarkup | ceda_markup/opensearch/os_request.py | 1 | 11997 | '''
BSD Licence
Copyright (c) 2012, Science & Technology Facilities Council (STFC)
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the Science & Technology Facilities Council (STFC)
nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Created on 5 May 2012
@author: Maurizio Nagni
'''
from osquery import URL_REL_DEFAULT, URL_INDEX_OFFSET_DEFAULT, \
URL_PAGE_OFFSET_DEFAULT
from ceda_markup.markup import createMarkup, createSimpleMarkup
from ceda_markup import get_mimetype
from ceda_markup.opensearch import create_template_query
MAX_OS_SHORT_NAME_LEN = 16
MAX_OS_LONG_NAME_LEN = 48
MAX_OS_TAGS_LEN = 256
MAX_OS_DESCRIPTION_LEN = 1024
MAX_OS_DEVELOPER_LEN = 64
MAX_OS_ATTRIBUTION_LEN = 256
SYNDACATION_OPEN = 'open'
SYNDACATION_LIMITED = 'limited'
SYNDACATION_PRIVATE = 'private'
SYNDACATION_CLOSED = 'closed'
OS_SYNDACATION_RIGHT = [SYNDACATION_OPEN, SYNDACATION_LIMITED, SYNDACATION_PRIVATE, SYNDACATION_CLOSED]
OS_SYNDACATION_RIGHT_DEFAULT = SYNDACATION_OPEN
OS_ADULT_CONTENT_DEFAULT = False
OS_INPUT_ENCODING_DEFAULT = 'UTF-8'
OS_OUTPUT_ENCODING_DEFAULT = 'UTF-8'
OS_NAMESPACE = 'http://a9.com/-/spec/opensearch/1.1/'
OS_PREFIX = 'os'
OS_ROOT_TAG = 'OpenSearchDescription'
def create_url(query, response_type, ospath, root = None, \
tagName = OS_ROOT_TAG, ns = OS_NAMESPACE):
markup = createMarkup('Url', OS_PREFIX, ns, root)
markup.set("type", get_mimetype(response_type))
template_query = create_template_query(root, query)
query_template = ("%s%s?%s") % (ospath, response_type, template_query[:-1])
markup.set("template", query_template)
if query.rel is not None and query.rel != URL_REL_DEFAULT:
markup.set("rel", query.rel)
if query.indexOffset is not None \
and query.indexOffset != URL_INDEX_OFFSET_DEFAULT:
markup.set("indexOffset", str(query.indexOffset))
if query.pageOffset is not None \
and query.pageOffset != URL_PAGE_OFFSET_DEFAULT:
markup.set("pageOffset", str(query.pageOffset))
return markup
def create_short_name(short_name, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(short_name, root, 'ShortName', ns, OS_PREFIX)
def create_description(description, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(description, root, 'Description', ns, OS_PREFIX)
def create_tags(tags, root = None, tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(tags, root, 'Tags', ns, OS_PREFIX)
def create_contact(contact, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(contact, root, 'Contact', ns, OS_PREFIX)
def create_long_name(long_name, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(long_name, root, 'LongName', ns, OS_PREFIX)
def create_developer(developer, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(developer, root, 'Developer', ns, OS_PREFIX)
def create_attribution(attribution, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(attribution, root, 'Attribution', ns, OS_PREFIX)
def create_syndacation_right(syndacation_right, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(syndacation_right, root, 'SyndacationRight', \
ns, OS_PREFIX)
def create_adult_content(adult_content, root = None, \
tagName = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(adult_content, root, 'AdultContent', \
ns, OS_PREFIX)
def create_language(language, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(language, root, 'Language', ns, OS_PREFIX)
def create_input_encoding(input_encoding, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(input_encoding, root, 'InputEncoding', \
ns, OS_PREFIX)
def create_output_encoding(output_encoding, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
return createSimpleMarkup(output_encoding, root, 'OutputEncoding', \
ns, OS_PREFIX)
def create_image(url, height = None, width = None, root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
markup = createSimpleMarkup(url, root, 'Image', ns, OS_PREFIX)
if height is not None and isinstance(height, (int, long)):
markup.set("height", height)
if width is not None and isinstance(width, (int, long)):
markup.set("width", width)
return markup
def create_osdescription(os_responses, os_description, query, ospath, \
root = None, \
tag_name = OS_ROOT_TAG, ns = OS_NAMESPACE):
"""
@param osResponses: a list of OSResponse instances
@param os_description: an OpenSearchDescription instance
@param query: an OSQuery instance
"""
markup = createMarkup(OS_ROOT_TAG, OS_PREFIX, ns, root)
markup.append(create_short_name(os_description.os_short_name, \
root = markup))
markup.append(create_description(os_description.os_description, \
root = markup))
if hasattr(os_description, 'os_tags'):
markup.append(create_tags(os_description.os_tags, root = markup))
if hasattr(os_description, 'os_contact'):
markup.append(create_contact(os_description.os_contact, \
root = markup))
if hasattr(os_description, 'os_long_name'):
markup.append(create_long_name(os_description.os_long_name, \
root = markup))
if hasattr(os_description, 'os_developer'):
markup.append(create_developer(os_description.os_developer, \
root = markup))
if hasattr(os_description, 'os_attribution'):
markup.append(create_attribution(os_description.os_attribution, \
root = markup))
if hasattr(os_description, 'os_image') \
and isinstance(os_description.os_image, list):
for img in os_description.os_image:
markup.append(create_image(img.url, img.height, img.width, \
root = markup))
if hasattr(os_description, 'os_syndacation_right') \
and os_description.os_syndacation_right != OS_SYNDACATION_RIGHT_DEFAULT:
markup.append(create_syndacation_right(os_description.os_syndacation_right, root = markup))
if hasattr(os_description, 'os_adult_content'):
markup.append(create_adult_content(os_description.os_adult_content, \
root = markup))
if os_description.os_language \
and isinstance(os_description.os_language, list):
for item in os_description.os_language:
markup.append(create_language(item, root = markup))
if os_description.os_input_encoding \
and isinstance(os_description.os_input_encoding, list):
for item in os_description.os_input_encoding:
markup.append(create_input_encoding(item, root = markup))
if os_description.os_output_encoding \
and isinstance(os_description.os_output_encoding, list):
for item in os_description.os_output_encoding:
markup.append(create_output_encoding(item, root = markup))
for item in os_responses:
url = create_url(query, item.extension, ospath, root = markup)
markup.append(url)
return markup
class OpenSearchDescription(object):
'''
classdocs
'''
def __init__(self, os_short_name, os_description, \
os_contact = None, os_tags = None, os_long_name = None, \
os_image = [], os_developer = None, os_attribution = None, \
os_syndacation_right = None, os_adult_content = None, \
os_language = ['*'], \
os_input_encoding = [OS_INPUT_ENCODING_DEFAULT], \
os_output_encoding = [OS_OUTPUT_ENCODING_DEFAULT]):
"""
@param os_image: a list of osImage instances
"""
self.os_syndacation_right = None
# should be set to True but because of
# http://code.google.com/p/gdata-python-client/issues/detail?id=611
# we cannot (for now)
self.os_adult_content = '1'
if os_description is not None:
self.os_description = os_description[:MAX_OS_DESCRIPTION_LEN]
if os_short_name is not None:
self.os_short_name = os_short_name[:MAX_OS_SHORT_NAME_LEN]
#Should check that is an email format
if os_contact is not None:
self.os_contact = os_contact
if os_tags is not None:
self.os_tags = os_tags[:MAX_OS_TAGS_LEN]
if os_long_name is not None:
self.os_long_name = os_long_name[:MAX_OS_LONG_NAME_LEN]
if os_developer is not None:
self.os_developer = os_developer[:MAX_OS_DEVELOPER_LEN]
if os_attribution is not None:
self.os_attribution = os_attribution[:MAX_OS_ATTRIBUTION_LEN]
if os_syndacation_right and os_syndacation_right in OS_SYNDACATION_RIGHT:
self.os_syndacation_right = os_syndacation_right
if os_adult_content is not None and os_adult_content in ['false', 'FALSE', '0', 'no', 'NO']:
# should be set to False but because of
# http://code.google.com/p/gdata-python-client/issues/detail?id=611
# we cannot (for now)
self.os_adult_content = '0'
self.os_image = os_image
self.os_language = os_language
self.os_input_encoding = os_input_encoding
self.os_output_encoding = os_output_encoding | bsd-3-clause | -6,705,487,842,680,196,000 | 42.788321 | 107 | 0.616404 | false |
google/brain-tokyo-workshop | AttentionAgent/protobuf/roll_out_service_pb2.py | 1 | 10105 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: protobuf/roll_out_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='protobuf/roll_out_service.proto',
package='evolution_algorithms',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x1fprotobuf/roll_out_service.proto\x12\x14\x65volution_algorithms\"#\n\rCMAParameters\x12\x12\n\nparameters\x18\x01 \x03(\x01\"\x89\x01\n\x0eRollOutRequest\x12\x16\n\x0eroll_out_index\x18\x01 \x01(\x05\x12\x10\n\x08\x65nv_seed\x18\x02 \x01(\x05\x12\x10\n\x08\x65valuate\x18\x03 \x01(\x08\x12;\n\x0e\x63ma_parameters\x18\x04 \x01(\x0b\x32#.evolution_algorithms.CMAParameters\":\n\x0fRollOutResponse\x12\x16\n\x0eroll_out_index\x18\x01 \x01(\x05\x12\x0f\n\x07\x66itness\x18\x02 \x01(\x01\"\x12\n\x10ParamSyncRequest\"\'\n\x11ParamSyncResponse\x12\x12\n\nparameters\x18\x01 \x03(\x01\x32q\n\x0eRollOutService\x12_\n\x0eperformRollOut\x12$.evolution_algorithms.RollOutRequest\x1a%.evolution_algorithms.RollOutResponse\"\x00\x32z\n\x14ParameterSyncService\x12\x62\n\rsyncParameter\x12&.evolution_algorithms.ParamSyncRequest\x1a\'.evolution_algorithms.ParamSyncResponse\"\x00\x62\x06proto3')
)
_CMAPARAMETERS = _descriptor.Descriptor(
name='CMAParameters',
full_name='evolution_algorithms.CMAParameters',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parameters', full_name='evolution_algorithms.CMAParameters.parameters', index=0,
number=1, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=57,
serialized_end=92,
)
_ROLLOUTREQUEST = _descriptor.Descriptor(
name='RollOutRequest',
full_name='evolution_algorithms.RollOutRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='roll_out_index', full_name='evolution_algorithms.RollOutRequest.roll_out_index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='env_seed', full_name='evolution_algorithms.RollOutRequest.env_seed', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='evaluate', full_name='evolution_algorithms.RollOutRequest.evaluate', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cma_parameters', full_name='evolution_algorithms.RollOutRequest.cma_parameters', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=95,
serialized_end=232,
)
_ROLLOUTRESPONSE = _descriptor.Descriptor(
name='RollOutResponse',
full_name='evolution_algorithms.RollOutResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='roll_out_index', full_name='evolution_algorithms.RollOutResponse.roll_out_index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fitness', full_name='evolution_algorithms.RollOutResponse.fitness', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=234,
serialized_end=292,
)
_PARAMSYNCREQUEST = _descriptor.Descriptor(
name='ParamSyncRequest',
full_name='evolution_algorithms.ParamSyncRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=294,
serialized_end=312,
)
_PARAMSYNCRESPONSE = _descriptor.Descriptor(
name='ParamSyncResponse',
full_name='evolution_algorithms.ParamSyncResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parameters', full_name='evolution_algorithms.ParamSyncResponse.parameters', index=0,
number=1, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=314,
serialized_end=353,
)
_ROLLOUTREQUEST.fields_by_name['cma_parameters'].message_type = _CMAPARAMETERS
DESCRIPTOR.message_types_by_name['CMAParameters'] = _CMAPARAMETERS
DESCRIPTOR.message_types_by_name['RollOutRequest'] = _ROLLOUTREQUEST
DESCRIPTOR.message_types_by_name['RollOutResponse'] = _ROLLOUTRESPONSE
DESCRIPTOR.message_types_by_name['ParamSyncRequest'] = _PARAMSYNCREQUEST
DESCRIPTOR.message_types_by_name['ParamSyncResponse'] = _PARAMSYNCRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CMAParameters = _reflection.GeneratedProtocolMessageType('CMAParameters', (_message.Message,), dict(
DESCRIPTOR = _CMAPARAMETERS,
__module__ = 'protobuf.roll_out_service_pb2'
# @@protoc_insertion_point(class_scope:evolution_algorithms.CMAParameters)
))
_sym_db.RegisterMessage(CMAParameters)
RollOutRequest = _reflection.GeneratedProtocolMessageType('RollOutRequest', (_message.Message,), dict(
DESCRIPTOR = _ROLLOUTREQUEST,
__module__ = 'protobuf.roll_out_service_pb2'
# @@protoc_insertion_point(class_scope:evolution_algorithms.RollOutRequest)
))
_sym_db.RegisterMessage(RollOutRequest)
RollOutResponse = _reflection.GeneratedProtocolMessageType('RollOutResponse', (_message.Message,), dict(
DESCRIPTOR = _ROLLOUTRESPONSE,
__module__ = 'protobuf.roll_out_service_pb2'
# @@protoc_insertion_point(class_scope:evolution_algorithms.RollOutResponse)
))
_sym_db.RegisterMessage(RollOutResponse)
ParamSyncRequest = _reflection.GeneratedProtocolMessageType('ParamSyncRequest', (_message.Message,), dict(
DESCRIPTOR = _PARAMSYNCREQUEST,
__module__ = 'protobuf.roll_out_service_pb2'
# @@protoc_insertion_point(class_scope:evolution_algorithms.ParamSyncRequest)
))
_sym_db.RegisterMessage(ParamSyncRequest)
ParamSyncResponse = _reflection.GeneratedProtocolMessageType('ParamSyncResponse', (_message.Message,), dict(
DESCRIPTOR = _PARAMSYNCRESPONSE,
__module__ = 'protobuf.roll_out_service_pb2'
# @@protoc_insertion_point(class_scope:evolution_algorithms.ParamSyncResponse)
))
_sym_db.RegisterMessage(ParamSyncResponse)
_ROLLOUTSERVICE = _descriptor.ServiceDescriptor(
name='RollOutService',
full_name='evolution_algorithms.RollOutService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=355,
serialized_end=468,
methods=[
_descriptor.MethodDescriptor(
name='performRollOut',
full_name='evolution_algorithms.RollOutService.performRollOut',
index=0,
containing_service=None,
input_type=_ROLLOUTREQUEST,
output_type=_ROLLOUTRESPONSE,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_ROLLOUTSERVICE)
DESCRIPTOR.services_by_name['RollOutService'] = _ROLLOUTSERVICE
_PARAMETERSYNCSERVICE = _descriptor.ServiceDescriptor(
name='ParameterSyncService',
full_name='evolution_algorithms.ParameterSyncService',
file=DESCRIPTOR,
index=1,
serialized_options=None,
serialized_start=470,
serialized_end=592,
methods=[
_descriptor.MethodDescriptor(
name='syncParameter',
full_name='evolution_algorithms.ParameterSyncService.syncParameter',
index=0,
containing_service=None,
input_type=_PARAMSYNCREQUEST,
output_type=_PARAMSYNCRESPONSE,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_PARAMETERSYNCSERVICE)
DESCRIPTOR.services_by_name['ParameterSyncService'] = _PARAMETERSYNCSERVICE
# @@protoc_insertion_point(module_scope)
| apache-2.0 | -6,697,575,156,520,963,000 | 33.138514 | 908 | 0.734092 | false |
tonimichel/djpl-users | setup.py | 1 | 1092 | #! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name='djpl-users',
version='0.1',
description='User functionality beyond admin users',
long_description=read('README.rst'),
license='The MIT License',
keywords='django, django-productline, users, signup, confirmation email',
author='Toni Michel',
author_email='[email protected]',
url="https://github.com/tonimichel/djpl-users",
packages=find_packages(),
package_dir={'users': 'users'},
include_package_data=True,
scripts=[],
zip_safe=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
],
install_requires=[
'django-productline',
'django-q==0.9.2'
]
)
| mit | -3,572,329,187,022,619,600 | 26.3 | 77 | 0.621795 | false |
joedeller/pymine | circle.py | 1 | 3410 | #! /usr/bin/python
# Joe Deller 2014
# Drawing circle shapes in Minecraft
# Level : Intermediate
# Uses : Libraries, variables, operators, loops, logic
# This program uses more functions from the maths library
# The mathematics of circles is something that you probably won't
# be taught until secondary school, but don't worry.
# All we really need to understand is where, how big , what color
# This program uses two different types of numbers
# Floating point (numbers with a decimal place ) and integers (whole numbers)
# First the libraries we will need
import mcpi.minecraft as minecraft
import mcpi.block as block
import math
# Make a connection to minecraft
mc = minecraft.Minecraft.create()
# Find out where we are in the world and store this for later
playerPos = mc.player.getPos()
pX = playerPos.x
pY = playerPos.y
pZ = playerPos.z
# Clean up the world and any previous circles nearby
# We will clear and area 20 both the left, right, front and back of where we are standing
# and 64 blocks up into the air
mc.setBlocks(pX - 20, pY, pZ - 20, pX + 20, pY + 64, pZ + 20, block.AIR.id)
# Setup a grass floor, the same size as the area we cleared, but not so high
mc.setBlocks(pX - 20, pY - 1, pZ - 20, pX + 20, pY - 1, pZ + 20, block.GRASS.id)
# Many computer languages, including python, use a measurement called radians to measure angles
# rather than degrees you might be used to.
# If you haven't heard of Pi, (the number rather than the computer)
# then Pi is the number you get if you measure all the way around a circle,
# called the circumference, by the width of the circle
# it is approximately 3.14, or, to be slightly more accurate, 3.1415926
# in fact the numbers after the decimal point go on for ever
# A circle can be divided up into 360 degrees, or 2 * Pi in radians
# A semi circle is 180 degrees, or Pi in radians
# Pi is such an important number that many computer
# languages include it as part of their keywords or
# as part of a library
# For now we won't worry too much about the Mathematics, just the color
# and shape
# How wide in blocks do we want our circle?
# Including the decimal place tells Python that we want a float variable to
# store the width, rather than whole numbers
diameter = 10.0
# Normally we would use a For loop, but in Python "range" only
# works with whole numbers and we need numbers with decimal places
# One way (and there are others) is to use a while loop
# You might wonder why we don't start from zero, try changing i to be 0.0
# and see :-)
i = 0.01
while (i < math.pi * 2):
# When we draw our blocks, we need to convert the floating point numbers back to integers
# Our circle won't be super smooth as minecraft blocks are quite large
# For now don't worry about the sin and cos functions, they work out where to place our blocks
# to make a circle shape
dx = int(diameter * math.cos(i))
dy = int(diameter * math.sin(i))
# We need to add our player X co-ordinate to the circle X coordinate so it is
# drawn close to where we are standing
# We also will draw our circle back a bit from where we are standing, 4
# blocks should be enough
mc.setBlock(dx + pX, pY + dy + diameter, pZ + 10, block.WOOL.id, 1)
# try changing the number we add to different values, for example 0.5
# the more you add the faster the loop finishes, so the less blocks get
# drawn
i = i + 0.4
| mit | -7,876,583,890,132,956,000 | 40.585366 | 98 | 0.7261 | false |
niwtr/map-walker | src/server/log.py | 1 | 2782 | #! /usr/bin/python
# -*- coding: utf-8 -*-
'''
Log module.
Maintains the mechanism to write log.
Design: Heranort, L.Laddie
'''
import os, time, shutil
'''
Log mode.
If the mode turns to be 'testing', we must write down all the environment.
And if the mode is 'distributed', we should write at least all the information
we need.
'''
#mode=[]
class Log_file():
def __init__(self):
path = os.getcwd()
pparent_path = os.path.dirname(os.path.dirname(path)) #get the root dir
self.file_path = os.path.join(pparent_path, 'data')
self.path_simple_log = os.path.join(self.file_path, 'log.txt') #get the log path
self.path_test_log = os.path.join(self.file_path, 'log_test.txt')
self.open_log()
'''
No less and no more.
'''
def make_simple_log(self, env):
pass
'''
Used Only during development of the program.
'''
def make_full_log(self, env):
pass
'''
Analyze the log file to check where the bug is.
'''
def analyzer(self):
pass
'''
Open the log file
'''
def open_log(self):
line_num = 0
if(os.path.isfile(self.path_simple_log)):
temp = open(self.path_simple_log, 'r')
lines = temp.readlines()
temp.close()
line_num = len(lines)
self.log_simple = open(self.path_simple_log, 'a') #open the log txt with a additional mode
self.log_test = open(self.path_test_log, 'a')
if(line_num >= 1000): #check the log size
self.roll_log()
'''
Preserve the old log
'''
def roll_log(self):
for i in range(1000):
file_name = os.path.join(self.file_path, 'log_pre_%d.log' % i)
if(os.path.isfile(file_name)):
continue
self.log_simple.close()
shutil.move(self.path_simple_log, file_name)
self.open_log()
self.info('log roll to %s', file_name)
return
'''
Write log to the actual disk.
'''
def write_log(self, mode, fmt, *msg):
str = '%s - [%s] %s\n' % (time.ctime()[4:], mode, fmt % msg)
self.log_simple.write(str)
try:
self.log_simple.flush()
except:
pass
'''
Three different types of log
'''
def debug(self, fmt, *msg):
self.write_log('DEBUG', fmt, *msg)
def info(self, fmt, *msg):
self.write_log('INFO', fmt, *msg)
def warn(self, fmt, *msg):
self.write_log('WARN', fmt, *msg)
log_file = Log_file()
if(__name__ == '__main__'):
log_file.debug('test')
log_file.debug('%d*%s', 272, 'test')
log_file.info('%d*%s', 1954, 'test')
for i in range(1500):
log_file.warn('%d*%s', i, 'test') | mit | -1,747,165,687,453,168,400 | 25.009346 | 99 | 0.542416 | false |
mesosphere/dcos-commons | frameworks/cassandra/tests/test_zzzrecovery.py | 1 | 4677 | # NOTE: THIS FILE IS INTENTIONALLY NAMED TO BE RUN LAST. SEE test_shutdown_host().
import logging
import pytest
import re
from typing import Iterator
import sdk_agents
import sdk_cmd
import sdk_install
import sdk_marathon
import sdk_plan
import sdk_tasks
from tests import config
RECOVERY_TIMEOUT_SECONDS = 20 * 60
log = logging.getLogger(__name__)
@pytest.fixture(scope="module", autouse=True)
def configure_package(configure_security: None) -> Iterator[None]:
try:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
sdk_install.install(config.PACKAGE_NAME, config.SERVICE_NAME, config.DEFAULT_TASK_COUNT)
yield # let the test session execute
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
@pytest.mark.sanity
@pytest.mark.dcos_min_version("1.9", reason="dcos task exec not supported < 1.9")
def test_node_replace_replaces_seed_node() -> None:
pod_to_replace = "node-0"
# start replace and wait for it to finish
sdk_cmd.svc_cli(
config.PACKAGE_NAME, config.SERVICE_NAME, "pod replace {}".format(pod_to_replace)
)
sdk_plan.wait_for_kicked_off_recovery(config.SERVICE_NAME)
sdk_plan.wait_for_completed_recovery(
config.SERVICE_NAME, timeout_seconds=RECOVERY_TIMEOUT_SECONDS
)
@pytest.mark.sanity
@pytest.mark.dcos_min_version("1.9", reason="dcos task exec not supported < 1.9")
def test_node_replace_replaces_node() -> None:
replace_task = [task for task in sdk_tasks.get_summary() if task.name == "node-2-server"][0]
log.info("avoid host for task {}".format(replace_task))
replace_pod_name = replace_task.name[: -len("-server")]
# Update the placement constraints so the new node doesn't end up on the same host
marathon_config = sdk_marathon.get_config(config.SERVICE_NAME)
original_constraint = marathon_config["env"]["PLACEMENT_CONSTRAINT"]
try:
marathon_config["env"]["PLACEMENT_CONSTRAINT"] = '[["hostname", "UNLIKE", "{}"]]'.format(
replace_task.host
)
sdk_marathon.update_app(marathon_config)
sdk_plan.wait_for_completed_deployment(config.SERVICE_NAME)
# start replace and wait for it to finish
sdk_cmd.svc_cli(
config.PACKAGE_NAME, config.SERVICE_NAME, "pod replace {}".format(replace_pod_name)
)
sdk_plan.wait_for_kicked_off_recovery(config.SERVICE_NAME)
sdk_plan.wait_for_completed_recovery(
config.SERVICE_NAME, timeout_seconds=RECOVERY_TIMEOUT_SECONDS
)
finally:
# revert to prior placement setting before proceeding with tests: avoid getting stuck.
marathon_config["env"]["PLACEMENT_CONSTRAINT"] = original_constraint
sdk_marathon.update_app(marathon_config)
sdk_plan.wait_for_completed_deployment(config.SERVICE_NAME)
# @@@@@@@
# WARNING: THIS MUST BE THE LAST TEST IN THIS FILE. ANY TEST THAT FOLLOWS WILL BE FLAKY.
# @@@@@@@
@pytest.mark.sanity
def test_shutdown_host() -> None:
candidate_tasks = sdk_tasks.get_tasks_avoiding_scheduler(
config.SERVICE_NAME, re.compile("^node-[0-9]+-server$")
)
assert len(candidate_tasks) != 0, "Could not find a node to shut down"
# Cassandra nodes should never share a machine
assert len(candidate_tasks) == len(
set([task.host for task in candidate_tasks])
), "Expected candidate tasks to all be on different hosts: {}".format(candidate_tasks)
# Just pick the first one from the list
replace_task = candidate_tasks[0]
replace_pod_name = replace_task.name[: -len("-server")]
# Instead of partitioning or reconnecting, we shut down the host permanently
sdk_agents.shutdown_agent(replace_task.host)
sdk_cmd.svc_cli(
config.PACKAGE_NAME, config.SERVICE_NAME, "pod replace {}".format(replace_pod_name)
)
sdk_plan.wait_for_kicked_off_recovery(config.SERVICE_NAME)
# Print another dump of current cluster tasks, now that repair has started.
sdk_tasks.get_summary()
sdk_plan.wait_for_completed_recovery(config.SERVICE_NAME)
sdk_tasks.check_running(config.SERVICE_NAME, config.DEFAULT_TASK_COUNT)
# Find the new version of the task. Note that the old one may still be present/'running' as
# Mesos might not have acknowledged the agent's death.
new_task = [
task
for task in sdk_tasks.get_summary()
if task.name == replace_task.name and task.id != replace_task.id
][0]
log.info(
"Checking that the original pod has moved to a new agent:\n"
"old={}\nnew={}".format(replace_task, new_task)
)
assert replace_task.agent_id != new_task.agent_id
| apache-2.0 | -8,963,423,582,632,244,000 | 36.119048 | 97 | 0.686337 | false |
acabey/acabey.github.io | projects/demos/engineering.purdue.edu/scriptingwithobjects/swocode/chap13/PassArgsToCallback.py | 1 | 2006 | #!/usr/bin/env python
### PassArgsToCallbacks.py
from Tkinter import *
mw = Tk()
b1 = Button( mw, text = "b1" ) #(A)
b2 = Button( mw, text = "b2" ) #(B)
b3 = Button( mw, text = "b3" ) #(C)
b4 = Button( mw, text = "b4" ) #(D)
b1.grid(row = 0, column = 0) #(E)
b2.grid(row = 0, column = 1) #(F)
b3.grid(row = 1, column = 0) #(G)
b4.grid(row = 1, column = 1) #(H)
# For the buttons b1 through b2, the callbacks need no arguments:
def says_b1(): print "b1 says hello to me\n" #(I)
def says_b2( e ): print "b2 says hello to me\n" #(J)
b1.configure( command = says_b1 ) #(K)
b2.bind( '<Button-1>', says_b2 ) #(L)
# The callbacks for b3 and b4 need the following two as arguments:
arg1 = "hello" #(M)
arg2 = "to me" #(N)
# Register the callbacks:
b3.configure( command = lambda: saysomething_1( b3, arg1, arg2 ) ) #(O)
b4.bind( '<Button-1>', lambda e: saysomething_2( e, arg1, arg2 ) ) #(P)
def saysomething_1( wdg, p, q ): #(Q)
butt_lbl = wdg.cget( 'text' ) #(R)
print "%s says %s %s\n" % (butt_lbl, p, q) #(S)
def saysomething_2( evt, p, q ): #(T)
butt_lbl = evt.widget.cget( 'text' ) #(U)
print "%s says %s %s\n" % (butt_lbl, p, q) #(V)
mainloop() #(W)
| gpl-3.0 | -2,207,782,935,895,623,200 | 47.926829 | 73 | 0.339482 | false |
Bouke/django-two-factor-auth | tests/test_views_qrcode.py | 1 | 3279 | from unittest import mock
import qrcode.image.svg
from django.test import RequestFactory, TestCase
from django.urls import reverse
from two_factor.utils import get_otpauth_url
from two_factor.views.core import QRGeneratorView
from .utils import UserMixin
class CustomQRView(QRGeneratorView):
def get_issuer(self):
return "Custom Test Issuer"
class QRTest(UserMixin, TestCase):
test_secret = 'This is a test secret for an OTP Token'
test_img = 'This is a test string that represents a QRCode'
def setUp(self):
super().setUp()
self.user = self.create_user(username='ⓑỚ𝓾⒦ȩ')
self.login_user()
def test_without_secret(self):
response = self.client.get(reverse('two_factor:qr'))
self.assertEqual(response.status_code, 404)
@mock.patch('qrcode.make')
def test_with_secret(self, mockqrcode):
# Setup the mock data
def side_effect(resp):
resp.write(self.test_img)
mockimg = mock.Mock()
mockimg.save.side_effect = side_effect
mockqrcode.return_value = mockimg
# Setup the session
session = self.client.session
session['django_two_factor-qr_secret_key'] = self.test_secret
session.save()
# Get default image factory
default_factory = qrcode.image.svg.SvgPathImage
# Get the QR code
response = self.client.get(reverse('two_factor:qr'))
# Check things went as expected
mockqrcode.assert_called_with(
get_otpauth_url(accountname=self.user.get_username(),
secret=self.test_secret, issuer="testserver"),
image_factory=default_factory)
mockimg.save.assert_called_with(mock.ANY)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode('utf-8'), self.test_img)
self.assertEqual(response['Content-Type'], 'image/svg+xml; charset=utf-8')
@mock.patch('qrcode.make')
def test_custom_issuer(self, mockqrcode):
# Setup the mock data
def side_effect(resp):
resp.write(self.test_img)
mockimg = mock.Mock()
mockimg.save.side_effect = side_effect
mockqrcode.return_value = mockimg
# Setup the session
session = self.client.session
session['django_two_factor-qr_secret_key'] = self.test_secret
session.save()
# Get default image factory
default_factory = qrcode.image.svg.SvgPathImage
# Get the QR code
factory = RequestFactory()
request = factory.get(reverse('two_factor:qr'))
request.user = self.user
request.session = session
response = CustomQRView.as_view()(request)
# Check things went as expected
mockqrcode.assert_called_with(
get_otpauth_url(accountname=self.user.get_username(),
secret=self.test_secret, issuer="Custom Test Issuer"),
image_factory=default_factory)
mockimg.save.assert_called_with(mock.ANY)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode('utf-8'), self.test_img)
self.assertEqual(response['Content-Type'], 'image/svg+xml; charset=utf-8')
| mit | 2,489,858,012,400,676,400 | 34.150538 | 82 | 0.643316 | false |
X0Leon/XQuant | xquant/engine/backtest.py | 1 | 6837 | # -*- coding: utf-8 -*-
"""
回测的主要接口
@author: Leon Zhang
@version: 0.4
"""
import datetime
import time
import pandas as pd
try:
import queue
except ImportError:
import Queue as queue
from ..utils.logger import setup_logger
from .event import SignalEvent
logger = setup_logger()
class Backtest(object):
"""
封装回测设置和模块的接口
"""
def __init__(self, csv_dir, symbol_list, initial_capital,
heartbeat, start_date, end_date, data_handler,
execution_handler, portfolio, strategy,
commission_type='zero', slippage_type='zero',
**params):
"""
初始化回测
csv_dir: CSV数据文件夹目录
symbol_list: 股票代码str的list,如'600008'
initial_capital: 初始资金,如10000.0
heartbeat: k bar周期,以秒计,如分钟线为60,模拟交易使用
start_date: 策略回测起始时间
end_date: 策略回测结束时间
end_date: 策略回测结束时间
data_handler: (Class) 处理市场数据的类
execution_handler: (Class) 处理order/fill的类
portfolio: (Class) 虚拟账户,追踪组合头寸等信息的类
strategy: (Class) 根据市场数据生成信号的策略类
commission_type: 交易费率模型
slippage_type: 滑点模型
params: 策略参数的字典
"""
self.csv_dir = csv_dir
self.symbol_list = symbol_list
self.initial_capital = initial_capital
self.heartbeat = heartbeat
self.start_date = start_date
self.end_date = end_date
self.data_handler_cls = data_handler
self.execution_handler_cls = execution_handler
self.portfolio_cls = portfolio
self.strategy_cls = strategy
self.commission_type = commission_type
self.slippage_type = slippage_type
self.events = queue.Queue()
self.params = params
self.signals = 0
self.orders = 0
self.fills = 0
self._generate_trading_instances()
def _generate_trading_instances(self):
"""
实例化类,得到data_handler(bars),strategy,portfolio(port),execution_handler(broker)对象
"""
self.data_handler = self.data_handler_cls(self.events, self.csv_dir, self.symbol_list,
self.start_date, self.end_date)
self.strategy = self.strategy_cls(self.data_handler, self.events, **self.params)
self.portfolio = self.portfolio_cls(self.data_handler, self.events, self.start_date,
self.initial_capital)
self.execution_handler = self.execution_handler_cls(self.data_handler, self.events,
slippage_type=self.slippage_type,
commission_type=self.commission_type)
def _run_backtest(self):
"""
执行回测
"""
while True:
# 更新k bar
bars = self.data_handler
if bars.continue_backtest:
bars.update_bars()
else:
break
# 处理events
while True:
try:
event = self.events.get(False)
except queue.Empty:
break
else:
if event is not None:
if event.type == 'BAR': # or event.type == 'TICK'
logger.debug(' '.join([event.bar[0], event.bar[1].strftime("%Y-%m-%d %H:%M:%S"),
str(event.bar[5])]))
self.strategy.calculate_signals(event)
self.portfolio.update_timeindex()
elif event.type == 'SIGNAL':
logger.info(' '.join(['Create Signal:', event.datetime.strftime("%Y-%m-%d %H:%M:%S"),
event.symbol, event.signal_type]))
self.signals += 1
self.portfolio.update_signal(event)
elif event.type == 'ORDER':
self.orders += 1
self.execution_handler.execute_order(event)
elif event.type == 'FILL':
self.fills += 1
self.portfolio.update_fill(event)
# time.sleep(self.heartbeat)
def _force_clear(self):
"""
回测结束,确保强制平仓
"""
for s in self.symbol_list:
self.portfolio.update_signal(SignalEvent(s, self.portfolio.current_datetime, 'EXIT'))
event = self.events.get()
if event is not None:
assert event.type == 'ORDER'
self.execution_handler.execute_order(event)
event = self.events.get()
assert event.type == 'FILL'
self.portfolio.update_fill(event)
self.portfolio.update_timeindex()
logger.info(' '.join(['Force Clear:', self.portfolio.current_datetime.strftime("%Y-%m-%d %H:%M:%S"),
s, 'EXIT']))
def _output_performance(self):
"""
输出策略的回测结果
"""
pass
def trade_record(self):
"""
交易记录
"""
trades = pd.DataFrame(self.portfolio.all_trades, columns=['datetime', 'exchange', 'symbol', 'direction',
'fill_price', 'quantity', 'commission'])
return trades.set_index('datetime')
def simulate_trading(self):
"""
模拟回测并输出结果,返回资金曲线和头寸的DataFrame
"""
start = time.time()
logger.info('Start backtest...')
self._run_backtest()
logger.info('Summary: Signals (%s), Orders (%s), Fills (%s)' % (self.signals, self.orders, self.fills))
self._force_clear()
end = time.time()
timing = round(end-start, 2)
logger.info('Backtest took %s seconds!' % timing)
self._output_performance()
positions = pd.DataFrame(self.portfolio.all_positions).drop_duplicates(subset='datetime', keep='last'
).set_index('datetime')
holdings = pd.DataFrame(self.portfolio.all_holdings).drop_duplicates(subset='datetime', keep='last'
).set_index('datetime')
return positions, holdings
| mit | 7,205,165,296,745,565,000 | 35.494318 | 116 | 0.50397 | false |
kevinharvey/django-tourney | tourney/players/models.py | 1 | 3456 | from django.db import models
class Player(models.Model):
name = models.CharField(max_length=100)
email = models.EmailField()
def __str__(self):
return '{} ({})'.format(self.name, self.email)
class Pool(models.Model):
tournament = models.ForeignKey('matches.Tournament')
players = models.ManyToManyField('players.Player')
def __str__(self):
return '{} - Pool {}'.format(self.tournament.name, self.id)
def _generate_matches(self):
"""
Create a match for each set of 2 players in the pool, and rounds to hold
them
"""
from matches.models import Match, Round
rounds = {}
players = [player for player in self.players.all()]
if len(players) % 2 != 0: players.append(None)
iterator = list(range(len(players)))
for x in iterator:
if x == 0: continue
round = Round(pool=self, number=x)
round.save()
rounds[round] = []
for x in iterator:
if not players[x]: continue
others_iterator = iterator.copy()
others_iterator.remove(x)
for y in others_iterator:
if not players[y]: continue
match_exists = Match.objects.filter(player_1_init=players[x], player_2_init=players[y]).exists()
inverse_match_exists = Match.objects.filter(player_1_init=players[y], player_2_init=players[x]).exists()
if match_exists or inverse_match_exists:
continue
for scheduled_round, players_in_round in rounds.items():
if (players[x] not in players_in_round) and (players[y] not in players_in_round):
break
match = Match(
player_1_init=players[x],
player_2_init=players[y],
round=scheduled_round,
round_index=0
)
match.save()
rounds[scheduled_round] += [players[x], players[y]]
def get_player_standings(self):
"""
Return a list of dictionaries describing the standings (player name and
win/loss record)
"""
records = []
rounds = self.round_set.all()
for round_object in rounds:
for match in round_object.match_set.all():
if not any(d['name'] == match.player_1.name for d in records):
records.append({'name': match.player_1.name, 'wins': 0, 'losses': 0})
if not any(d['name'] == match.player_2.name for d in records):
records.append({'name': match.player_2.name, 'wins': 0, 'losses': 0})
player_1_record = next((record for record in records if record['name'] == match.player_1.name), None)
player_2_record = next((record for record in records if record['name'] == match.player_2.name), None)
if match.winner() == match.player_1:
player_1_record['wins'] += 1
player_2_record['losses'] += 1
if match.winner() == match.player_2:
player_2_record['wins'] += 1
player_1_record['losses'] += 1
records_by_losses = sorted(records, key=lambda k: k['losses'])
records_by_wins = sorted(records, key=lambda k: k['wins'], reverse=True)
return records_by_wins
| gpl-3.0 | 8,356,018,102,655,234,000 | 35.378947 | 120 | 0.540799 | false |
davidak/PyZufall | demo.py | 1 | 1801 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function, unicode_literals)
from builtins import *
from pyzufall.version import __version__
from pyzufall.generator import adjektiv, band, bandart, baum, beilage, beruf_m, beruf_w, color, datum, essen, farbe, firma, geburtsdatum, gegenstand, interesse, koerperteil, nachname, objekt, objekt_m, objekt_w, ort, person, person_m, person_objekt_m, person_objekt_w, person_w, pflanze, sprichwort, stadt, stadt_bl, tier, trinken, verbd, verbi, verbi2, verbn, verbt, verbt2, vorname, vorname_m, vorname_w, wort, zahl
from pyzufall.satz import satz
from pyzufall.person import Person
titel = "Demoscript für PyZufall " + __version__
print("\n" + titel + "\n" + '~' * len(titel) + "\n")
print("Person: " + person())
print("Berufsbezeichnung M: " + beruf_m())
print("Berufsbezeichnung W: " + beruf_w())
print("Essen: " + essen())
print("Beilage: " + beilage())
print("Trinken: " + trinken())
print("Stadt: " + stadt())
print("Ort: " + ort())
print("Band: " + band())
print("Bandart: " + bandart())
print("Wort: " + wort())
print("Zahl: " + zahl())
print("Farbe: " + farbe())
print("Datum: " + datum())
print("Sprichwort: " + sprichwort())
anzahl = 10
print("\n" + str(anzahl) + " zufällige Sätze:\n")
for i in range(1, anzahl+1):
print(str(i) + ". " + satz())
print("\n") # Leerzeile
print("Zufällige Personen generieren:\n")
p1 = Person()
print(p1)
p2 = Person()
print(p2)
print("{} und {} sitzen auf einer Bank im Park.\n".format(p1.vorname, p2.vorname))
del p1, p2
s = "Heute Abend gibt es {} mit {} und dazu ein Glas {}.".format(essen(), beilage(), trinken())
print(s)
s = "Meine {} heißt '{}' und besteht aus {}, {} und mir.".format(bandart(), band(), vorname(), vorname())
print(s)
| gpl-3.0 | -8,526,882,945,034,902,000 | 34.215686 | 417 | 0.655902 | false |
bushvin/ansible-plugins | vars_plugins/user_dir_vars.py | 1 | 3579 | # (c) 2015, William Leemans <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
from ansible import utils
from ansible import errors
import ansible.constants as C
class VarsModule(object):
_base_host_vars = "~/.ansible/host_vars"
_base_group_vars = "~/.ansible/group_vars"
_host_allowed_facts = [ 'ansible_ssh_user', 'ansible_ssh_pass', 'ansible_sudo', 'ansible_sudo_pass', 'ansible_ssh_private_key_file', 'ansible_become', 'ansible_become_user', 'ansible_become_pass' ]
_group_allowed_facts = [ 'ansible_ssh_user', 'ansible_ssh_pass', 'ansible_sudo', 'ansible_sudo_pass', 'ansible_ssh_private_key_file', 'ansible_become', 'ansible_become_user', 'ansible_become_pass' ]
def __init__(self, inventory):
self.inventory = inventory
self.inventory_basedir = inventory.basedir()
self._base_host_vars = os.path.expanduser(self._base_host_vars)
self._base_group_vars = os.path.expanduser(self._base_group_vars)
def run(self, host, vault_password=None):
""" For backwards compatibility, when only vars per host were retrieved
This method should return both host specific vars as well as vars
calculated from groups it is a member of """
result = {}
result.update(self.get_host_vars(host, vault_password))
for g in host.groups:
result.update(self.get_group_vars(g,vault_password))
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
result = utils.merge_hash(result, data)
else:
result.update(data)
return result
def get_host_vars(self, host, vault_password=None):
result = {}
filename = os.path.join(self._base_host_vars, "%s.yml" % host.name)
if os.path.isfile( filename ):
res = utils.parse_yaml_from_file(filename, vault_password=vault_password)
if type(res) != dict:
raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename)
data = dict()
for el in res:
if len(self._host_allowed_facts) == 0 or el in self._host_allowed_facts:
data.update( { el: res[el] } )
result.update(data)
return result
def get_group_vars(self, group, vault_password=None):
result = {}
filename = os.path.join(self._base_group_vars, "%s.yml" % group.name)
if os.path.isfile( filename ):
res = utils.parse_yaml_from_file(filename, vault_password=vault_password)
if type(res) != dict:
raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename)
data = dict()
for el in res:
if len(self._group_allowed_facts) == 0 or el in self._group_allowed_facts:
data.update( { el: res[el] } )
result.update(data)
return result
| gpl-3.0 | -8,529,523,066,040,105,000 | 41.105882 | 202 | 0.62727 | false |
cgarrard/osgeopy-code | Chapter13/listing13_2.py | 1 | 1369 | # Plot countries as multipolygons.
import matplotlib.pyplot as plt
from osgeo import ogr
def plot_polygon(poly, symbol='k-', **kwargs):
"""Plots a polygon using the given symbol."""
for i in range(poly.GetGeometryCount()):
subgeom = poly.GetGeometryRef(i)
x, y = zip(*subgeom.GetPoints())
plt.plot(x, y, symbol, **kwargs)
def plot_layer(filename, symbol, layer_index=0, **kwargs):
"""Plots an OGR polygon layer using the given symbol."""
ds = ogr.Open(filename)
# Loop through all of the features in the layer.
for row in ds.GetLayer(layer_index):
geom = row.geometry()
geom_type = geom.GetGeometryType()
# If the geometry is a single polygon.
if geom_type == ogr.wkbPolygon:
plot_polygon(geom, symbol, **kwargs)
# Else if the geometry is a multipolygon, send each
# part to plot_polygon individually.
elif geom_type == ogr.wkbMultiPolygon:
for i in range(geom.GetGeometryCount()):
subgeom = geom.GetGeometryRef(i)
plot_polygon(subgeom, symbol, **kwargs)
# Plot countries.
plot_layer(r'D:\osgeopy-data\global\ne_110m_admin_0_countries.shp', 'k-')
plt.axis('equal')
# Get rid of the tick marks on the side of the plot.
plt.gca().get_xaxis().set_ticks([])
plt.gca().get_yaxis().set_ticks([])
plt.show()
| mit | 390,147,310,366,246,600 | 33.225 | 73 | 0.639153 | false |
pankeshang/PPrintJsonEncoder | pp_json_encoder.py | 1 | 10703 | # -*- coding: utf-8 -*-
__author__ = '[email protected]'
"""
PPrintJsonEncoder
This is a wraper of the existing JSONEncoder from python's default ``json`` module.
What has been newly added in is just the ``depth`` attribute
"""
import json
import re
try:
from _json import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from _json import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
#ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
INFINITY = float('inf')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
return '\\u{0:04x}'.format(n)
#return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
#return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class PPJSONEncoder(json.JSONEncoder):
def __init__(self, depth=None, **kwargs):
self.depth = depth
super(PPJSONEncoder, self).__init__(**kwargs)
def pp_iterencode(self, o):
""" No we do not care about ont_shot hahaha bite me!
"""
_one_shot = None
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on the
# internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
_iterencode = _pp_make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.depth
)
return _iterencode(o, 0)
def iterencode(self, o, _one_shot=False):
if self.depth:
return self.pp_iterencode(o)
return super(PPJSONEncoder, self).iterencode(o, _one_shot)
def _pp_make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
depth,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
if _current_indent_level > depth:
newline_indent = None
separator = _item_separator
else:
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
if _current_indent_level > depth:
newline_indent = None
item_separator = _item_separator
else:
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| mit | 2,360,319,245,648,625,000 | 31.433333 | 83 | 0.502476 | false |
jimporter/bfg9000 | bfg9000/builtins/find.py | 1 | 5986 | import re
from enum import Enum
from functools import reduce
from . import builtin
from ..glob import NameGlob, PathGlob
from ..iterutils import iterate, listify
from ..backends.make import writer as make
from ..backends.ninja import writer as ninja
from ..backends.make.syntax import Writer, Syntax
from ..build_inputs import build_input
from ..path import Path, Root, walk, uniquetrees
from ..platforms import known_platforms
build_input('find_dirs')(lambda build_inputs, env: set())
depfile_name = '.bfg_find_deps'
@builtin.default()
class FindResult(Enum):
include = 0
not_now = 1
exclude = 2
exclude_recursive = 3
def __bool__(self):
return self == self.include
def __and__(self, rhs):
return type(self)(max(self.value, rhs.value))
def __or__(self, rhs):
return type(self)(min(self.value, rhs.value))
class FileFilter:
def __init__(self, include, type=None, extra=None, exclude=None,
filter_fn=None):
self.include = [PathGlob(i, type) for i in iterate(include)]
if not self.include:
raise ValueError('at least one pattern required')
self.extra = [NameGlob(i, type) for i in iterate(extra)]
self.exclude = [NameGlob(i, type) for i in iterate(exclude)]
self.filter_fn = filter_fn
def bases(self):
return uniquetrees([i.base for i in self.include])
def _match_globs(self, path):
if any(i.match(path) for i in self.exclude):
return FindResult.exclude_recursive
skip_base = len(self.include) == 1
result = reduce(lambda a, b: a | b,
(i.match(path, skip_base) for i in self.include))
if result:
return FindResult.include
if any(i.match(path) for i in self.extra):
return FindResult.not_now
if result == PathGlob.Result.never:
return FindResult.exclude_recursive
return FindResult.exclude
def match(self, path):
result = self._match_globs(path)
if self.filter_fn:
return result & self.filter_fn(path)
return result
def write_depfile(env, path, output, seen_dirs, makeify=False):
with open(path.string(env.base_dirs), 'w') as f:
# Since this file is in the build dir, we can use relative dirs for
# deps also in the build dir.
roots = env.base_dirs.copy()
roots[Root.builddir] = None
out = Writer(f, None)
out.write(output.string(roots), Syntax.target)
out.write_literal(':')
for i in seen_dirs:
out.write_literal(' ')
out.write(i.string(roots), Syntax.dependency)
out.write_literal('\n')
if makeify:
for i in seen_dirs:
out.write(i.string(roots), Syntax.target)
out.write_literal(':\n')
def _path_type(path):
return 'd' if path.directory else 'f'
@builtin.function()
def filter_by_platform(context, path):
env = context.env
my_plat = {env.target_platform.genus, env.target_platform.family}
sub = '|'.join(re.escape(i) for i in known_platforms if i not in my_plat)
ex = r'(^|/|_)(' + sub + r')(\.[^\.]+$|$|/)'
return (FindResult.not_now if re.search(ex, path.suffix)
else FindResult.include)
def _find_files(env, filter, seen_dirs=None):
paths = filter.bases()
for p in paths:
yield p, filter.match(p)
for p in paths:
for base, dirs, files in walk(p, env.base_dirs):
if seen_dirs is not None:
seen_dirs.append(base)
to_remove = []
for i, p in enumerate(dirs):
m = filter.match(p)
if m == FindResult.exclude_recursive:
to_remove.append(i)
yield p, m
for p in files:
yield p, filter.match(p)
for i in reversed(to_remove):
del dirs[i]
def find(env, pattern, type=None, extra=None, exclude=None):
pattern = [Path.ensure(i, Root.srcdir) for i in iterate(pattern)]
file_filter = FileFilter(pattern, type, extra, exclude)
results = []
for path, matched in _find_files(env, file_filter):
if matched == FindResult.include:
results.append(path)
return results
@builtin.function()
def find_files(context, pattern, *, type=None, extra=None, exclude=None,
filter=None, file_type=None, dir_type=None, dist=True,
cache=True):
types = {'f': file_type or context['auto_file'],
'd': dir_type or context['directory']}
extra_types = {'f': context['generic_file'], 'd': context['directory']}
pattern = [context['relpath'](i) for i in iterate(pattern)]
exclude = context.build['project']['find_exclude'] + listify(exclude)
file_filter = FileFilter(pattern, type, extra, exclude, filter)
found, seen_dirs = [], []
for path, matched in _find_files(context.env, file_filter, seen_dirs):
if matched == FindResult.include:
found.append(types[_path_type(path)](path, dist=dist))
elif matched == FindResult.not_now and dist:
extra_types[_path_type(path)](path, dist=dist)
if cache:
context.build['find_dirs'].update(seen_dirs)
context.build['regenerate'].depfile = depfile_name
return found
@builtin.function()
def find_paths(context, *args, **kwargs):
return [i.path for i in context['find_files'](*args, **kwargs)]
@make.post_rule
def make_find_dirs(build_inputs, buildfile, env):
if build_inputs['find_dirs']:
write_depfile(env, Path(depfile_name), make.filepath,
build_inputs['find_dirs'], makeify=True)
buildfile.include(depfile_name)
@ninja.post_rule
def ninja_find_dirs(build_inputs, buildfile, env):
if build_inputs['find_dirs']:
write_depfile(env, Path(depfile_name), ninja.filepath,
build_inputs['find_dirs'])
| bsd-3-clause | -1,055,999,983,932,976,400 | 31.532609 | 77 | 0.607417 | false |
CompassionCH/l10n-switzerland | l10n_ch_payment_slip/tests/test_payment_slip.py | 1 | 12317 | # -*- coding: utf-8 -*-
# © 2014-2016 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import time
import re
import odoo.tests.common as test_common
from odoo.report import render_report
class TestPaymentSlip(test_common.TransactionCase):
_compile_get_ref = re.compile(r'[^0-9]')
def make_bank(self):
company = self.env.ref('base.main_company')
self.assertTrue(company)
partner = self.env.ref('base.main_partner')
self.assertTrue(partner)
bank = self.env['res.bank'].create(
{
'name': 'BCV',
'ccp': '01-1234-1',
'bic': '23452345',
'clearing': '234234',
}
)
bank_account = self.env['res.partner.bank'].create(
{
'partner_id': partner.id,
'bank_id': bank.id,
'bank_bic': bank.bic,
'acc_number': '01-1234-1',
'bvr_adherent_num': '1234567',
'print_bank': True,
'print_account': True,
'print_partner': True,
}
)
bank_account.onchange_acc_number_set_swiss_bank()
self.assertEqual(bank_account.ccp, '01-1234-1')
return bank_account
def make_invoice(self):
if not hasattr(self, 'bank_account'):
self.bank_account = self.make_bank()
account_model = self.env['account.account']
account_debtor = account_model.search([('code', '=', '1100')])
if not account_debtor:
account_debtor = account_model.create({
'code': 1100,
'name': 'Debitors',
'user_type_id':
self.env.ref('account.data_account_type_receivable').id,
'reconcile': True,
})
account_sale = account_model.search([('code', '=', '3200')])
if not account_sale:
account_sale = account_model.create({
'code': 3200,
'name': 'Goods sales',
'user_type_id':
self.env.ref('account.data_account_type_revenue').id,
'reconcile': False,
})
invoice = self.env['account.invoice'].create({
'partner_id': self.env.ref('base.res_partner_12').id,
'reference_type': 'none',
'name': 'A customer invoice',
'account_id': account_debtor.id,
'type': 'out_invoice',
'partner_bank_id': self.bank_account.id
})
self.env['account.invoice.line'].create({
'account_id': account_sale.id,
'product_id': False,
'quantity': 1,
'price_unit': 862.50,
'invoice_id': invoice.id,
'name': 'product that cost 862.50 all tax included',
})
invoice.action_invoice_open()
# waiting for the cache to refresh
attempt = 0
while not invoice.move_id:
invoice.refresh()
time.sleep(0.1)
attempt += 1
if attempt > 20:
break
return invoice
def test_invoice_confirmation(self):
"""Test that confirming an invoice generate slips correctly"""
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
for line in invoice.move_id.line_ids:
if line.account_id.user_type_id.type in ('payable', 'receivable'):
self.assertTrue(line.transaction_ref)
else:
self.assertFalse(line.transaction_ref)
for line in invoice.move_id.line_ids:
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
if line.account_id.user_type_id.type in ('payable', 'receivable'):
self.assertTrue(slip)
self.assertEqual(slip.amount_total, 862.50)
self.assertEqual(slip.invoice_id.id, invoice.id)
else:
self.assertFalse(slip)
def test_slip_validity(self):
"""Test that confirming slip are valid"""
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
for line in invoice.move_id.line_ids:
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
if line.account_id.user_type_id.type in ('payable', 'receivable'):
self.assertTrue(slip.reference)
self.assertTrue(slip.scan_line)
self.assertTrue(slip.slip_image)
self.assertTrue(slip.a4_pdf)
inv_num = line.invoice_id.number
line_ident = self._compile_get_ref.sub(
'', "%s%s" % (inv_num, line.id)
)
self.assertIn(line_ident, slip.reference.replace(' ', ''))
def test_print_report(self):
invoice = self.make_invoice()
data, format = render_report(
self.env.cr,
self.env.uid,
[invoice.id],
'l10n_ch_payment_slip.one_slip_per_page_from_invoice',
{},
context={'force_pdf': True},
)
self.assertTrue(data)
self.assertEqual(format, 'pdf')
def test_print_multi_report_merge_in_memory(self):
# default value as in memory
self.assertEqual(self.env.user.company_id.merge_mode, 'in_memory')
invoice1 = self.make_invoice()
invoice2 = self.make_invoice()
data, format = render_report(
self.env.cr,
self.env.uid,
[invoice1.id, invoice2.id],
'l10n_ch_payment_slip.one_slip_per_page_from_invoice',
{},
context={'force_pdf': True},
)
self.assertTrue(data)
self.assertEqual(format, 'pdf')
def test_print_multi_report_merge_on_disk(self):
self.env.user.company_id.merge_mode = 'on_disk'
invoice1 = self.make_invoice()
invoice2 = self.make_invoice()
data, format = render_report(
self.env.cr,
self.env.uid,
[invoice1.id, invoice2.id],
'l10n_ch_payment_slip.one_slip_per_page_from_invoice',
{},
context={'force_pdf': True},
)
self.assertTrue(data)
self.assertEqual(format, 'pdf')
def test_address_format(self):
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
address_lines = slip._get_address_lines(com_partner.id)
self.assertEqual(
address_lines,
[u'93, Press Avenue', u'', u'73377 Le Bourget du Lac']
)
def test_address_format_user_demo(self):
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
demo_user = self.env.ref('base.user_demo')
address_lines = slip.sudo(demo_user)._get_address_lines(com_partner.id)
self.assertEqual(
address_lines,
[u'93, Press Avenue', u'', u'73377 Le Bourget du Lac']
)
def test_address_format_no_country(self):
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
com_partner.country_id = False
address_lines = slip._get_address_lines(com_partner.id)
self.assertEqual(
address_lines,
[u'93, Press Avenue', u'', u'73377 Le Bourget du Lac']
)
def test_address_format_special_format(self):
""" Test special formating without street2 """
ICP = self.env['ir.config_parameter']
ICP.set_param(
'bvr.address.format',
"%(street)s\n%(zip)s %(city)s"
)
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
com_partner.country_id = False
address_lines = slip._get_address_lines(com_partner.id)
self.assertEqual(
address_lines,
[u'93, Press Avenue', u'73377 Le Bourget du Lac']
)
def test_address_length(self):
invoice = self.make_invoice()
self.assertTrue(invoice.move_id)
line = invoice.move_id.line_ids[0]
slip = self.env['l10n_ch.payment_slip'].search(
[('move_line_id', '=', line.id)]
)
com_partner = slip.get_comm_partner()
address_lines = slip._get_address_lines(com_partner.id)
f_size = 11
len_tests = [
(15, (11, None)),
(23, (11, None)),
(26, (10, None)),
(27, (10, None)),
(30, (9, None)),
(32, (8, 34)),
(34, (8, 34)),
(40, (8, 34))]
for text_len, result in len_tests:
com_partner.name = 'x' * text_len
res = slip._get_address_font_size(
f_size, address_lines, com_partner)
self.assertEqual(res, result, "Wrong result for len %s" % text_len)
def test_print_bvr(self):
invoice = self.make_invoice()
bvr = invoice.print_bvr()
self.assertEqual(bvr['report_name'],
'l10n_ch_payment_slip.one_slip_per_page_from_invoice')
self.assertEqual(bvr['report_file'],
'l10n_ch_payment_slip.one_slip_per_page')
def test_reload_from_attachment(self):
def _find_invoice_attachment(self, invoice):
return self.env['ir.attachment'].search([
('res_model', '=', invoice._name),
('res_id', '=', invoice.id)
])
invoice = self.make_invoice()
report_name = 'l10n_ch_payment_slip.one_slip_per_page_from_invoice'
report_payment_slip = self.env['report']._get_report_from_name(
report_name)
bvr_action = invoice.print_bvr()
# Print the report a first time
pdf = self.env['report'].with_context(bvr_action['context']).get_pdf(
invoice.ids, report_name)
# Ensure no attachment was stored
attachment = _find_invoice_attachment(self, invoice)
self.assertEqual(len(attachment), 0)
# Set the report to store and reload from attachment
report_payment_slip.write({
'attachment_use': True,
'attachment':
"('ESR'+(object.number or '').replace('/','')+'.pdf')"
})
# Print the report again
pdf1 = self.env['report'].with_context(bvr_action['context']).get_pdf(
invoice.ids, report_name)
# Ensure pdf is the same
self.assertEqual(pdf, pdf1)
# Ensure attachment was stored
attachment1 = _find_invoice_attachment(self, invoice)
self.assertEqual(len(attachment1), 1)
# Print the report another time
pdf2 = self.env['report'].with_context(bvr_action['context']).get_pdf(
invoice.ids, report_name)
# Ensure pdf and attachment are the same as before
attachment2 = _find_invoice_attachment(self, invoice)
self.assertEqual(len(attachment2), 1)
self.assertEqual(pdf1, pdf2)
self.assertEqual(attachment1, attachment2)
# Allow cancelling entries on the journal
invoice.journal_id.update_posted = True
# Cancel the invoice and set back to draft
invoice.action_invoice_cancel()
invoice.action_invoice_draft()
# Ensure attachment was unlinked
attachment = _find_invoice_attachment(self, invoice)
self.assertEqual(len(attachment), 0)
| agpl-3.0 | -716,294,360,628,454,700 | 36.779141 | 79 | 0.540435 | false |
jorgebaier/iic1103-s4-2016 | clase0922/ejemplo.py | 1 | 1231 | import bigramas_ord
# bigramas_ord.cargar_archivo() : carga archivo de datos
# bigramas_ord.palabra_comun_seguida(palabra):
# retorna una palabra que ocurre frecuentemente despues
# de palabra
def leer_prohibidas():
# retorna un string que contiene las letras prohibidas
print('qué letras quieres omitir? ')
s = ''
c = ''
while c != '0':
s = s + c
c = input()
return s
def legal(prohibidas,palabra):
# retorna True si la palabra no contiene
# ningun caracter en prohibidas
# retorna False en caso contrario
for c in prohibidas:
if c in palabra:
return False
return True
bigramas_ord.cargar_archivo()
cuantas_palabras = int(input('cuantas palabras quieres? '))
palabra_inicial = input('palabra inicial? ')
prohibidas = leer_prohibidas()
print("Letras prohibidas:",prohibidas)
contador = 0
palabra = palabra_inicial
while contador < cuantas_palabras:
print(palabra, end=' ')
palabra_original = palabra
palabra = bigramas_ord.palabra_comun_seguida(palabra_original)
while not legal(prohibidas,palabra):
palabra = bigramas_ord.palabra_comun_seguida(palabra_original)
contador += 1 # contador = contador + 1
| unlicense | -6,015,322,268,898,109,000 | 25.170213 | 74 | 0.681301 | false |
melmothx/jsonbot | jsb/plugs/core/userstate.py | 1 | 1812 | # jsb/plugs/userstate.py
#
#
""" userstate is stored in jsondata/state/users/<username>. """
## jsb imports
from jsb.lib.commands import cmnds
from jsb.lib.examples import examples
from jsb.lib.persiststate import UserState
from jsb.lib.errors import NoSuchUser
## set command
def handle_set(bot, ievent):
""" let the user manage its own state. """
try: (item, value) = ievent.args
except ValueError: ievent.missing("<item> <value>") ; return
ievent.user.state.data[item.lower()] = value
ievent.user.state.save()
ievent.reply("%s set to %s" % (item.lower(), value))
cmnds.add('set', handle_set, ['OPER', 'USER', 'GUEST'])
examples.add('set', 'set userstate', 'set place heerhugowaard')
## get command
def handle_get(bot, ievent):
""" get state of a user. """
target = ievent.rest
if target: target = target.lower()
userstate = ievent.user.state
result = []
for i, j in userstate.data.iteritems():
if target == i or not target: result.append("%s=%s" % (i, j))
if result: ievent.reply("state: ", result)
else: ievent.reply('no userstate of %s known' % ievent.userhost)
cmnds.add('get', handle_get, ['OPER', 'USER', 'GUEST'])
examples.add('get', 'get your userstate', 'get')
## unset command
def handle_unset(bot, ievent):
""" remove value from user state of the user giving the command. """
try:
item = ievent.args[0].lower()
except (IndexError, TypeError):
ievent.missing('<item>')
return
try: del ievent.user.state.data[item]
except KeyError:
ievent.reply('no such item')
return
ievent.user.state.save()
ievent.reply('item %s deleted' % item)
cmnds.add('unset', handle_unset, ['USER', 'GUEST'])
examples.add('unset', 'delete variable from your state', 'unset TZ')
| mit | 4,534,513,255,737,570,000 | 29.2 | 72 | 0.646799 | false |
jbernardis/repraptoolbox | src/Printer/heaters.py | 1 | 7394 | import wx
BUTTONDIM = (48, 48)
class HeaterInfo:
def __init__(self, name, tool, info):
self.name = name
self.tool = tool
self.mintemp = info[0]
self.maxtemp = info[1]
self.lowpreset = info[2]
self.highpreset = info[3]
self.setcmd = info[4]
self.setwaitcmd = info[5]
class Heaters(wx.Window):
def __init__(self, parent, reprap, prtName):
self.parent = parent
self.images = parent.images
self.settings = self.parent.settings
self.reprap = reprap
self.prtName = prtName
wx.Window.__init__(self, parent, wx.ID_ANY, size=(-1, -1), style=wx.SIMPLE_BORDER)
szHeaters = wx.BoxSizer(wx.VERTICAL)
self.bedInfo = HeaterInfo("Bed", None, self.settings.bedinfo)
self.hBed = Heater(self, self.bedInfo, self.reprap)
szHeaters.AddSpacer(5)
szHeaters.Add(self.hBed)
self.hHEs = []
self.hHEInfo = []
for i in range(self.settings.nextruders):
if self.settings.nextruders == 1:
tool = None
title = "HE"
else:
tool = i
title = "HE%d" % tool
hi = HeaterInfo(title, tool, self.settings.heinfo)
h = Heater(self, hi, self.reprap)
szHeaters.AddSpacer(5)
szHeaters.Add(h)
self.hHEs.append(h)
self.hHEInfo.append(hi)
szHeaters.AddSpacer(5)
self.SetSizer(szHeaters)
self.Layout()
self.Fit()
def registerGCodeTemps(self, hes, bed):
for i in range(self.settings.nextruders):
self.hHEs[i].enableExtract(hes[i])
self.hBed.enableExtract(bed)
def tempHandler(self, actualOrTarget, hName, tool, value):
if hName == "Bed":
self.hBed.setTemperature(actualOrTarget, value)
elif hName == "HE":
if tool is None:
ix = 0
else:
ix = tool
self.hHEs[ix].setTemperature(actualOrTarget, value)
def getBedInfo(self):
return self.bedInfo
def getHEInfo(self, tx):
if tx >= self.settings.nextruders:
return None
else:
return self.hHEInfo[tx]
class Heater(wx.Window):
def __init__(self, parent, hi, reprap):
self.parent = parent
self.images = parent.images
self.settings = self.parent.settings
self.reprap = reprap
self.htrInfo = hi
self.GCodeTemp = None
self.setting = None
self.actual = None
self.lowpreset = hi.lowpreset
self.highpreset = hi.highpreset
self.mintemp = hi.mintemp
self.maxtemp = hi.maxtemp
self.heaterOn = False
wx.Window.__init__(self, parent, wx.ID_ANY, size=(-1, -1), style=wx.NO_BORDER)
szHeater = wx.BoxSizer(wx.HORIZONTAL)
self.font12bold = wx.Font(12, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD)
self.font20bold = wx.Font(20, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD)
t = wx.StaticText(self, wx.ID_ANY, "%s:" % hi.name, size=(50, -1), style=wx.ALIGN_RIGHT)
t.SetFont(self.font12bold)
szHeater.AddSpacer(10)
szHeater.Add(t, 0, wx.ALIGN_CENTER_VERTICAL, 1)
szHeater.AddSpacer(10)
self.sbIndicator = wx.StaticBitmap(self, wx.ID_ANY, self.images.pngLedoff)
szHeater.Add(self.sbIndicator, 0, wx.ALIGN_CENTER_VERTICAL, 1)
self.bPower = wx.BitmapButton(self, wx.ID_ANY, self.images.pngHeatoff, size=BUTTONDIM, style = wx.NO_BORDER)
self.bPower.SetToolTip("Turn heater on/off")
self.Bind(wx.EVT_BUTTON, self.onBPower, self.bPower)
szHeater.Add(self.bPower)
self.tcActual = wx.TextCtrl(self, wx.ID_ANY, "", size=(70, -1), style=wx.TE_READONLY | wx.TE_RIGHT)
self.tcActual.SetFont(self.font12bold)
szHeater.Add(self.tcActual, 0, wx.ALIGN_CENTER_VERTICAL, 1)
t = wx.StaticText(self, wx.ID_ANY, " / ")
t.SetFont(self.font20bold)
szHeater.Add(t, 0, wx.ALIGN_CENTER_VERTICAL, 1)
self.tcSetting = wx.TextCtrl(self, wx.ID_ANY, "", size=(50, -1), style=wx.TE_READONLY | wx.TE_RIGHT)
self.tcSetting.SetFont(self.font12bold)
szHeater.Add(self.tcSetting, 0, wx.ALIGN_CENTER_VERTICAL, 1)
self.slThermostat = wx.Slider(self, wx.ID_ANY, value=self.lowpreset, size=(180, -1),
minValue=self.mintemp, maxValue=self.maxtemp,
style=wx.SL_HORIZONTAL | wx.SL_VALUE_LABEL)
self.slThermostat.SetToolTip("Choose temperature setting for heater")
szHeater.Add(self.slThermostat, 0, wx.ALIGN_CENTER_VERTICAL, 1)
self.Bind(wx.EVT_SCROLL, self.doThermostat, self.slThermostat)
szHeater.AddSpacer(10)
self.bLowPreset = wx.Button(self, wx.ID_ANY, "%d" % self.lowpreset, size=(40, 22))
self.bLowPreset.SetToolTip("Set heater to low preset value")
self.Bind(wx.EVT_BUTTON, self.doLowPreset, self.bLowPreset)
self.bHighPreset = wx.Button(self, wx.ID_ANY, "%d" % self.highpreset, size=(40, 22))
self.bHighPreset.SetToolTip("Set heater to high preset value")
self.Bind(wx.EVT_BUTTON, self.doHighPreset, self.bHighPreset)
sz = wx.BoxSizer(wx.VERTICAL)
sz.AddSpacer(3)
sz.Add(self.bHighPreset)
sz.Add(self.bLowPreset)
szHeater.Add(sz)
szHeater.AddSpacer(10)
self.bExtract = wx.BitmapButton(self, wx.ID_ANY, self.images.pngFileopen, size=BUTTONDIM)
self.bExtract.SetToolTip("Extract temperature setting from G Code")
self.Bind(wx.EVT_BUTTON, self.onBExtract, self.bExtract)
szHeater.Add(self.bExtract)
self.bExtract.Enable(False)
szHeater.AddSpacer(10)
self.SetSizer(szHeater)
self.Layout()
self.Fit()
def onBPower(self, evt):
if self.heaterOn and self.setting == self.slThermostat.GetValue():
self.heaterOn = False
self.updateSetting(0)
cmd = self.htrInfo.setcmd + " S0"
self.bPower.SetBitmap(self.images.pngHeatoff)
else:
self.heaterOn = True
self.updateSetting(self.slThermostat.GetValue())
cmd = self.htrInfo.setcmd + " S%d" % self.setting
self.bPower.SetBitmap(self.images.pngHeaton)
if self.htrInfo.tool is not None:
cmd += " T%d" % self.htrInfo.tool
self.reprap.sendNow(cmd)
def onBExtract(self, evt):
if self.GCodeTemp is not None:
self.slThermostat.SetValue(self.GCodeTemp)
def enableExtract(self, temp, flag=True):
self.bExtract.Enable(flag)
self.GCodeTemp = temp
def updateSetting(self, newSetting):
self.setting = newSetting
if self.setting is None:
self.tcSetting.SetValue("")
else:
self.tcSetting.SetValue("%d" % self.setting)
if self.setting is None:
self.sbIndicator.SetBitmap(self.images.pngLedoff)
elif self.actual is None:
self.sbIndicator.SetBitmap(self.images.pngLedon)
elif self.setting > self.actual:
self.sbIndicator.SetBitmap(self.images.pngLedon)
else:
self.sbIndicator.SetBitmap(self.images.pngLedoff)
def updateActual(self, newActual):
self.actual = newActual
if self.actual == None:
self.tcActual.SetValue("")
else:
self.tcActual.SetValue("%.1f" % self.actual)
if self.setting is None:
self.sbIndicator.SetBitmap(self.images.pngLedoff)
elif self.actual is None:
self.sbIndicator.SetBitmap(self.images.pngLedon)
elif self.setting > self.actual:
self.sbIndicator.SetBitmap(self.images.pngLedon)
else:
self.sbIndicator.SetBitmap(self.images.pngLedoff)
def doLowPreset(self, evt):
self.slThermostat.SetValue(self.lowpreset)
def doHighPreset(self, evt):
self.slThermostat.SetValue(self.highpreset)
def doThermostat(self, evt):
pass
def setTemperature(self, actualOrTarget, value):
if actualOrTarget == "target":
self.updateSetting(value)
if value == 0:
self.heaterOn = False
self.bPower.SetBitmap(self.images.pngHeatoff)
else:
self.heaterOn = True
self.bPower.SetBitmap(self.images.pngHeaton)
elif actualOrTarget == "actual":
self.updateActual(value)
| gpl-3.0 | -8,565,963,561,155,118,000 | 29.553719 | 110 | 0.70449 | false |
nanophotonics/nplab | nplab/experiment/scanning_experiment/continuous_linear_scanner.py | 1 | 11973 | from __future__ import print_function
from builtins import str
__author__ = 'alansanders'
from nplab.experiment.scanning_experiment import ScanningExperiment, TimedScan
from threading import Thread
import time
from nplab.utils.gui import *
from nplab.ui.ui_tools import UiTools
from nplab import inherit_docstring
from functools import partial
import numpy as np
class ContinuousLinearScan(ScanningExperiment, TimedScan):
@inherit_docstring(TimedScan)
@inherit_docstring(ScanningExperiment)
def __init__(self):
super(ContinuousLinearScan, self).__init__()
self.step = None
self.direction = 1
# Repeat capabilities
self._num_measurements = 0 # the number of measurements made and incremented to num_repeats
self.num_repeats = 1 # user sets this in subclass
self.hold = False # setting this to true prevents movement commands
self._last_step = 0. # this is useful when incrementing a displacement array
# Feedback attributes
self.engage_feedback = False
self.feedback_on = 'Force'
self.set_point = 0
self.feedback_gain = 1
self.feedback_min = -1
self.feedback_max = 1
@inherit_docstring(ScanningExperiment.run)
def run(self, new=True):
if isinstance(self.acquisition_thread, Thread) and self.acquisition_thread.is_alive():
print('scan already running')
return
self.init_scan()
self.acquisition_thread = Thread(target=self.scan, args=(new,))
self.acquisition_thread.start()
def set_parameter(self, value):
"""Vary the independent parameter."""
raise NotImplementedError
@inherit_docstring(ScanningExperiment.scan_function)
def scan_function(self, index):
raise NotImplementedError
def update_parameter(self, value):
"""Vary the independent parameter."""
raise NotImplementedError
@inherit_docstring(ScanningExperiment.run)
def scan(self, new=True):
self.abort_requested = False
self.open_scan()
self.status = 'acquiring data'
self.acquiring.set()
scan_start_time = time.time()
index = 0 if new else 1
while not self.abort_requested:
if self.hold or self._num_measurements < self.num_repeats:
self._last_step = 0. # used to prevent the incrementing of the displacement
else:
self.set_parameter(self.direction*self.step)
self._num_measurements = 0 # reset the number of measurements made after move
self._last_step = self.direction*self.step
self._num_measurements += 1
self.scan_function(index)
index += 1
if self.engage_feedback:
feedback_input = self.calculate_feedback_input()
direction, step = self.feedback_loop(feedback_input, self.set_point)
self.update_from_feedback(direction, step)
try:
self.update_parameter(self.direction*self.step)
except NotImplementedError:
pass
self.print_scan_time(time.time() - scan_start_time)
self.acquiring.clear()
# finish the scan
self.analyse_scan()
self.close_scan()
self.status = 'scan complete'
def calculate_feedback_input(self):
"""
Return the input to the feedback loop.
:return value: the value of the variable to feed back on
"""
raise NotImplementedError
def feedback_loop(self, feedback_input, set_point):
"""
Returns the direction and step size that should be used in the next loop iteration.
:param feedback_input: the current value of the target variable
:param set_point: the target value that should held
:returns direction, step_size:
:rtype : object
"""
e = feedback_input - set_point
output = -self.feedback_gain*e # if e>0 i.e. input > set_point for d=1 then d goes to -1
output = np.clip(output, self.feedback_min, self.feedback_max)
step_size = abs(output)
direction = np.sign(output)
return direction, step_size
def update_from_feedback(self, direction, step):
"""This function is created simply to be subclass GUI updates."""
self.direction = direction
self.step = step
@inherit_docstring(ContinuousLinearScan)
class ContinuousLinearScanQt(ContinuousLinearScan, QtCore.QObject):
direction_updated = QtCore.Signal(int)
step_updated = QtCore.Signal(float)
@inherit_docstring(ContinuousLinearScan.__init__)
def __init__(self):
ContinuousLinearScan.__init__(self)
QtCore.QObject.__init__(self)
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.update)
@inherit_docstring(ContinuousLinearScan.run)
def run(self, rate=0.1):
super(ContinuousLinearScanQt, self).run()
self.acquiring.wait()
self.timer.start(1000.*rate)
def get_qt_ui(self):
return ContinuousLinearScanUI(self)
@staticmethod
def get_qt_ui_cls():
return ContinuousLinearScanUI
@inherit_docstring(ContinuousLinearScan.update)
def update(self, force=False):
if not self.acquisition_thread.is_alive():
self.timer.stop()
@inherit_docstring(ContinuousLinearScan.update_from_feedback)
def update_from_feedback(self, direction, step):
super(ContinuousLinearScanQt, self).update_from_feedback(direction, step)
self.direction_updated.emit(self.direction)
self.step_updated.emit(self.step)
class ContinuousLinearScanUI(QtWidgets.QWidget, UiTools):
def __init__(self, cont_linear_scan):
assert isinstance(cont_linear_scan, ContinuousLinearScanQt), 'An instance of ContinuousLinearScanQt must be supplied'
super(ContinuousLinearScanUI, self).__init__()
self.linear_scan = cont_linear_scan
uic.loadUi(os.path.join(os.path.dirname(__file__), 'continuous_linear_scanner.ui'), self)
self.rate = 1./30.
self.setWindowTitle(self.linear_scan.__class__.__name__)
self.step.setValidator(QtGui.QDoubleValidator())
self.step.textChanged.connect(self.check_state)
self.step.textChanged.connect(self.on_text_change)
self.start_button.clicked.connect(self.on_click)
self.abort_button.clicked.connect(self.linear_scan.abort)
self.change_direction_button.clicked.connect(self.on_click)
self.step_up.clicked.connect(self.on_click)
self.step_down.clicked.connect(self.on_click)
self.step.setText(str(self.linear_scan.step))
self.direction.setText(str(self.linear_scan.direction))
self.num_repeats.setValidator(QtGui.QDoubleValidator())
self.num_repeats.textChanged.connect(self.check_state)
self.num_repeats.textChanged.connect(self.on_text_change)
self.hold.stateChanged.connect(self.on_state_change)
self.set_point.setValidator(QtGui.QDoubleValidator())
self.set_point.textChanged.connect(self.check_state)
self.set_point.textChanged.connect(self.on_text_change)
self.engage_feedback.stateChanged.connect(self.on_state_change)
self.linear_scan.direction_updated.connect(partial(self.update_param, 'direction'))
self.linear_scan.step_updated.connect(partial(self.update_param, 'step'))
def on_click(self):
sender = self.sender()
if sender == self.start_button:
self.linear_scan.run(self.rate)
elif sender == self.change_direction_button:
self.linear_scan.direction *= -1
self.direction.setText(str(self.linear_scan.direction))
elif sender == self.step_up:
self.step.blockSignals(True)
self.linear_scan.step *= 2
self.step.setText(str(self.linear_scan.step))
self.step.blockSignals(False)
elif sender == self.step_down:
self.step.blockSignals(True)
self.linear_scan.step /= 2
self.step.setText(str(self.linear_scan.step))
self.step.blockSignals(False)
def on_text_change(self, value):
sender = self.sender()
if sender.validator() is not None:
state = sender.validator().validate(value, 0)[0]
if state != QtGui.QValidator.Acceptable:
return
if sender == self.step:
self.linear_scan.step = float(value)
elif sender == self.num_repeats:
self.linear_scan.num_repeats = int(value)
elif sender == self.set_point:
self.linear_scan.set_point = float(value)
def on_state_change(self, state):
sender = self.sender()
if sender == self.hold:
if state == QtCore.Qt.Checked:
self.linear_scan.hold = True
elif state == QtCore.Qt.Unchecked:
self.linear_scan.hold = False
elif sender == self.engage_feedback:
if state == QtCore.Qt.Checked:
self.linear_scan.engage_feedback = True
elif state == QtCore.Qt.Unchecked:
self.linear_scan.engage_feedback = False
def update_param(self, param, value):
if param == 'direction':
self.direction.setText(str(value))
elif param == 'step':
self.step.setText(str(value))
if __name__ == '__main__':
import matplotlib
matplotlib.use('Qt4Agg')
from nplab.ui.mpl_gui import FigureCanvasWithDeferredDraw as FigureCanvas
from matplotlib.figure import Figure
import numpy as np
class DummyLinearScan(ContinuousLinearScanQt):
def __init__(self):
super(DummyLinearScan, self).__init__()
self.step = 1.
self.direction = 1.
self.fig = Figure()
self.p = None
self.x = None
self.y = None
def open_scan(self):
self.fig.clear()
self.p = 0
self.d = []
self.x = []
self.y = []
self.ax = self.fig.add_subplot(111)
def set_parameter(self, value):
self.p += value
#def update_parameter(self, value):
# self.p += value
def scan_function(self, index):
time.sleep(0.01)
self.d.append(index)
self.x.append(self.p)
self.y.append(np.sin(2*np.pi*0.01*self.p))
self.check_for_data_request(self.d, self.x, self.y)
def update(self, force=False):
super(DummyLinearScan, self).update(force)
if self.y == [] or self.fig.canvas is None:
return
if force:
data = (self.d, self.x, self.y)
else:
data = self.request_data()
if data is not False:
d, x, y = data
if not np.any(np.isfinite(y)):
return
if not self.ax.lines:
self.ax.plot(d, y)
else:
l, = self.ax.lines
l.set_data(d, y)
self.ax.relim()
self.ax.autoscale_view()
self.fig.canvas.draw()
def get_qt_ui(self):
return DummyLinearScanUI(self)
def calculate_feedback_input(self):
return self.y[-1]
class DummyLinearScanUI(ContinuousLinearScanUI):
def __init__(self, linear_scan):
super(DummyLinearScanUI, self).__init__(linear_scan)
self.canvas = FigureCanvas(self.linear_scan.fig)
self.canvas.setMaximumSize(300,300)
self.layout.addWidget(self.canvas)
self.resize(self.sizeHint())
ls = DummyLinearScan()
app = get_qt_app()
gui = ls.get_qt_ui()
gui.rate = 1./30.
gui.show()
sys.exit(app.exec_())
| gpl-3.0 | -8,910,935,309,353,352,000 | 36.889241 | 125 | 0.612211 | false |
ernw/knxmap | knxmap/bus/monitor.py | 1 | 6078 | import logging
import codecs
from knxmap.bus.tunnel import KnxTunnelConnection
from knxmap.data.constants import *
from knxmap.messages import parse_message, KnxConnectRequest, KnxConnectResponse, \
KnxTunnellingRequest, KnxTunnellingAck, KnxConnectionStateResponse, \
KnxDisconnectRequest, KnxDisconnectResponse
LOGGER = logging.getLogger(__name__)
class KnxBusMonitor(KnxTunnelConnection):
"""Implementation of bus_monitor_mode and group_monitor_mode."""
def __init__(self, future, loop=None, group_monitor=True):
super(KnxBusMonitor, self).__init__(future, loop=loop)
self.group_monitor = group_monitor
def connection_made(self, transport):
self.transport = transport
self.peername = self.transport.get_extra_info('peername')
self.sockname = self.transport.get_extra_info('sockname')
if self.group_monitor:
# Create a TUNNEL_LINKLAYER layer request (default)
connect_request = KnxConnectRequest(sockname=self.sockname)
else:
# Create a TUNNEL_BUSMONITOR layer request
connect_request = KnxConnectRequest(sockname=self.sockname,
layer_type='TUNNEL_BUSMONITOR')
LOGGER.trace_outgoing(connect_request)
self.transport.sendto(connect_request.get_message())
# Send CONNECTIONSTATE_REQUEST to keep the connection alive
self.loop.call_later(50, self.knx_keep_alive)
def datagram_received(self, data, addr):
knx_message = parse_message(data)
if not knx_message:
LOGGER.error('Invalid KNX message: {}'.format(data))
self.knx_tunnel_disconnect()
self.transport.close()
self.future.set_result(None)
return
knx_message.set_peer(addr)
LOGGER.trace_incoming(knx_message)
if isinstance(knx_message, KnxConnectResponse):
if not knx_message.ERROR:
if not self.tunnel_established:
self.tunnel_established = True
self.communication_channel = knx_message.communication_channel
else:
if not self.group_monitor and knx_message.ERROR_CODE == 0x23:
LOGGER.error('Device does not support BUSMONITOR, try --group-monitor instead')
else:
LOGGER.error('Connection setup error: {}'.format(knx_message.ERROR))
self.transport.close()
self.future.set_result(None)
elif isinstance(knx_message, KnxTunnellingRequest):
self.print_message(knx_message)
if CEMI_PRIMITIVES[knx_message.cemi.message_code] == 'L_Data.con' or \
CEMI_PRIMITIVES[knx_message.cemi.message_code] == 'L_Data.ind' or \
CEMI_PRIMITIVES[knx_message.cemi.message_code] == 'L_Busmon.ind':
tunnelling_ack = KnxTunnellingAck(
communication_channel=knx_message.communication_channel,
sequence_count=knx_message.sequence_counter)
LOGGER.trace_outgoing(tunnelling_ack)
self.transport.sendto(tunnelling_ack.get_message())
elif isinstance(knx_message, KnxTunnellingAck):
self.print_message(knx_message)
elif isinstance(knx_message, KnxConnectionStateResponse):
# After receiving a CONNECTIONSTATE_RESPONSE schedule the next one
self.loop.call_later(50, self.knx_keep_alive)
elif isinstance(knx_message, KnxDisconnectRequest):
connect_response = KnxDisconnectResponse(communication_channel=self.communication_channel)
self.transport.sendto(connect_response.get_message())
self.transport.close()
self.future.set_result(None)
elif isinstance(knx_message, KnxDisconnectResponse):
self.transport.close()
self.future.set_result(None)
def print_message(self, message):
"""A generic message printing function. It defines
a format for the monitoring modes."""
assert isinstance(message, KnxTunnellingRequest)
cemi = tpci = apci= {}
if message.cemi:
cemi = message.cemi
if cemi.tpci:
tpci = cemi.tpci
if cemi.apci:
apci = cemi.apci
if cemi.knx_destination and cemi.extended_control_field and \
cemi.extended_control_field.get('address_type'):
dst_addr = message.parse_knx_group_address(cemi.knx_destination)
elif cemi.knx_destination:
dst_addr = message.parse_knx_address(cemi.knx_destination)
if self.group_monitor:
format = ('[ chan_id: {chan_id}, seq_no: {seq_no}, message_code: {msg_code}, '
'source_addr: {src_addr}, dest_addr: {dst_addr}, tpci_type: {tpci_type}, '
'tpci_seq: {tpci_seq}, apci_type: {apci_type}, apci_data: {apci_data} ]').format(
chan_id=message.communication_channel,
seq_no=message.sequence_counter,
msg_code=CEMI_PRIMITIVES.get(cemi.message_code),
src_addr=message.parse_knx_address(cemi.knx_source),
dst_addr=dst_addr,
tpci_type=_CEMI_TPCI_TYPES.get(tpci.tpci_type),
tpci_seq=tpci.sequence,
apci_type=_CEMI_APCI_TYPES.get(apci.apci_type),
apci_data=apci.apci_data)
else:
format = ('[ chan_id: {chan_id}, seq_no: {seq_no}, message_code: {msg_code}, '
'timestamp: {timestamp}, raw_frame: {raw_frame} ]').format(
chan_id=message.communication_channel,
seq_no=message.sequence_counter,
msg_code=CEMI_PRIMITIVES.get(cemi.message_code),
timestamp=codecs.encode(cemi.additional_information.get('timestamp'), 'hex'),
raw_frame=codecs.encode(cemi.raw_frame, 'hex'))
LOGGER.info(format)
| gpl-3.0 | 4,277,655,511,188,420,000 | 50.508475 | 103 | 0.608753 | false |
sungpil/bigshow | com/sundaytoz/bigshow/chart.py | 1 | 4332 | import ast
import time
from com.sundaytoz.bigshow import models
from com.sundaytoz.bigshow.resources import Resource
from com.sundaytoz.cache import Cache
from com.sundaytoz.logger import Logger
class Chart:
pass
TTL_LAST_JOB = 3600
TTL_LAST_RESULT = 2592000
__data_adapters = {}
@staticmethod
def query(chart_id, resource, query_type, query, query_params):
Logger.debug('chart_id={0}, resource={1}, query_type={2}, query={3}, query_params={4}'
.format(chart_id, resource, query_type, query, query_params))
adapter = Resource.get(resource_id=resource)
if not adapter:
return None
else:
job_id = Chart.get_job_id(chart_id)
adapter.query(job_id=job_id, query_type=query_type, query=query, query_params=query_params)
return job_id
@staticmethod
def query_sync(chart_id, resource, query_type, query, query_params):
job_id = Chart.query(chart_id=chart_id, resource=resource,
query_type=query_type, query=query, query_params=query_params)
if not job_id:
return None, {'message': 'fail to initialize job'}
adapter = Resource.get(resource_id=resource)
if not adapter:
return None, {'message': 'fail to initialize resources'}
retry_count = 100
while retry_count > 0:
status, results, error = adapter.get_result(job_id)
if 'DONE' == status:
return results, error
else:
time.sleep(10)
@staticmethod
def get_result(chart_id, from_cache=True):
Logger().debug("get_result: chart_id={chart_id}, from_cache={from_cache}"
.format(chart_id=chart_id, from_cache=from_cache))
last_job_key = Chart.get_job_key(chart_id=chart_id)
if from_cache is True:
last_job = Cache().get(last_job_key)
else:
last_job = None
if not last_job:
chart = models.Chart.get(chart_id, ['resource,query_type,query,query_params'])
new_job = {'id': Chart.get_job_id(chart_id), 'resource': chart['resource']}
adapter = Resource.get(resource_id=chart['resource'])
adapter.query(job_id=new_job['id'], query_type=chart['query_type'],
query=chart['query'], query_params=chart['query_params'])
Cache().set(last_job_key, new_job, Chart.TTL_LAST_JOB)
return 'RUNNING', None, None
else:
last_job = ast.literal_eval(last_job)
last_job_id = last_job['id']
last_job_result = Cache().get(last_job_id)
if last_job_result:
last_job_result = ast.literal_eval(last_job_result)
return 'DONE', last_job_result['result'], last_job_result['error']
else:
adapter = Resource.get(resource_id=last_job['resource'])
if not adapter.exists(job_id=last_job_id):
chart = models.Chart.get(chart_id, ['resource,query_type,query,query_params'])
adapter.query_async(job_id=last_job['id'], query_type=chart['query_type'],
query=chart['query'], query_params=chart['query_params'])
Cache().set(last_job_key, last_job, Chart.TTL_LAST_JOB)
return 'RUNNING', None, None
else:
status, results, error = adapter.get_result(last_job_id)
if 'DONE' == status:
Cache().set(last_job_id, {'result': results, 'error': error}, Chart.TTL_LAST_RESULT)
return status, results, error
@staticmethod
def del_cache(chart_id):
Cache().delete(Chart.get_job_key(chart_id=chart_id))
@staticmethod
def get_cached_result(last_job_key):
last_job_id = Cache().get(last_job_key)
if last_job_id:
return last_job_id, Cache().get(last_job_id)
else:
return None, None
@staticmethod
def get_job_id(chart_id):
return "chart-{chart_id}-{time}".format(chart_id=chart_id, time=int(time.time()))
@staticmethod
def get_job_key(chart_id):
return "last_job:{chart_id}".format(chart_id=chart_id)
| mit | -5,101,964,833,741,713,000 | 41.058252 | 108 | 0.572253 | false |
ktbyers/netmiko | netmiko/extreme/extreme_exos.py | 1 | 2550 | """Extreme support."""
import time
import re
from netmiko.no_config import NoConfig
from netmiko.cisco_base_connection import CiscoSSHConnection
class ExtremeExosBase(NoConfig, CiscoSSHConnection):
"""Extreme Exos support.
Designed for EXOS >= 15.0
"""
def session_preparation(self):
self._test_channel_read()
self.set_base_prompt()
self.disable_paging(command="disable clipaging")
self.send_command_timing("disable cli prompting")
# Clear the read buffer
time.sleep(0.3 * self.global_delay_factor)
self.clear_buffer()
def set_base_prompt(self, *args, **kwargs):
"""
Extreme attaches an id to the prompt. The id increases with every command.
It needs to br stripped off to match the prompt. Eg.
testhost.1 #
testhost.2 #
testhost.3 #
If new config is loaded and not saved yet, a '* ' prefix appears before the
prompt, eg.
* testhost.4 #
* testhost.5 #
"""
cur_base_prompt = super().set_base_prompt(*args, **kwargs)
# Strip off any leading * or whitespace chars; strip off trailing period and digits
match = re.search(r"[\*\s]*(.*)\.\d+", cur_base_prompt)
if match:
self.base_prompt = match.group(1)
return self.base_prompt
else:
return self.base_prompt
def send_command(self, *args, **kwargs):
"""Extreme needs special handler here due to the prompt changes."""
# Change send_command behavior to use self.base_prompt
kwargs.setdefault("auto_find_prompt", False)
# refresh self.base_prompt
self.set_base_prompt()
return super().send_command(*args, **kwargs)
def check_config_mode(self, check_string="#"):
"""Checks whether in configuration mode. Returns a boolean."""
return super().check_config_mode(check_string=check_string)
def save_config(
self, cmd="save configuration primary", confirm=False, confirm_response=""
):
"""Saves configuration."""
return super().save_config(
cmd=cmd, confirm=confirm, confirm_response=confirm_response
)
class ExtremeExosSSH(ExtremeExosBase):
pass
class ExtremeExosTelnet(ExtremeExosBase):
def __init__(self, *args, **kwargs):
default_enter = kwargs.get("default_enter")
kwargs["default_enter"] = "\r\n" if default_enter is None else default_enter
super().__init__(*args, **kwargs)
| mit | 5,777,244,633,024,680,000 | 31.692308 | 91 | 0.618824 | false |
bond-anton/ScientificProjects | BDProjects/EntityManagers/LogManager.py | 1 | 3830 | from __future__ import division, print_function
from sqlalchemy import func
from BDProjects.Entities import LogCategory, Log
from BDProjects.Entities import Project
from BDProjects.Entities import Session
from BDProjects.Entities import User
from .EntityManager import EntityManager
default_log_categories = {'Information': 'Informational messages',
'Warning': 'Warning messages',
'Error': 'Error messages'}
class LogManager(EntityManager):
def __init__(self, session_manager, echo=True):
self.echo = echo
super(LogManager, self).__init__(session_manager)
def create_log_category(self, category, description=None):
log_category, category_exists = self._check_category_name(category, description)
if log_category and not category_exists:
if self.session_manager.session_data is not None:
log_category.session_id = self.session_manager.session_data.id
self.session.add(log_category)
self.session.commit()
if log_category.category not in default_log_categories:
record = 'Log category %s successfully created' % log_category.category
self.log_record(record=record, category='Information')
return log_category
else:
self.session.rollback()
if log_category.category not in default_log_categories:
record = 'Log category %s is already registered' % log_category.category
self.log_record(record=record, category='Warning')
return self.session.query(LogCategory).filter(LogCategory.category == log_category.category).one()
def log_record(self, record, category=None):
log_category, category_exists = self._check_category_name(category)
category_id, project_id, session_id = None, None, None
if not category_exists:
record = 'Create log category first'
self.log_record(record=record, category='Warning')
else:
category_id = log_category.id
if self.session_manager.project is not None:
if not isinstance(self.session_manager.project, Project):
raise ValueError('provide a Project instance or None')
project_id = self.session_manager.project.id
if self.session_manager.session_data is not None:
if not isinstance(self.session_manager.session_data, Session):
raise ValueError('provide a valid Session or None')
session_id = self.session_manager.session_data.id
log = Log(record=record, category_id=category_id, project_id=project_id, session_id=session_id)
self.session.add(log)
self.session.commit()
if self.echo:
login_length = self._get_max_login_length()
user_login = self.session_manager.user.login
user_login = '@' + user_login + ' ' * (login_length - len(user_login))
print('[%s] %s: %s' % (log_category.category.upper()[:4], user_login, record))
def _get_max_login_length(self):
return self.session.query(func.max(func.length(User.login))).one()[0]
def _check_category_name(self, category, description=None):
category_exists = False
if isinstance(category, str):
log_category = LogCategory(category=category, description=description)
existing_category = self.session.query(LogCategory).filter(
LogCategory.category == log_category.category).all()
if existing_category:
log_category = existing_category[0]
category_exists = True
else:
log_category = None
return log_category, category_exists
| apache-2.0 | 7,475,034,101,805,334,000 | 46.875 | 110 | 0.629243 | false |
kyokyos/bioinform | GlycemiaIndex.py | 1 | 2898 | #糖尿病血糖指数分析
#glycemia index
#age_analysis,
# input values
glycemia_index=7
diabetes_type="type1" #type1,type2,none
age=25 #child,adult
test_time="before_meal" #before_meal, twoHour_after_meal
#other values
age_index="child"
result="normal"
def age_analysis(age):
if age<18:
return "child"
else:
return "adult"
#age_index
age_index=age_analysis(age)
#print "age_index:",age_index
#analysis if the glycemia index from you is norm
def glycemia_target(age_index,diabetes_type,glycemia_index,test_time):
#none diabetes
if diabetes_type=="none":
#before meal
if test_time=="before_meal":
if 3.5<=glycemia_index<=5.5:
return "normal"
elif glycemia_index<3.5:
return "hypoglycemia"
else:
return "hyperglycemia"
#after meal
if test_time=="twoHour_after_meal":
if glycemia_index<=8:
return "normal"
else:
return "hyperglycemia"
# type2
if diabetes_type=="type2":
#before meal
if test_time=="before_meal":
if 4<=glycemia_index<=7:
return "normal"
elif glycemia_index<3.5:
return "hypoglycemia"
else:
return "hyperglycemia"
#after meal
if test_time=="twoHour_after_meal":
if glycemia_index<=8.5:
return "normal"
else:
return "hyperglycemia"
#type1
if diabetes_type=="type1":
#child
if age_index=="child":
#before meal
if test_time=="before_meal":
if 3.5<=glycemia_index<=5.5:
return "normal"
elif glycemia_index<3.5:
return "hypoglycemia"
else:
return "hyperglycemia"
#after meal
if test_time=="twoHour_after_meal":
if glycemia_index<=8:
return "normal"
else:
return "hyperglycemia"
#adult
if age_index=="adult":
#before meal
if test_time=="before_meal":
if 4<=glycemia_index<=7:
return "normal"
elif glycemia_index<4:
return "hypoglycemia"
else:
return "hyperglycemia"
#after meal
if test_time=="twoHour_after_meal":
if glycemia_index<=9:
return "normal"
else:
return "hyperglycemia"
result=glycemia_target(age_index,diabetes_type,glycemia_index,test_time)
if result!="normal":
print "warning:"
print "result:",result
| unlicense | -4,958,989,030,931,958,000 | 26.169811 | 72 | 0.495833 | false |
memespring/open-notices | open_notices/apps/notices/tests.py | 1 | 6463 | from django.test import TestCase
from django.test import Client
from rest_framework.test import APIClient
from notices import models
from django.contrib.auth import get_user_model
from rest_framework.authtoken.models import Token
from django.core.exceptions import ValidationError
from datetime import datetime
class NoticeModelTestCase(TestCase):
def setUp(self):
UserModel = get_user_model()
self.user = UserModel(email='[email protected]')
self.user.set_password('notasecret')
self.user.save()
def test_invalid_date_range(self):
with self.assertRaises(ValidationError):
notice = models.Notice()
notice.title = 'test title'
notice.details = 'test details'
notice.location = 'SRID=3857;POINT (-284821.3533571999869309 6865433.3731604004278779)'
notice.starts_at = datetime(2016, 1, 1)
notice.ends_at = datetime(2012, 1, 1)
notice.timezone = "Europe/London"
notice.user = self.user
notice.save()
class NoticeAPIGeojsonTestCase(TestCase):
def get_valid_data(self):
return {'title': 'test title', 'location': {"type":"Point","coordinates":[-0.09430885313565737,51.43326585306407]}, 'tags': [],"starts_at":"2016-01-01T11:00:00","ends_at":"2016-01-02T12:00:00", "timezone": "Europe/London"}
def setUp(self):
self.client = APIClient()
UserModel = get_user_model()
self.user = UserModel(email='[email protected]')
self.user.set_password('notasecret')
self.user.save()
def test_list(self):
response = self.client.get('/notices.geojson')
self.assertEqual(response.status_code, 200)
def test_create_method_not_allowed(self):
data = self.get_valid_data()
token = Token.objects.get_or_create(user=self.user)[0]
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post('/notices/new.geojson', data, format='json')
self.assertEqual(response.status_code, 405)
class NoticeAPITestCase(TestCase):
def setUp(self):
self.client = APIClient()
UserModel = get_user_model()
self.user = UserModel(email='[email protected]')
self.user.set_password('notasecret')
self.user.save()
Token.objects.create(user=self.user)
def get_valid_data(self):
return {'title': 'test title', 'location': {"type":"Point","coordinates":[-0.09430885313565737,51.43326585306407]}, 'tags': [],"starts_at":"2016-01-01T11:00:00","ends_at":"2016-01-02T12:00:00", "timezone": "Europe/London"}
def test_create_get_not_found(self):
response = self.client.get('/notices/new.json')
self.assertEqual(response.status_code, 405)
def test_create_unauthorised(self):
response = self.client.post('/notices/new.json')
self.assertEqual(response.status_code, 401)
def test_create_authorised_empty(self):
token = Token.objects.get_or_create(user=self.user)[0]
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post('/notices/new.json')
self.assertEqual(response.status_code, 400)
def test_create_authorised_valid(self):
data = self.get_valid_data()
token = Token.objects.get_or_create(user=self.user)[0]
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post('/notices/new.json', data, format='json')
self.assertEqual(response.status_code, 201)
def test_create_non_json_denied(self):
data = self.get_valid_data()
token = Token.objects.get_or_create(user=self.user)[0]
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post('/notices/new.geojson', data, format='json')
self.assertEqual(response.status_code, 405)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
response = self.client.post('/notices/new.csv', data, format='json')
self.assertEqual(response.status_code, 405)
class NoticeTestCase(TestCase):
def setUp(self):
# Every test needs a client.
self.client = Client()
#create a user for use later
UserModel = get_user_model()
self.user = UserModel(email='[email protected]')
self.user.set_password('notasecret')
self.user.save()
def test_list(self):
response = self.client.get('/notices/')
self.assertEqual(response.status_code, 200)
def test_view_notice(self):
notice = models.Notice()
notice.title = 'test title'
notice.details = 'test details'
notice.location = 'SRID=3857;POINT (-284821.3533571999869309 6865433.3731604004278779)'
notice.starts_at = datetime(2016, 1, 1)
notice.ends_at = datetime(2016, 1, 1)
notice.timezone = "Europe/London"
notice.user = self.user
notice.save()
response = self.client.get('/notices/%s/' % notice.pk)
self.assertContains(response, 'test title', 2, 200)
self.assertEqual(response.status_code, 200)
def test_create_unauthorised(self):
response = self.client.post('/notices/new', follow=True)
self.assertRedirects(response, '/signin/?next=/notices/new')
def test_create_empty(self):
self.client.login(email='[email protected]', password='notasecret')
response = self.client.post('/notices/new')
self.assertContains(response, "This field is required", 1, 200)
def test_create_valid(self):
self.client.login(email='[email protected]', password='notasecret')
#information
data = {'title': 'Test notice', 'details': 'It is a test'}
response = self.client.post('/notices/new', data, follow=True)
self.assertRedirects(response, '/notices/new/location')
#location
data = {'location': 'SRID=3857;POINT (-284821.3533571999869309 6865433.3731604004278779)'}
response = self.client.post('/notices/new/location', data, follow=True)
self.assertRedirects(response, '/notices/new/datetime')
#datetime
data = {'starts_at': '2016-01-01', 'ends_at': '2016-01-02', 'timezone': 'Europe/London'}
response = self.client.post('/notices/new/datetime', data)
self.assertEqual(response.status_code, 302)
| agpl-3.0 | 1,189,363,066,844,634,600 | 39.647799 | 230 | 0.652793 | false |
dincamihai/liwrapper | tests/test_api.py | 1 | 2167 | import mock
import json
import pytest
import responses
from apiwrapper.handler import JMXHandler
from apiwrapper import exceptions
@pytest.fixture(scope='function')
def create_scenario_response_400(request):
return dict(
method=responses.POST,
url='https://api.loadimpact.com/v2/user-scenarios',
status=400,
content_type='application/json',
body=json.dumps(
{u'message': u'JSON parse error - No JSON object could be decoded'}
)
)
@pytest.fixture(scope='function')
def create_scenario_response_401(request):
return dict(
method=responses.POST,
url='https://api.loadimpact.com/v2/user-scenarios',
status=401,
content_type='application/json',
body=json.dumps({u'message': u'Invalid credentials provided'})
)
def test_create_scenario_400(wrapper, create_scenario_response_400):
with pytest.raises(exceptions.BadRequestException) as exc:
with responses.RequestsMock() as rsps:
rsps.add(**create_scenario_response_400)
wrapper.create_scenario()
assert exc.value.message == "Could not create scenario. Bad payload."
def test_create_scenario_401(wrapper, create_scenario_response_401):
with pytest.raises(exceptions.MissingAPITokenException) as exc:
with responses.RequestsMock() as rsps:
rsps.add(**create_scenario_response_401)
wrapper.create_scenario()
assert exc.value.message == (
"Could not create scenario. Missing or invalid API token."
"Make sure LOAD_IMPACT_TOKEN env var is set."
)
def test_create_scenario(wrapper, create_scenario_response):
with responses.RequestsMock() as rsps:
rsps.add(**create_scenario_response)
scenario_id = wrapper.create_scenario()
assert scenario_id == 82431
def test_create_config(wrapper, create_config_response):
with responses.RequestsMock() as rsps:
rsps.add(**create_config_response)
config_id = wrapper.create_test_config(1)
assert config_id == 3204290
def test_wrapper_data():
wrapper = JMXHandler('tests/sample.jmx')
assert wrapper.data
| gpl-2.0 | 5,730,927,933,193,715,000 | 30.867647 | 79 | 0.681126 | false |
Garcia1008/tournament | changectx/changectx.py | 1 | 2307 | import discord
from discord.ext import commands
from .utils import checks
import time
from random import randint
class ChangeCTX:
def __init__(self, bot):
self.bot = bot
self.context = None
self.impersonate = None
@checks.is_owner()
@commands.command(name="setcontext", pass_context=True)
async def set_context(self, ctx, channel_id: str):
channel = self.bot.get_channel(channel_id)
if channel is None:
return await self.bot.say("Channel not found")
if channel.type != discord.ChannelType.text:
return await self.bot.say("Try again with a text channel")
self.context = channel
await self.bot.say("Context set to channel {0.name}".format(channel))
@checks.is_owner()
@commands.command(name="setauthor", pass_context=True)
async def set_impersonate(self, ctx, user_id: str=None):
self.impersonate = user_id
await self.bot.say("Impersonate ID set")
@checks.is_owner()
@commands.command(name="runincontext", pass_context=True)
async def run_in_context(self, ctx, *, com: str):
if self.context is None and self.impersonate is None:
return await \
self.bot.say("Hint: `{0.prefix}setcontext`"
"and/or `{0.prefix}setauthor`".format(ctx))
chan = ctx.message.channel if self.context is None \
else self.context
try:
server = chan.server
prefix = self.bot.settings.get_prefixes(server)[0]
except AttributeError:
return await self.bot.say("Are you sure I can see that channel?")
author_id = ctx.message.author.id if self.impersonate is None \
else self.impersonate
data = \
{'timestamp': time.strftime("%Y-%m-%dT%H:%M:%S%z", time.gmtime()),
'content': prefix + com,
'channel': chan,
'channel_id': chan.id,
'author': {'id': author_id},
'nonce': randint(-2**32, (2**32) - 1),
'id': randint(10**(17), (10**18) - 1),
'reactions': []
}
message = discord.Message(**data)
self.bot.dispatch('message', message)
def setup(bot):
n = ChangeCTX(bot)
bot.add_cog(n)
| mit | 6,471,981,614,218,644,000 | 30.60274 | 78 | 0.579541 | false |
Ensembl/ensembl-hive | wrappers/python3/eHive/__init__.py | 1 | 1250 |
# Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2021] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We take all the interesting classes from both modules, i.e. BaseRunnable and all the exceptions
from eHive.Process import BaseRunnable, CompleteEarlyException, JobFailedException, __version__
from eHive.Params import ParamException, ParamNameException, ParamSubstitutionException, ParamInfiniteLoopException, ParamWarning
__all__ = ['BaseRunnable', 'CompleteEarlyException', 'JobFailedException', 'ParamException', 'ParamNameException', 'ParamSubstitutionException', 'ParamInfiniteLoopException', 'ParamWarning', '__version__']
| apache-2.0 | 5,271,706,095,723,022,000 | 55.818182 | 205 | 0.7904 | false |
DynamoDS/Coulomb | WorkspaceTools/uses_list_at_level.py | 1 | 2340 | # This tool walks over a dataset and reports how many workspaes have at least one use of
# list at level
import gzip
import json
import base64
import sys
import traceback
import time
from os import listdir
from os.path import isfile, join
from collections import defaultdict
import os
VERBOSE = True
def log(s):
if VERBOSE:
print time.strftime("%Y-%m-%d %H:%M:%S"), s
if len(sys.argv) != 3:
print "Usage: python export_workspaces.py path_to_data out_path"
print "Walk over sessions files to export whether it uses"
print "list at level"
exit(1)
path = sys.argv[1]
outPath = sys.argv[2]
linesCount = 0
dataLinesCount = 0
err = 0;
dtWsCount = {}
def updateResultFile():
outF = open(outPath, 'w')
outF.write("Date, No L@L, L@L\n")
for k in sorted(dtWsCount):
v = dtWsCount[k]
outF.write(k + ", " + str(v[False]) + ", " + str(v[True]) + "\n")
outF.flush()
log("Start")
files = [ f for f in listdir(path) if isfile(join(path,f)) ]
for filePath in files:
f = gzip.open (join(path,filePath));
for ln in f:
linesCount += 1
if linesCount % 1000 == 0:
updateResultFile()
log (str(linesCount))
# log (str(linesCount) + "\t" + str(dataLinesCount) + "\t" + str(err) + "\tNew sessions:\t" + str(len(newSessionIDSet)) + "\tUpdated sessions:\t" + str(len(sessionIDSet)))
try:
if not ln.startswith("{"):
continue
dataLinesCount += 1
data = json.loads(ln)
session = data["SessionID"]
serverDate = data["b75e7a7f_ServerDate"]
tag = data["Tag"]
if (tag.startswith("Workspace")):
data = base64.b64decode(data["Data"])
usesListAtLevel = False
usesListAtLevel = data.find('useLevels="True"') > -1
if not dtWsCount.has_key(serverDate):
dtWsCount[serverDate] = {}
dtWsCount[serverDate][False] = 0
dtWsCount[serverDate][True] = 0
dtWsCount[serverDate][usesListAtLevel] += 1
# print (dtWsCount)
# print (session + ",\t" + serverDate + ",\t")
except err:
err += 1
print err
log("Printing results")
| mit | 695,384,233,009,811,600 | 25.292135 | 174 | 0.560684 | false |
Subsets and Splits