repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringlengths 1
5
| size
stringlengths 4
7
| content
stringlengths 475
1M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,293,591B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
alladdin/plugin.video.primaplay | default.py | 1 | 10359 | # -*- coding: utf-8 -*-
import os
import sys
import xbmc
import xbmcgui
import xbmcplugin
import xbmcaddon
import traceback
import time
from xbmcplugin import addDirectoryItem
from libPrimaPlay import PrimaPlay
import urllib
from urlparse import parse_qs
_addon_ = xbmcaddon.Addon('plugin.video.primaplay')
_scriptname_ = _addon_.getAddonInfo('name')
_version_ = _addon_.getAddonInfo('version')
###############################################################################
def log(msg, level=xbmc.LOGDEBUG):
if type(msg).__name__ == 'unicode':
msg = msg.encode('utf-8')
xbmc.log("[%s] %s" % (_scriptname_, msg.__str__()), level)
def logDbg(msg):
log(msg, level=xbmc.LOGDEBUG)
def logErr(msg):
log(msg, level=xbmc.LOGERROR)
def _exception_log(exc_type, exc_value, exc_traceback):
logErr(traceback.format_exception(exc_type, exc_value, exc_traceback))
xbmcgui.Dialog().notification(_scriptname_, _toString(exc_value), xbmcgui.NOTIFICATION_ERROR)
def _toString(text):
if type(text).__name__ == 'unicode':
output = text.encode('utf-8')
else:
output = str(text)
return output
try:
_icon_ = xbmc.translatePath(os.path.join(_addon_.getAddonInfo('path'), 'icon.png'))
_handle_ = int(sys.argv[1])
_baseurl_ = sys.argv[0]
_hd_enabled = False;
if (_addon_.getSetting('hd_enabled') == 'true'): _hd_enabled = True
_play_parser = PrimaPlay.Parser(hd_enabled=_hd_enabled)
_play_account = None
if (_addon_.getSetting('account_enabled') == 'true'):
_play_account = PrimaPlay.Account( _addon_.getSetting('account_email'), _addon_.getSetting('account_password'), _play_parser )
xbmcplugin.setContent(_handle_, 'tvshows')
def main_menu(pageurl, list_only = False):
page = _play_parser.get_page(pageurl+'?strana=1')
if not list_only:
if page.player:
add_player(page.player)
else:
add_search_menu()
add_account_menu()
add_filters(page, pageurl)
for video_list in page.video_lists:
if video_list.title: add_title(video_list)
add_item_list(video_list.item_list)
if video_list.next_link: add_next_link(video_list.next_link)
def shows_menu(pageurl, list_only = False):
page = _play_parser.get_shows(pageurl)
for video_list in page.video_lists:
if video_list.title: add_show(video_list)
add_item_list(video_list.item_list)
if video_list.next_link: add_next_link(video_list.next_link)
def show_navigation(pageurl, list_only = False):
page = _play_parser.get_show_navigation(pageurl)
for video_list in page.video_lists:
if video_list.title: add_title(video_list)
def next_menu(nexturl):
next_list = _play_parser.get_next_list(nexturl)
add_item_list(next_list.list)
if next_list.next_link: add_next_link(next_list.next_link)
def search():
keyboard = xbmc.Keyboard('',u'Hledej')
keyboard.doModal()
if (not keyboard.isConfirmed()): return
search_query = keyboard.getText()
if len(search_query) <= 1: return
main_menu(_play_parser.get_search_url(search_query))
def account():
if not _play_account.login():
li = list_item('[B]Chyba přihlášení![/B] Zkontrolujte e-mail a heslo.')
xbmcplugin.addDirectoryItem(handle=_handle_, url='#', listitem=li, isFolder=True)
return
main_menu(_play_account.video_list_url, True)
def remove_filter(removefilterurl):
link = _play_parser.get_redirect_from_remove_link(removefilterurl)
main_menu(link)
def manage_filter(pageurl, filterid):
if filterid is None:
main_menu(pageurl)
return
page = _play_parser.get_page(pageurl)
dlg = xbmcgui.Dialog()
filter_list = page.filter_lists[filterid]
add_id = dlg.select(filter_list.title, map(lambda x: x.title, filter_list.item_list))
if add_id < 0:
main_menu(pageurl)
return
main_menu(filter_list.item_list[add_id].link)
def add_filters(page, pageurl):
if page.current_filters:
li = list_item(u'[B]Odstranit nastavené filtry: [/B]' + ", ".join(map(lambda x: x.title, page.current_filters.item_list)))
url = get_menu_link( action = 'FILTER-REMOVE', linkurl = page.current_filters.link )
xbmcplugin.addDirectoryItem(handle=_handle_, url=url, listitem=li, isFolder=True)
for filterid, filter_list in enumerate(page.filter_lists):
li = list_item(u'[B]Nastav filtr: [/B]' + filter_list.title)
url = get_menu_link( action = 'FILTER-MANAGE', linkurl = pageurl, filterid = filterid )
xbmcplugin.addDirectoryItem(handle=_handle_, url=url, listitem=li, isFolder=True)
def add_search_menu():
li = list_item(u'[B]Hledej[/B]')
url = get_menu_link( action = 'SEARCH' )
xbmcplugin.addDirectoryItem(handle=_handle_, url=url, listitem=li, isFolder=True)
def add_account_menu():
if _play_account is None: return
li = list_item(u'[B]Můj PLAY[/B]')
url = get_menu_link( action = 'ACCOUNT' )
xbmcplugin.addDirectoryItem(handle=_handle_, url=url, listitem=li, isFolder=True)
def add_show(video_list):
url = '#'
thumbnail = None
if video_list.link:
url = get_menu_link( action = 'SHOW-NAV', linkurl = video_list.link )
if video_list.thumbnail:
thumbnail = video_list.thumbnail
li = list_item(video_list.title, thumbnail)
xbmcplugin.addDirectoryItem(handle=_handle_, url=url, listitem=li, isFolder=True)
def add_title(video_list):
li = list_item('[B]'+video_list.title+'[/B]')
url = '#'
if video_list.link:
url = get_menu_link( action = 'PAGE', linkurl = video_list.link )
xbmcplugin.addDirectoryItem(handle=_handle_, url=url, listitem=li, isFolder=True)
def add_item_list(item_list):
for item in item_list:
li = list_item(item.title, item.image_url, item.description, item.broadcast_date, item.year)
url = item.link
if item.isFolder: url = get_menu_link( action = 'PAGE', linkurl = item.link )
xbmcplugin.addDirectoryItem(handle=_handle_, url=url, listitem=li, isFolder=item.isFolder)
def add_next_link(next_link):
li = list_item(u'Další stránka')
url = get_menu_link( action = 'PAGE-NEXT', linkurl = next_link )
xbmcplugin.addDirectoryItem(handle=_handle_, url=url, listitem=li, isFolder=True)
def add_player(player):
li = list_item(u"[B]Přehraj:[/B] "+player.title, player.image_url, player.description, player.broadcast_date, player.year)
xbmcplugin.addDirectoryItem(handle=_handle_, url=player.video_link, listitem=li, isFolder=False)
def play_video(link):
product_id = _play_parser.get_productID(link)
video = _play_parser.get_video(product_id)
if video.link is None:
raise Exception('Video není dostupné')
return
video_item = xbmcgui.ListItem(video.title)
video_item.setInfo('video', {'Title': video.title})
video_item.setThumbnailImage(video.image_url)
player = xbmc.Player()
player.play(video.link, video_item)
def list_item(label, thumbnail = None, description = None, broadcast_date = None, year = None):
li = xbmcgui.ListItem(label)
liVideo = {
'title': label,
'plot': description,
'year': year,
'aired': broadcast_date
}
if thumbnail:
li.setThumbnailImage(thumbnail)
li.setArt({'poster': thumbnail, 'fanart': thumbnail})
li.setInfo("video", liVideo)
return li
def get_menu_link(**kwargs):
return _baseurl_ + "?" + urllib.urlencode(kwargs)
def get_params():
if len(sys.argv[2])<2: return []
encoded_query = sys.argv[2].lstrip('?')
decoded_params = parse_qs(encoded_query)
param = {}
for key in decoded_params:
if len(decoded_params[key]) <= 0: continue
param[key] = decoded_params[key][0]
return param
def assign_params(params):
for param in params:
try:
globals()[param] = params[param]
except:
pass
action = None
linkurl = None
filterid = None
params = get_params()
assign_params(params)
logDbg("PrimaPlay Parameters!!!")
logDbg("action: "+str(action))
logDbg("linkurl: "+str(linkurl))
logDbg("filterid: "+str(filterid))
try:
if action == "FILTER-REMOVE":
remove_filter(linkurl)
xbmcplugin.endOfDirectory(_handle_, updateListing=True)
if action == "FILTER-MANAGE":
manage_filter(linkurl, int(filterid))
xbmcplugin.endOfDirectory(_handle_, updateListing=True)
elif action == "PAGE-NEXT":
next_menu(linkurl)
xbmcplugin.endOfDirectory(_handle_, updateListing=True)
elif action == "SEARCH":
search()
xbmcplugin.endOfDirectory(_handle_)
elif action == "ACCOUNT":
account()
xbmcplugin.endOfDirectory(_handle_)
elif action == "SHOW-NAV":
show_navigation(linkurl)
xbmcplugin.endOfDirectory(_handle_)
elif action == "PAGE":
main_menu(linkurl, list_only=True)
xbmcplugin.endOfDirectory(_handle_)
elif action == "PLAY":
play_video(linkurl)
else:
ts = int(time.time())
shows_menu("https://prima.iprima.cz/iprima-api/ListWithFilter/Series/Content?ts="+ str(ts) +"&filter=all&featured_queue_name=iprima:hp-featured-series")
xbmcplugin.endOfDirectory(_handle_)
except Exception as ex:
exc_type, exc_value, exc_traceback = sys.exc_info()
_exception_log(exc_type, exc_value, exc_traceback)
except Exception as ex:
exc_type, exc_value, exc_traceback = sys.exc_info()
_exception_log(exc_type, exc_value, exc_traceback)
| gpl-2.0 | 6,113,510,587,463,496,000 | 37.322222 | 164 | 0.603557 | false |
StingraySoftware/dave | src/main/python/utils/dataset_cache.py | 1 | 1790 | import hashlib
import utils.exception_helper as ExHelper
from random import randint
from config import CONFIG
import pylru
cached_datasets = pylru.lrucache(CONFIG.PYTHON_CACHE_SIZE)
# DATASET CACHE METHODS
def add(key, dataset):
try:
cached_datasets[key] = dataset
except:
logging.error(ExHelper.getException('dataset_cache.add'))
def contains(key):
try:
return key in cached_datasets
except:
logging.error(ExHelper.getException('dataset_cache.contains'))
return False
def get(key):
try:
if contains(key):
return cached_datasets[key]
except:
logging.error(ExHelper.getException('dataset_cache.get'))
return None
def remove(key):
try:
if contains(key):
del cached_datasets[key]
return True
except:
logging.error(ExHelper.getException('dataset_cache.remove'))
return False
def remove_with_prefix(key_prefix):
try:
remove_keys = []
for key in cached_datasets.keys():
if key.startswith(key_prefix):
remove_keys.append(key)
for key in remove_keys:
remove(key)
except:
logging.error(ExHelper.getException('dataset_cache.remove_with_prefix'))
def get_key(value, strict=False):
try:
m = hashlib.md5()
if strict:
m.update(str(value).encode('utf-8'))
else:
m.update(str(value + str(randint(0,99999))).encode('utf-8'))
ugly_key = str(m.digest())
return "".join(e for e in ugly_key if e.isalnum())
except:
logging.error(ExHelper.getException('dataset_cache.remove_with_prefix'))
return ""
def count():
return len(cached_datasets)
def clear():
cached_datasets.clear()
| apache-2.0 | -4,037,457,176,267,402,000 | 23.189189 | 80 | 0.623464 | false |
py-mina-deploy/py-mina | py_mina/decorators/task.py | 1 | 1045 | """
Task decorator (wrapper for `fabric3` @task decorator)
"""
from __future__ import with_statement
import timeit
import fabric.api
from py_mina.echo import echo_task, print_task_stats
def task(wrapped_function):
"""
Task function decorator
"""
wrapped_function_name = wrapped_function.__name__
def task_wrapper(*args):
"""
Runs task and prints stats at the end
"""
echo_task('Running "%s" task\n' % wrapped_function_name)
start_time = timeit.default_timer()
with fabric.api.settings(colorize_errors=True):
try:
wrapped_function(*args)
except Exception as e:
print_task_stats(wrapped_function_name, start_time, e)
raise e # escalate exception
else:
print_task_stats(wrapped_function_name, start_time)
# Copy __name__ and __doc__ from decorated function to wrapper function
task_wrapper.__name__ = wrapped_function_name or 'task'
if wrapped_function.__doc__: task_wrapper.__doc__ = wrapped_function.__doc__
# Decorate with `fabric3` task decorator
return fabric.api.task(task_wrapper)
| mit | -8,349,296,923,187,485,000 | 22.75 | 77 | 0.702392 | false |
GiulioGx/RNNs | sources/ObjectiveFunction.py | 1 | 3362 | import theano.tensor as TT
from theano.tensor.elemwise import TensorType
import theano as T
from infos.Info import Info
from infos.InfoElement import PrintableInfoElement
from infos.InfoGroup import InfoGroup
from infos.InfoList import InfoList
from infos.InfoProducer import SimpleInfoProducer
from infos.SymbolicInfo import SymbolicInfo
from lossFunctions.LossFunction import LossFunction
from model.Variables import Variables
__author__ = 'giulio'
class ObjectiveFunction(SimpleInfoProducer): # XXX is this class needed?
def __init__(self, loss_fnc: LossFunction, net, params: Variables, u, t, mask):
self.__net = net
self.__loss_fnc = loss_fnc
self.__u = u
self.__t = t
self.__params = params
# XXX
# XXX REMOVE (?)
self.failsafe_grad, _ = self.__net.symbols.failsafe_grad(u=u, t=t, mask=mask, params=self.__params,
obj_fnc=self)
self.__grad, self.__objective_value = self.__net.symbols.gradient(u=u, t=t, mask=mask, params=self.__params,
obj_fnc=self)
grad_norm = self.__grad.value.norm()
# separate
gradient_info = self.__grad.temporal_norms_infos
# DEBUG DIFF
# debug_diff = (self.grad.value - self.failsafe_grad).norm()
debug_diff = TT.alloc(-1)
self.__infos = ObjectiveFunction.Info(gradient_info, self.__objective_value, grad_norm, debug_diff,
net.symbols.mask)
@property
def current_loss(self):
return self.__objective_value
def value(self, y, t, mask):
return self.__loss_fnc.value(y=y, t=t, mask=mask)
@property
def loss_mask(self):
return self.__loss_fnc.mask
@property
def infos(self):
return self.__infos
@property
def grad(self):
return self.__grad
class Info(SymbolicInfo):
def __init__(self, gradient_info, objective_value, grad_norm, debug_diff, mask):
# n_selected_temporal_losses = TT.switch(mask.sum(axis=1) > 0, 1, 0).sum(axis=1).sum()
n_selected_temporal_losses = LossFunction.num_examples_insting_temp_loss(mask)
self.__symbols = [objective_value, grad_norm, debug_diff,
n_selected_temporal_losses] + gradient_info.symbols
self.__symbolic_gradient_info = gradient_info
def fill_symbols(self, symbols_replacements: list) -> Info:
loss_value_info = PrintableInfoElement('value', ':07.3f', symbols_replacements[0].item())
loss_grad_info = PrintableInfoElement('grad', ':07.3f', symbols_replacements[1].item())
norm_diff_info = PrintableInfoElement('@@', '', symbols_replacements[2].item())
n_loss_info = PrintableInfoElement('##n', '', symbols_replacements[3])
gradient_info = self.__symbolic_gradient_info.fill_symbols(symbols_replacements[4:])
loss_info = InfoGroup('loss', InfoList(loss_value_info, loss_grad_info))
obj_info = InfoGroup('obj', InfoList(loss_info, gradient_info))
info = InfoList(obj_info, norm_diff_info)
return info
@property
def symbols(self):
return self.__symbols
| lgpl-3.0 | -1,983,306,252,256,076,300 | 36.355556 | 116 | 0.603212 | false |
ntt-sic/taskflow | taskflow/tests/unit/test_check_transition.py | 1 | 1654 | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from taskflow import exceptions as exc
from taskflow import states
from taskflow import test
class CheckFlowTransitionTest(test.TestCase):
def test_same_state(self):
self.assertFalse(
states.check_flow_transition(states.SUCCESS, states.SUCCESS))
def test_rerunning_allowed(self):
self.assertTrue(
states.check_flow_transition(states.SUCCESS, states.RUNNING))
def test_no_resuming_from_pending(self):
self.assertFalse(
states.check_flow_transition(states.PENDING, states.RESUMING))
def test_resuming_from_running(self):
self.assertTrue(
states.check_flow_transition(states.RUNNING, states.RESUMING))
def test_bad_transition_raises(self):
with self.assertRaisesRegexp(exc.InvalidStateException,
'^Flow transition.*not allowed'):
states.check_flow_transition(states.FAILURE, states.SUCCESS)
| apache-2.0 | 5,794,993,840,121,304,000 | 35.755556 | 78 | 0.692866 | false |
hasgeek/funnel | funnel/forms/organization.py | 1 | 3873 | from __future__ import annotations
from flask import Markup, url_for
from baseframe import _, __
from coaster.auth import current_auth
import baseframe.forms as forms
from ..models import Organization, Profile, Team
__all__ = ['OrganizationForm', 'TeamForm']
@Organization.forms('main')
class OrganizationForm(forms.Form):
title = forms.StringField(
__("Organization name"),
description=__(
"Your organization’s given name, without legal suffixes such as Pvt Ltd"
),
validators=[
forms.validators.DataRequired(),
forms.validators.Length(max=Organization.__title_length__),
],
filters=[forms.filters.strip()],
)
name = forms.AnnotatedTextField(
__("Username"),
description=__(
"A short name for your organization’s profile page."
" Single word containing letters, numbers and dashes only."
" Pick something permanent: changing it will break existing links from"
" around the web"
),
validators=[
forms.validators.DataRequired(),
forms.validators.Length(max=Profile.__name_length__),
],
filters=[forms.filters.strip()],
prefix="https://hasgeek.com/",
widget_attrs={'autocorrect': 'none', 'autocapitalize': 'none'},
)
def validate_name(self, field):
reason = Profile.validate_name_candidate(field.data)
if not reason:
return # name is available
if reason == 'invalid':
raise forms.ValidationError(
_(
"Names can only have letters, numbers and dashes (except at the"
" ends)"
)
)
if reason == 'reserved':
raise forms.ValidationError(_("This name is reserved"))
if self.edit_obj and field.data.lower() == self.edit_obj.name.lower():
# Name is not reserved or invalid under current rules. It's also not changed
# from existing name, or has only changed case. This is a validation pass.
return
if reason == 'user':
if (
current_auth.user.username
and field.data.lower() == current_auth.user.username.lower()
):
raise forms.ValidationError(
Markup(
_(
"This is <em>your</em> current username."
' You must change it first from <a href="{account}">your'
" account</a> before you can assign it to an organization"
).format(account=url_for('account'))
)
)
raise forms.ValidationError(_("This name has been taken by another user"))
if reason == 'org':
raise forms.ValidationError(
_("This name has been taken by another organization")
)
# We're not supposed to get an unknown reason. Flag error to developers.
raise ValueError(f"Unknown profile name validation failure reason: {reason}")
@Team.forms('main')
class TeamForm(forms.Form):
title = forms.StringField(
__("Team name"),
validators=[
forms.validators.DataRequired(),
forms.validators.Length(max=Team.__title_length__),
],
filters=[forms.filters.strip()],
)
users = forms.UserSelectMultiField(
__("Users"),
validators=[forms.validators.DataRequired()],
description=__("Lookup a user by their username or email address"),
)
is_public = forms.BooleanField(
__("Make this team public"),
description=__(
"Team members will be listed on the organization’s profile page"
),
default=True,
)
| agpl-3.0 | -2,139,949,074,538,231,800 | 35.828571 | 88 | 0.5609 | false |
binary-array-ld/bald | ncldDump/ncldDump.py | 1 | 18095 | from __future__ import print_function
from six import string_types, PY2
import argparse
import jinja2
import json
import netCDF4
import numpy
import os
import re
import sys
import pprint
import traceback
def parseArgs(args):
'''
Parse the command line arguments into a dictionary object.
args [in] A list of command line arguments.
returns A dictionary of the parse results.
'''
parser = argparse.ArgumentParser(description = 'Generate web-linked CDL (without data) from a netCDF-LD file as HTML.',
epilog = 'If no output file is specified, the output will be written to the file ncldDump.html in the current folder.')
parser.add_argument('-a', metavar = '<alias file>', default = None,
dest = 'aliasFile', help = 'A JSON file containing alias definitions.')
parser.add_argument('-o', metavar = '<output file>', default = 'ncldDump.html',
dest = 'outputFile', help = 'The file to write the output to.')
parser.add_argument('inputFile', metavar = '<input file>', help = 'A netCDF-LD file.')
parsedArgs = parser.parse_args(args)
assert os.access(parsedArgs.inputFile, os.R_OK), 'Unable to read file ' + parsedArgs.inputFile
if parsedArgs.aliasFile is not None:
assert os.access(parsedArgs.aliasFile, os.R_OK), 'Unable to read file ' + parsedArgs.aliasFile
argDict = vars(parsedArgs)
return argDict
def parseDtype(dtype):
'''
Return a string representing the data type in the dtype argument.
dtype [in] A dtype object.
returns A string.
'''
# Get the basic two character type string for the type. Remove any
# byte-order information or other extraneous characters.
#
theType = dtype.str.strip('><=#')
# Map the type. If the type is not found, return '?'.
#
result = '?'
if 'i1' == theType:
result = 'byte'
elif 'u1' == theType:
result = 'ubyte'
elif 'i2' == theType:
result = 'short'
elif 'u2' == theType:
result = 'ushort'
elif 'i4' == theType:
result = 'int'
elif 'u4' == theType:
result = 'uint'
elif 'f4' == theType:
result = 'float'
elif 'i8' == theType:
result = 'int64'
elif 'u8' == theType:
result = 'uint64'
elif 'f8' == theType:
result = 'double'
elif 'S1' == theType:
result = 'char'
elif 'S' == theType:
result = 'string'
elif 'U' == theType:
result = 'string'
# Return the result.
#
return result
def parseType(obj):
'''
Return a string representing the data type of the obj argument.
dtype [in] A dtype object.
returns A string.
'''
# Map the type. If the type is not found, return '?'.
#
result = '?'
if True == isinstance(obj, string_types):
result = ''
elif True == isinstance(obj, numpy.int8):
result = 'b'
elif True == isinstance(obj, numpy.uint8):
result = 'ub'
elif True == isinstance(obj, numpy.int16):
result = 's'
elif True == isinstance(obj, numpy.uint16):
result = 'us'
elif True == isinstance(obj, numpy.int32):
result = ''
elif True == isinstance(obj, numpy.uint32):
result = 'u'
elif True == isinstance(obj, numpy.int64):
result = 'll'
elif True == isinstance(obj, numpy.uint64):
result = 'ull'
elif True == isinstance(obj, numpy.float32):
result = 'f'
elif True == isinstance(obj, numpy.float64):
result = ''
elif True == isinstance(obj, numpy.ndarray):
result = parseType(obj[0])
# Return the result.
#
return result
def convertToStringHook(item, ignoreDicts = False):
'''
This function is passed to the json load function as an object hook. It
converts any string_types strings into ASCII strings.
item [in] An item passed in for processing.
ignoreDicts [in] If this is set to True ignore any dict objects passed in.
returns Items with any string_types strings converted to ASCII.
'''
# If this is a string_types string, convert it. If this is a list, convert any
# contained string_types strings. If this is a dict and it hasn't been converted
# already, convert any contained string_types strings. Otherwise, leave the item
# alone.
#
if isinstance(item, string_types):
if PY2:
result = item.encode('utf-8')
else:
result = item
elif isinstance(item, list):
result = [ convertToStringHook(element, True) for element in item ]
elif isinstance(item, dict) and not ignoreDicts:
result = { convertToStringHook(key, True) : convertToStringHook(value, True) for key, value in item.items() }
else:
result = item
# Return the possibly converted item.
#
return result
def loadAliasDict(aliasFilePath):
'''
Load an alias dictionary from a JSON file. This is a temporary workaround
until it is decided how to store the information in a netCDF-LD file. The
alias dictionary is a mapping of URIs to context prefixes and words. The
words will be found in variable and attribute names, and in words found in
specified attribute values. If the file path is None, create a stubbed-out
dictionary and return it.
aliasFilePath [in] The path to the JSON file containing the alias
definitions.
returns The loaded dictionary.
'''
# If the file path is None, create a stubbed-out dictionary.
#
if aliasFilePath is None:
aliasDict = { 'contexts' : {}, 'names' : {}, 'values' : {} }
else:
# Open the file to parse.
#
aliasFile = open(aliasFilePath)
# Parse the contained JSON.
#
aliasDict = json.load(aliasFile, object_hook = convertToStringHook)
# Return the dictionary.
#
return aliasDict
def makeURL(word, pattern):
'''
Create a URL from the word and pattern.
word [in] The word to build the URL around.
pattern [in] The URL pattern to reference.
returns A URL.
'''
# Insert the word into any replaceable part in the pattern.
#
theURL = pattern.format(word)
# Return the URL
#
return theURL
def resolveName(name, aliasDict):
'''
Determine if the name has a context part (the form is <context>__<name>).
If it does and the context alias exists, use it to build a URL.
If not, attempt to resolve the name into a URL using the names
part of the alias dictionary.
name [in] A name to attempt to resolve into a URL string.
aliasDict [in] A dictionary of URI patterns keyed by the elements they
replace.
returns A URL, or None if there was no resolution.
'''
# Start with the result equal to None.
#
result = None
# Split the name on '__'.
#
nameParts = name.split('__')
# Breakout context.
#
for _x in [0]:
# If there is a context part, attempt to use it.
#
if 2 == len(nameParts):
# Get the name and context parts.
#
contextPart = nameParts[0]
namePart = nameParts[1]
# If the context exists in the alias dictionary, create a URL
# string using the pattern for the context and the name part.
#
if contextPart in aliasDict['contexts']:
pattern = aliasDict['contexts'][contextPart]
result = makeURL(namePart, pattern)
break
# If the name exists in the alias dictionary, create a URL string
# using the pattern for the name.
#
if name in aliasDict['names']:
pattern = aliasDict['names'][name]
result = makeURL(name, pattern)
break
# Return the resolved URL if one was found.
#
return result
def resolveValue(name, value, aliasDict):
'''
Determine if the value associated with the name has an entry in the alias
dictionary. If it does, build a URL and return it.
name [in] A name associated with the value to attempt to resolve into
a URL.
value [in] A value to attempt to resolve into a URL.
aliasDict [in] A dictionary of URI patterns keyed by the elements they
replace.
returns A URL, or None if there was no resolution.
'''
# Start with the result equal to None.
#
result = None
# Breakout context
#
done = False
while False == done:
done = True
# If the value is not a string, get a string representation.
#
if False == isinstance(value, str) and False == isinstance(value, string_types):
value = str(value)
# If the value starts with 'http', interpret the entire string as a
# resolved URL.
#
if value[0:4] == 'http':
result = value
break
# Attempt to split the value on '__' to see if there is a context
# part to the value.
#
valueParts = value.split('__')
# If there is a context part, resolve the value as a name.
#
if 2 == len(valueParts):
result = resolveName(value, aliasDict)
break
# If the name exists in the alias dictionary, and if the value exists
# in the sub-dictionary for the name, create a URL using the pattern
# for the value. A wildcard (*) for a value key in the dictionary
# matches any value.
#
if name in aliasDict['values']:
subDict = aliasDict['values'][name]
pattern = None
if value in subDict:
pattern = subDict[value]
elif '*' in subDict:
pattern = subDict['*']
if pattern is not None:
result = makeURL(value, pattern)
break
# Return the resolved name if one was found.
#
return result
def parseAttributes(ncObj, aliasDict):
'''
Build a list of dictionaries for each netCDF attribute on the object.
ncObj [in] A netCDF object with attributes.
aliasDict [in] A dictionary of URI patterns keyed by the elements they
replace.
returns A list of dictionaries for each attribute.
'''
# Create the attribute list.
#
attrList = []
# Fill the list with dictionaries describing each attribute.
#
for attrName in ncObj.ncattrs():
# Get the value and type for the attribute.
#
attrValue = ncObj.getncattr(attrName)
attrType = parseType(attrValue)
# If the value is an array, make it a list.
#
if True == isinstance(attrValue, numpy.ndarray):
attrValue = list(attrValue)
# Get the URL (if any) for the attribute.
#
nameURL = resolveName(attrName, aliasDict)
# Get the URL (if any) for the value.
#
valueURL = resolveValue(attrName, attrValue, aliasDict)
# If the value is a string, wrap it in '"' characters.
#
if True == isinstance(attrValue, str) or True == isinstance(attrValue, string_types):
attrValue = '"' + str(attrValue) + '"'
valueEntry = { 'element' : attrValue }
if valueURL is not None:
valueEntry['url'] = valueURL
# Build the attribute entry. If there is a name URL add it.
#
attrEntry = {'name' : attrName, 'value' : valueEntry, 'type' : attrType}
if nameURL is not None:
attrEntry['url'] = nameURL
# Add the entry to the list.
#
attrList.append(attrEntry)
# Return the list.
#
return attrList
def parseGroup(ncObj, aliasDict):
'''
Build dimension, variable, and attribute lists for the group object.
ncObj [in] The netCDF4 group object to parse.
aliasDict [in] A dictionary of URI patterns keyed by the elements they
replace.
returns A nested set of dictionaries and lists describing the object
contents.
'''
# Create the top-level dictionary.
#
dataDict = {}
# If there are any dimensions, add and populate a dimensions entry.
#
dimList = []
try:
for dimName, dimObj in ncObj.dimensions.items():
dimEntry = {'name' : dimName }
if True == dimObj.isunlimited():
dimEntry['value'] = 'UNLIMITED'
dimEntry['comment'] = str(dimObj.size) + ' currently'
else:
dimEntry['value'] = str(dimObj.size)
dimList.append(dimEntry)
except:
pass
if 0 < len(dimList):
dataDict['dimensions'] = dimList
# If there are any variables, add and populate a variables entry.
#
varList = []
try:
for varName, varObj in ncObj.variables.items():
varType = parseDtype(varObj.dtype)
varEntry = {'name' : varName, 'type' : varType}
dimList = []
for dimName in varObj.dimensions:
dimSize = ncObj.dimensions[dimName].size
dimList.append(dimName)
if 0 < len(dimList):
varEntry['dimensions'] = dimList
# If the variable name is in the alias dictionary names section,
# get a URL for it and add it to the entry for the variable.
#
if varName in aliasDict['names']:
pattern = aliasDict['names'][varName]
theURL = makeURL(varName, pattern)
varEntry['url'] = theURL
# If there are any attributes add and populate an attributes
# entry.
#
attrList = parseAttributes(varObj, aliasDict)
if 0 < len(attrList):
varEntry['attributes'] = attrList
varList.append(varEntry)
except:
#type_, value_, traceback_ = sys.exc_info()
#tb = traceback.format_tb(traceback_)
pass
if 0 < len(varList):
dataDict['variables'] = varList
# If there are any group-level attributes, add and populate an attributes
# entry.
#
attrList = parseAttributes(ncObj, aliasDict)
if 0 < len(attrList):
dataDict['attributes'] = attrList
# Return the dictionary.
#
return dataDict
def parseDataset(ncObj, aliasDict):
'''
Build a set of group dictionaries for the netCDF4 Dataset object.
ncObj [in] The netCDF4 Dataset object to parse.
aliasDict [in] A dictionary of URI patterns keyed by the elements they
replace.
returns A nested set of dictionaries and lists describing the object
contents.
'''
# Parse the contents of the root group of the netCDF file. Add a groupName
# element and store it in a groups list.
#
groupList = []
groupEntry = parseGroup(ncObj, aliasDict)
groupEntry['groupName'] = 'global'
groupList.append(groupEntry)
# If there are any other groups, add them as well.
#
for groupName, groupObj in ncObj.groups.items():
groupEntry = parseGroup(groupObj, aliasDict)
groupEntry['groupName'] = groupName
groupList.append(groupEntry)
# Add the group list to a top-level dictionary.
#
dataDict = { 'groups' : groupList }
# Return the dictionary.
#
return dataDict
def ncldDump(inputFile, aliasFile, outputFile):
'''
Generate an HTML page from a netCDF-LD file. The page will contain CDL
describing the structure and contents of the file (without data values),
similar to the output of ncdump. Any elements that have associated linked
data will be presented as hot links that will open a new browser tab that
shows the linked contents.
inputFile [in] The netCDF-LD file to parse and display.
aliasFile [in] A JSON file with alias definitions. If the value is None,
no aliases are defined.
outputFile [in] The output file to write to.
'''
# Load the alias dictionary.
#
aliasDict = loadAliasDict(aliasFile)
# Get a netCDF4 dataset object from the input file and open it.
#
ncObj = netCDF4.Dataset(inputFile, 'r')
# Parse the contents into a dictionary.
#
ncDict = parseDataset(ncObj, aliasDict)
# Add a filePath entry.
#
ncDict['filePath'] = os.path.split(inputFile)[-1]
# Create a jinja environment and template object.
#
envObj = jinja2.Environment(loader = jinja2.FileSystemLoader('./'))
templateObj = envObj.get_template('ncldDump_template.html')
# Render the template with the contents of the dictionary.
#
result = templateObj.render(**ncDict)
# Open the output file and write the rendered template into it.
#
outObj = open(outputFile, 'w')
outObj.write(result)
outObj.close()
if __name__ == '__main__':
try:
argDict = parseArgs(sys.argv[1:])
ncldDump(**argDict)
except Exception as exc:
print(exc)
if 'pdb' not in sys.modules:
sys.exit(1)
| bsd-3-clause | 2,375,748,781,564,818,000 | 28.909091 | 156 | 0.574247 | false |
quimaguirre/diana | diana/toolbox/selection_utilities.py | 1 | 3039 | from random import shuffle, randint
from itertools import combinations
def main():
return
def get_subsamples_at_ratio(values, n_fold=1000, ratio=0.1):
n = int(round(len(values) * float(ratio)))
#for i in range(n_fold):
# yield random_combination(values, n, n_fold=1)
return get_subsamples(values, n_fold, n)
def get_subsamples(scores, n_fold=10000, n_sample=1000):
for i in range(n_fold):
#if with_replacement:
# size = len(scores)-1
# selected = empty(n_sample)
# for i in range(n_sample):
# selected[i] = scores[randint(0,size)]
shuffle(scores)
selected = scores[:n_sample]
yield selected
return
def random_combination(nodes, n, r):
"Random selection r times from itertools.combinations(nodes, n)"
shuffle(nodes)
values = []
for i, combination in enumerate(combinations(nodes, n)):
if randint(0, n) == 0:
values.append(combination)
if len(values) >= r:
break
if len(values) < r:
raise ValueError("Not enough combinations!")
return values
def k_fold_cross_validation(X, K, randomize = False, replicable = None):
"""
By John Reid (code.activestate.com)
Generates K (training, validation) pairs from the items in X.
Each pair is a partition of X, where validation is an iterable
of length len(X)/K. So each training iterable is of length (K-1)*len(X)/K.
If randomise is true, a copy of X is shuffled before partitioning,
otherwise its order is preserved in training and validation.
If replicable is not None, this number is used to create the same random splits at each call
"""
#if randomize: from random import shuffle; X=list(X); shuffle(X)
if randomize:
from random import seed
X=list(X)
if replicable is not None:
seed(replicable)
shuffle(X)
for k in range(K):
training = [x for i, x in enumerate(X) if i % K != k]
validation = [x for i, x in enumerate(X) if i % K == k]
yield k+1, training, validation
return
def generate_samples_from_list_without_replacement(elements, sample_size, n_folds = None, replicable = None):
"""
Iteratively returns (yields) n_folds sublists of elements with a size of sample_size
n_folds: If None calculated to cover as much elements as possible
replicable: If not None uses this replicable as the seed for random
"""
from random import seed
if replicable is not None:
seed(replicable)
shuffle(elements)
if n_folds is None:
from math import ceil
#n_folds = len(elements) / sample_size
n_folds = int(ceil(float(len(elements)) / sample_size))
for i in range(n_folds):
if (i+1)*sample_size < len(elements):
yield elements[i*sample_size:(i+1)*sample_size]
else:
yield elements[i*sample_size:]
return
if __name__ == "__main__":
main()
| mit | 7,498,710,034,029,366,000 | 31.677419 | 109 | 0.622244 | false |
lemonad/my-django-skeleton | djangoproject/settings.py | 1 | 3240 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import os
import posix
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
# logging.basicConfig(
# level = logging.DEBUG,
# format = '%(asctime)s %(levelname)s %(message)s',
# filename = '/tmp/djangoproject.log',
# filemode = 'w'
# )
PROJECT_ROOT = os.path.join(os.path.dirname(__file__), '/')
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'djangoproject.db'
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
TIME_ZONE = 'Europe/Stockholm'
# LANGUAGE_CODE = 'sv-SE'
LANGUAGE_CODE = 'en-US'
ugettext = lambda s: s
LANGUAGES = (
('en', ugettext('English')),
('sv', ugettext('Swedish')),
)
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/admin_media/'
# Don't share this with anybody.
SECRET_KEY = 'ChangeThisKeyToSomethingCompletelyDifferent'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'djangoflash.middleware.FlashMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.middleware.doc.XViewMiddleware',
)
ROOT_URLCONF = 'djangoproject.urls'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'djangoproject.exampleapp',
)
TEMPLATE_CONTEXT_PROCESSORS = TEMPLATE_CONTEXT_PROCESSORS + (
'djangoflash.context_processors.flash',
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), "exampleapp/fixtures"),
os.path.join(os.path.dirname(__file__), "templates"),
)
SITE_ID = 1
DEFAULT_FROM_EMAIL = "[email protected]"
SERVER_EMAIL = "[email protected]"
EMAIL_SUBJECT_PREFIX = "Django: "
# The below LOGIN_URL and LOGOUT_URL doesn't seem to be used except
# when unit testing views.
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
LOGOUT_URL = '/logout/'
| bsd-2-clause | 7,324,122,328,674,378,000 | 28.724771 | 101 | 0.695988 | false |
askcom/pybigip | pybigip/gtm.py | 1 | 5269 | '''
GTM Interfaces.
Example (Disable datacenter for a distributed application):
>>> con = pybigip.Connection('gtm.example.company', 'admin', 'foobarbaz')
>>> myapp = pybigip.gtm.Application(con, '/Common/myapp')
>>> myapp.datacenters['/Common/SFO'].enabled = False
'''
import itertools
from pybigip import core
class Applications(core.ObjectList):
'''
Access Distributed Applications.
'''
def __init__(self, con):
'''
'''
super(Applications, self).__init__(con)
self._lcon = self._con.GlobalLB.Application
def load(self, names):
'''
Override parent load method to preload Application datacenter status
data.
'''
ret = list()
app_dcs = self._lcon.get_data_centers(names)
app_desc = self._lcon.get_description(names)
for app, dcs, desc in itertools.izip(names, app_dcs, app_desc):
app_obj = Application(self._con, app)
app_obj._dcs = dict(((dc, Datacenter(app_obj, dc)) for dc in dcs))
app_obj._description = desc
ret.append(app_obj)
return ret
class Application(object):
'''
A Distributed Application.
'''
_description = None
_wips = None
def __init__(self, con, name, dcs=None):
'''
'''
self._con = con
self._lcon = self._con.GlobalLB.Application
self.name = name
self._dcs = dcs
def get_ctx(self, name, type):
'''
Get application object context status.
@param name: Object name
@param type: Object type
@return: dict containing object context status information.
'''
ctx = {
'application_name': self.name,
'object_name': name,
'object_type': type
}
return self._lcon.get_application_context_status([ctx])[0]
def enable_ctx(self, name, type):
'''
Enable an application object context.
@param name: Object name
@param type: Object type
'''
ctx = {
'application_name': self.name,
'object_name': name,
'object_type': type
}
self._lcon.enable_application_context_object([ctx])
def disable_ctx(self, name, type):
'''
Disable an application object context.
@param name: Object name
@param type: Object type
'''
ctx = {
'application_name': self.name,
'object_name': name,
'object_type': type
}
self._lcon.disable_application_context_object([ctx])
@property
def description(self):
'''
Lazy load application description value.
@return: application description from the bigip.
'''
if not self._description:
self._description = self._lcon.get_description([self.name])[0]
return self._description
@property
def datacenters(self):
'''
Lazy load application datacenter list.
@return: List of L{Datacenter} objects for this application.
'''
if not self._dcs:
dcs = self._lcon.get_data_centers([self.name])[0]
self._dcs = dict(((dc, Datacenter(self, dc)) for dc in dcs))
return self._dcs
def status(self):
'''
'''
return self._lcon.get_object_status([self.name])[0]
@property
def wips(self):
'''
'''
if not self._wips:
self._wips = self._lcon.get_wide_ips([self.name])[0]
return self._wips
class Datacenter(object):
'''
Application datacenter context object.
'''
_status = None
def __init__(self, app, name):
'''
@param app: Containing application
@param name: Datacenter name
'''
self._app = app
self.name = name
def enable(self):
'''
Enable this datacenter by enabling the coresponding application
context object in the Application.
'''
self._app.enable_ctx(self.name,
'APPLICATION_OBJECT_TYPE_DATACENTER')
def disable(self):
'''
Disable this datacenter by disabling the coresponding application
context object in the Application.
'''
self._app.disable_ctx(self.name,
'APPLICATION_OBJECT_TYPE_DATACENTER')
def toggle(self):
'''
Toggle enabled status
'''
self.enabled = not self.enabled
def status(self):
'''
Get status information for this datacenter.
'''
return self._app.get_ctx(self.name,
'APPLICATION_OBJECT_TYPE_DATACENTER')
@property
def enabled(self):
'''
@return: bool representation of datacenter enabled status.
'''
return self.status()['enabled_status'] == 'ENABLED_STATUS_ENABLED'
@enabled.setter
def enabled(self, value):
'''
Write property to allow setting the enable status for this datacenter.
@param value:
'''
value = bool(value)
if value:
self.enable()
else:
self.disable()
| apache-2.0 | 6,623,435,594,905,670,000 | 24.955665 | 78 | 0.547163 | false |
mjoblin/neotiles | neotiles/matrixes/__init__.py | 1 | 7239 | try:
from neopixel import Adafruit_NeoPixel, ws
DEFAULT_STRIP_TYPE = ws.WS2811_STRIP_GRB
except ImportError:
DEFAULT_STRIP_TYPE = None
try:
from rgbmatrix import RGBMatrix, RGBMatrixOptions
except ImportError:
pass
from neotiles import MatrixSize
from neotiles.exceptions import NeoTilesError
__all__ = ['NTMatrix', 'NTNeoPixelMatrix', 'NTRGBMatrix']
class NTMatrix(object):
"""
Base class for the Neotiles Matrix interface.
"""
def __init__(self):
self._size = None
self._brightness = None
def setPixelColor(self, x, y, color):
raise NotImplementedError
def show(self):
raise NotImplementedError
@property
def brightness(self):
return self._brightness
@brightness.setter
def brightness(self, val):
self._brightness = val
@property
def size(self):
return self._size
class NTNeoPixelMatrix(NTMatrix):
"""
Represents a NeoPixel matrix.
You must specify a ``size`` matching your neopixel matrix (e.g. ``(8, 8)``)
as well as the ``led_pin`` you're using to talk to it (e.g. ``18``). The
other parameters can usually be left at their defaults. For more
information on the other parameters look at the ``Adafruit_NeoPixel``
class in the
`neopixel <https://github.com/jgarff/rpi_ws281x/tree/master/python>`_
module.
If your RGB values appear to be mixed up (e.g. red is showing as green)
then try using a different ``strip_type``. You can see a list of valid
strip type constants here (look for ``_STRIP_`` in the constant name):
https://docs.rs/ws281x/0.1.0/ws281x/ffi/index.html. Specify a strip type
like this: ``strip_type=ws.WS2811_STRIP_GRB``. For this to work you'll
need to ``import ws`` (which comes with the ``neopixel`` module) into your
code.
:param size: (:class:`MatrixSize`) Size of the neopixel matrix.
:param led_pin: (int) The pin you're using to talk to your neopixel matrix.
:param led_freq_hz: (int) LED frequency.
:param led_dma: (int) LED DMA.
:param led_brightness: (int) Brightness of the matrix display (0-255).
:param led_invert: (bool) Whether to invert the LEDs.
:param strip_type: (int) Neopixel strip type.
:raises: :class:`exceptions.NeoTilesError` if ``matrix_size`` or
``led_pin`` are not specified.
"""
def __init__(
self, size=None, led_pin=None,
led_freq_hz=800000, led_dma=5, led_brightness=64, led_invert=False,
strip_type=DEFAULT_STRIP_TYPE):
super(NTNeoPixelMatrix, self).__init__()
if size is None or led_pin is None:
raise NeoTilesError('size and led_pin must be specified')
self._size = MatrixSize(*size)
self._led_pin = led_pin
self._led_freq_hz = led_freq_hz
self._led_dma = led_dma
self._brightness = led_brightness
self._led_invert = led_invert
self._strip_type = strip_type
self._led_count = self.size.cols * self.size.rows
self.hardware_matrix = Adafruit_NeoPixel(
self._led_count, self._led_pin, freq_hz=self._led_freq_hz,
dma=self._led_dma, invert=self._led_invert,
brightness=self.brightness, strip_type=self._strip_type
)
self.hardware_matrix.begin()
def __repr__(self):
strip_name = self._strip_type
# Convert strip name from strip type integer to associated attribute
# name from ws module (if we can find it).
for strip_check in [attr for attr in dir(ws) if '_STRIP_' in attr]:
if getattr(ws, strip_check) == self._strip_type:
strip_name = 'ws.{}'.format(strip_check)
return (
'{}(size={}, led_pin={}, led_freq_hz={}, led_dma={}, '
'led_brightness={}, led_invert={}, strip_type={})'
).format(
self.__class__.__name__, self.size, self._led_pin,
self._led_freq_hz, self._led_dma, self.brightness,
self._led_invert, strip_name
)
def setPixelColor(self, x, y, color):
pixel_num = (y * self.size.cols) + x
self.hardware_matrix.setPixelColor(pixel_num, color.hardware_int)
def show(self):
self.hardware_matrix.show()
@property
def brightness(self):
return self._brightness
@brightness.setter
def brightness(self, val):
error_msg = 'Brightness must be between 0 and 255'
try:
if val >= 0 and val <= 255:
self._brightness = val
self.hardware_matrix.setBrightness(self._brightness)
else:
raise ValueError(error_msg)
except TypeError:
raise ValueError(error_msg)
class NTRGBMatrix(NTMatrix):
"""
Represents an RGB Matrix.
If no options are passed in then the matrix will be initialized with
default options. These options can be overridden either with ``options``
(which should be an ``RGBMatrixOptions`` object as provided by the
``rgbmatrix`` module); or individual options can be passed into the
constructor.
For example, the following are equivalent: ::
from rgbmatrix import RGBMatrixOptions
options = RGBMatrixOptions()
options.chain_length = 2
options.gpio_slowdown = 3
NTRGBMatrix(options=options)
and: ::
NTRGBMatrix(chain_length=2, gpio_slowdown=3)
:param options: (RGBMatrixOptions) Matrix options.
:param kwargs: (*) Individual matrix options.
"""
def __init__(self, options=None, **kwargs):
super(NTRGBMatrix, self).__init__()
if options is None:
options = RGBMatrixOptions()
for kwarg in kwargs:
setattr(options, kwarg, kwargs[kwarg])
self._size = MatrixSize(
options.rows * options.chain_length, options.rows)
self.options = options
self.hardware_matrix = RGBMatrix(options=options)
self.frame_canvas = self.hardware_matrix.CreateFrameCanvas()
def __repr__(self):
options = [
attr for attr in dir(self.options) if
not callable(getattr(self.options, attr)) and
not attr.startswith('_')
]
options_string = ', '.join([
'{}={}'.format(option, getattr(self.options, option))
for option in sorted(options)
])
return '{}({})'.format(self.__class__.__name__, options_string)
def setPixelColor(self, x, y, color):
cd = color.components_denormalized
self.frame_canvas.SetPixel(x, y, cd[0], cd[1], cd[2])
def show(self):
self.frame_canvas = self.hardware_matrix.SwapOnVSync(self.frame_canvas)
@property
def brightness(self):
return self.hardware_matrix.brightness
@brightness.setter
def brightness(self, val):
error_msg = 'Brightness must be between 0 and 100'
try:
if val >= 0 and val <= 100:
self._brightness = val
self.hardware_matrix.brightness = val
else:
raise ValueError(error_msg)
except TypeError:
raise ValueError(error_msg)
| mit | -5,985,888,903,134,632,000 | 31.173333 | 79 | 0.611549 | false |
Valentijn1995/Kn0ckKn0ck | Proxies/MultiProxy.py | 1 | 1343 | from Proxy import Proxy
class MultiProxy(Proxy):
"""
Proxy which is composed of multiple proxy's. Another proxy will be used as soon as you call de connect method.
This class makes use of the Composite design pattern. You can use the MultiProxy class as if it is one proxy
but you are actually using multiple proxies.
"""
def __init__(self, proxy_list):
Proxy.__init__(self, None)
self._current_proxy = None
self._proxy_list = proxy_list
self._proxy_counter = 0
self._last_proxy_index = len(proxy_list)
def _get_next_proxy(self):
if self._proxy_counter >= self._last_proxy_index:
self._proxy_counter = 0
next_proxy = self._proxy_list[self._proxy_counter]
self._proxy_counter += 1
return next_proxy
def _receive(self):
return self._current_proxy.receive()
def copy(self):
return MultiProxy(self._proxy_list)
def _connect(self, destination):
self._current_proxy = self._get_next_proxy()
self._current_proxy.connect(destination)
def is_connected(self):
return self._current_proxy is not None
def _send(self, payload):
self._current_proxy.send(payload)
def _close(self):
self._current_proxy.close()
self._current_proxy = None
| mit | -4,782,610,776,293,411,000 | 30.97619 | 118 | 0.62621 | false |
hirofumi0810/tensorflow_end2end_speech_recognition | examples/erato/evaluation/eval_julius.py | 1 | 4380 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
import sys
import codecs
from glob import glob
import numpy as np
import pandas as pd
sys.path.append('../../../')
from experiments.erato.data.load_dataset_ctc import Dataset
def main():
results_paths = [path for path in glob(
'/home/lab5/inaguma/asru2017/erato_results_0710/test/*.log')]
# Julisu Rusults
for path in results_paths:
with codecs.open(path, 'r', 'euc_jp') as f:
start_flag = False
file_name = ''
output, output_pos = '', ''
result_dict = {}
for line in f:
line = line.strip()
if line == '----------------------- System Information end -----------------------':
start_flag = True
if start_flag:
if 'input MFCC file' in line:
file_name = line.split(': ')[-1]
file_name = '_'.join(file_name.split('/')[-2:])
file_name = re.sub('.wav', '', file_name)
if 'sentence1' in line:
output = line.split(': ')[-1]
output = re.sub('<s>', '', output)
output = re.sub('</s>', '', output)
output = re.sub('<sp>', '', output)
output = re.sub(r'[\sー]+', '', output)
if 'wseq1' in line:
output_pos = line.split(': ')[-1]
output_pos = re.sub('<s>', '', output_pos)
output_pos = re.sub('</s>', '', output_pos)
output_pos = re.sub('<sp>', '', output_pos)
output_pos = re.sub('感動詞', 'F', output_pos)
output_pos = re.sub('言いよどみ', 'D', output_pos)
result_dict[file_name] = [output, output_pos[1:]]
output, output_pos = '', ''
dataset = Dataset(data_type='test',
label_type='kana',
ss_type='insert_left',
batch_size=1,
max_epoch=1,
shuffle=False,
progressbar=True)
tp_f, fp_f, fn_f = 0., 0., 0.
tp_d, fp_d, fn_d = 0., 0., 0.
for data, is_new_epoch in dataset:
# Create feed dictionary for next mini batch
inputs, labels_true, inputs_seq_len, input_names = data
if input_names[0][0] not in result_dict.keys():
continue
output, output_pos = result_dict[input_names[0][0]]
detected_f_num = output_pos.count('F')
detected_d_num = output_pos.count('D')
if detected_f_num != 0 or detected_d_num != 0:
print(output_pos)
print(output)
str_true = labels_true[0][0][0]
print(str_true)
print('-----')
true_f_num = np.sum(labels_true[0][0][0].count('F'))
true_d_num = np.sum(labels_true[0][0][0].count('D'))
# Filler
if detected_f_num <= true_f_num:
tp_f += detected_f_num
fn_f += true_f_num - detected_f_num
else:
tp_f += true_f_num
fp_f += detected_f_num - true_f_num
# Disfluency
if detected_d_num <= true_d_num:
tp_d += detected_d_num
fn_d += true_d_num - detected_d_num
else:
tp_d += true_d_num
fp_d += detected_d_num - true_d_num
if is_new_epoch:
break
r_f = tp_f / (tp_f + fn_f) if (tp_f + fn_f) != 0 else 0
p_f = tp_f / (tp_f + fp_f) if (tp_f + fp_f) != 0 else 0
f_f = 2 * r_f * p_f / (r_f + p_f) if (r_f + p_f) != 0 else 0
r_d = tp_d / (tp_d + fn_d) if (tp_d + fn_d) != 0 else 0
p_d = tp_d / (tp_d + fp_d) if (tp_d + fp_d) != 0 else 0
f_d = 2 * r_d * p_d / (r_d + p_d) if (r_d + p_d) != 0 else 0
acc_f = [p_f, r_f, f_f]
acc_d = [p_d, r_d, f_d]
df_acc = pd.DataFrame({'Filler': acc_f, 'Disfluency': acc_d},
columns=['Filler', 'Disfluency'],
index=['Precision', 'Recall', 'F-measure'])
print(df_acc)
if __name__ == '__main__':
main()
| mit | 6,545,205,638,564,850,000 | 32.813953 | 100 | 0.446355 | false |
DamienIrving/ocean-analysis | data_processing/calc_sfci.py | 1 | 3165 | """Calculate the (binned) total internal surface forcing."""
import sys
script_dir = sys.path[0]
import os
import pdb
import argparse
import numpy as np
import iris
import cmdline_provenance as cmdprov
repo_dir = '/'.join(script_dir.split('/')[:-1])
module_dir = repo_dir + '/modules'
sys.path.append(module_dir)
try:
import general_io as gio
import convenient_universal as uconv
except ImportError:
raise ImportError('Script and modules in wrong directories')
def main(inargs):
"""Run the program."""
sfc_tbin_cube = iris.load_cube(inargs.sfc_file, 'total surface forcing binned by temperature')
wfo_tbin_cube = iris.load_cube(inargs.wfo_file, 'Water Flux into Sea Water binned by temperature')
cp = 3992.10322329649 #J kg-1 degC-1
lower_tos_bounds = sfc_tbin_cube.coord('sea_surface_temperature').bounds[:, 0]
coord_names_tbin = [coord.name() for coord in sfc_tbin_cube.dim_coords]
theta = uconv.broadcast_array(lower_tos_bounds,
coord_names_tbin.index('sea_surface_temperature'),
sfc_tbin_cube.shape)
sfci_tbin_cube = sfc_tbin_cube.copy()
sfci_tbin_cube.data = sfc_tbin_cube.data - (cp * theta * wfo_tbin_cube.data) # SFCI = SFC - Cp*THETA*SVF
sfci_tbin_cube.var_name = 'sfci_tbin'
sfci_tbin_cube.long_name = 'total internal surface forcing binned by temperature'
metadata = {inargs.sfc_file: sfc_tbin_cube.attributes['history'],
inargs.wfo_file: wfo_tbin_cube.attributes['history']}
log = cmdprov.new_log(infile_history=metadata, git_repo=repo_dir)
sfci_tbin_cube.attributes['history'] = log
sfc_tsbin_cube = iris.load_cube(inargs.sfc_file, 'total surface forcing binned by temperature and salinity')
wfo_tsbin_cube = iris.load_cube(inargs.wfo_file, 'Water Flux into Sea Water binned by temperature and salinity')
coord_names_tsbin = [coord.name() for coord in sfc_tsbin_cube.dim_coords]
theta = uconv.broadcast_array(lower_tos_bounds,
coord_names_tsbin.index('sea_surface_temperature'),
sfc_tsbin_cube.shape)
sfci_tsbin_cube = sfc_tsbin_cube.copy()
sfci_tsbin_cube.data = sfc_tsbin_cube.data - (cp * theta * wfo_tsbin_cube.data) # SFCI = SFC - Cp*THETA*SVF
sfci_tsbin_cube.var_name = 'sfci_tsbin'
sfci_tsbin_cube.long_name = 'total internal surface forcing binned by temperature and salinity'
sfci_tsbin_cube.attributes['history'] = log
cube_list = iris.cube.CubeList([sfci_tbin_cube, sfci_tsbin_cube])
iris.save(cube_list, inargs.sfci_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__,
argument_default=argparse.SUPPRESS,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("sfc_file", type=str, help="Total surface forcing file")
parser.add_argument("wfo_file", type=str, help="Surface freshwater flux file")
parser.add_argument("sfci_file", type=str, help="Output file")
args = parser.parse_args()
main(args)
| mit | -7,283,266,626,185,533,000 | 46.238806 | 116 | 0.660664 | false |
gleb812/pch2csd | tests/test_csdgen.py | 1 | 3891 | from unittest import TestCase, skip
from pch2csd.csdgen import Udo, Csd, ZakSpace
from pch2csd.parse import parse_pch2
from pch2csd.patch import transform_in2in_cables
from pch2csd.resources import ProjectData
from tests.util import get_test_resource, cmp_str_lines
class TestPolymorphism(TestCase):
def setUp(self):
self.data = ProjectData()
self.poly_mix2 = parse_pch2(self.data, get_test_resource('test_poly_mix2.pch2'))
self.udo_mix2_k = """opcode Mix21A_v0, 0, iiiiiiiii
; TODO: lin/log scale, chain input
iLev1, iSw1, iLev2, iSw2, iScale, izIn1, izIn2, izInChain, izOut xin
k1 zkr izIn1
k2 zkr izIn2
k3 zkr izInChain
kout = k1 + k2*iLev1*iSW1 + k3*iLev2*iSW2
zkw kout, izOut
endop
"""
self.udo_mix2_a = """opcode Mix21A_v1, 0, iiiiiiiii
; TODO: lin/log scale, chain input
iLev1, iSw1, iLev2, iSw2, iScale, izIn1, izIn2, izInChain, izOut xin
a1 zar izIn1
a2 zar izIn2
a3 zar izInChain
aout = a1 + a2*iLev1*iSW1 + a3*iLev2*iSW2
zaw aout, izOut
endop
"""
@skip
def test_mix2__choose_right_templates(self):
p = self.poly_mix2
udo_s = [Udo(p, m) for m in p.modules][:2]
self.assertSequenceEqual([s.get_name() for s in udo_s],
['Mix21A_v0', 'Mix21A_v1'])
self.assertTrue(cmp_str_lines(udo_s[0].get_src(), self.udo_mix2_k))
self.assertTrue(cmp_str_lines(udo_s[1].get_src(), self.udo_mix2_a))
class TestParameterMapping(TestCase):
def setUp(self):
self.data = ProjectData()
self.poly_mix2 = parse_pch2(self.data, get_test_resource('test_poly_mix2.pch2'))
@skip
def test_poly_mix2(self):
p = self.poly_mix2
udo_s = [Udo(p, m) for m in p.modules]
params = [udo.get_params() for udo in udo_s]
self.assertSequenceEqual(params, [[-99.9, 0, -6.2, 1, 2],
[0.781, 1, 0.781, 1, 0],
[0., 0],
[2, 1, 1]])
class TestRateConversion(TestCase):
def setUp(self):
self.data = ProjectData()
self.r2b_b2r_fn = get_test_resource('test_convert_r2b_b2r.pch2')
def test_r2b_b2r(self):
p = parse_pch2(self.data, self.r2b_b2r_fn)
zak = ZakSpace()
udos = zak.connect_patch(p)
in2, envh, out2, a2k, k2a = sorted(udos, key=lambda x: x.mod.id)
# sends a
self.assertSequenceEqual(in2.outlets, [7, 0])
# a -> k
self.assertSequenceEqual(a2k.inlets, [7])
self.assertSequenceEqual(a2k.outlets, [7])
# receives k
self.assertSequenceEqual(envh.inlets, [1, 7, 1])
# sends k
self.assertSequenceEqual(envh.outlets, [0, 8])
# k -> a
self.assertSequenceEqual(k2a.inlets, [8])
self.assertSequenceEqual(k2a.outlets, [8])
# receives a
self.assertSequenceEqual(out2.inlets, [8, 1])
csd = Csd(p, zak, udos)
print(csd.get_code())
class TestUdoGen(TestCase):
def setUp(self):
self.data = ProjectData()
self.poly_mix2_fn = get_test_resource('test_poly_mix2.pch2')
self.modes_LfoC = get_test_resource('test_modes_LfoC.pch2')
self.LevAmp = get_test_resource('test_LevAmp.pch2')
def test_udo_statement_gen__not_raises(self):
p = parse_pch2(self.data, self.poly_mix2_fn)
p.cables = [transform_in2in_cables(p, c) for c in p.cables]
zak = ZakSpace()
udos = zak.connect_patch(p)
csd = Csd(p, zak, udos)
csd.get_code()
def test_patch_LevAmp(self):
p = parse_pch2(self.data, self.LevAmp)
zak = ZakSpace()
udos = [udo for udo in zak.connect_patch(p)
if udo.mod.type == 81]
amp_params = [u.get_params()[0] for u in udos]
self.assertEqual(len(amp_params), 2)
self.assertEqual(amp_params[0], 83)
| mit | -2,614,806,133,715,733,500 | 33.433628 | 88 | 0.598047 | false |
sfstpala/pcr | pcr/tests/test_maths.py | 1 | 2478 | # Copyright (c) 2013 Stefano Palazzo <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from pcr.maths import is_prime, get_prime, phi, mult_inv, make_rsa_keys
def is_really_prime(n):
for i in range(2, n):
if n % i == 0:
return False
return True
def gcd(a, b):
if b == 0:
return a
return gcd(b, a % b)
class MathsTest(unittest.TestCase):
def test_is_prime(self):
self.assertTrue(is_prime(2))
for i in range(3000 + 1, 4000 + 1):
self.assertEqual(is_prime(i), is_really_prime(i), i)
def test_get_prime(self):
self.assertRaises(ValueError, get_prime, 0)
self.assertRaises(ValueError, get_prime, 1)
self.assertRaises(ValueError, get_prime, 4)
for i in range(32):
n = get_prime(16)
self.assertTrue(is_really_prime(n), n)
def test_phi(self):
# ps and qs are prime
ps = 233, 239, 241, 251
qs = 257, 263, 269, 271
for p, q in zip(ps, qs):
n = p * q
t = 0 # number of n where gcd(n, k) = 1 in 0 <= k <= n
c = phi(n, p, q) # test candidate
for k in range(0, (p * q) + 1):
if gcd(n, k) == 1:
t += 1
self.assertEqual(t, c, (p, q))
def test_mult_inv(self):
for n in range(4096, 8192):
if not is_prime(n):
continue
m = mult_inv(n, 8192)
self.assertEqual(n * mult_inv(n, 8192) % 8192, 1, m)
def test_make_rsa_keys(self):
n, e, d = make_rsa_keys(512)
self.assertEqual(e, 65537)
self.assertNotEqual(n, d)
self.assertLess(len(bin(n)[2:]), 513)
self.assertGreater(len(bin(n)[2:]), 384)
self.assertLess(len(bin(d)[2:]), 513)
self.assertGreater(len(bin(d)[2:]), 384)
| gpl-3.0 | 4,466,378,599,322,320,000 | 31.605263 | 71 | 0.58636 | false |
Bazzzzzinga/Election-Portal | Election_Portal/models.py | 1 | 1767 | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
class Election(models.Model):
election_name=models.CharField(max_length=50)
nom_start_time=models.DateTimeField('Nominations start time')
nom_end_time=models.DateTimeField('Nominations end time')
vote_start_time=models.DateTimeField('Voting start time')
vote_end_time=models.DateTimeField('Voting end time')
desc=models.TextField()
def __str__(self):
return self.election_name
def nomval(self):
if self.nom_start_time>timezone.now():
return "1"
elif self.nom_end_time>=timezone.now():
return "2"
else:
return "3"
def winner(self):
x=self.candidate_set.all().order_by('-vote_count')
if x:
return x[0]
else:
return None
class Branch(models.Model):
name=models.CharField(max_length=50)
def __str__(self):
return self.name
class Candidate(models.Model):
myid=models.AutoField(primary_key=True)
election=models.ForeignKey(Election,on_delete=models.CASCADE)
name=models.CharField(max_length=50)
branch=models.CharField(max_length=50)
work_experience=models.TextField()
user=models.CharField(max_length=30)
vote_count=models.IntegerField(default=0)
profile_pic=models.ImageField(upload_to='media/',blank=True)
def __str__(self):
return self.name
class Comment(models.Model):
candidate=models.ForeignKey(Candidate,on_delete=models.CASCADE)
user=models.CharField(max_length=30)
comment_content=models.CharField(max_length=3000)
comment_time=models.DateTimeField('Comment Time')
def __str__(self):
return self.comment_content
def isCandidate(self):
return candidate.user==self.user
class Voter(models.Model):
election=models.ForeignKey(Election,on_delete=models.CASCADE)
user=models.CharField(max_length=30) | mit | -7,296,916,089,330,264,000 | 31.740741 | 64 | 0.760611 | false |
mpapazog/meraki-python | deploydevices.py | 1 | 16568 | # This is a script to claim a number of devices into Dashboard, create a network for them and bind
# the network to a pre-existing template. Optionally you can also claim a license key. Switch networks
# must be eligible for auto-bind (Auto-bind is not valid unless the switch template has at least
# one profile and has at most one profile per switch model.)
#
# You need to have Python 3 and the Requests module installed. You
# can download the module here: https://github.com/kennethreitz/requests
# or install it using pip.
#
# To run the script, enter:
# python deploydevices.py -k <key> -o <org> -s <sn> -n <netw> -c <cfg_tmpl> [-t <tags>] [-a <addr>] [-m ignore_error]
#
# To make script chaining easier, all lines containing informational messages to the user
# start with the character @
#
# This file was last modified on 2017-07-05
import sys, getopt, requests, json
def printusertext(p_message):
#prints a line of text that is meant for the user to read
#do not process these lines when chaining scripts
print('@ %s' % p_message)
def printhelp():
#prints help text
printusertext('This is a script to claim MR, MS and MX devices into Dashboard, create a new network for them')
printusertext(' and bind the network to a pre-existing template. The script can also claim license capacity.')
printusertext('')
printusertext('To run the script, enter:')
printusertext('python deploydevices.py -k <key> -o <org> -s <sn> -n <netw> -c <cfg_tmpl> [-t <tags>] [-a <addr>] [-m ignore_error]')
printusertext('')
printusertext('<key>: Your Meraki Dashboard API key')
printusertext('<org>: Name of the Meraki Dashboard Organization to modify')
printusertext('<sn>: Serial number of the devices to claim. Use double quotes and spaces to enter')
printusertext(' multiple serial numbers. Example: -s "AAAA-BBBB-CCCC DDDD-EEEE-FFFF"')
printusertext(' You can also enter a license key as a serial number to claim along with devices')
printusertext('<netw>: Name the new network will have')
printusertext('<cfg_template>: Name of the config template the new network will bound to')
printusertext('-t <tags>: Optional parameter. If defined, network will be tagged with the given tags')
printusertext('-a <addr>: Optional parameter. If defined, devices will be moved to given street address')
printusertext('-m ignore_error: Optional parameter. If defined, the script will not stop if network exists')
printusertext('')
printusertext('Example:')
printusertext('python deploydevices.py -k 1234 -o MyCustomer -s XXXX-YYYY-ZZZZ -n "SF Branch" -c MyCfgTemplate')
printusertext('')
printusertext('Use double quotes ("") in Windows to pass arguments containing spaces. Names are case-sensitive.')
def getorgid(p_apikey, p_orgname):
#looks up org id for a specific org name
#on failure returns 'null'
r = requests.get('https://dashboard.meraki.com/api/v0/organizations', headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return 'null'
rjson = r.json()
for record in rjson:
if record['name'] == p_orgname:
return record['id']
return('null')
def getshardurl(p_apikey, p_orgid):
#Looks up shard URL for a specific org. Use this URL instead of 'dashboard.meraki.com'
# when making API calls with API accounts that can access multiple orgs.
#On failure returns 'null'
r = requests.get('https://dashboard.meraki.com/api/v0/organizations/%s/snmp' % p_orgid, headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return 'null'
rjson = r.json()
return(rjson['hostname'])
def getnwid(p_apikey, p_shardurl, p_orgid, p_nwname):
#looks up network id for a network name
#on failure returns 'null'
r = requests.get('https://%s/api/v0/organizations/%s/networks' % (p_shardurl, p_orgid), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return 'null'
rjson = r.json()
for record in rjson:
if record['name'] == p_nwname:
return record['id']
return('null')
def createnw(p_apikey, p_shardurl, p_dstorg, p_nwdata):
#creates network if one does not already exist with the same name
#check if network exists
getnwresult = getnwid(p_apikey, p_shardurl, p_dstorg, p_nwdata['name'])
if getnwresult != 'null':
printusertext('WARNING: Skipping network "%s" (Already exists)' % p_nwdata['name'])
return('null')
if p_nwdata['type'] == 'combined':
#find actual device types
nwtype = 'wireless switch appliance'
else:
nwtype = p_nwdata['type']
if nwtype != 'systems manager':
r = requests.post('https://%s/api/v0/organizations/%s/networks' % (p_shardurl, p_dstorg), data=json.dumps({'timeZone': p_nwdata['timeZone'], 'tags': p_nwdata['tags'], 'name': p_nwdata['name'], 'organizationId': p_dstorg, 'type': nwtype}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
else:
printusertext('WARNING: Skipping network "%s" (Cannot create SM networks)' % p_nwdata['name'])
return('null')
return('ok')
def gettemplateid(p_apikey, p_shardurl, p_orgid, p_tname):
#looks up config template id for a config template name
#on failure returns 'null'
r = requests.get('https://%s/api/v0/organizations/%s/configTemplates' % (p_shardurl, p_orgid), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return 'null'
rjson = r.json()
for record in rjson:
if record['name'] == p_tname:
return record['id']
return('null')
def bindnw(p_apikey, p_shardurl, p_nwid, p_templateid, p_autobind):
#binds a network to a template
if p_autobind:
autobindvalue = 'true'
else:
autobindvalue = 'false'
r = requests.post('https://%s/api/v0/networks/%s/bind' % (p_shardurl, p_nwid), data=json.dumps({'configTemplateId': p_templateid, 'autoBind': autobindvalue}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return 'null'
return('ok')
def claimdeviceorg(p_apikey, p_shardurl, p_orgid, p_devserial):
#claims a device into an org without adding to a network
r = requests.post('https://%s/api/v0/organizations/%s/claim' % (p_shardurl, p_orgid), data=json.dumps({'serial': p_devserial}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
return(0)
def claimlicenseorg(p_apikey, p_shardurl, p_orgid, p_licensekey):
#claims a license key into an org
r = requests.post('https://%s/api/v0/organizations/%s/claim' % (p_shardurl, p_orgid), data=json.dumps({'licenseKey': p_licensekey, 'licenseMode': 'addDevices'}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
return(0)
def claimdevice(p_apikey, p_shardurl, p_nwid, p_devserial):
#claims a device into a network
r = requests.post('https://%s/api/v0/networks/%s/devices/claim' % (p_shardurl, p_nwid), data=json.dumps({'serial': p_devserial}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
return(0)
def getdeviceinfo(p_apikey, p_shardurl, p_nwid, p_serial):
#returns info for a single device
#on failure returns lone device record, with serial number 'null'
r = requests.get('https://%s/api/v0/networks/%s/devices/%s' % (p_shardurl, p_nwid, p_serial), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
returnvalue = []
if r.status_code != requests.codes.ok:
returnvalue = {'serial':'null', 'model':'null'}
return(returnvalue)
rjson = r.json()
return(rjson)
def setdevicedata(p_apikey, p_shardurl, p_nwid, p_devserial, p_field, p_value, p_movemarker):
#modifies value of device record. Returns the new value
#on failure returns one device record, with all values 'null'
#p_movemarker is boolean: True/False
movevalue = "false"
if p_movemarker:
movevalue = "true"
r = requests.put('https://%s/api/v0/networks/%s/devices/%s' % (p_shardurl, p_nwid, p_devserial), data=json.dumps({p_field: p_value, 'moveMapMarker': movevalue}), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
if r.status_code != requests.codes.ok:
return ('null')
return('ok')
def getorgdeviceinfo (p_apikey, p_shardurl, p_orgid, p_devserial):
#gets basic device info from org inventory. device does not need to be part of a network
r = requests.get('https://%s/api/v0/organizations/%s/inventory' % (p_shardurl, p_orgid), headers={'X-Cisco-Meraki-API-Key': p_apikey, 'Content-Type': 'application/json'})
returnvalue = {}
if r.status_code != requests.codes.ok:
returnvalue = {'serial':'null', 'model':'null'}
return(returnvalue)
rjson = r.json()
foundserial = False
for record in rjson:
if record['serial'] == p_devserial:
foundserial = True
returnvalue = {'mac': record['mac'], 'serial': record['serial'], 'networkId': record['networkId'], 'model': record['model'], 'claimedAt': record['claimedAt'], 'publicIp': record['publicIp']}
if not foundserial:
returnvalue = {'serial':'null', 'model':'null'}
return(returnvalue)
def main(argv):
#set default values for command line arguments
arg_apikey = 'null'
arg_orgname = 'null'
arg_serial = 'null'
arg_nwname = 'null'
arg_template = 'null'
arg_modexisting = 'null'
arg_address = 'null'
arg_nwtags = 'null'
#get command line arguments
try:
opts, args = getopt.getopt(argv, 'hk:o:s:n:c:m:a:t:')
except getopt.GetoptError:
printhelp()
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
printhelp()
sys.exit()
elif opt == '-k':
arg_apikey = arg
elif opt == '-o':
arg_orgname = arg
elif opt == '-s':
arg_serial = arg
elif opt == '-n':
arg_nwname = arg
elif opt == '-c':
arg_template = arg
elif opt == '-m':
arg_modexisting = arg
elif opt == '-a':
arg_address = arg
elif opt == '-t':
arg_nwtags = arg
#check if all parameters are required parameters have been given
if arg_apikey == 'null' or arg_orgname == 'null' or arg_serial == 'null' or arg_nwname == 'null' or arg_template == 'null':
printhelp()
sys.exit(2)
#set optional flag to ignore error if network already exists
stoponerror = True
if arg_modexisting == 'ignore_error':
stoponerror = False
#get organization id corresponding to org name provided by user
orgid = getorgid(arg_apikey, arg_orgname)
if orgid == 'null':
printusertext('ERROR: Fetching organization failed')
sys.exit(2)
#get shard URL where Org is stored
shardurl = getshardurl(arg_apikey, orgid)
if shardurl == 'null':
printusertext('ERROR: Fetching Meraki cloud shard URL failed')
sys.exit(2)
#make sure that a network does not already exist with the same name
nwid = getnwid(arg_apikey, shardurl, orgid, arg_nwname)
if nwid != 'null' and stoponerror:
printusertext('ERROR: Network with that name already exists')
sys.exit(2)
#get template ID for template name argument
templateid = gettemplateid(arg_apikey, shardurl, orgid, arg_template)
if templateid == 'null':
printusertext('ERROR: Unable to find template: ' + arg_template)
sys.exit(2)
#get serial numbers from parameter -s
devicelist = {}
devicelist['serial'] = arg_serial.split(" ")
devicelist['model'] = []
for i in range (0, len(devicelist['serial']) ):
claimdeviceorg(arg_apikey, shardurl, orgid, devicelist['serial'][i])
#check if device has been claimed successfully
deviceinfo = getorgdeviceinfo (arg_apikey, shardurl, orgid, devicelist['serial'][i])
if deviceinfo['serial'] == 'null':
printusertext('INFO: Serial number %s is a license or unsupported device' % devicelist['serial'][i])
claimlicenseorg(arg_apikey, shardurl, orgid, devicelist['serial'][i])
devicelist['model'].append(deviceinfo['model'])
#compile list of different product types in order to create correct type of network
devicetypes = {'mx': False, 'ms': False, 'mr': False}
for record in devicelist['model']:
if record [:2] == 'MX' or record [:1] == 'Z':
devicetypes['mx'] = True
elif record [:2] == 'MS':
devicetypes['ms'] = True
elif record [:2] == 'MR':
devicetypes['mr'] = True
#build network type string for network creation
nwtypestring = ''
if devicetypes['mr']:
nwtypestring += 'wireless'
if len(nwtypestring) > 0:
nwtypestring += ' '
if devicetypes['ms']:
nwtypestring += 'switch'
if len(nwtypestring) > 0:
nwtypestring += ' '
if devicetypes['mx']:
nwtypestring += 'appliance'
#compile parameters to create network
nwtags = ''
if arg_nwtags != 'null':
nwtags = arg_nwtags
### NOTE THAT TIMEZONE IS HARDCODED IN THIS SCRIPT. EDIT THE LINE BELOW TO MODIFY ###
nwparams = {'name': arg_nwname, 'timeZone': 'Europe/Helsinki', 'tags': nwtags, 'organizationId': orgid, 'type': nwtypestring}
#create network and get its ID
if nwid == 'null':
createstatus = createnw (arg_apikey, shardurl, orgid, nwparams)
if createstatus == 'null':
printusertext('ERROR: Unable to create network')
sys.exit(2)
nwid = getnwid(arg_apikey, shardurl, orgid, arg_nwname)
if nwid == 'null':
printusertext('ERROR: Unable to get ID for new network')
sys.exit(2)
#clean up serials list to filter out licenses, MVs, etc
validserials = []
for i in range (0, len(devicelist['serial']) ):
if devicelist['model'][i][:2] == 'MR' or devicelist['model'][i][:2] == 'MS' or devicelist['model'][i][:2] == 'MX' or devicelist['model'][i][:1] == 'Z':
validserials.append(devicelist['serial'][i])
for devserial in validserials:
#claim device into newly created network
claimdevice(arg_apikey, shardurl, nwid, devserial)
#check if device has been claimed successfully
deviceinfo = getdeviceinfo(arg_apikey, shardurl, nwid, devserial)
if deviceinfo['serial'] == 'null':
printusertext('ERROR: Claiming or moving device unsuccessful')
sys.exit(2)
#set device hostname
hostname = deviceinfo['model'] + '_' + devserial
setdevicedata(arg_apikey, shardurl, nwid, devserial, 'name', hostname, False)
#if street address is given as a parameter, set device location
if arg_address != 'null':
setdevicedata(arg_apikey, shardurl, nwid, devserial, 'address', arg_address, True)
#bind network to template. If switches in template, attempt to autobind them
bindstatus = bindnw(arg_apikey, shardurl, nwid, templateid, devicetypes['ms'])
if bindstatus == 'null' and stoponerror:
printusertext('ERROR: Unable to bind network to template')
sys.exit(2)
printusertext('End of script.')
if __name__ == '__main__':
main(sys.argv[1:]) | mit | -7,332,367,188,420,823,000 | 41.038961 | 328 | 0.612808 | false |
ppnchb/python-automata-simulator | automata/reader.py | 1 | 1056 | __author__ = 'Hyunsoo'
import re
def getStates(data):
states = []
for line in data[1:]:
state = line[1]
assert state not in states
states.append(state)
return states
def getVocabulary(data):
vocabulary = []
line = data[0][2:]
for symbol in line:
assert len(symbol) <= 1 and symbol not in vocabulary
vocabulary.append(symbol)
return vocabulary
def getInitialState(data):
return data[1][1]
def getFinalState(data):
finalStates = []
for line in data[1:]:
if len(line[0])>0:
finalStates.append(line[1])
return finalStates
def getTable(data):
table = [line[1:] for line in data]
return table
def getPartialData(data, index):
height, width = len(data), len(data[0])
result = [row[:] for row in data]
for row in range(1, height):
for column in range(2, width):
tableData = re.split(';\s+', result[row][column])
assert len(tableData)>index
result[row][column]=tableData[index]
return result | gpl-2.0 | 8,983,291,796,607,273,000 | 26.815789 | 61 | 0.606061 | false |
PnX-SI/GeoNature | backend/geonature/utils/env.py | 1 | 1539 | """ Helpers to manipulate the execution environment """
import os
import subprocess
from pathlib import Path
import pkg_resources
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
from flask_mail import Mail
from flask_migrate import Migrate
# Must be at top of this file. I don't know why (?)
MAIL = Mail()
from flask import current_app
# Define GEONATURE_VERSION before import config_shema module
# because GEONATURE_VERSION is imported in this module
ROOT_DIR = Path(__file__).absolute().parent.parent.parent.parent
try:
GEONATURE_VERSION = pkg_resources.get_distribution("geonature").version
except pkg_resources.DistributionNotFound:
with open(str((ROOT_DIR / "VERSION"))) as v:
GEONATURE_VERSION = v.read()
BACKEND_DIR = ROOT_DIR / "backend"
DEFAULT_CONFIG_FILE = ROOT_DIR / "config/geonature_config.toml"
os.environ['FLASK_SQLALCHEMY_DB'] = 'geonature.utils.env.DB'
os.environ['FLASK_MARSHMALLOW'] = 'geonature.utils.env.MA'
DB = SQLAlchemy()
MA = Marshmallow()
migrate = Migrate()
GN_MODULE_FILES = (
"manifest.toml",
"__init__.py",
"backend/__init__.py",
"backend/blueprint.py",
)
GN_EXTERNAL_MODULE = ROOT_DIR / "external_modules"
GN_MODULE_FE_FILE = "frontend/app/gnModule.module"
def import_requirements(req_file):
from geonature.utils.errors import GeoNatureError
cmd_return = subprocess.call(["pip", "install", "-r", req_file])
if cmd_return != 0:
raise GeoNatureError("Error while installing module backend dependencies")
| gpl-3.0 | -4,124,683,430,628,901,000 | 25.534483 | 82 | 0.725146 | false |
SKIRT/PTS | modeling/component/images.py | 1 | 4916 | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.modeling.component.sed Contains the ImagesModelingComponent class.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard modules
from abc import ABCMeta
# Import astronomical modules
from astropy.io.fits import Header
# Import the relevant PTS classes and modules
from .component import ModelingComponent
from ...core.tools import filesystem as fs
from ...core.simulation.skifile import SkiFile
from ...magic.basics.coordinatesystem import CoordinateSystem
from pts.core.tools.utils import lazyproperty
# -----------------------------------------------------------------
class ImagesModelingComponent(ModelingComponent):
"""
This class...
"""
__metaclass__ = ABCMeta
# -----------------------------------------------------------------
def __init__(self, *args, **kwargs):
"""
The constructor ...
:param args:
:param kwargs:
:return:
"""
# Call the constructor of the base class
super(ImagesModelingComponent, self).__init__(*args, **kwargs)
# -----------------------------------------------------------------
def setup(self, **kwargs):
"""
This function ...
:return:
"""
# Call the setup function of the base class
super(ImagesModelingComponent, self).setup()
# Load the environment
# NO WE NEED TO DO IT IN THE BASE CLASS BECAUSE E.G. THE FITTINGCOMPONENT DIRECTLY INHERITS FROM THIS CLASS BUT ALSO NEEDS THE ENVIRONMENT
#self.environment = ImagesModelingEnvironment(self.config.path)
# -----------------------------------------------------------------
@lazyproperty
def images_header(self):
"""
This function ...
:return:
"""
return Header.fromtextfile(self.environment.images_header_path)
# -----------------------------------------------------------------
@lazyproperty
def images_wcs(self):
"""
This function ...
:return:
"""
return CoordinateSystem.from_file(self.environment.images_header_path)
# -----------------------------------------------------------------
def get_images_path(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
return fs.join(modeling_path, "images")
# -----------------------------------------------------------------
def get_images_paths(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
return fs.files_in_path(get_images_path(modeling_path), extension="fits")
# -----------------------------------------------------------------
def get_images_header_path(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
return fs.join(get_images_path(modeling_path), "header.txt")
# -----------------------------------------------------------------
def get_images_header(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
return Header.fromtextfile(modeling_path)
# -----------------------------------------------------------------
def get_images_wcs(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
return CoordinateSystem.from_file(get_images_header_path(modeling_path))
# -----------------------------------------------------------------
def get_ski_template_path(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
return fs.join(modeling_path, "template.ski")
# -----------------------------------------------------------------
def get_ski_template(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
return SkiFile(get_ski_template_path(modeling_path))
# -----------------------------------------------------------------
def get_ski_input_path(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
return fs.join(modeling_path, "input")
# -----------------------------------------------------------------
def get_ski_input_paths(modeling_path):
"""
This function ...
:param modeling_path:
:return:
"""
path = get_ski_input_path(modeling_path)
if not fs.is_directory(path): return None
else: return fs.files_in_path(path)
# -----------------------------------------------------------------
| agpl-3.0 | 6,299,047,027,472,003,000 | 23.331683 | 146 | 0.467548 | false |
Ultimaker/Uranium | plugins/LocalFileOutputDevice/LocalFileOutputDevice.py | 1 | 10399 | # Copyright (c) 2021 Ultimaker B.V.
# Uranium is released under the terms of the LGPLv3 or higher.
import os
import sys
from PyQt5.QtCore import QUrl
from PyQt5.QtGui import QDesktopServices
from PyQt5.QtWidgets import QFileDialog, QMessageBox
from UM.Application import Application
from UM.FileHandler.WriteFileJob import WriteFileJob
from UM.Logger import Logger
from UM.Mesh.MeshWriter import MeshWriter
from UM.Message import Message
from UM.OutputDevice import OutputDeviceError
from UM.OutputDevice.OutputDevice import OutputDevice
from UM.OutputDevice.ProjectOutputDevice import ProjectOutputDevice
from UM.i18n import i18nCatalog
catalog = i18nCatalog("uranium")
class LocalFileOutputDevice(ProjectOutputDevice):
"""Implements an OutputDevice that supports saving to arbitrary local files."""
def __init__(self, add_to_output_devices: bool = True, parent = None):
super().__init__(device_id = "local_file", add_to_output_devices = add_to_output_devices, parent = parent)
self.setName(catalog.i18nc("@item:inmenu", "Local File"))
self.setShortDescription(catalog.i18nc("@action:button Preceded by 'Ready to'.", "Save to Disk"))
self.setDescription(catalog.i18nc("@info:tooltip", "Save to Disk"))
self.setIconName("save")
self.shortcut = "Ctrl+S"
self.menu_entry_text = catalog.i18nc("@item:inmenu About saving files to the hard drive", "To Disk")
self._writing = False
def requestWrite(self, nodes, file_name = None, limit_mimetypes = None, file_handler = None, **kwargs):
"""Request the specified nodes to be written to a file.
:param nodes: A collection of scene nodes that should be written to the
file.
:param file_name: A suggestion for the file name to write
to. Can be freely ignored if providing a file name makes no sense.
:param limit_mimetypes: Should we limit the available MIME types to the
MIME types available to the currently active machine?
:param kwargs: Keyword arguments.
"""
if self._writing:
raise OutputDeviceError.DeviceBusyError()
# Set up and display file dialog
dialog = QFileDialog()
dialog.setWindowTitle(catalog.i18nc("@title:window", "Save to Disk"))
dialog.setFileMode(QFileDialog.AnyFile)
dialog.setAcceptMode(QFileDialog.AcceptSave)
# Ensure platform never ask for overwrite confirmation since we do this ourselves
dialog.setOption(QFileDialog.DontConfirmOverwrite)
if sys.platform == "linux" and "KDE_FULL_SESSION" in os.environ:
dialog.setOption(QFileDialog.DontUseNativeDialog)
filters = []
mime_types = []
selected_filter = None
if "preferred_mimetypes" in kwargs and kwargs["preferred_mimetypes"] is not None:
preferred_mimetypes = kwargs["preferred_mimetypes"]
else:
preferred_mimetypes = Application.getInstance().getPreferences().getValue("local_file/last_used_type")
preferred_mimetype_list = preferred_mimetypes.split(";")
if not file_handler:
file_handler = Application.getInstance().getMeshFileHandler()
file_types = file_handler.getSupportedFileTypesWrite()
file_types.sort(key = lambda k: k["description"])
if limit_mimetypes:
file_types = list(filter(lambda i: i["mime_type"] in limit_mimetypes, file_types))
file_types = [ft for ft in file_types if not ft["hide_in_file_dialog"]]
if len(file_types) == 0:
Logger.log("e", "There are no file types available to write with!")
raise OutputDeviceError.WriteRequestFailedError(catalog.i18nc("@info:warning", "There are no file types available to write with!"))
# Find the first available preferred mime type
preferred_mimetype = None
for mime_type in preferred_mimetype_list:
if any(ft["mime_type"] == mime_type for ft in file_types):
preferred_mimetype = mime_type
break
extension_added = False
for item in file_types:
type_filter = "{0} (*.{1})".format(item["description"], item["extension"])
filters.append(type_filter)
mime_types.append(item["mime_type"])
if preferred_mimetype == item["mime_type"]:
selected_filter = type_filter
if file_name and not extension_added:
extension_added = True
file_name += "." + item["extension"]
# CURA-6411: This code needs to be before dialog.selectFile and the filters, because otherwise in macOS (for some reason) the setDirectory call doesn't work.
stored_directory = Application.getInstance().getPreferences().getValue("local_file/dialog_save_path")
if stored_directory and stored_directory != "":
dialog.setDirectory(stored_directory)
# Add the file name before adding the extension to the dialog
if file_name is not None:
dialog.selectFile(file_name)
dialog.setNameFilters(filters)
if selected_filter is not None:
dialog.selectNameFilter(selected_filter)
if not dialog.exec_():
raise OutputDeviceError.UserCanceledError()
save_path = dialog.directory().absolutePath()
Application.getInstance().getPreferences().setValue("local_file/dialog_save_path", save_path)
selected_type = file_types[filters.index(dialog.selectedNameFilter())]
Application.getInstance().getPreferences().setValue("local_file/last_used_type", selected_type["mime_type"])
# Get file name from file dialog
file_name = dialog.selectedFiles()[0]
Logger.log("d", "Writing to [%s]..." % file_name)
if os.path.exists(file_name):
result = QMessageBox.question(None, catalog.i18nc("@title:window", "File Already Exists"), catalog.i18nc("@label Don't translate the XML tag <filename>!", "The file <filename>{0}</filename> already exists. Are you sure you want to overwrite it?").format(file_name))
if result == QMessageBox.No:
raise OutputDeviceError.UserCanceledError()
self.writeStarted.emit(self)
# Actually writing file
if file_handler:
file_writer = file_handler.getWriter(selected_type["id"])
else:
file_writer = Application.getInstance().getMeshFileHandler().getWriter(selected_type["id"])
try:
mode = selected_type["mode"]
if mode == MeshWriter.OutputMode.TextMode:
Logger.log("d", "Writing to Local File %s in text mode", file_name)
stream = open(file_name, "wt", encoding = "utf-8")
elif mode == MeshWriter.OutputMode.BinaryMode:
Logger.log("d", "Writing to Local File %s in binary mode", file_name)
stream = open(file_name, "wb")
else:
Logger.log("e", "Unrecognised OutputMode.")
return None
job = WriteFileJob(file_writer, stream, nodes, mode)
job.setFileName(file_name)
job.setAddToRecentFiles(True) # The file will be added into the "recent files" list upon success
job.progress.connect(self._onJobProgress)
job.finished.connect(self._onWriteJobFinished)
message = Message(catalog.i18nc("@info:progress Don't translate the XML tags <filename>!", "Saving to <filename>{0}</filename>").format(file_name),
0, False, -1 , catalog.i18nc("@info:title", "Saving"))
message.show()
job.setMessage(message)
self._writing = True
job.start()
except PermissionError as e:
Logger.log("e", "Permission denied when trying to write to %s: %s", file_name, str(e))
raise OutputDeviceError.PermissionDeniedError(catalog.i18nc("@info:status Don't translate the XML tags <filename>!", "Permission denied when trying to save <filename>{0}</filename>").format(file_name)) from e
except OSError as e:
Logger.log("e", "Operating system would not let us write to %s: %s", file_name, str(e))
raise OutputDeviceError.WriteRequestFailedError(catalog.i18nc("@info:status Don't translate the XML tags <filename> or <message>!", "Could not save to <filename>{0}</filename>: <message>{1}</message>").format(file_name, str(e))) from e
def _onJobProgress(self, job, progress):
self.writeProgress.emit(self, progress)
def _onWriteJobFinished(self, job):
self._writing = False
self.writeFinished.emit(self)
if job.getResult():
self.writeSuccess.emit(self)
message = Message(catalog.i18nc("@info:status Don't translate the XML tags <filename>!", "Saved to <filename>{0}</filename>").format(job.getFileName()), title = catalog.i18nc("@info:title", "File Saved"))
message.addAction("open_folder", catalog.i18nc("@action:button", "Open Folder"), "open-folder", catalog.i18nc("@info:tooltip", "Open the folder containing the file"))
message._folder = os.path.dirname(job.getFileName())
message.actionTriggered.connect(self._onMessageActionTriggered)
message.show()
else:
message = Message(catalog.i18nc("@info:status Don't translate the XML tags <filename> or <message>!", "Could not save to <filename>{0}</filename>: <message>{1}</message>").format(job.getFileName(), str(job.getError())), lifetime = 0, title = catalog.i18nc("@info:title", "Warning"))
message.show()
self.writeError.emit(self)
try:
job.getStream().close()
except (OSError, PermissionError): #When you don't have the rights to do the final flush or the disk is full.
message = Message(catalog.i18nc("@info:status", "Something went wrong saving to <filename>{0}</filename>: <message>{1}</message>").format(job.getFileName(), str(job.getError())), title = catalog.i18nc("@info:title", "Error"))
message.show()
self.writeError.emit(self)
def _onMessageActionTriggered(self, message, action):
if action == "open_folder" and hasattr(message, "_folder"):
QDesktopServices.openUrl(QUrl.fromLocalFile(message._folder))
| lgpl-3.0 | 5,876,116,264,392,756,000 | 48.755981 | 294 | 0.650832 | false |
halbbob/dff | modules/viewer/cat.py | 1 | 5050 | # DFF -- An Open Source Digital Forensics Framework
# Copyright (C) 2009-2011 ArxSys
# This program is free software, distributed under the terms of
# the GNU General Public License Version 2. See the LICENSE file
# at the top of the source tree.
#
# See http://www.digital-forensic.org for more information about this
# project. Please do not directly contact any of the maintainers of
# DFF for assistance; the project provides a web site, mailing lists
# and IRC channels for your use.
#
# Author(s):
# Solal Jacob <[email protected]>
# Jeremy MOUNIER <[email protected]>
__dff_module_cat_version__ = "1.0.0"
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from api.vfs import *
from api.module.module import *
from api.module.script import *
from api.types.libtypes import Argument, typeId
class TextEdit(QTextEdit):
def __init__(self, cat):
QTextEdit.__init__(self)
self.cat = cat
self.scroll = self.cat.scroll
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setReadOnly(1)
self.setWordWrapMode(QTextOption.NoWrap)
def wheelEvent(self, event):
v = self.scroll.value()
if event.delta() > 0:
trig = v - 5
if trig >= self.scroll.min:
self.cat.read(trig)
self.scroll.setValue(trig)
else:
trig = v + 5
if trig < self.scroll.max:
self.cat.read(trig)
self.scroll.setValue(trig)
class Scroll(QScrollBar):
def __init__(self, parent):
QScrollBar.__init__(self, parent)
self.cat = parent
self.init()
self.initCallBacks()
self.setValues()
def init(self):
self.min = 0
self.single = 1
self.page = 32
self.max = self.cat.lines - 1
def initCallBacks(self):
self.connect(self, SIGNAL("sliderMoved(int)"), self.moved)
self.connect(self, SIGNAL("actionTriggered(int)"), self.triggered)
def setValues(self):
self.setMinimum(self.min)
self.setMaximum(self.max)
self.setSingleStep(self.single)
self.setPageStep(self.page)
self.setRange(self.min, self.max)
def triggered(self, action):
if action == QAbstractSlider.SliderSingleStepAdd:
trig = self.value() + 1
if trig <= self.max:
self.cat.read(trig)
elif action == QAbstractSlider.SliderSingleStepSub:
trig = self.value() - 1
if trig >= self.min:
self.cat.read(trig)
elif action == QAbstractSlider.SliderPageStepSub:
trig = self.value() - 5
if trig >= self.min:
self.cat.read(trig)
elif action == QAbstractSlider.SliderPageStepAdd:
trig = self.value() + 5
if trig <= self.max:
self.cat.read(trig)
def moved(self, value):
if value == self.max:
value -= 5
self.cat.read(value)
class CAT(QWidget, Script):
def __init__(self):
Script.__init__(self, "cat")
self.vfs = vfs.vfs()
self.type = "cat"
self.icon = None
def start(self, args):
self.args = args
try:
self.node = args["file"].value()
except:
pass
def g_display(self):
QWidget.__init__(self)
self.vfile = self.node.open()
self.offsets = self.linecount()
self.initShape()
self.read(0)
def initShape(self):
self.hbox = QHBoxLayout()
self.hbox.setContentsMargins(0, 0, 0, 0)
self.scroll = Scroll(self)
self.text = TextEdit(self)
self.hbox.addWidget(self.text)
self.hbox.addWidget(self.scroll)
self.setLayout(self.hbox)
def read(self, line):
padd = 0
if line > padd:
padd = 1
self.vfile.seek(self.offsets[line]+padd)
self.text.clear()
self.text.textCursor().insertText(QString.fromUtf8(self.vfile.read(1024*10)))
self.text.moveCursor(QTextCursor.Start)
def linecount(self):
offsets = [0]
offsets.extend(self.vfile.indexes('\n'))
self.lines = len(offsets)
return offsets
def updateWidget(self):
pass
def c_display(self):
file = self.node.open()
fsize = self.node.size()
size = 0
self.buff = ""
while size < fsize:
try:
tmp = file.read(4096)
except vfsError, e:
print self.buff
break
if len(tmp) == 0:
print tmp
break
size += len(tmp)
self.buff += tmp
print tmp
file.close()
if len(self.buff):
return self.buff
class cat(Module):
"""Show text file content
ex:cat /myfile.txt"""
def __init__(self):
Module.__init__(self, "text", CAT)
self.conf.addArgument({"name": "file",
"description": "Text file to display",
"input": Argument.Required|Argument.Single|typeId.Node})
self.conf.addConstant({"name": "mime-type",
"type": typeId.String,
"description": "managed mime type",
"values": ["HTML", "ASCII", "XML", "text"]})
self.tags = "Viewers"
self.flags = ["console", "gui"]
self.icon = ":text"
| gpl-2.0 | -1,013,139,386,503,417,700 | 25.719577 | 83 | 0.611089 | false |
gitcoinco/web | app/grants/management/commands/find_deadbeat_grants.py | 1 | 1292 | '''
Copyright (C) 2021 Gitcoin Core
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from django.core.management.base import BaseCommand
from marketing.mails import notify_deadbeat_grants
class Command(BaseCommand):
help = 'finds quests whose reward is out of redemptions'
def handle(self, *args, **options):
from grants.models import Grant
from django.utils import timezone
before = timezone.now() - timezone.timedelta(hours=6)
grants = Grant.objects.filter(contract_address='0x0', contract_version__lt=2, active=True, created_on__lt=before)
if grants.count():
notify_deadbeat_grants(grants)
| agpl-3.0 | -7,652,814,746,059,988,000 | 35.914286 | 121 | 0.720588 | false |
laszlokiraly/OffenesParlament | offenesparlament/op_scraper/scraper/parlament/resources/extractors/prelaw.py | 2 | 3197 | import datetime
from django.utils.html import remove_tags
from scrapy import Selector
from parlament.resources.extractors import SingleExtractor
from parlament.resources.extractors import MultiExtractor
from parlament.resources.extractors.law import LAW
from parlament.resources.util import _clean
# import the logging library
import logging
# Get an instance of a logger
logger = logging.getLogger(__name__)
class PRELAW:
class DESCRIPTION(SingleExtractor):
XPATH = "//div[contains(concat(' ', normalize-space(@class), ' '), ' c_2 ')]/h3/following-sibling::p/text()"
@classmethod
def xt(cls, response):
try:
description = response.xpath(cls.XPATH)[0].extract()[0]
except:
import ipdb
ipdb.set_trace()
return remove_tags(description, 'p')
class STEPS(MultiExtractor):
XPATH = "//table[contains(@class,'tabelleHistorie')]"
@classmethod
def xt(cls, response):
steps = []
raw_table = response.xpath(cls.XPATH)[0]
raw_steps = Selector(text=raw_table.extract()).xpath('//tr')[1:] # ignore header
for index, step in enumerate(raw_steps, start=1):
step_selector = Selector(text=step.extract())
title = LAW.PHASES.STEPS.TITLE.xt(step_selector)
date_str = LAW.PHASES.STEPS.DATE.xt(step_selector)
date = datetime.datetime.strptime(
date_str, "%d.%m.%Y").date()
protocol_url = LAW.PHASES.STEPS.PROTOCOL.xt(step_selector)
steps.append({
'date': date,
'title': title['text'],
'sortkey': str(index).zfill(3),
'protocol_url': protocol_url
})
return steps
class OPINIONS(MultiExtractor):
XPATH = "//div[contains(@class,'filterListe')]//table[contains(@class,'filter')]//tr"
@classmethod
def xt(cls, response):
ops = []
raw_ops = response.xpath(cls.XPATH).extract()
for raw_op in raw_ops[1:]:
op_sel = Selector(text=raw_op)
date = op_sel.xpath('//td[1]').xpath("normalize-space()").extract()[0]
url = op_sel.xpath('//td[2]/a/@href').extract()[0]
parl_id = u"({})".format(
op_sel.xpath('//td[3]/a').xpath('normalize-space()').extract()[0])
title = op_sel.xpath('//td[2]').xpath('normalize-space()').extract()[0]
if title:
title = _clean(title).replace("*", ", ")
else:
title = None
email = None
try:
date = datetime.datetime.strptime(
_clean(date), "%d.%m.%Y").date()
except:
date = None
ops.append({
'date': date,
'url': url,
'email': email,
'title': title,
'parl_id': parl_id
})
return ops
| bsd-2-clause | 2,424,237,312,767,877,000 | 33.376344 | 116 | 0.506099 | false |
MTgeophysics/mtpy | legacy/beta_tests_before_merge/plot_strike.py | 1 | 1281 | # -*- coding: utf-8 -*-
"""
Created on Wed Sep 18 15:35:39 2013
@author: Alison Kirkby
plots strike
fails with error:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\Users\u64125\AppData\Local\Continuum\Miniconda2\envs\mtpy27\lib\site-packages\spyderlib\widgets\externalshell\sitecustomize.py", line 714, in runfile
execfile(filename, namespace)
File "C:\Users\u64125\AppData\Local\Continuum\Miniconda2\envs\mtpy27\lib\site-packages\spyderlib\widgets\externalshell\sitecustomize.py", line 74, in execfile
exec(compile(scripttext, filename, 'exec'), glob, loc)
File "C:/Git/mtpy/examples/tests/plot_strike.py", line 21, in <module>
plotstrike = PlotStrike(fn_list=elst)
File "mtpy\imaging\plotstrike.py", line 240, in __init__
self.plot()
File "mtpy\imaging\plotstrike.py", line 307, in plot
zinv = mt.get_Zinvariants()
AttributeError: 'MTplot' object has no attribute 'get_Zinvariants'
"""
import os
os.chdir(r'C:\Git\mtpy')
from mtpy.imaging.plotstrike import PlotStrike
import os.path as op
import matplotlib.pyplot as plt
# path to edis
epath = r'C:\Git\mtpy\examples\data\edi_files'
elst=[op.join(epath,edi) for edi in os.listdir(epath) if edi.endswith('.edi')][::4]
plotstrike = PlotStrike(fn_list=elst) | gpl-3.0 | 8,677,624,270,188,862,000 | 32.736842 | 160 | 0.733802 | false |
UltrosBot/Ultros | plugins/urls/handlers/handler.py | 1 | 4865 | # coding=utf-8
from kitchen.text.converters import to_unicode
from plugins.urls.matching import REGEX_TYPE
__author__ = 'Gareth Coles'
class URLHandler(object):
"""
URL handler. Subclass this!
You'll want to override both the `call` method and the *criteria* dict.
The former is called if the criteria matches the URL.
In the criteria dict, you're expected to provide values to test equality
for. However, there are a few things to be aware of.
* Leave a key out if you don't care about matching it - None will be
matched against.
* You may provide a compiled regular expression to test against as well.
* Finally, you may provide a callable (function or class), which will be
run for the comparison instead, and should return either True or False.
>>> criteria = {
... # No protocol here, since we don't care about it
... "auth": lambda x: x is not None
... "domain": re.compile(u"[a-zA-Z]+"),
... "port": lambda x: x > 8080,
... "path": lambda x: len(x) > 10,
... "permission": "urls.trigger.example" # If you need one
... }
...
>>>
Additionally, if the above matching is somehow not good enough for you, you
may override the `match` function.
"""
# Remember to set this, so that there are no conflicting handlers - only
# one handler per name!
name = ""
plugin = None
urls_plugin = None
criteria = {
"protocol": None,
"auth": None,
"domain": None,
"port": None,
"path": None,
# Check the user and source for a permission - This is not a URL field
"permission": None,
}
def __init__(self, plugin):
"""
Initializer. The plugin here is your plugin, not the URLs plugin.
You're expected to initialize this object yourself, so feel free to
override this.
"""
self.plugin = plugin
def call(self, url, context):
"""
Called if the URL matches. Override this or there's basically no point
in having a handler.
*context* here is a dict containing "protocol", "source", and "target"
keys, which you can use to respond to whoever sent the message which
contained the URL.
Return True if this should cascade to any other handlers, or False
if it should end here.
If an exception is raised, it will be caught and we'll move on to the
next handler.
:param url: The URL object that was matched
:param context: Dictionary with the current context, contains
the MessageReceived event under "event" in normal
circumstances
:type url: plugins.urls.url.URL
:type context: dict
:return: constants.STOP_HANDLING or constants.CASCADE
:rtype: int
"""
raise NotImplementedError()
def match(self, url, context):
"""
Decide whether to handle this URL.
This should return True if this handler should handle the URL, or
False if not.
Do not do any actual handling here. You should only override this if
the built-in handling doesn't cover your needs for some reason.
:param url: The URL object to match
:param context: Dictionary with the current context
:return: True if this handler should handle the URL, False otherwise
"""
for key in self.criteria.iterkeys():
value = self.criteria.get(key)
if key == "permission":
event = context["event"]
result = self.plugin.commands.perm_handler.check(
value, event.source, event.target, event.caller
)
if not result:
return False
continue
if callable(value): # Function, lambda, etc
if value(getattr(url, key)):
continue
else:
return False
elif isinstance(value, REGEX_TYPE): # Compiled regex
# Casting due to port, None, etc
if value.match(to_unicode(getattr(url, key))):
continue
else:
return False
elif value == getattr(url, key): # Standard equality test
continue
else:
return False
return True
def teardown(self):
"""
Called when the URLs plugin unloads - Do any saving or cleanup you
need to do here
"""
pass
def reload(self):
"""
Called when the URLs plugin has its configuration reloaded - You are
free to leave this as it is if it isn't relevant to your plugin
"""
pass
| artistic-2.0 | 6,549,978,788,208,118,000 | 29.791139 | 79 | 0.578623 | false |
abinit/abinit | scripts/post_processing/nonlop_dfpt_test/dfpt_test.py | 1 | 13397 | #! /usr/bin/env python
# -*- coding: iso-8859-15 -*-
#set -x
# ===============================================================
# = Tool for ABINIT nonlop routine testing =
# = =
# = Uses an instrumented version of forstrnps routine; =
# = To activate this instrumented version, uncomment all =
# = 'TESTDFPT' sections in forstrnps.F90 file. =
# ===============================================================
import os,sys
# TO BE CUSTOMIZED BY USER
ABINIT_EXE='../../../build/atlas-fftw3/src/98_main/abinit'
#Read argument(s)
n_arg=len(sys.argv)-1
if n_arg < 1 or sys.argv[1] == "--help" or sys.argv[1] == "-help":
print >> sys.stderr, 'Syntax: '+ sys.argv[0]+' --choice choice [--test_case test_case]'
print >> sys.stderr, ' [--signs signs] [--signsdfpt signsdfpt]'
print >> sys.stderr, ' [--idir1 idir1] [--idir2 idir2] [--iatom iatom]'
print >> sys.stderr, ' [--iband iband] --enl [enl]'
print >> sys.stderr, ' choice= 2 : d/d_atm (force)'
print >> sys.stderr, ' 3 : d/d_+strain (stress)'
print >> sys.stderr, ' 5 : d/d_k (ddk)'
print >> sys.stderr, ' 51 : d(right)/d_k, partial dk derivative'
print >> sys.stderr, ' 54k: d2/d_atm.d_k, finite difference on k (effective charge)'
print >> sys.stderr, ' 54a: d2/d_atm.d_k, finite difference on atm (effective charge)'
print >> sys.stderr, ' 55k: d2/d_strain.d_k, finite difference on k (piezo)'
print >> sys.stderr, ' 55s: d2/d_strain.d_k, finite difference on strain (piezo)'
print >> sys.stderr, ' 8 : d2/d_k.d_k, full dk derivative (effective mass)'
print >> sys.stderr, ' 81 : d/d_k[.d(right)_k], partial dk derivative (d_k.d_field)'
print >> sys.stderr, ' test_case = which test case to use (see input_template directory), optional (default=TEST_CAO-1) '
print >> sys.stderr, ' signs = option for nonlop calculation (1:<Psi|Vnl|Psi> or 2:<g|Vnl|Psi>), optional (default=1) '
print >> sys.stderr, ' signsdfpt= option for DFPT nonlop calc. (1:<Psi|Vnl^(1)|Psi> or 2:<g|Vnl^(1)|Psi>), optional (default=signs) '
print >> sys.stderr, ' idir1 = direction of 1st perturbation, optional (default=1)'
print >> sys.stderr, ' idir2 = direction of 2nd perturbation, optional (default=1) '
print >> sys.stderr, ' iatom = index of perturbed atom (direction idir1), optional (default=1) '
print >> sys.stderr, ' iband = compute only band iband, optional (default=all bands)'
print >> sys.stderr, ' enl = option for NL operator (sij: use Sij, dij: use Dij), optional (default=sij)'
sys.exit()
test_type='$';test_case='TEST_CAO-1'
dir1=0;dir2=0;atom=0;band=0;signs=1;signsdfpt=-1;enl='sij'
for ii in range(n_arg+1)[1:]:
arg=sys.argv[ii]
if arg=='-c' or arg=='-choice' or arg=='--choice':
test_type=sys.argv[ii+1]
if arg=='-t' or arg=='-test_case' or arg=='--test_case':
test_case=sys.argv[ii+1]
if arg=='-i1' or arg=='-idir1' or arg=='--idir1':
dir1=int(sys.argv[ii+1])
if arg=='-i2' or arg=='-idir2' or arg=='--idir2':
dir2=int(sys.argv[ii+1])
if arg=='-ia' or arg=='-iatom' or arg=='--iatom':
atom=int(sys.argv[ii+1])
if arg=='-ib' or arg=='-iband' or arg=='--iband':
band=int(sys.argv[ii+1])
if arg=='-s' or arg=='-signs' or arg=='--signs':
signs=int(sys.argv[ii+1])
if arg=='-s' or arg=='-signsdfpt' or arg=='--signsdfpt':
signsdfpt=int(sys.argv[ii+1])
if arg=='-e' or arg=='-enl' or arg=='--enl':
enl=sys.argv[ii+1]
if signsdfpt==-1:signsdfpt=signs
if test_type!='2' and test_type!='3' and \
test_type!='5' and test_type!='51' and \
test_type!='54k' and test_type!='54a' and \
test_type!='55k' and test_type!='55s' and \
test_type!='8' and test_type!='81':
print >> sys.stderr, 'Error: wrong value for choice!'
sys.exit()
if dir2<0 or dir2>6 or dir1<0 or dir1>6:
print >> sys.stderr, 'Error: wrong values for dir1/dir2!'
sys.exit()
if atom<0:
print >> sys.stderr, 'Error: wrong value for iatom!'
sys.exit()
if band<0:
print >> sys.stderr, 'Error: wrong value for iband!'
sys.exit()
if signs!=1 and signs!=2:
print >> sys.stderr, 'Error: wrong value for signs!'
sys.exit()
if signsdfpt!=1 and signsdfpt!=2:
print >> sys.stderr, 'Error: wrong value for signsdfpt!'
sys.exit()
if enl!='sij' and enl!='dij':
print >> sys.stderr, 'Error: wrong value for enl!'
sys.exit()
if (signsdfpt==2 and (test_type=='55k' or test_type=='55s')):
print >> sys.stderr, 'Error: signsdfpt=%s not allowed with choice=%s!' %(signsdfpt,test_type)
sys.exit()
#Name of input dir
GENERIC_INPUT_DIR=test_case
if test_type =='2': # Force
INPUT_DIR=GENERIC_INPUT_DIR+'_ATM'
elif test_type=='3': # Stress
INPUT_DIR=GENERIC_INPUT_DIR+'_STR'
elif test_type=='5': # ddk
INPUT_DIR=GENERIC_INPUT_DIR+'_K'
elif test_type=='51': # d(right)dk
INPUT_DIR=GENERIC_INPUT_DIR+'_K'
elif test_type=='54k': # Effective charge
INPUT_DIR=GENERIC_INPUT_DIR+'_K'
elif test_type=='54a': # Effective charge
INPUT_DIR=GENERIC_INPUT_DIR+'_ATM'
elif test_type=='55k': # Piezo
INPUT_DIR=GENERIC_INPUT_DIR+'_K'
elif test_type=='55s': # Piezo
INPUT_DIR=GENERIC_INPUT_DIR+'_STR'
elif test_type=='8': # D2dk
INPUT_DIR=GENERIC_INPUT_DIR+'_K'
elif test_type=='81': # D2dk partial
INPUT_DIR=GENERIC_INPUT_DIR+'_K'
#natom,nband, delta:
#These values should be consistent with the ABINIT input file
#Eventually read the values from data file
natom=2 ; nband=8 ; DELTA=0.0001 ; pseudos=['Ca.LDA_PW-JTH.xml']
ff=open('./input_template/'+INPUT_DIR+'/data','r')
fflines=ff.readlines()
ff.close()
for lgn in fflines:
if lgn.find('natom=')!=-1: natom=int(lgn.split()[1])
if lgn.find('nband=')!=-1: nband=int(lgn.split()[1])
if lgn.find('delta=')!=-1: DELTA=float(lgn.split()[1])
if lgn.find('pseudo=')!=-1: pseudos=(lgn.split()[1:])
if atom>natom:
print >> sys.stderr, 'Error: iatom>natom!'
sys.exit()
if band>nband:
print >> sys.stderr, 'Error: iband>nband!'
sys.exit()
#Select nonlop input arguments
dir1_list=[1];dir2_list=[1];atom_list=[0]
if test_type=='2': # Force
choice='choice1'
choicedfpt='choicedfpt2'
dir1_list=[1,2,3]
dir2_list=[0]
atom_list=range(natom+1)[1:]
df_conv_factor=1.
elif test_type=='3': # Stress
choice='choice1'
choicedfpt='choicedfpt3'
dir1_list=[1,2,3,4,5,6]
dir2_list=[0]
atom_list=[0]
df_conv_factor=1.
if test_type=='5': # ddk
choice='choice1'
choicedfpt='choicedfpt5'
dir1_list=[1,2,3]
dir2_list=[0]
atom_list=[0]
df_conv_factor=1.
if test_type=='51': # d(right)dk
choice='choice1'
choicedfpt='choicedfpt51'
dir1_list=[1,2,3]
dir2_list=[0]
atom_list=[0]
df_conv_factor=2.
elif test_type=='54k': # Effective charge
choice='choice2'
choicedfpt='choicedfpt54'
dir1_list=[1,2,3]
dir2_list=[1,2,3]
atom_list=range(natom+1)[1:]
df_conv_factor=2.
elif test_type=='54a': # Effective charge
choice='choice51'
choicedfpt='choicedfpt54'
dir1_list=[1,2,3]
dir2_list=[1,2,3]
atom_list=range(natom+1)[1:]
df_conv_factor=1.
elif test_type=='55k': # Piezo
choice='choice3'
choicedfpt='choicedfpt55'
dir1_list=[1,2,3,4,5,6]
dir2_list=[1,2,3]
atom_list=[0]
df_conv_factor=2.
elif test_type=='55s': # Piezo
choice='choice5'
choicedfpt='choicedfpt55'
dir1_list=[1,2,3,4,5,6]
dir2_list=[1,2,3]
atom_list=[0]
df_conv_factor=2.
elif test_type=='8': # D2dk
choice='choice5'
choicedfpt='choicedfpt8'
dir1_list=[1,2,3]
dir2_list=[1,2,3]
atom_list=[0]
df_conv_factor=1.
elif test_type=='81': # D2dk partial
choice='choice51'
choicedfpt='choicedfpt81'
dir1_list=[1,2,3]
dir2_list=[1,2,3]
atom_list=[0]
df_conv_factor=1.
# choice='choice5'
# df_conv_factor=2.
#If requested, overwrite default values for pert. dirs
if dir1>0: dir1_list=[dir1]
if dir2>0: dir2_list=[dir2]
if atom>0: atom_list=[atom]
#Print title
print("===========================================")
print("NONLOP TEST, CHOICE=%s, SIGNS=%d/%d, ENL=%s" % (test_type,signs,signsdfpt,enl))
sys.stdout.flush()
ab=[[1,1],[2,2],[3,3],[3,2],[3,1],[2,1]]
ba=[1,6,5,6,2,4,5,4,3] # from xy to Voigt
#Loop on perturbations
for iatom in atom_list: # Atom (optional)
for idir1 in dir1_list: # Perturbation 1
for idir2 in dir2_list: # Perturbation 2 (optional)
print("===========================================")
if test_type=='2': # Force
idir =0
idirdfpt =idir1
input_index=3*(iatom-1)+idir1
print ("atm=%d, atm_dir=%d" %(iatom,idir1))
elif test_type=='3': # Stress
idir =0
idirdfpt =idir1
input_index=idir1
alpha=ab[idir1-1][0];beta =ab[idir1-1][1]
print ("str1_dir=%d, str2_dir=%d" %(alpha,beta))
elif test_type=='5': # ddk
idir =0
idirdfpt =idir1
input_index=idir1
print ("k_dir=%d" %(idir1))
elif test_type=='51': # d(right)dk
idir =0
idirdfpt =idir1
input_index=idir1
print ("k_dir=%d" %(idir1))
elif test_type=='54k': # Effective charge
idir =idir1
idirdfpt =3*(idir1-1)+idir2
input_index=idir2
print ("atm=%d, atm_dir=%d, k_dir=%d" %(iatom,idir1,idir2))
elif test_type=='54a': # Effective charge
idir =idir2
idirdfpt =3*(idir1-1)+idir2
input_index=3*(iatom-1)+idir1
print ("atm=%d, atm_dir=%d, k_dir=%d" %(iatom,idir1,idir2))
elif test_type=='55k': # Piezo
idir =idir1
idirdfpt =3*(idir1-1)+idir2
input_index=idir2
alpha=ab[idir1-1][0];beta =ab[idir1-1][1]
print ("str1_dir=%d, str2_dir=%d, k_dir=%d" %(alpha,beta,idir2))
elif test_type=='55s': # Effective charge
idir =idir2
idirdfpt =3*(idir1-1)+idir2
input_index=idir1
alpha=ab[idir1-1][0];beta =ab[idir1-1][1]
print ("str1_dir=%d, str2_dir=%d, k_dir=%d" %(alpha,beta,idir2))
elif test_type=='8': # D2dk
idir =idir2
if signsdfpt==1:
idirdfpt =ba[3*(idir1-1)+idir2-1]
if signsdfpt==2:
idirdfpt =3*(idir1-1)+idir2
input_index=idir1
print ("k1_dir=%d, k2_dir=%d" %(idir1,idir2))
elif test_type=='81': # D2dk partial
idir =idir2
idirdfpt =3*(idir1-1)+idir2
input_index=idir1
print ("k1_dir=%d, k2_dir=%d" %(idir1,idir2))
sys.stdout.flush()
# Create temporary files
os.system('rm -rf config')
os.system('mkdir config')
os.system('touch config/'+choice)
os.system('touch config/'+choicedfpt)
os.system('cp -rf input_template/'+INPUT_DIR+'/inputDFk'+str(input_index)+'.in config/inputDF.in')
os.system('touch config/idir'+str(idir))
os.system('touch config/idirdfpt'+str(idirdfpt))
os.system('touch config/signs'+str(signs))
os.system('touch config/signsdfpt'+str(signsdfpt))
if iatom>0:
os.system('touch config/iatom'+str(iatom))
if band>0:
os.system('touch config/iband'+str(band))
if enl=='sij':
os.system('touch config/sij')
if enl=='dij':
os.system('touch config/dij')
# Add "useria" flag in input file (to activate nonlop_test routine)
if 'useria 112233' not in open('config/inputDF.in').read():
with open('config/inputDF.in', "a") as ff:
ff.write('\nuseria 112233 ! Activate nonlop_test routine\n')
# Create files filesdf
ff=open('./exec/filesdf','w')
ff.write('config/inputDF.in\n')
ff.write('exec/outputDF.out\n')
ff.write('temp/nonlop_test_inp\n')
ff.write('temp/nonlop_test_out\n')
ff.write('temp/nonlop_test_tmp\n')
for pseudo in pseudos:
ff.write('pseudo/'+pseudo+'\n')
ff.close()
# Run ABINIT
os.system('rm -rf ./temp/* ./exec/outputDF.out* ./exec/logdf')
os.system(ABINIT_EXE+'< ./exec/filesdf > ./exec/logdf')
os.system('rm -rf malloc.prc')
# Extract relevant lines from ABINIT log
os.system('grep TESTDFPT ./exec/logdf > ./exec/res')
ff=open('./exec/res','r')
fflines=ff.readlines()
ff.close()
# Set some data to locate results in log
dtset_shift=nband+1
if band>0:dtset_shift=2
first_line=1+dtset_shift
df1_shift =first_line
df2_shift =first_line+ dtset_shift
dfpt_shift=first_line+2*dtset_shift
# Print result for each band
band_list=range(nband)
if band>0:band_list=[band-1]
for iband in band_list:
band_indx=band_list.index(iband)
df1 =float(fflines[df1_shift +band_indx].split()[2])
df2 =float(fflines[df2_shift +band_indx].split()[2])
dfpt=float(fflines[dfpt_shift+band_indx].split()[2])
df=float((df2-df1)/(2.*DELTA)/df_conv_factor)
if abs(df)>5.e-8 or abs(dfpt)>5.e-8:
diff=abs(df-dfpt)/abs(df)*100.
else:
diff=0.
if diff>0.001:
print (" band=%d: diff=%15.12g perc. !!!!!" %(iband+1,diff))
else:
print (" band=%d: diff=%15.12g perc." %(iband+1,diff))
sys.stdout.flush()
| gpl-3.0 | -8,391,554,192,785,976,000 | 35.208108 | 137 | 0.581623 | false |
rupertotorres1/UBCPreReqTool | main.py | 1 | 1241 | try:
input = raw_input
except NameError:
pass
import sys
from pickle import load
# Load dictionary with courses and their pre-requisites and co-requisites
dict = load(open("dictCoursesPreCoReqs.p", "rb"))
print ("Welcome! This tool helps you find out which courses require the given course as a pre-req or co-req. I hope it is useful.")
def interaction():
print ("")
course_input = input("Which course would you like to look up? (Enter x to exit): ").upper()
if (course_input != "X"):
print("")
pre_co_req_for = []
# If the course that the user provided is not in the loaded dictionary, ask again
if (not(course_input in dict)):
print ("That is not a valid course")
interaction()
# Else, search the courses for which the provided course is a pre-requisite or co-requisite
# and add them to a list.
else:
for course, pre_co_reqs in dict.items():
if (course_input in pre_co_reqs):
pre_co_req_for.append(course)
sys.stdout.write(course_input + " is a pre-req or co-req for:")
print("")
pre_co_req_for.sort()
for p in pre_co_req_for:
sys.stdout.write("| " + str(p) + " |")
print("")
print("")
interaction()
interaction()
| mit | -385,972,854,799,941,900 | 26.204545 | 131 | 0.641418 | false |
synapse-wireless/snap-to-cloud-examples | exosite/exosite_connector.py | 1 | 2078 | import json
from pyonep import onep # Exosite Python Library
from tornado import httpclient
# TODO: Replace these with values from your own Exosite account and resource
# We want to map SN171 SNAP addresses to Exosite CIKs
# Addresses should not have any separators (no "." Or ":", etc.). The hexadecimal digits a-f must be entered in lower case.
EXOSITE_CIKS = {"XXXXXX": 'unique Exosite CIK here',
"YYYYYY": 'another unique Exosite CIK here'} # yapf: disable
class ExositeConnector(object):
def __init__(self):
self.exosite = onep.OnepV1()
def publish(self, thing_id, state):
"""Publish a message to Exosite API.
:param str thing_id: The 6-character SNAP MAC Address
:param dict state: A dictionary containing the new state values for a thing
"""
# Use the Exosite Python Library to format the message
jsonreq = {"auth": {"cik": EXOSITE_CIKS[thing_id.lower()]},
"calls": self.exosite._composeCalls([('writegroup',
[[[{"alias": "batt"}, int(state['batt'])],
[{"alias": "state"}, int(state['button_state'])],
[{"alias": "count"}, state['button_count']]]])])} # yapf: disable
# Create a Tornado HTTPRequest
request = httpclient.HTTPRequest(url=self.exosite.onephttp.host + self.exosite.url,
method='POST',
headers=self.exosite.headers,
body=json.dumps(jsonreq))
http_client = httpclient.AsyncHTTPClient()
http_client.fetch(request, self._handle_request)
@staticmethod
def _handle_request(response):
"""Prints the response of a HTTPRequest.
:param response: HTTPRequest
:return:
"""
if response.error:
print "Error:", response.error
else:
print response.body
| apache-2.0 | -1,340,095,892,736,022,300 | 42.291667 | 125 | 0.552936 | false |
mathause/regionmask | regionmask/defined_regions/_ar6_pre_revisions.py | 1 | 4711 | import geopandas as gp
from shapely import geometry
from ..core._geopandas import _enumerate_duplicates, from_geopandas
from ._ressources import read_remote_shapefile
REPR = """
pre-revision version of 'AR6 reference regions - Iturbide et al., 2020'
These are the regions as originally submitted by Iturbide et al., 2020. During
the revisions regions were added and existing regions were adapted. The originally
submitted regions are provided here for completeness. Use the revised regions
i.e. ``regionmask.defined_regions.ar6``.
Attributes
----------
all : Regions
All regions (land + ocean), regions split along the date line
are combined (see below).
land : Regions
Land regions only, regions split along the date line
are combined (see below).
ocean : Regions
Ocean regions only, regions split along the date line
are combined (see below).
separate_pacific : Regions
Original definitions of the regions, no combination of the Pacific
regions.
Combined Regions
----------------
SPO and SPO*; EPO and EPO*; NPO and NPO*
Note
----
The region numbers for ``all``, ``land``, and ``ocean`` are consistent. The
region numbers for ``separate_pacific`` and all others are not.
"""
def _combine_to_multipolygon(df, column, *names):
all_poly = [df[df[column] == name].geometry.values[0] for name in names]
combined_poly = geometry.MultiPolygon(all_poly)
df.loc[df[column] == names[0], "geometry"] = gp.GeoSeries(combined_poly).values
for name in names[1:]:
df = df.loc[df[column] != name]
return df
land = [
"GIC",
"NEC",
"CNA",
"ENA",
"NWN",
"WNA",
"NCA",
"SCA",
"CAR",
"NWS",
"SAM",
"SSA",
"SWS",
"SES",
"NSA",
"NES",
"NEU",
"CEU",
"EEU",
"MED",
"WAF",
"SAH",
"NEAF",
"CEAF",
"SWAF",
"SEAF",
"CAF",
"RAR",
"RFE",
"ESB",
"WSB",
"WCA",
"TIB",
"EAS",
"ARP",
"SAS",
"SEA",
"NAU",
"CAU",
"SAU",
"NZ",
"EAN",
"WAN",
]
ocean = [
"ARO",
"SPO",
"EPO",
"NPO",
"SAO",
"EAO",
"NAO",
"EIO",
"SIO",
"ARS",
"BOB",
"SOO",
]
class ar6_pre_revisions_cls:
"""docstring for ar6"""
def __init__(self):
self.__df = None
self.__df_combined = None
self._all = None
self._land = None
self._ocean = None
self._separate_pacific = None
self._name = "pre-revision version of 'AR6 reference regions'"
self._source = "Iturbide et al., 2020 (Earth Syst. Sci. Data)"
@property
def _df(self):
if self.__df is None:
self.__df = read_remote_shapefile("IPCC-WGI-reference-regions-v1.zip")
return self.__df
@property
def _df_combined(self):
if self.__df_combined is None:
_df_combined = self._df.copy()
_df_combined = _combine_to_multipolygon(_df_combined, "V3", "SPO", "SPO*")
_df_combined = _combine_to_multipolygon(_df_combined, "V3", "EPO", "EPO*")
_df_combined = _combine_to_multipolygon(_df_combined, "V3", "NPO", "NPO*")
# make sure the index goes from 0 to n - 1
_df_combined = _df_combined.reset_index().drop("index", axis=1)
self.__df_combined = _df_combined
return self.__df_combined
@property
def all(self):
if self._all is None:
self._all = from_geopandas(
self._df_combined,
names="V2",
abbrevs="V3",
name=self._name,
source=self._source,
)
return self._all
@property
def land(self):
if self._land is None:
r = self.all[land]
r.name = self._name + " (land only)"
self._land = r
return self._land
@property
def ocean(self):
if self._ocean is None:
r = self.all[ocean]
r.name = self._name + " (ocean only)"
self._ocean = r
return self._ocean
@property
def separate_pacific(self):
if self._separate_pacific is None:
# need to fix the duplicates
df = self._df.copy()
df["V2"] = _enumerate_duplicates(df["V2"])
self._separate_pacific = from_geopandas(
df,
names="V2",
abbrevs="V3",
name=self._name + "(separate Pacific regions)",
source=self._source,
)
return self._separate_pacific
def __repr__(self): # pragma: no cover
return REPR
_ar6_pre_revisions = ar6_pre_revisions_cls()
| mit | 2,731,936,561,561,596,000 | 21.327014 | 86 | 0.544258 | false |
m-ober/byceps | tests/services/shop/order/actions/test_create_ticket_bundles.py | 1 | 2006 | """
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.shop.order import action_registry_service
from byceps.services.shop.order import event_service as order_event_service
from byceps.services.ticketing import (
category_service as ticket_category_service,
ticket_service,
)
from .base import OrderActionTestBase
class CreateTicketBundlesActionTest(OrderActionTestBase):
def setUp(self):
super().setUp()
self.article = self.create_article(self.shop.id, quantity=10)
self.ticket_category = ticket_category_service.create_category(
self.party.id, 'Deluxe'
)
def test_create_ticket_bundles(self):
ticket_quantity = 5
bundle_quantity = 2
action_registry_service.register_ticket_bundles_creation(
self.article.item_number, self.ticket_category.id, ticket_quantity
)
articles_with_quantity = [(self.article, bundle_quantity)]
self.order = self.place_order(articles_with_quantity)
tickets_before_paid = self.get_tickets_for_order()
assert len(tickets_before_paid) == 0
self.mark_order_as_paid()
tickets_after_paid = self.get_tickets_for_order()
assert len(tickets_after_paid) == 10
for ticket in tickets_after_paid:
assert ticket.owned_by_id == self.buyer.id
assert ticket.used_by_id == self.buyer.id
events = order_event_service.get_events_for_order(self.order.id)
ticket_bundle_created_events = {
event
for event in events
if event.event_type == 'ticket-bundle-created'
}
assert len(ticket_bundle_created_events) == bundle_quantity
# -------------------------------------------------------------------- #
# helpers
def get_tickets_for_order(self):
return ticket_service.find_tickets_created_by_order(
self.order.order_number
)
| bsd-3-clause | -5,490,505,122,318,828,000 | 30.34375 | 78 | 0.633599 | false |
emmanuel-santos/GEM | trucoGemSite/truco/views.py | 1 | 4850 | from django.shortcuts import render
from django.contrib.auth.models import User
from django.contrib.auth import logout
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django.http import HttpResponseRedirect, HttpResponse
from truco.forms import *
from truco.models import Partida, Jugador, Carta
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import login
from django.db.models import F, Count
def vista_principal(request):
if request.user.is_authenticated():
return hall(request)
else:
return login(request,template_name='login.html')
@login_required
def salir_partida(request, ident):
try:
partida = Partida.objects.get(id = ident)
except Partida.DoesNotExist:
return HttpResponseRedirect('/')
jugador = partida.jugador(request.user)
equipo = jugador.equipo
partida.equipos.exclude(id=equipo.id).update(puntos_partida=30)
partida.terminada = True
partida.save()
return HttpResponseRedirect('/')
def new_user(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
user = User.objects.create_user(form.cleaned_data['username'], '[email protected]', form.cleaned_data['password1'])
user.save()
return HttpResponseRedirect("volver")
else:
form = UserCreationForm()
return render(request, 'form.html', {'form':form})
def volver(request):
if request.method == 'POST':
return HttpResponseRedirect("/")
else:
return render(request, 'volver.html')
@login_required
def sala_input(request, ident, input):
try:
partida = Partida.objects.get(id = ident)
except Partida.DoesNotExist:
return HttpResponseRedirect('/')
jugador = partida.jugador(request.user)
partida.procesar_entrada(jugador,input)
return HttpResponseRedirect('/sala/'+ str(ident))
@login_required
def partida_refresh(request, ident):
return sala(request, ident, True)
@login_required
def sala(request, ident, refresh=False):
try:
partida = Partida.objects.get(id = ident)
except Partida.DoesNotExist:
return HttpResponseRedirect('/')
jugador = partida.jugador(request.user)
# Si hay slots vacios, el usuario los ocupa.
if jugador == None:
if not partida.esta_llena():
partida.sumar_jugador(request.user)
jugador = partida.jugador(request.user)
# Elaborado de la respuesta
resp = partida.mostrar_partida(jugador)
# Form para mentir
if request.method == 'POST':
form = MentirForm(request.POST)
if form.is_valid():
jugador.puntos_cantados = form.cleaned_data['Puntos']
jugador.save()
partida.ultima_ronda.accion(jugador, 'mentir')
return HttpResponseRedirect("/sala/" + str(ident))
else:
form = MentirForm()
resp.update({'form':form})
# Elegir template
template = resp["template"] if refresh else "sala.html"
return render(request, template, resp)
@login_required
def nueva_sala(request):
if request.method == 'POST':
form = NuevaSalaForm(request.POST)
if form.is_valid():
sala = Partida()
sala.nombre=form.cleaned_data['Nombre']
sala.puntos_max=form.cleaned_data['Puntos']
sala.cantidad_jugadores=form.cleaned_data['num_jug']
sala.save()
return HttpResponseRedirect("sala/" + str(sala.id))
else:
form = NuevaSalaForm()
return render(request, 'form.html', {'form':form})
@login_required
def hall(request):
resp = {
'hay_salas' : Partida.objects.all().count() != 0,
'lista_salas' : Partida.objects.filter(terminada=False),
}
return render(request, 'hall.html', resp)
@login_required
def hall_select(request, categoria):
categorias = {
'0' : Partida.objects.filter(terminada=False),
'1' : Partida.objects.annotate(Count('jugadores')).filter(jugadores__count__lt=2),
'2' : Partida.objects.all(),
}
return render(request, 'hall_content.html', {'lista_salas':categorias[categoria]})
@login_required
def user(request,id):
partidas = Partida.objects.annotate(Count('jugadores')).annotate(Count('rondas')).filter(jugadores__user__id=id).exclude(rondas__count=0)
stats = {
'totales' : partidas.count(),
'ganadas' : Jugador.objects.filter(equipo__puntos_partida__gte=F('partida__puntos_max'),user__id=id).count(),
'jugando' : partidas.filter(terminada=False).count(),
'partidas' : partidas,
'pageuser' : User.objects.get(id=id),
}
stats['perdidas'] = stats['totales'] - (stats['ganadas'] + stats['jugando'])
return render(request, 'usuario.html', stats)
| mit | -8,096,884,938,365,961,000 | 32.680556 | 141 | 0.66 | false |
zentropi/python-uzentropi | tests/test_uzentropi.py | 1 | 6545 | # coding=utf-8
import sys
import unittest
try: # pragma: no cover
import os
sys.path.append(os.path.join(
os.path.dirname(__file__),
'..', 'src',
))
ZENSOCKET_TEST_TOKEN = os.getenv('ZENSOCKET_TEST_TOKEN')
except ImportError: # pragma: no cover
ZENSOCKET_TEST_TOKEN = sys.argv[1]
try:
import json
import asyncio
except ImportError: # pragma: no cover
import ujson as json
import asyncio_priority as asyncio
from uzentropi import (
time,
Frame,
Handler,
Kind,
Session,
RateLimitError,
WebsocketConnection,
async_rate_limit_error
)
frame_as_dict = {'kind': Kind.EVENT, 'name': 'hello', 'data': {'a': 1}, 'meta': {'source': 'test_source'}}
class TestUZentropi(unittest.TestCase):
def test_exceptions(self):
e = RateLimitError()
self.assertTrue(isinstance(e, Exception))
def test_async_rate_limit_error(self):
async def test_arle():
e = await async_rate_limit_error()
self.assertTrue(isinstance(e, Exception))
asyncio.get_event_loop().run_until_complete(test_arle())
def test_kind(self):
self.assertEqual(Kind.TIMER, -1)
self.assertEqual(Kind.COMMAND, 0)
self.assertEqual(Kind.EVENT, 1)
self.assertEqual(Kind.REQUEST, 2)
self.assertEqual(Kind.RESPONSE, 3)
self.assertEqual(Kind.STATE, 4)
self.assertEqual(Kind.MESSAGE, 5)
def test_frame(self):
frame = Frame.from_dict(frame_as_dict)
self.assertEqual(frame.kind, Kind.EVENT)
self.assertEqual(frame.name, 'hello')
self.assertEqual(frame.data, {'a': 1})
self.assertEqual(frame.get_data('a'), 1)
self.assertEqual(frame.get_meta('source'), 'test_source')
self.assertEqual(frame.as_dict(), frame_as_dict)
self.assertEqual(json.loads(frame.as_json()), frame_as_dict)
frame_from_json = frame.from_json(frame.as_json())
self.assertEqual(frame_from_json.as_dict(), frame_as_dict)
frame_copy = frame.copy()
self.assertEqual(frame_copy.name, frame.name)
self.assertEqual(frame_copy.kind, frame.kind)
self.assertEqual(frame_copy.data, frame.data)
self.assertEqual(frame_copy.meta, frame.meta)
frame_as_dict.update({'unknown': 'key'})
safe_frame = Frame.from_dict(frame_as_dict)
self.assertEqual(safe_frame.name, frame.name)
self.assertEqual(safe_frame.kind, frame.kind)
with self.assertRaises(AttributeError):
safe_frame.unknown
with self.assertRaises(TypeError):
Frame(**frame_as_dict)
del frame_as_dict['unknown']
def test_session_recv(self):
session = Session()
frame, send = session.recv(frame_as_dict)
self.assertEqual(send, None)
self.assertEqual(frame.name, 'hello')
self.assertEqual(frame.data, {'a': 1})
self.assertEqual(frame.get_data('a'), 1)
self.assertEqual(frame.get_meta('source'), 'test_source')
def test_session_send(self):
session = Session()
recv, frame = session.send(Kind.EVENT,
name='hello',
data={'a': 1},
meta={'source': 'test_source'},
)
self.assertEqual(recv, None)
self.assertEqual(frame.name, 'hello')
self.assertEqual(frame.data, {'a': 1})
self.assertEqual(frame.get_data('a'), 1)
self.assertEqual(frame.get_meta('source'), 'test_source')
# without meta
recv, frame = session.send(Kind.EVENT,
name='hello',
data={'a': 1},
)
self.assertEqual(recv, None)
self.assertEqual(frame.name, 'hello')
self.assertEqual(frame.data, {'a': 1})
self.assertEqual(frame.get_data('a'), 1)
self.assertEqual(frame.get_meta('source'), None)
recv, frame = session.send(Kind.EVENT, space='test_space')
self.assertEqual(frame.get_meta('space'), 'test_space')
recv, frame = session.send(Kind.EVENT, target='test_target')
self.assertEqual(frame.get_meta('target'), 'test_target')
recv, frame = session.send(Kind.EVENT, reply_to='test_id')
self.assertEqual(frame.get_meta('reply_to'), 'test_id')
def test_handler(self):
async def _test(agent):
self.assertTrue(agent)
handler = Handler(Kind.EVENT, 'test_name', _test, rate_limit=0.1)
self.assertEqual(handler.name, 'test_name')
self.assertEqual(handler.kind, Kind.EVENT)
self.assertEqual(handler._callable, _test)
asyncio.get_event_loop().run_until_complete(handler(True))
time.sleep(0.12)
asyncio.get_event_loop().run_until_complete(handler(True))
async def verify_error():
r = await handler(True)
self.assertTrue(isinstance(r, RateLimitError))
raise r
with self.assertRaises(RateLimitError):
asyncio.get_event_loop().run_until_complete(verify_error())
self.assertEqual(handler.call_total, 3)
self.assertEqual(handler.describe(),
{
'kind': Kind.EVENT,
'name': 'test_name',
'expects': None,
})
def test_websocket(self):
ws = WebsocketConnection()
asyncio.get_event_loop().run_until_complete(
ws.send(Kind.EVENT, name='test'))
self.assertTrue(True)
asyncio.get_event_loop().run_until_complete(
ws.send_frame(frame_as_dict))
self.assertTrue(True)
def test_websocket_connection(self):
ws = WebsocketConnection()
asyncio.get_event_loop().run_until_complete(
ws.connect('ws://192.168.1.7:9000/zensocket', ZENSOCKET_TEST_TOKEN)
)
async def test_recv(frame):
self.assertEqual(frame.name, 'test')
await ws.close()
asyncio.get_event_loop().stop()
async def test_send():
await asyncio.sleep(0.1)
await ws.send(Kind.EVENT, name='test')
asyncio.get_event_loop().create_task(ws.listen(test_recv))
asyncio.get_event_loop().create_task(test_send())
asyncio.get_event_loop().run_forever()
if __name__ == '__main__': # pragma: no cover
unittest.main()
| apache-2.0 | 4,058,507,473,805,783,600 | 34 | 106 | 0.581207 | false |
kdyq007/cmdb-api | core/ci_relation.py | 1 | 2400 | # -*- coding:utf-8 -*-
from flask import Blueprint
from flask import jsonify
from flask import request
from lib.ci import CIRelationManager
from lib.utils import get_page
from lib.utils import get_per_page
from lib.auth import auth_with_key
cirelation = Blueprint("cirelation", __name__)
@cirelation.route("/types", methods=["GET"])
def get_types():
manager = CIRelationManager()
return jsonify(relation_types=manager.relation_types)
@cirelation.route("/<int:first_ci>/second_cis", methods=["GET"])
def get_second_cis_by_first_ci(first_ci=None):
page = get_page(request.values.get("page", 1))
count = get_per_page(request.values.get("count"))
relation_type = request.values.get("relation_type", "contain")
manager = CIRelationManager()
numfound, total, second_cis = manager.get_second_cis(
first_ci, page=page, per_page=count, relation_type=relation_type)
return jsonify(numfound=numfound, total=total,
page=page, second_cis=second_cis)
@cirelation.route("/<int:second_ci>/first_cis", methods=["GET"])
def get_first_cis_by_second_ci(second_ci=None):
page = get_page(request.values.get("page", 1))
count = get_per_page(request.values.get("count"))
relation_type = request.values.get("relation_type", "contain")
manager = CIRelationManager()
numfound, total, first_cis = manager.get_first_cis(
second_ci, per_page=count, page=page, relation_type=relation_type)
return jsonify(numfound=numfound, total=total,
page=page, first_cis=first_cis)
@cirelation.route("/<int:first_ci>/<int:second_ci>", methods=["POST"])
@auth_with_key
def create_ci_relation(first_ci=None, second_ci=None):
relation_type = request.values.get("relation_type", "contain")
manager = CIRelationManager()
res = manager.add(first_ci, second_ci, relation_type=relation_type)
return jsonify(cr_id=res)
@cirelation.route("/<int:cr_id>", methods=["DELETE"])
@auth_with_key
def delete_ci_relation(cr_id=None):
manager = CIRelationManager()
manager.delete(cr_id)
return jsonify(message="CIType Relation is deleted")
@cirelation.route("/<int:first_ci>/<int:second_ci>", methods=["DELETE"])
@auth_with_key
def delete_ci_relation_2(first_ci, second_ci):
manager = CIRelationManager()
manager.delete_2(first_ci, second_ci)
return jsonify(message="CIType Relation is deleted") | gpl-2.0 | -2,730,147,931,960,538,000 | 33.3 | 74 | 0.6975 | false |
creasyw/IMTAphy | framework/library/PyConfig/openwns/probebus.py | 1 | 9523 | ###############################################################################
# This file is part of openWNS (open Wireless Network Simulator)
# _____________________________________________________________________________
#
# Copyright (C) 2004-2007
# Chair of Communication Networks (ComNets)
# Kopernikusstr. 16, D-52074 Aachen, Germany
# phone: ++49-241-80-27910,
# fax: ++49-241-80-22242
# email: [email protected]
# www: http://www.openwns.org
# _____________________________________________________________________________
#
# openWNS is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License version 2 as published by the
# Free Software Foundation;
#
# openWNS is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import openwns
import openwns.logger
from openwns.pyconfig import attrsetter
import openwns.interface
class NeedsFilename(openwns.interface.Interface):
@openwns.interface.abstractmethod
def setFilename(self, filename):
pass
class MeasurementSource(object):
def __init__(self):
object.__init__(self)
self.observers = []
def addObserver(self, probeBus):
self.observers.append(probeBus)
return probeBus
class ProbeBus(MeasurementSource):
def __init__(self):
MeasurementSource.__init__(self)
def observe(self, probeBus):
probeBus.addObserver(self)
return probeBus
class ProbeBusRegistry(object):
def __init__(self):
super(ProbeBusRegistry, self).__init__()
self.measurementSources = {}
self.logger = openwns.logger.Logger("WNS", "ProbeBusRegistry", True)
def getMeasurementSource(self, probeBusID):
if not self.measurementSources.has_key(probeBusID):
self.measurementSources[probeBusID] = MeasurementSource()
return self.measurementSources[probeBusID]
def removeMeasurementSource(self, probeBusID):
self.measurementSources.pop(probeBusID)
def getMeasurementSources(self):
return self.measurementSources
class PassThroughProbeBus(ProbeBus):
""" The PassThroughProbeBus always accepts and always forwards. """
nameInFactory = "PassThroughProbeBus"
def __init__(self):
ProbeBus.__init__(self)
class SettlingTimeGuardProbeBus(ProbeBus):
""" The SettlingTimeGuardProbeBus only accepts if the global settling time (transient phase)
has elapsed"""
nameInFactory = "SettlingTimeGuardProbeBus"
def __init__(self, settlingTime):
ProbeBus.__init__(self)
self.settlingTime = settlingTime
class LoggingProbeBus(ProbeBus):
""" The LoggingProbeBus always accepts and logs the message to the logging subsystem.
"""
nameInFactory = "LoggingProbeBus"
def __init__(self, probeName='', parentLogger=None):
ProbeBus.__init__(self)
if len(probeName) > 0:
probeName = '.' + probeName
self.logger = openwns.logger.Logger("WNS", "LoggingProbeBus"+probeName, True, parentLogger)
class PythonProbeBus(ProbeBus):
""" Use the PythonProbeBus to do all your probing work in python. Specify what to do
in accepts, onMeasurement, output from within your configuration file."""
nameInFactory = "PythonProbeBus"
def _dummyOnMeasurement(timestamp, value, reg):
pass
def _dummyOutput():
pass
def __init__(self, acceptsFunction, onMeasurementFunction = _dummyOnMeasurement, outputFunction = _dummyOutput):
ProbeBus.__init__(self)
self.accepts = acceptsFunction
self.onMeasurement = onMeasurementFunction
self.output = outputFunction
self.reportErrors = True
class TimeWindowProbeBus(ProbeBus):
""" Only accepts for a certain time window given by start and end time"""
nameInFactory = "TimeWindowProbeBus"
def __init__(self, start, end):
ProbeBus.__init__(self)
self.start = start
self.end = end
class TimeSeriesProbeBus(ProbeBus):
""" The LogEval ProbeBus always accepts and logs the values into a file.
"""
nameInFactory = "TimeSeriesProbeBus"
outputFilename = None
format = None
timePrecision = None
valuePrecision = None
name = None
description = None
contextKeys = None
def __init__(self, outputFilename, format, timePrecision, valuePrecision, name, desc, contextKeys):
ProbeBus.__init__(self)
self.outputFilename = outputFilename
self.format = format
self.timePrecision = timePrecision
self.valuePrecision = valuePrecision
self.name = name
self.description = desc
self.contextKeys = contextKeys
class ContextFilterProbeBus(ProbeBus):
nameInFactory = "ContextFilterProbeBus"
idName = None
idValues = None
def __init__(self, _idName, _idValues, _outputName = None):
ProbeBus.__init__(self)
self.idName = _idName
self.idValues = _idValues
class ConstantContextProvider(object):
__plugin__ = "wns.ProbeBus.ConstantContextProvider"
""" Name in the static factory """
key = None
""" The name of the context """
value = None
""" A constant integer value """
def __init__(self, key, value):
super(ConstantContextProvider, self).__init__()
self.key = key
self.value = value
class StatEvalProbeBus(ProbeBus):
nameInFactory = "StatEvalProbeBus"
statEval = None
appendFlag = None
def __init__(self, outputFilename, statEvalConfig):
ProbeBus.__init__(self)
self.outputFilename = outputFilename
self.statEval = statEvalConfig
if (statEvalConfig.appendFlag == None):
self.appendFlag = False
else:
self.appendFlag = statEvalConfig.appendFlag
class TabPar:
"""
Helper Class to configure the TableProbeBus.
Configure one of these for each dimension of your table.
Parameters:
idName: the name in the IDregistry/Context under which the
value for this axis should be searched
minimum: min value of the axis
maximum: max value of the axis
resolution: number of equidistant intervals into which the
range from min to max will be divided. Note that
the maximum value will be counted into the last interval
"""
idName = None
minimum = None
maximum = None
resolution = None
def __init__(self, idName, minimum, maximum, resolution):
self.idName = idName
self.minimum = minimum
self.maximum = maximum
self.resolution = resolution
class TableProbeBus(ProbeBus):
"""
The TableProbeBus consumes measurement values and sorts them
into n-dimensional tables of statistical evaluation objects.
Parameters:
axisParams: list of TabPar objecst, one for each dimension of the desired table
outputFilename: base name of the output files produced by the TableProbeBus
evals: list of strings with the requested statistics, possible values are:
'mean', 'variance', 'relativeVariance', 'coeffOfVariation', 'M2', 'M3', 'Z3',
'skewness', 'deviation', 'relativeDeviation', 'trials', 'min', 'max'
formats: list of strings with the requested output formats, possible values are:
'HumanReadable', 'PythonReadable', 'MatlabReadable', 'MatlabReadableSparse'
"""
nameInFactory = "TableProbeBus"
axisParams = None
outputFilename = None
evals = None
formats = None
def __init__(self, axisParams, outputFilename, evals = ['mean'], formats = ['HumanReadable']):
ProbeBus.__init__(self)
self.axisParams = axisParams
self.outputFilename = outputFilename
self.evals = list(set(evals)) # filter out potential duplicates
self.formats = list(set(formats)) # filter out potential duplicates
class TextProbeBus(ProbeBus):
"""
Wrapper for a ProbeText StatEval
"""
nameInFactory = "TextProbeBus"
key = None
outputFilename = None
evalConfig = None
writeHeader = None
prependSimTimeFlag = None
simTimePrecision = None
simTimeWidth = None
skipInterval = None
def __init__(self, outputFilename, key, description):
ProbeBus.__init__(self)
self.key = key
self.outputFilename = outputFilename
self.writeHeader = True
self.prependSimTimeFlag = True
self.simTimePrecision = 7
self.simTimeWidth = 10
self.skipInterval = 0
self.isJSON = False
class JSONProbeBus(TextProbeBus):
def __init__(self, name, key, description):
TextProbeBus.__init__(self, name, key, description)
self.isJSON = True | gpl-2.0 | -5,343,076,373,274,479,000 | 32.893238 | 120 | 0.623438 | false |
JianboTang/modified_GroundHog | fork_process/dataPreprocess/result_analysis/analysis_2.py | 1 | 3000 | import numpy
import pickle
readcmnt = open('../used/test/cmnt_inside.txt','r')
readtran = open('../used/test/cmnt_outside.txt','r');
def preprocess(line):
lline = list(line.decode("utf-8"));
lline = [x for x in lline if x != u' ']
del lline[-1]
return lline
def static(dictionary,lline):
for i in xrange(len(lline) - 1):
if lline[i] in dictionary:
if lline[i + 1] in dictionary[lline[i]]:
dictionary[lline[i]][lline[i + 1]] += 1
else:
dictionary[lline[i]][lline[i + 1]] = 1;
else:
dictionary[lline[i]] = {}
dictionary[lline[i]][lline[i + 1]] = 1;
return dictionary,len(lline)
def fileStatic(fileHandle,count):
statDict = {}
number = 0;
i = 0;
while i < count:
line = fileHandle.readline();
if not line:
print "touch the end of file"
break
statDict,temp = static(statDict,preprocess(line))
number += temp
i += 1
print "total number is : ",number
return statDict
def extractDict(dict1,dict2):
common = [];
dict_x = []
dict_y = []
for x in dict1:
for y in dict1[x]:
if x in dict2 and y in dict2[x]:
if x not in dict_x:
dict_x.append(x)
if y not in dict_y:
dict_y.append(y)
common.append([x,y])
matrix1 = numpy.zeros((len(dict_x),len(dict_y)));
matrix2 = numpy.zeros((len(dict_x),len(dict_y)));
for i,x in enumerate(dict_x):
for j,y in enumerate(dict_y):
if x in dict1 and y in dict1[x]:
matrix1[i,j] = dict1[x][y];
if x in dict1 and y in dict2[x]:
matrix2[i,j] = dict2[x][y];
return matrix1,matrix2
def similarityMatrix(matrix1,matrix2):
similar = numpy.zeros(matrix1.shape[0])
for i in xrange(matrix1.shape[0]):
temp = numpy.zeros(1);
temp1 = numpy.zeros(1);
temp2 = numpy.zeros(1);
for j in xrange(matrix1.shape[1]):
temp += matrix1[i,j] * matrix2[i,j];
temp1 += matrix1[i,j] ** 2;
temp2 += matrix2[i,j] ** 2;
similar[i] = temp / (numpy.sqrt(temp1) * numpy.sqrt(temp2));
return similar
def main(count):
cmnt_dict = fileStatic(readcmnt,count);
tran_dict = fileStatic(readtran,count);
matrix1,matrix2 = extractDict(cmnt_dict,tran_dict);
# writeMatrix = open('matrix.pkl','w');
# pickle.dump(matrix1,writeMatrix);
# pickle.dump(matrix2,writeMatrix);
# writeMatrix.close();
# readMatrix = open('matrix.pkl','r');
# matrix1 = pickle.load(readMatrix)
# matrix2 = pickle.load(readMatrix);
similar = similarityMatrix(matrix1,matrix2);
print sum(matrix1)
print sum(matrix2)
print float(sum(similar >= 0.8)) / float(len(similar))
print float(sum(similar >= 0.5)) / float(len(similar))
if __name__ == '__main__':
main(1000000);
| bsd-3-clause | -4,999,212,649,414,244,000 | 31.967033 | 68 | 0.553 | false |
ddw/python-tdl | examples/life.py | 1 | 5907 | #!/usr/bin/env python
import random
import time
import tdl
WIDTH = 80
HEIGHT = 40
class LifeBoard():
def __init__(self, width, height):
self.width = width
self.height = height
self.live_cells = set()
self.wrap = True
def set(self, x, y, value):
if value:
self.live_cells.add((x, y))
else:
self.live_cells.discard((x, y))
def set_batch(self, x, y, batch):
for y_, line in enumerate(batch):
for x_, char in enumerate(line):
self.set(x + x_, y + y_, char != ' ')
def get(self, x, y):
if(self.wrap is False
and not (0 <= x < self.width and 0 <= y < self.height)):
return False
return (x % self.width, y % self.height) in self.live_cells
def clear(self):
self.live_cells.clear()
def toggle(self, x, y):
self.live_cells.symmetric_difference_update([(x, y)])
def wrap_edges(self):
for x in range(-1, self.width + 1):
self.set(x, -1, self.get(x, -1))
self.set(x, self.height, self.get(x, self.height))
for y in range(self.height):
self.set(-1, y, self.get(-1, y))
self.set(self.width, y, self.get(self.width, y))
def get_neighbours(self, x, y):
return len(self.live_cells & {(x - 1, y - 1), (x, y - 1),
(x + 1,y - 1), (x + 1, y),
(x + 1, y + 1), (x, y + 1),
(x - 1, y + 1), (x - 1, y)})
def rule(self, is_alive, neighbours):
"""
1. Any live cell with fewer than two live neighbours dies, as if caused
by under-population.
2. Any live cell with two or three live neighbours lives on to the next
generation.
3. Any live cell with more than three live neighbours dies, as if by
overcrowding.
4. Any dead cell with exactly three live neighbours becomes a live
cell, as if by reproduction.
"""
if is_alive:
return 2 <= neighbours <= 3
else:
return neighbours == 3
def step(self):
self.wrap_edges()
next_generation = set()
for x in range(self.width):
for y in range(self.height):
if self.rule(self.get(x, y), self.get_neighbours(x, y)):
next_generation.add((x, y))
self.live_cells = next_generation
def main():
console = tdl.init(WIDTH, HEIGHT)
board = LifeBoard(WIDTH, HEIGHT - 1)
# The R-pentomino
#board.set_batch(WIDTH // 2 - 2,HEIGHT // 2 - 2,
# [' **',
# '** ',
# ' * '])
# Diehard
#board.set_batch(WIDTH // 2 - 5,HEIGHT // 2 - 2,
# [' * ',
# '** ',
# ' * ***'])
# Gosper glider gun
board.set_batch(1, 1,
[' ',
' * ',
' * * ',
' ** ** **',
' * * ** **',
'** * * ** ',
'** * * ** * * ',
' * * * ',
' * * ',
' ** '])
play = False
redraw = True
mouse_drawing = None
mouse_x = -1
mouse_y = -1
while True:
for event in tdl.event.get():
if event.type == 'QUIT':
return
elif event.type == 'KEYDOWN':
if event.key == 'SPACE':
play = not play
redraw = True
elif event.char.upper() == 'S':
board.step()
redraw = True
elif event.char.upper() == 'C':
board.clear()
redraw = True
elif event.char.upper() == 'W':
board.wrap = not board.wrap
redraw = True
elif event.type == 'MOUSEDOWN':
x, y, = event.cell
board.toggle(x, y)
mouse_drawing = event.cell
redraw = True
elif event.type == 'MOUSEUP':
mouse_drawing = None
elif event.type == 'MOUSEMOTION':
if(mouse_drawing and mouse_drawing != event.cell):
x, y = mouse_drawing = event.cell
board.toggle(x, y)
mouse_x, mouse_y = event.cell
redraw = True
if play and mouse_drawing is None:
board.step()
redraw = True
if redraw:
redraw = False
console.clear()
for x, y in board.live_cells:
console.draw_char(x, y, '*')
#console.draw_rect(0, -1, None, None, None, bg=(64, 64, 80))
console.draw_rect(0, -1, None, None, None, bg=(64, 64, 80))
console.draw_str(0, -1, "Mouse:Toggle Cells, Space:%5s, [S]tep, [C]lear, [W]rap Turn %s" % (['Play', 'Pause'][play], ['On', 'Off'][board.wrap]), None, None)
if (mouse_x, mouse_y) in console:
console.draw_char(mouse_x, mouse_y,
None, (0, 0, 0), (255, 255, 255))
else:
time.sleep(0.01)
tdl.flush()
tdl.set_title("Conway's Game of Life - %i FPS" % tdl.get_fps())
if __name__ == '__main__':
main()
| bsd-2-clause | 8,701,432,477,316,274,000 | 34.371257 | 168 | 0.402912 | false |
vinay-qa/vinayit-android-server-apk | py/test/selenium/webdriver/common/proxy_tests.py | 1 | 1561 | #!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.ftp_proxy = 'ftp.proxy:1234'
proxy.no_proxy = 'localhost, foo.localhost'
proxy.sslProxy = 'ssl.proxy:1234'
proxy.autodetect = 'True'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'MANUAL',
'httpProxy': 'some.url:1234',
'ftpProxy': 'ftp.proxy:1234',
'noProxy': 'localhost, foo.localhost',
'sslProxy': 'ssl.proxy:1234',
'autodetect': 'True'
}
}
print 'descap', desired_capabilities
self.assertEqual(expected_capabilities, desired_capabilities)
| apache-2.0 | 6,617,160,217,035,852,000 | 32.934783 | 74 | 0.650865 | false |
shiina/invariant-object-recognition | gabor.py | 1 | 4221 | import numpy as np
import matplotlib.pyplot as pl
import Image
import scipy.signal as sg
#some variable initializations
#resolution of gabor filter
resolution = 1.
#size of gabor filter
gsize = 30
#Number of gabor filter orientations with cosine in the gabor bank
N_Greal = 8
#Number of gabor filter orientations with sine in the gabor bank
N_Gimag = 0
#number of different wave vectors in the gabor bank
N_Size = 8
#total number of gabor filters
N_Gabor = N_Greal*N_Size+N_Gimag*N_Size
# return 2D Gabor Filter with cosine. Uses multivariate Gaussian with standard deviations "sigmax" and "sigmay" and has a mean of 0. Cosine has wave vector "k", phase "phi and is rotated around angle "theta". Filter has "size" as size with resolution "res".
def Gabor_real(size, sigmax, sigmay, k, phi, theta, res):
x,y = np.mgrid[-size/2:size/2:res,-size/2:size/2:res]
xrot = x*np.cos(theta) + y*np.sin(theta)
return (1/(2.*np.pi*sigmax*sigmay))*np.exp(-(x**2/(2.0*sigmax**2))-(y**2/(2.0*sigmay**2)))*np.cos((k*xrot)-phi)
# return 2D Gabor Filter with sine. Uses multivariate Gaussian with standard deviations "sigmax" and "sigmay" and has a mean of 0. Sine has wave vector "k", phase "phi and is rotated around angle "theta". Filter has "size" as size with resolution "res".
def Gabor_imag(size, sigmax, sigmay, k, phi, theta, res):
# return 2D Gabor Filter
x,y = np.mgrid[-size/2:size/2:res,-size/2:size/2:res]
xrot = x*np.cos(theta) + y*np.sin(theta)
return (1/(2.*np.pi*sigmax*sigmay))*np.exp(-(x**2/(2.0*sigmax**2))-(y**2/(2.0*sigmay**2)))*np.sin((k*xrot)-phi)
# return gabor bank of "n_real" cosine gabor filters and "n_imag" sine gabor filters with "n_size" wave vektors and size "size" and resolution "res". returns array of gabor filters with shape (N_Gabor,int(size/res),int(size/res) such that gabor_bank[i] is the i-th gabor filter. gabor_bank[0:nsize*n_real] contains the real gabor filters where gabor_bank[0:n_real] contains n_real differently sized filters of the same orientation and so on. gabor_bank[nsize*n_real:nsize*(n_real+n_imag)] contains the imaginary gabor filters.
def Gabor_Bank(n_real, n_imag, n_size, size, res):
#total number of gabor filters in the gabor bank
N_Gabor = n_real*n_size+n_imag*n_size
gabor_bank = np.zeros((N_Gabor,int(size/res),int(size/res)))
for i in range(n_real):
for j in range(n_size):
gabor_bank[i*n_size+j] = Gabor_real(size,j/4.+1/2.,j/4.+1/2.,n_size/2.+1-j/2.,0,i*np.pi/n_real,res)
for i in range(n_imag):
for j in range(n_size):
gabor_bank[i*n_size+j+n_real*n_size] = Gabor_imag(size,j/4.+1/4.,j/4.+1/4.,n_size/2.+1-j/2.,0,i*2*np.pi/n_imag,res)
return gabor_bank
#nice gabor filter plot function for the "N"-th gabor filter. for my 4 different sizes though.
def Gabor_Plot(gabor_bank,N):
f,ar = pl.subplots(2,2)
ar[0,0].imshow(gabor_bank[N+0])
ar[0,1].imshow(gabor_bank[N+1])
ar[1,0].imshow(gabor_bank[N+2])
ar[1,1].imshow(gabor_bank[N+3])
f.show()
#reads png image with name "image_name". returns a 2D numpy array
def Read_Image(img_name):
img = Image.open(img_name).convert('LA')
img = np.array(img)
#img = img[:,:,0]
#img = np.dot(img[:,:,:3], [0.299, 0.587, 0.144])
return img
#plots image after reading. also plots convolved image if given cimg[i] as argument
def Plot_Image(img):
pl.figure()
pl.imshow(img,cmap='gray')
pl.show()
#convolve data
def Convolve_Data(img,gabor_bank):
cimg = np.zeros((gabor_bank.shape[0],gabor_bank.shape[1]+img.shape[0]-1,gabor_bank.shape[2]+img.shape[1]-1))
for i in range(gabor_bank.shape[0]):
cimg[i]=sg.convolve2d(img, gabor_bank[i])
#return status of convolution (since it is soo slow)
print N_Gabor, i
return cimg
#write "data" into "filename". checks data after writing with assertion.
def Write_Data(data,filename):
with file(filename, 'w') as outfile:
for i in range(data.shape[0]):
np.savetxt(outfile, data[i])
new_data = np.loadtxt(filename)
new_data = new_data.reshape((data.shape[0],data.shape[1],data.shape[2]))
assert np.all(new_data == data)
def Read_Img_Database():
for i in range(1,101):
for j in range(356):
filename="coil-100/obj"+str(i)+"__"+str(j)+".png"
img=Read_Image('coil-100/obj1__100.png')
Plot_Image(img)
| lgpl-3.0 | 2,869,530,535,805,990,400 | 43.904255 | 526 | 0.695096 | false |
aggrent/cab | cab/forms.py | 1 | 3329 | from django import forms
from django.contrib import admin
from haystack.forms import SearchForm
from cab.models import Language, Snippet, SnippetFlag, VERSIONS
from registration.forms import RegistrationFormUniqueEmail
from taggit_autosuggest.widgets import TagAutoSuggest
from epiceditor.widgets import AdminEpicEditorWidget
from codemirror.widgets import CodeMirrorTextarea
haskell_textarea = CodeMirrorTextarea(mode="haskell", theme="ambiance", config={ 'fixedGutter': True, 'lineNumbers': False, 'viewportMargin': 5000 })
def validate_non_whitespace_only_string(value):
"""
Additionally to requiring a non-empty string, this validator also strips
the string to treat strings with only whitespaces in them as empty.
"""
if not value or not value.strip():
raise forms.ValidationError(u'This field is required', code='required')
class SnippetForm(forms.ModelForm):
title = forms.CharField(
validators=[validate_non_whitespace_only_string])
description = forms.CharField(
validators=[validate_non_whitespace_only_string],
widget=AdminEpicEditorWidget)
code = forms.CharField(
validators=[validate_non_whitespace_only_string],
widget=haskell_textarea)
class Meta:
model = Snippet
exclude = ('author', 'bookmark_count', 'rating_score', 'version', 'language')
class SnippetFlagForm(forms.ModelForm):
class Meta:
model = SnippetFlag
fields = ('flag',)
class AdvancedSearchForm(SearchForm):
language = forms.ModelChoiceField(
queryset=Language.objects.all(), required=False)
version = forms.MultipleChoiceField(choices=VERSIONS, required=False)
minimum_pub_date = forms.DateTimeField(
widget=admin.widgets.AdminDateWidget, required=False)
minimum_bookmark_count = forms.IntegerField(required=False)
minimum_rating_score = forms.IntegerField(required=False)
def search(self):
# First, store the SearchQuerySet received from other processing.
sqs = super(AdvancedSearchForm, self).search()
if not self.is_valid():
return sqs
if self.cleaned_data['language']:
sqs = sqs.filter(language=self.cleaned_data['language'].name)
if self.cleaned_data['version']:
sqs = sqs.filter(
version__in=self.cleaned_data['version'])
if self.cleaned_data['minimum_pub_date']:
sqs = sqs.filter(
pub_date__gte=self.cleaned_data['minimum_pub_date'])
if self.cleaned_data['minimum_bookmark_count']:
sqs = sqs.filter(
bookmark_count__gte=self.cleaned_data['minimum_bookmark_count'])
if self.cleaned_data['minimum_rating_score']:
sqs = sqs.filter(
rating_score__gte=self.cleaned_data['minimum_rating_score'])
return sqs
class RegisterForm(RegistrationFormUniqueEmail):
your_name = forms.CharField(label='Your Name', required=False,
widget=forms.TextInput(attrs={
'autocomplete': 'off',
}))
def clean(self):
if self.cleaned_data.get('your_name'):
raise forms.ValidationError('Please keep the Name field blank')
return self.cleaned_data
| bsd-3-clause | -2,306,287,639,573,231,000 | 35.184783 | 149 | 0.665966 | false |
galaxor/Nodewatcher | generator/gennyd.py | 1 | 9945 | #!/usr/bin/python
#
# nodewatcher firmware generator daemon
#
# Copyright (C) 2009 by Jernej Kos <[email protected]>
#
# First parse options (this must be done here since they contain import paths
# that must be parsed before Django models can be imported)
import sys, os, re
from optparse import OptionParser
print "============================================================================"
print " nodewatcher firmware generator daemon "
print "============================================================================"
parser = OptionParser()
parser.add_option('--path', dest = 'path', help = 'Path that contains nodewatcher "web" Python module')
parser.add_option('--settings', dest = 'settings', help = 'Django settings to use')
parser.add_option('--destination', dest = 'destination', help = 'Firmware destination directory')
options, args = parser.parse_args()
if not options.path:
print "ERROR: Path specification is required!\n"
parser.print_help()
exit(1)
elif not options.settings:
print "ERROR: Settings specification is required!\n"
parser.print_help()
exit(1)
elif not options.destination:
print "ERROR: Firmware destination directory is required!\n"
parser.print_help()
exit(1)
# Setup import paths, since we are using Django models
sys.path.append(options.path)
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
# Django stuff
from django.core.mail import send_mail
from django.utils.translation import ugettext as _
from django.template import loader, Context
from django.conf import settings
# Other stuff
from beanstalk import serverconn
from beanstalk import job
from config_generator import OpenWrtConfig, portLayouts
import logging
import hashlib
from traceback import format_exc
import pwd
from zipfile import ZipFile, ZIP_DEFLATED
from base64 import urlsafe_b64encode
from glob import glob
WORKDIR = os.getcwd()
DESTINATION = options.destination
IMAGEBUILDERS = (
"imagebuilder.atheros",
"imagebuilder.brcm24",
"imagebuilder.broadcom",
"imagebuilder.ar71xx"
)
def no_unicodes(x):
"""
Converts all unicodes to str instances.
"""
for k, v in x.iteritems():
if isinstance(v, unicode):
x[k] = v.encode('utf8')
return x
def generate_image(d):
"""
Generates an image accoording to given configuration.
"""
logging.debug(repr(d))
if d['imagebuilder'] not in IMAGEBUILDERS:
raise Exception("Invalid imagebuilder specified!")
x = OpenWrtConfig()
x.setUUID(d['uuid'])
x.setOpenwrtVersion(d['openwrt_ver'])
x.setArch(d['arch'])
x.setPortLayout(d['port_layout'])
x.setWifiIface(d['iface_wifi'], d['driver'], d['channel'])
x.setWifiAnt(d['rx_ant'], d['tx_ant'])
x.setLanIface(d['iface_lan'])
x.setNodeType("adhoc")
x.setPassword(d['root_pass'])
x.setHostname(d['hostname'])
x.setIp(d['ip'])
x.setSSID(d['ssid'])
# Add WAN interface and all subnets
if d['wan_dhcp']:
x.addInterface("wan", d['iface_wan'], init = True)
else:
x.addInterface("wan", d['iface_wan'], d['wan_ip'], d['wan_cidr'], d['wan_gw'], init = True)
for subnet in d['subnets']:
x.addSubnet(str(subnet['iface']), str(subnet['network']), subnet['cidr'], subnet['dhcp'], True)
x.setCaptivePortal(d['captive_portal'])
if d['vpn']:
x.setVpn(d['vpn_username'], d['vpn_password'], d['vpn_mac'], d['vpn_limit'])
if d['lan_wifi_bridge']:
x.enableLanWifiBridge()
if d['lan_wan_switch']:
x.switchWanToLan()
# Add optional packages
for package in d['opt_pkg']:
x.addPackage(package)
# Cleanup stuff from previous builds
os.chdir(WORKDIR)
os.system("rm -rf build/files/*")
os.system("rm -rf build/%s/bin/*" % d['imagebuilder'])
os.mkdir("build/files/etc")
x.generate("build/files/etc")
if d['only_config']:
# Just pack configuration and send it
prefix = hashlib.md5(os.urandom(32)).hexdigest()[0:16]
tempfile = os.path.join(DESTINATION, prefix + "-config.zip")
zip = ZipFile(tempfile, 'w', ZIP_DEFLATED)
os.chdir('build/files')
for root, dirs, files in os.walk("etc"):
for file in files:
zip.write(os.path.join(root, file))
zip.close()
# Generate checksum
f = open(tempfile, 'r')
checksum = hashlib.md5(f.read())
f.close()
# We can take just first 22 characters as checksums are fixed size and we can reconstruct it
filechecksum = urlsafe_b64encode(checksum.digest())[:22]
checksum = checksum.hexdigest()
result = "%s-%s-config-%s.zip" % (d['hostname'], d['router_name'], filechecksum)
destination = os.path.join(DESTINATION, result)
os.rename(tempfile, destination)
# Send an e-mail
t = loader.get_template('generator/email_config.txt')
c = Context({
'hostname' : d['hostname'],
'ip' : d['ip'],
'username' : d['vpn_username'],
'config' : result,
'checksum' : checksum,
'network' : { 'name' : settings.NETWORK_NAME,
'home' : settings.NETWORK_HOME,
'contact' : settings.NETWORK_CONTACT,
'description' : getattr(settings, 'NETWORK_DESCRIPTION', None)
},
'images_bindist_url' : getattr(settings, 'IMAGES_BINDIST_URL', None)
})
send_mail(
settings.EMAIL_SUBJECT_PREFIX + (_("Configuration for %s/%s") % (d['hostname'], d['ip'])),
t.render(c),
settings.EMAIL_IMAGE_GENERATOR_SENDER,
[d['email']],
fail_silently = False
)
else:
# Generate full image
x.build("build/%s" % d['imagebuilder'])
# Read image version
try:
f = open(glob('%s/build/%s/build_dir/target-*/root-*/etc/version' % (WORKDIR, d['imagebuilder']))[0], 'r')
version = f.read().strip()
version = re.sub(r'\W+', '_', version)
version = re.sub(r'_+', '_', version)
f.close()
except:
version = 'unknown'
# Get resulting image
files = []
for file, type in d['imagefiles']:
file = str(file)
source = "%s/build/%s/bin/%s" % (WORKDIR, d['imagebuilder'], file)
f = open(source, 'r')
checksum = hashlib.md5(f.read())
f.close()
# We can take just first 22 characters as checksums are fixed size and we can reconstruct it
filechecksum = urlsafe_b64encode(checksum.digest())[:22]
checksum = checksum.hexdigest()
ext = os.path.splitext(file)[1]
router_name = d['router_name'].replace('-', '')
result = "%s-%s-%s%s-%s%s" % (d['hostname'], router_name, version, ("-%s" % type if type else "-all"), filechecksum, ext)
destination = os.path.join(DESTINATION, result)
os.rename(source, destination)
files.append({ 'name' : result, 'checksum' : checksum })
# Send an e-mail
t = loader.get_template('generator/email.txt')
c = Context({
'hostname' : d['hostname'],
'ip' : d['ip'],
'username' : d['vpn_username'],
'files' : files,
'network' : { 'name' : settings.NETWORK_NAME,
'home' : settings.NETWORK_HOME,
'contact' : settings.NETWORK_CONTACT,
'description' : getattr(settings, 'NETWORK_DESCRIPTION', None)
},
'images_bindist_url' : getattr(settings, 'IMAGES_BINDIST_URL', None)
})
send_mail(
settings.EMAIL_SUBJECT_PREFIX + (_("Router images for %s/%s") % (d['hostname'], d['ip'])),
t.render(c),
settings.EMAIL_IMAGE_GENERATOR_SENDER,
[d['email']],
fail_silently = False
)
# Configure logger
logging.basicConfig(level = logging.DEBUG,
format = '%(asctime)s %(levelname)-8s %(message)s',
datefmt = '%a, %d %b %Y %H:%M:%S',
filename = os.path.join(WORKDIR, 'generator.log'),
filemode = 'a')
if settings.IMAGE_GENERATOR_USER:
# Change ownership for the build directory
os.system("chown -R {0}:{0} build".format(settings.IMAGE_GENERATOR_USER))
# Drop user privileges
try:
info = pwd.getpwnam(settings.IMAGE_GENERATOR_USER)
os.setgid(info.pw_gid)
os.setuid(info.pw_uid)
except:
print "ERROR: Unable to change to '{0}' user!".format(settings.IMAGE_GENERATOR_USER)
exit(1)
logging.info("nodewatcher firmware generator daemon v0.1 starting up...")
c = serverconn.ServerConn("127.0.0.1", 11300)
c.job = job.Job
c.use("generator")
logging.info("Connected to local beanstalkd instance.")
try:
while True:
j = c.reserve()
j.Finish()
try:
logging.info("Generating an image for '%s/%s'..." % (j.data['vpn_username'], j.data['ip']))
generate_image(no_unicodes(j.data))
logging.info("Image generation successful!")
except:
logging.error(format_exc())
logging.warning("Image generation has failed!")
# Send an e-mail
d = no_unicodes(j.data)
t = loader.get_template('generator/email_failed.txt')
ctx = Context({
'hostname' : d['hostname'],
'ip' : d['ip'],
'username' : d['vpn_username'],
'network' : { 'name' : settings.NETWORK_NAME,
'home' : settings.NETWORK_HOME,
'contact' : settings.NETWORK_CONTACT,
'description' : getattr(settings, 'NETWORK_DESCRIPTION', None)
},
'images_bindist_url' : getattr(settings, 'IMAGES_BINDIST_URL', None)
})
send_mail(
settings.EMAIL_SUBJECT_PREFIX + (_("Image generation failed for %s/%s") % (d['hostname'], d['ip'])),
t.render(ctx),
settings.EMAIL_IMAGE_GENERATOR_SENDER,
[d['email']],
fail_silently = False
)
except KeyboardInterrupt:
logging.info("Terminating due to user abort.")
except:
logging.error(format_exc())
logging.warning("We are going down!")
| agpl-3.0 | -8,465,503,166,224,846,000 | 31.713816 | 133 | 0.604424 | false |
OCA/purchase-workflow | purchase_location_by_line/models/purchase.py | 1 | 2524 | # © 2016 Eficent Business and IT Consulting Services S.L.
# (<http://www.eficent.com>)
# © 2018 Hizbul Bahar <[email protected]>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class PurchaseOrderLine(models.Model):
_inherit = 'purchase.order.line'
location_dest_id = fields.Many2one(
comodel_name='stock.location', string='Destination',
domain=[('usage', 'in', ['internal', 'transit'])])
@api.model
def _first_picking_copy_vals(self, key, lines):
"""The data to be copied to new pickings is updated with data from the
grouping key. This method is designed for extensibility, so that
other modules can store more data based on new keys."""
vals = super(PurchaseOrderLine, self)._first_picking_copy_vals(
key, lines)
for key_element in key:
if 'location_dest_id' in key_element.keys():
vals['location_dest_id'] = key_element['location_dest_id'].id
return vals
@api.model
def _get_group_keys(self, order, line, picking=False):
"""Define the key that will be used to group. The key should be
defined as a tuple of dictionaries, with each element containing a
dictionary element with the field that you want to group by. This
method is designed for extensibility, so that other modules can add
additional keys or replace them by others."""
key = super(PurchaseOrderLine, self)._get_group_keys(
order, line, picking=picking)
default_picking_location_id = line.order_id._get_destination_location()
default_picking_location = self.env['stock.location'].browse(
default_picking_location_id)
location = line.location_dest_id or default_picking_location
return key + ({'location_dest_id': location},)
@api.multi
def _create_stock_moves(self, picking):
res = super(PurchaseOrderLine, self)._create_stock_moves(picking)
for line in self:
default_picking_location_id = \
line.order_id._get_destination_location()
default_picking_location = self.env['stock.location'].browse(
default_picking_location_id)
location = line.location_dest_id or default_picking_location
if location:
line.move_ids.filtered(lambda m: m.state != 'done').write(
{'location_dest_id': location.id})
return res
| agpl-3.0 | -5,849,242,406,780,434,000 | 43.245614 | 79 | 0.639572 | false |
Ignotus/bookclub | app.py | 1 | 2062 | from flask import Flask, render_template, redirect, url_for, request
from flask_login import LoginManager, current_user
from flaskext.markdown import Markdown
from flask_assets import Environment, Bundle
route_modules = ["auth", "blog", "progress", "home", "calendar", "books"]
for module in route_modules:
exec("from routes.%s import %s" % (module, module))
from core.config import *
from core.db import db
from core.tables import User
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://" + DB_USER + ":" + DB_PASSWORD + "@" + DB_HOST + "/" + DB
db.init_app(app)
Markdown(app)
login_manager = LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(userid):
user = User.query.get(int(userid))
if user:
return user
app.debug = DEBUG
app.secret_key = SECRET_KEY
@app.before_request
def before_request():
app.config["SERVER_NAME"] = request.host
@app.route("/")
def main():
if current_user.is_authenticated():
return redirect(url_for("home.main"))
return redirect(url_for("blog.blog_main"))
modules = [auth, blog, progress, home, calendar, books]
for module in modules:
app.register_blueprint(module)
@app.errorhandler(401)
def custom_401(error):
return render_template("unauthorized.html")
assets = Environment(app)
css = Bundle("css/blog.css", "css/style.css",
filters="cssmin", output="gen/style.min.css")
js_markdown = Bundle("js/to-markdown.js", "js/markdown.js",
filters="jsmin", output="gen/markdown.min.js")
css_tagsinput = Bundle("css/bootstrap-tagsinput.css", filters="cssmin",
output="gen/bootstrap-tagsinput.min.css")
js_tagsinput = Bundle("js/bootstrap-tagsinput.js", filters="jsmin",
output="gen/bootstrap-tagsinput.min.js")
assets.register("css_all", css)
assets.register("js_markdown", js_markdown)
assets.register("css_tagsinput", css_tagsinput)
assets.register("js_tagsinput", js_tagsinput)
if __name__ == "__main__":
app.run(port=PORT)
| mit | -5,505,833,248,113,537,000 | 26.493333 | 112 | 0.677983 | false |
pombredanne/pywb | pywb/rewrite/url_rewriter.py | 1 | 5978 | from six.moves.urllib.parse import urljoin, urlsplit, urlunsplit
from pywb.rewrite.wburl import WbUrl
from pywb.rewrite.cookie_rewriter import get_cookie_rewriter
#=================================================================
class UrlRewriter(object):
"""
Main pywb UrlRewriter which rewrites absolute and relative urls
to be relative to the current page, as specified via a WbUrl
instance and an optional full path prefix
"""
NO_REWRITE_URI_PREFIX = ('#', 'javascript:', 'data:',
'mailto:', 'about:', 'file:', '{')
PROTOCOLS = ('http:', 'https:', 'ftp:', 'mms:', 'rtsp:', 'wais:')
REL_SCHEME = ('//', r'\/\/', r'\\/\\/')
def __init__(self, wburl, prefix='', full_prefix=None, rel_prefix=None,
root_path=None, cookie_scope=None, rewrite_opts=None):
self.wburl = wburl if isinstance(wburl, WbUrl) else WbUrl(wburl)
self.prefix = prefix
self.full_prefix = full_prefix or prefix
self.rel_prefix = rel_prefix or prefix
self.root_path = root_path or '/'
if self.full_prefix and self.full_prefix.startswith(self.PROTOCOLS):
self.prefix_scheme = self.full_prefix.split(':')[0]
else:
self.prefix_scheme = None
self.prefix_abs = self.prefix and self.prefix.startswith(self.PROTOCOLS)
self.cookie_scope = cookie_scope
self.rewrite_opts = rewrite_opts or {}
if self.rewrite_opts.get('punycode_links'):
self.wburl._do_percent_encode = False
def rewrite(self, url, mod=None):
# if special protocol, no rewriting at all
if url.startswith(self.NO_REWRITE_URI_PREFIX):
return url
if (self.prefix and
self.prefix != '/' and
url.startswith(self.prefix)):
return url
if (self.full_prefix and
self.full_prefix != self.prefix and
url.startswith(self.full_prefix)):
return url
wburl = self.wburl
is_abs = url.startswith(self.PROTOCOLS)
scheme_rel = False
if url.startswith(self.REL_SCHEME):
is_abs = True
scheme_rel = True
# if prefix starts with a scheme
#if self.prefix_scheme:
# url = self.prefix_scheme + ':' + url
#url = 'http:' + url
# optimize: join if not absolute url, otherwise just use as is
if not is_abs:
new_url = self.urljoin(wburl.url, url)
else:
new_url = url
if mod is None:
mod = wburl.mod
final_url = self.prefix + wburl.to_str(mod=mod, url=new_url)
if not is_abs and self.prefix_abs and not self.rewrite_opts.get('no_match_rel'):
parts = final_url.split('/', 3)
final_url = '/'
if len(parts) == 4:
final_url += parts[3]
# experiment for setting scheme rel url
elif scheme_rel and self.prefix_abs:
final_url = final_url.split(':', 1)[1]
return final_url
def get_new_url(self, **kwargs):
return self.prefix + self.wburl.to_str(**kwargs)
def rebase_rewriter(self, new_url):
if new_url.startswith(self.prefix):
new_url = new_url[len(self.prefix):]
elif new_url.startswith(self.rel_prefix):
new_url = new_url[len(self.rel_prefix):]
new_wburl = WbUrl(new_url)
return self._create_rebased_rewriter(new_wburl, self.prefix)
def _create_rebased_rewriter(self, new_wburl, prefix):
return UrlRewriter(new_wburl, prefix)
def get_cookie_rewriter(self, scope=None):
# collection scope overrides rule scope?
if self.cookie_scope:
scope = self.cookie_scope
cls = get_cookie_rewriter(scope)
return cls(self)
def deprefix_url(self):
return self.wburl.deprefix_url(self.full_prefix)
def __repr__(self):
return "UrlRewriter('{0}', '{1}')".format(self.wburl, self.prefix)
@staticmethod
def urljoin(orig_url, url): # pragma: no cover
new_url = urljoin(orig_url, url)
if '../' not in new_url:
return new_url
# only needed in py2 as py3 urljoin resolves '../'
parts = urlsplit(new_url)
scheme, netloc, path, query, frag = parts
path_parts = path.split('/')
i = 0
n = len(path_parts) - 1
while i < n:
if path_parts[i] == '..':
del path_parts[i]
n -= 1
if i > 0:
del path_parts[i - 1]
n -= 1
i -= 1
else:
i += 1
if path_parts == ['']:
path = '/'
else:
path = '/'.join(path_parts)
parts = (scheme, netloc, path, query, frag)
new_url = urlunsplit(parts)
return new_url
#=================================================================
class SchemeOnlyUrlRewriter(UrlRewriter):
"""
A url rewriter which ensures that any urls have the same
scheme (http or https) as the base url.
Other urls/input is unchanged.
"""
def __init__(self, *args, **kwargs):
super(SchemeOnlyUrlRewriter, self).__init__(*args, **kwargs)
self.url_scheme = self.wburl.url.split(':')[0]
if self.url_scheme == 'https':
self.opposite_scheme = 'http'
else:
self.opposite_scheme = 'https'
def rewrite(self, url, mod=None):
if url.startswith(self.opposite_scheme + '://'):
url = self.url_scheme + url[len(self.opposite_scheme):]
return url
def get_new_url(self, **kwargs):
return kwargs.get('url', self.wburl.url)
def rebase_rewriter(self, new_url):
return self
def get_cookie_rewriter(self, scope=None):
return None
def deprefix_url(self):
return self.wburl.url
| gpl-3.0 | 326,196,317,442,210,560 | 30.967914 | 88 | 0.546002 | false |
betoesquivel/CIE | app/models.py | 1 | 1574 | #!flask/bin/python
from app import db
from werkzeug import generate_password_hash, check_password_hash
COMISSIONER_ROLE = 0
MARKETING_ROLE = 1
ADMIN_ROLE = 2
class Staff(db.Model):
__tablename__ = 'staff'
id = db.Column(db.Integer, primary_key = True)
studentNumber = db.Column(db.Integer)
name = db.Column(db.String(64, convert_unicode=True))
email = db.Column(db.String(64, convert_unicode=True), unique = True)
pwdhash = db.Column(db.String(54, convert_unicode=True))
role = db.Column(db.Integer)
council = db.relationship('Council', uselist=False, backref='staff')
def __init__(self, name, studentNumber, email, password, role):
self.name = name
self.studentNumber = studentNumber
self.email = email.lower()
self.set_password(password)
self.role = role
def set_password(self, password):
self.pwdhash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.pwdhash, password)
def __repr__(self):
return '<Staff %r %r>' % (self.role, self.name)
class Council(db.Model):
__tablename__ = 'council'
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String(64, convert_unicode=True), unique = True)
description = db.Column(db.String(1000, convert_unicode=True))
website = db.Column(db.String(64, convert_unicode=True), unique = True)
comissionerId = db.Column(db.Integer, db.ForeignKey('staff.id'))
def __repr__(self):
return '<Council %r>' % (self.name)
| mit | 419,842,146,019,052,300 | 33.977778 | 75 | 0.662643 | false |
mybluevan/gospel-preaching | gospel_preaching/simple_orders/admin.py | 1 | 1882 | from models import Product, Order, OrderItem
from django.contrib import admin
from datetime import date
from calc_fields import CalcAdmin
class OrderItemInline(admin.TabularInline):
model = OrderItem
extra = 10
class ProductAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("title",)}
search_fields = ['title', 'description']
list_display = ('title', 'price', 'qoh')
class OrderAdmin(CalcAdmin):
calc_defs = {'shipping_cost': ('ship_cost','$%#.2f'), 'total': ('total','$%#.2f')}
fields = ('user', 'ship_date', 'ship_name', 'ship_addr', 'ship_city', 'ship_state', 'ship_zip', 'phone', 'email', 'instructions', 'shipped', 'paid', 'payment_method')
calc_fields = fields + ('shipping_cost', 'total')
date_hierarchy = 'date'
list_display = ('date', 'ship_name', 'phone', 'email', 'shipped', 'paid', 'total')
list_filter = ('shipped', 'paid')
actions = ['mark_shipped', 'mark_paid']
inlines = [OrderItemInline]
save_on_top = True
def mark_shipped(self, request, queryset):
rows_updated = queryset.update(shipped=True, ship_date=date.today())
if rows_updated == 1:
message_bit = "1 order was"
else:
message_bit = "%s orders were" % rows_updated
self.message_user(request, "%s successfully marked as shipped." % message_bit)
mark_shipped.short_description = "Mark selected orders as shipped"
def mark_paid(self, request, queryset):
rows_updated = queryset.update(paid=True)
if rows_updated == 1:
message_bit = "1 order was"
else:
message_bit = "%s orders were" % rows_updated
self.message_user(request, "%s successfully marked as paid." % message_bit)
mark_paid.short_description = "Mark selected orders as paid"
admin.site.register(Product, ProductAdmin)
admin.site.register(Order, OrderAdmin)
| gpl-3.0 | -6,975,373,972,628,733,000 | 42.767442 | 170 | 0.643464 | false |
google-research/google-research | tunas/schema_io.py | 1 | 8358 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Utilities for schema serialization and deserialization.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
from typing import Any, ByteString, Callable, Dict, Iterable, Optional, Sequence, Text, Tuple, Type, TypeVar, Union
import six
from six.moves import map
from six.moves import zip
from tunas import schema
# Primitive types (like integers or strings) that are supported in both Python
# and JSON.
_PRIMITIVE_TYPES = (int, float) + six.string_types
# We don't have a good way to identify namedtuples to the Python type system,
# except that they're subclasses of tuple.
_NamedTupleTypeVar = TypeVar('_NamedTupleTypeVar', bound=Tuple[Any, Ellipsis])
# Registration interface for namedtuple serialization and deserialization.
# typing.Type[] is not currently supported inside type annotation comments,
# so we use the annotation Any instead.
_NAMEDTUPLE_NAME_TO_CLASS = {} # type: Dict[Text, Any]
_NAMEDTUPLE_CLASS_TO_NAME = {} # type: Dict[Any, Text]
_NAMEDTUPLE_CLASS_TO_DEFAULTS = {} # type: Dict[Any, Dict[Text, Any]]
def register_namedtuple(
name,
deprecated_names = None,
defaults = None,
):
"""Register a namedtuple class for serialization/deserialization.
Namedtuples that are registered can be serialized and deserialized using
the utilities in this file.
Example usage:
@schema_io.register_namedtuple('package.C')
class C(collections.namedtuple('C', ['field1'])):
pass
# Later in the code
serialized = schema_io.serialize(C('foo')) # returns a serialized string
restored = schema_io.deserialize(serialized) # returns a namedtuple
Args:
name: String, globally unique identifier for the registered class.
deprecated_names: Optional list of Strings constaining deprecated names for
the registered class.
defaults: Optional list of default argument values. This makes it possible
to add new fields to a namedtuple while preserving backwards-compatibility
for old objects which are loaded from disk.
Returns:
A class decorator.
"""
def decorator(cls):
"""Register a new class instance."""
if name in _NAMEDTUPLE_NAME_TO_CLASS:
raise ValueError('Duplicate name in registry: {:s}'.format(name))
if cls in _NAMEDTUPLE_CLASS_TO_NAME:
raise ValueError('Duplicate class in registry: {:s}'.format(name))
if not issubclass(cls, tuple) or not hasattr(cls, '_fields'):
raise ValueError(
'Cannot register class {}.{} because it is not a namedtuple'
.format(cls.__module__, cls.__name__))
_NAMEDTUPLE_NAME_TO_CLASS[name] = cls
_NAMEDTUPLE_CLASS_TO_NAME[cls] = name
if deprecated_names:
for deprecated_name in deprecated_names:
if deprecated_name in _NAMEDTUPLE_NAME_TO_CLASS:
raise ValueError(
'Duplicate name registered: {:s}'.format(deprecated_name))
_NAMEDTUPLE_NAME_TO_CLASS[deprecated_name] = cls
if defaults:
for field in sorted(defaults.keys()):
if field not in cls._fields:
raise ValueError(
'Field {} appears in defaults but not in class {}.{}'
.format(field, cls.__module__, cls.__name__))
_NAMEDTUPLE_CLASS_TO_DEFAULTS[cls] = dict(defaults)
return cls
return decorator
def namedtuple_class_to_name(cls):
if cls not in _NAMEDTUPLE_CLASS_TO_NAME:
raise KeyError(
'Namedtuple class {}.{} is not registered. Did you forget to use a '
'@schema_io.register_namedtuple() decorator?'
.format(cls.__module__, cls.__name__))
return _NAMEDTUPLE_CLASS_TO_NAME[cls]
def namedtuple_name_to_class(name):
if name not in _NAMEDTUPLE_NAME_TO_CLASS:
raise KeyError(
'Namedtuple name {} is not registered. Did you forget to use a '
'@schema_io.register_namedtuple() decorator?'
.format(repr(name)))
return _NAMEDTUPLE_NAME_TO_CLASS[name]
def _to_json(structure):
"""Convert a nested datastructure to pure JSON."""
if structure is None or isinstance(structure, _PRIMITIVE_TYPES):
return structure
elif isinstance(structure, schema.OneOf):
result = ['oneof']
result.append(['choices', _to_json(structure.choices)])
result.append(['tag', _to_json(structure.tag)])
return result
elif isinstance(structure, list):
result = ['list']
result.extend(map(_to_json, structure))
return result
elif isinstance(structure, tuple) and hasattr(structure, '_fields'):
result = ['namedtuple:' + namedtuple_class_to_name(structure.__class__)]
result.extend(zip(structure._fields, map(_to_json, structure)))
return result
elif isinstance(structure, tuple):
result = ['tuple']
result.extend(map(_to_json, structure))
return result
elif isinstance(structure, dict):
result = ['dict']
for k in sorted(structure):
result.append((_to_json(k), _to_json(structure[k])))
return result
else:
raise ValueError('Unrecognized type: {}'.format(type(structure)))
def _namedtuple_from_json(
cls, kv_pairs):
"""Convert a JSON data structure to a namedtuple."""
# Start with a list of default keyword arguments.
if cls in _NAMEDTUPLE_CLASS_TO_DEFAULTS:
kwargs = dict(_NAMEDTUPLE_CLASS_TO_DEFAULTS[cls])
else:
kwargs = dict()
# Add all the user-provided key-value pairs.
for key, value in kv_pairs:
if key not in cls._fields:
raise ValueError(
'Invalid field: {} for class: {}, permitted values: {}'
.format(key, cls, cls._fields))
kwargs[key] = value
# Make sure we've provided all the arguments we need.
for field in cls._fields:
if field not in kwargs:
raise ValueError(
'Missing field: {} for class: {}'.format(field, cls))
# Now wrap the key-value pairs in a namedtuple.
return cls(**kwargs)
def _from_json(structure):
"""Converted a pure JSON data structure to one with namedtuples and OneOfs."""
if structure is None or isinstance(structure, _PRIMITIVE_TYPES):
return structure
elif isinstance(structure, list):
assert structure
typename = structure[0]
structure = structure[1:]
if typename == 'dict':
return {_from_json(k): _from_json(v) for (k, v) in structure}
elif typename.startswith('namedtuple:'):
cls = namedtuple_name_to_class(typename[len('namedtuple:'):])
kv_pairs = [(_from_json(k), _from_json(v)) for (k, v) in structure]
return _namedtuple_from_json(cls, kv_pairs)
elif typename == 'oneof':
keys = tuple(_from_json(k) for (k, v) in structure)
assert keys == ('choices', 'tag'), keys
return schema.OneOf(*(_from_json(v) for (k, v) in structure))
elif typename == 'list':
return list(map(_from_json, structure))
elif typename == 'tuple':
return tuple(map(_from_json, structure))
else:
raise ValueError('Unsupported __type: {}'.format(typename))
else:
raise ValueError('Unrecognized JSON type: {}'.format(type(structure)))
def serialize(structure):
"""Serialize a nested data structure to a string.
Args:
structure: A recursive data structure, possibly consisting of integers,
strings, tuples, dictionaries, and namedtuples. Namedtuples must be
registered with the @register_namedtuple decorator above.
Returns:
A json-serialized string.
"""
return json.dumps(_to_json(structure), sort_keys=True, indent=2)
def deserialize(serialized):
"""Convert a serialized string to a nested data structure.
Args:
serialized: A json-serialized string returned by serialize().
Returns:
A (possibly nested) data structure.
"""
return _from_json(json.loads(serialized))
| apache-2.0 | 1,837,907,914,271,440,400 | 33.53719 | 115 | 0.68581 | false |
StartTheShift/thunderdome | thunderdome/tests/groovy/test_scanner.py | 1 | 1663 | # Copyright (c) 2012-2013 SHIFT.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
from unittest import TestCase
from thunderdome.gremlin import parse
class GroovyScannerTest(TestCase):
"""
Test Groovy language scanner
"""
def test_parsing_complicated_function(self):
groovy_file = os.path.join(os.path.dirname(__file__), 'groovy_test_model.groovy')
result = parse(groovy_file)
assert len(result[6].body.split('\n')) == 8
result_map = {x.name: x for x in result}
assert 'get_self' in result_map
assert 'return_value' in result_map
assert 'long_func' in result_map
| mit | -4,721,672,207,574,906,000 | 42.763158 | 89 | 0.734215 | false |
ragupta-git/ImcSdk | imcsdk/imcfilter.py | 1 | 7963 | # Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import pyparsing as pp
from . import imcgenutils
from . import imccoreutils
from .imcfiltertype import OrFilter, AndFilter, NotFilter
from .imcbasetype import FilterFilter
types = {"eq": "EqFilter",
"ne": "NeFilter",
"ge": "GeFilter",
"gt": "GtFilter",
"le": "LeFilter",
"lt": "LtFilter",
"re": "WcardFilter"
}
class ParseFilter(object):
"""
Supporting class to parse filter expression.
"""
def __init__(self, class_id, is_meta_classid):
self.class_id = class_id
self.is_meta_classid = is_meta_classid
def parse_filter_obj(self, toks):
"""
Supporting class to parse filter expression.
"""
# print toks[0] #logger
prop_ = toks[0]["prop"]
value_ = toks[0]["value"]
type_ = "re"
if "type_exp" in toks[0]:
type_ = toks[0]["type_exp"]["types"]
flag_ = "C"
if "flag_exp" in toks[0]:
flag_ = toks[0]["flag_exp"]["flags"]
# print prop_, value_, type_, flag_ #logger
if flag_ == "I":
value_ = re.sub(
r"[a-zA-Z]",
lambda x: "[" +
x.group().upper() +
x.group().lower() +
"]",
value_)
if self.is_meta_classid:
class_obj = imccoreutils.load_class(self.class_id)
prop_ = imccoreutils.get_prop_meta(class_obj, prop_)
prop_ = prop_.xml_attribute
sub_filter = create_basic_filter(types[type_],
class_=imcgenutils.word_l(
self.class_id),
property=prop_,
value=value_)
return sub_filter
@staticmethod
def and_operator(toks):
"""
method to support logical 'and' operator expression
"""
# print str, loc, toks
# print toks[0][0::2]
and_filter = AndFilter()
for op_filter in toks[0][0::2]:
and_filter.child_add(op_filter)
return and_filter
@staticmethod
def or_operator(toks):
"""
method to support logical 'or' operator expression
"""
# print str, loc, toks
# print toks[0][0::2]
or_filter = OrFilter()
for op_filter in toks[0][0::2]:
or_filter.child_add(op_filter)
return or_filter
@staticmethod
def not_operator(toks):
"""
method to support logical 'and' operator expression
"""
not_filter = NotFilter()
for op_filter in toks[0][1:]:
not_filter.child_add(op_filter)
return not_filter
def parse_filter_str(self, filter_str):
"""
method to parse filter string
"""
prop = pp.WordStart(pp.alphas) + pp.Word(pp.alphanums +
"_").setResultsName("prop")
value = (pp.QuotedString("'") | pp.QuotedString('"') | pp.Word(
pp.printables, excludeChars=",")).setResultsName("value")
types_ = pp.oneOf("re eq ne gt ge lt le").setResultsName("types")
flags = pp.oneOf("C I").setResultsName("flags")
comma = pp.Literal(',')
quote = (pp.Literal("'") | pp.Literal('"')).setResultsName("quote")
type_exp = pp.Group(pp.Literal("type") + pp.Literal(
"=") + quote + types_ + quote).setResultsName("type_exp")
flag_exp = pp.Group(pp.Literal("flag") + pp.Literal(
"=") + quote + flags + quote).setResultsName("flag_exp")
semi_expression = pp.Forward()
semi_expression << pp.Group(pp.Literal("(") +
prop + comma + value +
pp.Optional(comma + type_exp) +
pp.Optional(comma + flag_exp) +
pp.Literal(")")
).setParseAction(
self.parse_filter_obj).setResultsName("semi_expression")
expr = pp.Forward()
expr << pp.operatorPrecedence(semi_expression, [
("not", 1, pp.opAssoc.RIGHT, self.not_operator),
("and", 2, pp.opAssoc.LEFT, self.and_operator),
("or", 2, pp.opAssoc.LEFT, self.or_operator)
])
result = expr.parseString(filter_str)
return result
def generate_infilter(class_id, filter_str, is_meta_class_id):
"""
Create FilterFilter object
Args:
class_id (str): class_id
filter_str (str): filter expression
is_meta_class_id (bool)
Returns:
True on successful connect
Example:
generate_infilter("LsServer",
'("usr_lbl, "mysp", type="eq", flag="I)',
True)
"""
parse_filter = ParseFilter(class_id=class_id,
is_meta_classid=is_meta_class_id)
result = parse_filter.parse_filter_str(filter_str)
in_filter = FilterFilter()
in_filter.child_add(result[0])
return in_filter
def handle_filter_max_component_limit(handle, l_filter):
"""
Method checks the filter count and if the filter count exceeds
the max_components(number of filters), then the given filter
objects get distributed among small groups and then again binded
together in complex filters(like and , or) so that the
count of filters can be reduced.
"""
from .imccore import AbstractFilter
from .imcfiltertype import AndFilter, OrFilter
max_components = 10
if l_filter is None or l_filter.child_count() <= max_components:
return l_filter
if not isinstance(l_filter, AndFilter) and not isinstance(l_filter,
OrFilter):
return l_filter
if isinstance(l_filter, AndFilter):
parent_filter = AndFilter()
child_filter = AndFilter()
parent_filter.child_add(child_filter)
for childf in l_filter.child:
if isinstance(childf, AbstractFilter):
if child_filter.child_count() == max_components:
child_filter = AndFilter()
parent_filter.child_add(child_filter)
child_filter.child_add(childf)
result_filter = parent_filter
else:
parent_filter = OrFilter()
child_filter = OrFilter()
parent_filter.child_add(child_filter)
for childf in l_filter.child:
if isinstance(childf, AbstractFilter):
if child_filter.child_count() == max_components:
child_filter = OrFilter()
parent_filter.child_add(child_filter)
child_filter.child_add(childf)
result_filter = parent_filter
return handle_filter_max_component_limit(handle, result_filter)
def create_basic_filter(filter_name, **kwargs):
"""
Loads filter class
"""
from . import imcmeta
fq_module_name = imcmeta.OTHER_TYPE_CLASS_ID[filter_name]
module_import = __import__(fq_module_name, globals(), locals(),
[filter_name], level=1)
filter_obj = getattr(module_import, filter_name)()
filter_obj.create(**kwargs)
return filter_obj
| apache-2.0 | 4,576,562,915,621,964,000 | 31.635246 | 76 | 0.550044 | false |
hopped/wikipedia-edit-wars | wikiparser.py | 1 | 2271 | import xml.sax.handler
__author__ = 'Dennis Hoppe ([email protected])'
class WikiArticle(object):
title = ""
id = 0
revisions = []
class WikiRevision(object):
timestamp = ""
username = ""
userid = 0
revid = 0
comment = ""
text = ""
class WikiParser(xml.sax.handler.ContentHandler):
def __init__(self):
self.wikiArticle = WikiArticle()
self.wikiRevision = WikiRevision()
self.inPage = 0
self.inTitle = 0
self.inRevision = 0
self.inText = 0
self.inId = 0
self.inUsername = 0
self.inContributor = 0
self.inTimestamp = 0
self.inComment = 0
def startElement(self, name, attributes):
self.buffer = ""
if name == "page":
self.inPage = 1
elif name == "title":
self.inTitle = 1
elif name == "revision":
self.inRevision = 1
self.wikiRevision = WikiRevision()
elif name == "username":
self.inUsername = 1
elif name == "contributor":
self.inContributor = 1
elif name == "text":
self.inText == 1
elif name == "id":
self.inId = 1
elif name == "timestamp":
self.inTimestamp = 1
elif name == "comment":
self.inComment = 1
def characters(self, data):
self.buffer += data
def endElement(self, name):
if name == "page":
self.inPage = 0
elif name == "title":
self.inTitle = 0
self.wikiArticle.title = self.buffer
elif name == "revision":
self.inRevision = 0
self.wikiArticle.revisions.append(self.wikiRevision)
elif name == "username":
self.inUsername = 0
self.wikiRevision.username = self.buffer
elif name == "contributor":
self.inContributor = 0
elif name == "id":
self.id = 0
if self.inRevision:
if self.inContributor:
self.wikiRevision.userid = self.buffer
else:
self.wikiRevision.revid = self.buffer
else:
self.wikiArticle.id = self.buffer
print self.buffer
elif name == "text":
self.inText == 0
self.wikiRevision.text = self.buffer
elif name == "timestamp":
self.inTimestamp == 0
self.wikiRevision.timestamp = self.buffer
elif name == "comment":
self.inComment = 0
self.wikiRevision.comment = self.buffer
| mit | -2,681,335,567,877,862,400 | 23.419355 | 58 | 0.601057 | false |
mirusresearch/staticdhcpd | libpydhcpserver/libpydhcpserver/dhcp.py | 1 | 34945 | # -*- encoding: utf-8 -*-
"""
libpydhcpserver.dhcp
====================
Handles send/receive and internal routing for DHCP packets.
Legal
-----
This file is part of libpydhcpserver.
libpydhcpserver is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2014 <[email protected]>
(C) Matthew Boedicker, 2011 <[email protected]>
(C) Mathieu Ignacio, 2008 <[email protected]>
"""
import collections
import platform
import select
import socket
import threading
from dhcp_types.ipv4 import IPv4
from dhcp_types.mac import MAC
from dhcp_types.packet import (DHCPPacket, FLAGBIT_BROADCAST)
from dhcp_types.constants import (
FIELD_CIADDR, FIELD_YIADDR, FIELD_SIADDR, FIELD_GIADDR,
)
#IP constants
_IP_GLOB = IPv4('0.0.0.0') #: The internal "everything" address.
_IP_BROADCAST = IPv4('255.255.255.255') #: The broadcast address.
IP_UNSPECIFIED_FILTER = (_IP_GLOB, _IP_BROADCAST, None) #: A tuple of addresses that reflect non-unicast targets.
_ETH_P_SNAP = 0x0005
"""
Internal-only Ethernet-frame-grabbing for Linux.
Nothing should be addressable to the special response socket, but better to avoid wasting memory.
"""
Address = collections.namedtuple("Address", ('ip', 'port'))
"""
An inet layer-3 address.
.. py:attribute:: ip
An :class:`IPv4 <dhcp_types.ipv4.IPv4>` address
.. py:attribute:: port
A numeric port value.
"""
class DHCPServer(object):
"""
Handles internal packet-path-routing logic.
"""
_server_address = None #: The IP associated with this server.
_network_link = None #: The I/O-handler; you don't want to touch this.
def __init__(self, server_address, server_port, client_port, pxe_port=None, response_interface=None, response_interface_qtags=None):
"""
Sets up the DHCP network infrastructure.
:param server_address: The IP address on which to run the DHCP service.
:type server_address: :class:`IPv4 <dhcp_types.ipv4.IPv4>`
:param int port: The port on which DHCP servers and relays listen in this network.
:param int client_port: The port on which DHCP clients listen in this network.
:param int pxe_port: The port on which DHCP servers listen for PXE traffic in this
network; ``None`` to disable.
:param str response_interface: The interface on which to provide raw packet support,
like ``"eth0"``, or ``None`` if not requested.
:param sequence response_interface_qtags: Any qtags to insert into raw packets, in
order of appearance. Definitions take the following form:
(pcp:`0-7`, dei:``bool``, vid:`1-4094`)
:except Exception: A problem occurred during setup.
"""
self._server_address = server_address
self._network_link = _NetworkLink(str(server_address), server_port, client_port, pxe_port, response_interface, response_interface_qtags=response_interface_qtags)
def _getNextDHCPPacket(self, timeout=60, packet_buffer=2048):
"""
Blocks for up to ``timeout`` seconds while waiting for a packet to
arrive; if one does, a thread is spawned to process it.
Have a thread blocking on this at all times; restart it immediately after it returns.
:param int timeout: The number of seconds to wait before returning.
:param int packet_buffer: The size of the buffer to use for receiving packets.
:return tuple(2): (DHCP-packet-received:``bool``,
:class:`Address <dhcp.Address>` or ``None`` on
timeout)
"""
(source_address, data, pxe) = self._network_link.getData(timeout=timeout, packet_buffer=packet_buffer)
if data:
try:
packet = DHCPPacket(data=data)
except ValueError:
pass
else:
if packet.isDHCPRequestPacket():
threading.Thread(target=self._handleDHCPRequest, args=(packet, source_address, pxe)).start()
elif packet.isDHCPDiscoverPacket():
threading.Thread(target=self._handleDHCPDiscover, args=(packet, source_address, pxe)).start()
elif packet.isDHCPInformPacket():
threading.Thread(target=self._handleDHCPInform, args=(packet, source_address, pxe)).start()
elif packet.isDHCPReleasePacket():
threading.Thread(target=self._handleDHCPRelease, args=(packet, source_address, pxe)).start()
elif packet.isDHCPDeclinePacket():
threading.Thread(target=self._handleDHCPDecline, args=(packet, source_address, pxe)).start()
elif packet.isDHCPLeaseQueryPacket():
threading.Thread(target=self._handleDHCPLeaseQuery, args=(packet, source_address, pxe)).start()
return (True, source_address)
return (False, source_address)
def _handleDHCPDecline(self, packet, source_address, pxe):
"""
Processes a DECLINE packet.
Override this with your own logic to handle DECLINEs.
:param packet: The packet to be processed.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param source_address: The address from which the request was received.
:type source_address: :class:`Address <dhcp.Address>`
:param bool pxe: ``True`` if the packet was received on the PXE port.
"""
def _handleDHCPDiscover(self, packet, source_address, pxe):
"""
Processes a DISCOVER packet.
Override this with your own logic to handle DISCOVERs.
:param packet: The packet to be processed.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param source_address: The address from which the request was received.
:type source_address: :class:`Address <dhcp.Address>`
:param bool pxe: ``True`` if the packet was received on the PXE port.
"""
def _handleDHCPInform(self, packet, source_address, pxe):
"""
Processes an INFORM packet.
Override this with your own logic to handle INFORMs.
:param packet: The packet to be processed.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param source_address: The address from which the request was received.
:type source_address: :class:`Address <dhcp.Address>`
:param bool pxe: ``True`` if the packet was received on the PXE port.
"""
def _handleDHCPLeaseQuery(self, packet, source_address, pxe):
"""
Processes a LEASEQUERY packet.
Override this with your own logic to handle LEASEQUERYs.
:param packet: The packet to be processed.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param source_address: The address from which the request was received.
:type source_address: :class:`Address <dhcp.Address>`
:param bool pxe: ``True`` if the packet was received on the PXE port.
"""
def _handleDHCPRelease(self, packet, source_address):
"""
Processes a RELEASE packet.
Override this with your own logic to handle RELEASEs.
:param packet: The packet to be processed.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param source_address: The address from which the request was received.
:type source_address: :class:`Address <dhcp.Address>`
:param bool pxe: ``True`` if the packet was received on the PXE port.
"""
def _handleDHCPRequest(self, packet, source_address, pxe):
"""
Processes a REQUEST packet.
Override this with your own logic to handle REQUESTs.
:param packet: The packet to be processed.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param source_address: The address from which the request was received.
:type source_address: :class:`Address <dhcp.Address>`
:param bool pxe: ``True`` if the packet was received on the PXE port.
"""
def _sendDHCPPacket(self, packet, source_address, pxe):
"""
Encodes and sends a DHCP packet to its destination.
**Important**: during this process, the packet may be modified, but
will be restored to its initial state by the time this method returns.
If any threadsafing is required, it must be handled in calling logic.
:param packet: The packet to be processed.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param source_address: The address from which the request was received.
:type source_address: :class:`Address <dhcp.Address>`
:param bool pxe: ``True`` if the packet was received on the PXE port.
:return int: The number of bytes transmitted.
:except Exception: A problem occurred during serialisation or
transmission.
"""
return self._network_link.sendData(packet, source_address, pxe)
class _NetworkLink(object):
"""
Handles network I/O.
"""
_client_port = None #: The port on which clients expect to receive DHCP traffic.
_server_port = None #: The port on which servers expect to receive DHCP traffic.
_pxe_port = None #: The port on which PXE clients expect to receive traffic.
_pxe_socket = None #: The internal socket to use for PXE traffic.
_responder_dhcp = None #: The internal socket to use for responding to DHCP requests.
_responder_pxe = None #: The internal socket to use for responding to PXE requests.
_responder_broadcast = None #: The internal socket to use for responding to broadcast requests.
_listening_sockets = None #: All sockets on which to listen for activity.
_unicast_discover_supported = False #: Whether unicast responses to DISCOVERs are supported.
def __init__(self, server_address, server_port, client_port, pxe_port, response_interface=None, response_interface_qtags=None):
"""
Sets up the DHCP network infrastructure.
:param str server_address: The IP address on which to run the DHCP service.
:param int server_port: The port on which DHCP servers and relays listen in this network.
:param int client_port: The port on which DHCP clients listen in this network.
:param int|None pxe_port: The port on which DHCP servers listen for PXE traffic in this
network.
:param str|None response_interface: The interface on which to provide raw packet support,
like 'eth0', or None if not requested.
:param sequence|None response_interface_qtags: Any qtags to insert into raw packets, in
order of appearance. Definitions take the following form:
(pcp:`0-7`, dei:``bool``, vid:`1-4094`)
:except Exception: A problem occurred during setup.
"""
self._client_port = client_port
self._server_port = server_port
self._pxe_port = pxe_port
#Create and bind unicast sockets
(dhcp_socket, pxe_socket) = self._setupListeningSockets(server_port, pxe_port)
if pxe_socket:
self._listening_sockets = (dhcp_socket, pxe_socket)
self._pxe_socket = pxe_socket
else:
self._listening_sockets = (dhcp_socket,)
#Wrap the sockets with appropriate logic and set options
self._responder_dhcp = _L3Responder(socketobj=dhcp_socket)
self._responder_pxe = _L3Responder(socketobj=pxe_socket)
#Either create a raw-response socket or a generic broadcast-response socket
if response_interface:
try:
self._responder_broadcast = _L2Responder_AF_PACKET(server_address, response_interface, qtags=response_interface_qtags)
except Exception:
try:
self._responder_broadcast = _L2Responder_pcap(server_address, response_interface, qtags=response_interface_qtags)
except Exception, e:
import errno
raise EnvironmentError(errno.ELIBACC, "Raw response-socket requested on %(interface)s, but neither AF_PACKET/PF_PACKET nor libpcap are available, or the interface does not exist" % {'interface': response_interface,})
self._unicast_discover_supported = True
else:
self._responder_broadcast = _L3Responder(server_address=server_address)
def _setupListeningSockets(self, server_port, pxe_port):
"""
Creates and binds the listening sockets.
:param int server_port: The port on which to listen for DHCP traffic.
:param int pxe_port: The port on which to listen for PXE traffic.
:return tuple(2): The DHCP and PXE sockets, the latter of which may be ``None`` if not
requested.
:except socket.error: Sockets could not be created or bound.
"""
dhcp_socket = pxe_socket = None
try:
dhcp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if pxe_port:
pxe_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
except socket.error, msg:
raise Exception('Unable to create socket: %(err)s' % {'err': str(msg),})
try:
dhcp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if pxe_socket:
pxe_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except socket.error, msg:
import warnings
warnings.warn('Unable to set SO_REUSEADDR; multiple DHCP servers cannot be run in parallel: %(err)s' % {'err': str(msg),})
if platform.system() != 'Linux':
try:
dhcp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if pxe_port:
pxe_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except socket.error, msg:
import warnings
warnings.warn('Unable to set SO_REUSEPORT; multiple DHCP servers cannot be run in parallel: %(err)s' % {'err': str(msg),})
try:
dhcp_socket.bind(('', server_port))
if pxe_port:
pxe_socket.bind(('', pxe_port))
except socket.error, e:
raise Exception('Unable to bind sockets: %(error)s' % {
'error': str(e),
})
return (dhcp_socket, pxe_socket)
def getData(self, timeout, packet_buffer):
"""
Runs `select()` over all relevant sockets, providing data if available.
:param int timeout: The number of seconds to wait before returning.
:param int packet_buffer: The size of the buffer to use for receiving packets.
:return tuple(3):
0. :class:`Address <dhcp.Address>` or ``None``: None if the timeout was reached.
1. The received data as a ``str`` or ``None`` if the timeout was reached.
2. A ``bool`` indicating whether the data was received via PXE.
:except select.error: The `select()` operation did not complete gracefully.
"""
pxe = False
active_sockets = select.select(self._listening_sockets, [], [], timeout)[0]
if active_sockets:
active_socket = active_sockets[0]
pxe = active_socket == self._pxe_socket
(data, source_address) = active_socket.recvfrom(packet_buffer)
if data:
return (Address(IPv4(source_address[0]), source_address[1]), data, pxe)
return (None, None, False)
def sendData(self, packet, address, pxe):
"""
Writes the packet to to appropriate socket, addressed to the appropriate recipient.
:param packet: The packet to be written.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param address: The address from which the original packet was received.
:type address: :class:`Address <dhcp.Address>`
:param bool pxe: Whether the request was received via PXE.
:return tuple(2):
0. The number of bytes written to the network.
1. The :class:`Address <dhcp.Address>` ultimately used.
:except Exception: A problem occurred during serialisation or transmission.
"""
ip = None
relayed = False
port = self._client_port
source_port = self._server_port
responder = self._responder_dhcp
if address.ip in IP_UNSPECIFIED_FILTER: #Broadcast source; this is never valid for PXE
if (not self._unicast_discover_supported #All responses have to be via broadcast
or packet.getFlag(FLAGBIT_BROADCAST)): #Broadcast bit set; respond in kind
ip = _IP_BROADCAST
else: #The client wants unicast and this host can handle it
ip = packet.extractIPOrNone(FIELD_YIADDR)
responder = self._responder_broadcast
else: #Unicast source
ip = address.ip
relayed = bool(packet.extractIPOrNone(FIELD_GIADDR))
if relayed: #Relayed request.
port = self._server_port
else: #Request directly from client, routed or otherwise.
if pxe:
ip = packet.extractIPOrNone(FIELD_CIADDR) or ip
port = address.port or self._pxe_port #BSD doesn't seem to preserve port information
source_port = self._pxe_port
responder = self._responder_pxe
return responder.send(packet, ip, port, relayed, source_port=source_port)
class _Responder(object):
"""
A generic responder-template, which defines common logic.
"""
def send(self, packet, ip, port, relayed, **kwargs):
"""
Performs final sanity-checking and address manipulation, then submits the packet for
transmission.
:param packet: The packet to be written.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param ip: The address to which the packet should be sent.
:type ip: :class:`IPv4 <dhcp_types.IPv4>`
:param int port: The port to which the packet should be sent.
:param bool relayed: ``True`` if the packet came from a relay.
:param \*\*kwargs: Any technology-specific arguments.
:return tuple(2):
0. The number of bytes written to the network.
1. The :class:`Address <dhcp.Address>` ultimately used.
:except Exception: An error occurred during serialisation or transmission.
"""
if relayed:
broadcast_source = packet.extractIPOrNone(FIELD_CIADDR) in IP_UNSPECIFIED_FILTER
else:
broadcast_source = ip in IP_UNSPECIFIED_FILTER
(broadcast_changed, original_was_broadcast) = packet.setFlag(FLAGBIT_BROADCAST, broadcast_source)
#Perform any necessary packet-specific address-changes
if not original_was_broadcast: #Unicast behaviour permitted; use the packet's IP override, if set
ip = packet.response_ip or ip
port = packet.response_port or port
if packet.response_source_port is not None:
kwargs['source_port'] = packet.response_source_port
bytes_sent = self._send(packet, str(ip), port, **kwargs)
if broadcast_changed: #Restore the broadcast bit, in case the packet needs to be used for something else
packet.setFlag(FLAGBIT_BROADCAST, original_was_broadcast)
return (bytes_sent, Address(IPv4(ip), port))
def _send(self, packet, ip, port, **kwargs):
"""
Handles technology-specific transmission; must be implemented by subclasses.
:param packet: The packet to be written.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param ip: The address to which the packet should be sent.
:type ip: :class:`IPv4 <dhcp_types.IPv4>`
:param int port: The port to which the packet should be sent.
:param \*\*kwargs: Any technology-specific arguments.
:return int: The number of bytes written to the network.
:except Exception: An error occurred during serialisation or transmission.
"""
raise NotImplementedError("_send() must be implemented in subclasses")
class _L3Responder(_Responder):
"""
Defines rules and logic needed to respond at layer 3.
"""
_socket = None #: The socket used for responses.
def __init__(self, socketobj=None, server_address=None):
"""
Wraps an existing socket or creates an arbitrarily bound new socket with broadcast
capabilities.
:param socket.socket|None socketobj: The socket to be bound; if ``None``, a new one is
created.
:param str|None server_address: The address to which a new socket should be bound.
:except Exception: Unable to bind a new socket.
"""
if socketobj:
self._socket = socketobj
else:
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
except socket.error, e:
raise Exception('Unable to set SO_BROADCAST: %(err)s' % {'err': e,})
try:
self._socket.bind((server_address or '', 0))
except socket.error, e:
raise Exception('Unable to bind socket: %(error)s' % {'error': e,})
def _send(self, packet, ip, port, **kwargs):
"""
Serialises and sends the packet.
:param packet: The packet to be written.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param str ip: The address to which the packet should be sent.
:param int port: The port to which the packet should be sent.
:param \*\*kwargs: Any technology-specific arguments.
:return int: The number of bytes written to the network.
:except Exception: An error occurred during serialisation or transmission.
"""
return self._socket.sendto(packet.encodePacket(), (ip, port))
class _L2Responder(_Responder):
"""
Defines rules and logic needed to respond at layer 2.
"""
_ethernet_id = None #: The source MAC and Ethernet payload-type (and qtags, if applicable).
_server_address = None #: The server's IP.
#Locally cached module functions
_array_ = None #: `array.array`
_pack_ = None #: `struct.pack`
def __init__(self, server_address, mac, qtags=None):
"""
Constructs the Ethernet header for all L2 communication.
:param str server_address: The server's IP as a dotted quad.
:param str mac: The MAC of the responding interface, in network-byte order.
:param sequence qtags: Any qtags to insert into raw packets, in order of appearance.
Definitions take the following form: (pcp:`0-7`, dei:``bool``, vid:`1-4094`)
"""
import struct
self._pack_ = struct.pack
import array
self._array_ = array.array
self._server_address = socket.inet_aton(str(server_address))
ethernet_id = [mac,] #Source MAC
if qtags:
for (pcp, dei, vid) in qtags:
ethernet_id.append("\x81\x00") #qtag payload-type
qtag_value = pcp << 13 #Priority-code-point (0-7)
qtag_value += int(dei) << 12 #Drop-eligible-indicator
qtag_value += vid #vlan-identifier
ethernet_id.append(self._pack('!H', qtag_value))
ethernet_id.append("\x08\x00") #IP payload-type
self._ethernet_id = ''.join(ethernet_id)
def _checksum(self, data):
"""
Computes the RFC768 checksum of ``data``.
:param sequence data: The data to be checksummed.
:return int: The data's checksum.
"""
if sum(len(i) for i in data) & 1: #Odd
checksum = sum(self._array_('H', ''.join(data)[:-1]))
checksum += ord(data[-1][-1]) #Add the final byte
else: #Even
checksum = sum(self._array_('H', ''.join(data)))
checksum = (checksum >> 16) + (checksum & 0xffff)
checksum += (checksum >> 16)
return ~checksum & 0xffff
def _ipChecksum(self, ip_prefix, ip_destination):
"""
Computes the checksum of the IPv4 header.
:param str ip_prefix: The portion of the IPv4 header preceding the `checksum` field.
:param str ip_destination: The destination address, in network-byte order.
:return int: The IPv4 checksum.
"""
return self._checksum([
ip_prefix,
'\0\0', #Empty checksum field
self._server_address,
ip_destination,
])
def _udpChecksum(self, ip_destination, udp_addressing, udp_length, packet):
"""
Computes the checksum of the UDP header and payload.
:param str ip_destination: The destination address, in network-byte order.
:param str udp_addressing: The UDP header's port section.
:param str udp_length: The length of the UDP payload plus header.
:param str packet: The serialised packet.
:return int: The UDP checksum.
"""
return self._checksum([
self._server_address,
ip_destination,
'\0\x11', #UDP spec padding and protocol
udp_length,
udp_addressing,
udp_length,
'\0\0', #Dummy UDP checksum
packet,
])
def _assemblePacket(self, packet, mac, ip, port, source_port):
"""
Assembles the Ethernet, IPv4, and UDP headers, serialises the packet, and provides a
complete Ethernet frame for injection into the network.
:param packet: The packet to be written.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param mac: The MAC to which the packet is addressed.
:type mac: :class:`MAC <dhcp_types.mac.MAC>`
:param str ip: The IPv4 to which the packet is addressed, as a dotted quad.
:param int port: The port to which the packet is addressed.
:param int source_port: The port from which the packet is addressed.
:return str: The complete binary packet.
"""
binary = []
#<> Ethernet header
if _IP_BROADCAST == ip:
binary.append('\xff\xff\xff\xff\xff\xff') #Broadcast MAC
else:
binary.append(''.join(chr(i) for i in mac)) #Destination MAC
binary.append(self._ethernet_id) #Source MAC and Ethernet payload-type
#<> Prepare packet data for transmission and checksumming
binary_packet = packet.encodePacket()
packet_len = len(binary_packet)
#<> IP header
binary.append(self._pack_("!BBHHHBB",
69, #IPv4 + length=5
0, #DSCP/ECN aren't relevant
28 + packet_len, #The UDP and packet lengths in bytes
0, #ID, which is always 0 because we're the origin
packet_len <= 560 and 0b0100000000000000 or 0, #Flags and fragmentation
128, #Make the default TTL sane, but not maximum
0x11, #Protocol=UDP
))
ip_destination = socket.inet_aton(ip)
binary.extend((
self._pack_("<H", self._ipChecksum(binary[-1], ip_destination)),
self._server_address,
ip_destination
))
#<> UDP header
binary.append(self._pack_("!HH", source_port, port))
binary.append(self._pack_("!H", packet_len + 8)) #8 for the header itself
binary.append(self._pack_("<H", self._udpChecksum(ip_destination, binary[-2], binary[-1], binary_packet)))
#<> Payload
binary.append(binary_packet)
return ''.join(binary)
def _send(self, packet, ip, port, source_port=0, **kwargs):
"""
Serialises and sends the packet.
:param packet: The packet to be written.
:type packet: :class:`DHCPPacket <dhcp_types.packet.DHCPPacket>`
:param str ip: The address to which the packet should be sent.
:param int port: The port to which the packet should be sent.
:param int source_port: The UDP port from which to claim the packet originated.
:param \*\*kwargs: Any technology-specific arguments.
:return int: The number of bytes written to the network.
:except Exception: An error occurred during serialisation or transmission.
"""
mac = (packet.response_mac and MAC(packet.response_mac)) or packet.getHardwareAddress()
binary_packet = self._assemblePacket(packet, mac, ip, port, source_port)
return self._send_(binary_packet)
class _L2Responder_AF_PACKET(_L2Responder):
"""
A Linux-specific layer 2 responder that uses AF_PACKET/PF_PACKET.
"""
_socket = None #: The socket used for responses.
def __init__(self, server_address, response_interface, qtags=None):
"""
Creates and configures a raw socket on an interface.
:param str server_address: The server's IP as a dotted quad.
:param str response_interface: The interface on which to provide raw packet support, like
``"eth0"``.
:param sequence qtags: Any qtags to insert into raw packets, in order of appearance.
Definitions take the following form: (pcp:`0-7`, dei:``bool``, vid:`1-4094`)
:except socket.error: The socket could not be configured.
"""
socket_type = ((hasattr(socket, 'AF_PACKET') and socket.AF_PACKET) or (hasattr(socket, 'PF_PACKET') and socket.PF_PACKET))
if not socket_type:
raise Exception("Neither AF_PACKET nor PF_PACKET found")
self._socket = socket.socket(socket_type, socket.SOCK_RAW, socket.htons(_ETH_P_SNAP))
self._socket.bind((response_interface, _ETH_P_SNAP))
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 2 ** 12)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 2 ** 12)
mac = self._socket.getsockname()[4]
_L2Responder.__init__(self, server_address, mac, qtags=qtags)
def _send_(self, packet):
"""
Sends the packet.
:param str packet: The packet to be written.
:return int: The number of bytes written to the network.
:except Exception: An error occurred during transmission.
"""
return self._socket.send(packet)
class _L2Responder_pcap(_L2Responder):
"""
A more general Unix-oriented layer 2 responder that uses libpcap.
"""
_fd = None #: The file-descriptor of the socket used for responses.
_inject = None #: The "send" function to invoke from libpcap.
#Locally cached module functions
_c_int_ = None #: `ctypes.c_int`
def __init__(self, server_address, response_interface, qtags=None):
"""
Creates and configures a raw socket on an interface.
:param str server_address: The server's IP as a dotted quad.
:param str response_interface: The interface on which to provide raw packet support, like
``"eth0"``.
:param sequence qtags: Any qtags to insert into raw packets, in order of appearance.
Definitions take the following form: (pcp:`0-7`, dei:``bool``, vid:`1-4094`)
:except Exception: Interfacing with libpcap failed.
"""
import ctypes
self._c_int_ = ctypes.c_int
import ctypes.util
pcap = ctypes.util.find_library('pcap')
if not pcap:
raise Exception("libpcap not found")
pcap = ctypes.cdll.LoadLibrary(pcap)
errbuf = ctypes.create_string_buffer(256)
self._fd = pcap.pcap_open_live(response_interface, ctypes.c_int(0), ctypes.c_int(0), ctypes.c_int(0), errbuf)
if not self._fd:
import errno
raise IOError(errno.EACCES, errbuf.value)
elif errbuf.value:
import warnings
warnings.warn(errbuf.value)
try:
mac = self._getMAC(response_interface)
except Exception:
pcap.pcap_close(self._fd)
raise
else:
_L2Responder.__init__(self, server_address, mac, qtags=qtags)
self._inject = pcap.pcap_inject
def _getMAC(self, response_interface):
"""
Mostly portable means of getting the MAC address for the interface.
:param str response_interface: The interface on which to provide raw packet support, like
``"eth0"``.
:return str: The MAC address, in network-byte order.
:except Exception: The MAC could not be retrieved.
"""
import subprocess
import re
if platform.system() == 'Linux':
command = ('/sbin/ip', 'link', 'show', response_interface)
else:
command = ('/sbin/ifconfig', response_interface)
ifconfig_output = subprocess.check_output(command)
m = re.search(r'\b(?P<mac>(?:[0-9A-Fa-f]{2}:){5}(?:[0-9A-Fa-f]{2}))\b', ifconfig_output)
if not m:
raise Exception("Unable to determine MAC of %(interface)s" % {
'interface': response_interface,
})
return ''.join(chr(i) for i in MAC(m.group('mac')))
def _send_(self, packet):
"""
Sends the packet.
:param str packet: The packet to be written.
:return int: The number of bytes written to the network.
:except Exception: An error occurred during transmission.
"""
return self._inject(self._fd, packet, self._c_int_(len(packet)))
| gpl-3.0 | -4,686,634,937,923,367,000 | 44.443433 | 236 | 0.61614 | false |
fujii-team/Henbun | testing/test_nn_model.py | 1 | 1040 | from __future__ import print_function
import tensorflow as tf
import numpy as np
import unittest
import Henbun as hb
class SquareModel(hb.model.Model):
def setUp(self):
self.p = hb.param.Variable([2,3])
def likelihood(self):
return -tf.reduce_sum(tf.square(self.p))
class test_square(unittest.TestCase):
def setUp(self):
tf.set_random_seed(0)
self.m = SquareModel()
def test_manual_optimize(self):
trainer = tf.train.AdamOptimizer(learning_rate=0.01)
self.m.initialize()
with self.m.tf_mode():
op = tf.negative(self.m.likelihood())
opt_op = trainer.minimize(op, var_list = self.m.get_tf_variables())
self.m._session.run(tf.variables_initializer(tf.global_variables()))
for i in range(1000):
self.m._session.run(opt_op)
self.assertTrue(np.allclose(self.m.p.value, np.zeros((2,3)), atol=1.0e-4))
"""
In this test, we make sure n.n. model works fine.
"""
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -7,322,589,570,255,599,000 | 29.588235 | 82 | 0.624038 | false |
gratefulfrog/ArduGuitar | Ardu2/design/POC-3_MAX395/pyboard/DraftDevt/illuminator.py | 1 | 1619 | # illuminator.py
# support leds or other pin controlled lights
class Illuminator__:
"""
Helper class for Illuminator, see below
"""
def __init__(self,pin):
"""create an instance of a LED connected to the
pin provided.
Pins should be configured as Pin('X1', Pin.OUT_PP)
"""
self.p = pin
self.off()
def off(self):
""" set pin to low
"""
self.p.low()
def on(self):
""" set pin to high
"""
self.p.high()
def value(self):
""" returns 0 or 1 depending on state of pin
"""
return self.p.value()
class Illuminator(Illuminator__):
""" Usage:
>> pinId = 'X1
>> i = Illuminator(Pin(pinID, Pin.OUT_PP))
>> i.value()
0
>> i.on()
>> i.value()
1
>> i.off()
>> i.value()
0
-----
wiring:
from pin to LED+
from LED- to current limiting resistor
from current limiting resistor to ground
"""
toggleFuncs = (Illuminator__.on, Illuminator__.off) # for use in toggle
def __init__(self,pin):
"""create an instance of a LED connected to the
pin provided.
Pin should be configured as Pin('X1', Pin.OUT_PP)
"""
Illuminator__.__init__(self,pin)
def toggle(self):
""" toggles the value of the pin
"""
type(self).toggleFuncs[self.value()](self)
def __repr__(self):
return 'Illuminator:' + \
'\n\tpin:\t' + str(self.p) +\
'\n\tvalue:\t' + str(self.p.value()) + '\n'
| gpl-2.0 | -6,721,304,239,893,838,000 | 22.463768 | 76 | 0.502779 | false |
djaodjin/envconnect | envconnect/urls/api/editors.py | 1 | 2225 | # Copyright (c) 2020, DjaoDjin inc.
# see LICENSE.
from django.conf.urls import url, include
from pages.settings import PATH_RE, SLUG_RE
from ...api.benchmark import (DisableScorecardAPIView, EnableScorecardAPIView,
ScoreWeightAPIView)
from ...api.best_practices import (BestPracticeAPIView,
BestPracticeMirrorAPIView, BestPracticeMoveAPIView,
EnableContentAPIView, DisableContentAPIView)
from ...api.columns import ColumnAPIView
from ...api.consumption import (ConsumptionListAPIView,
ConsumptionEditableDetailAPIView)
urlpatterns = [
# XXX move into editables/score/
# url(r'^editables/enable/(?P<path>%s)$' % PATH_RE,
# EnableContentAPIView.as_view(), name="api_enable"),
# url(r'^editables/disable/(?P<path>%s)$' % PATH_RE,
# DisableContentAPIView.as_view(), name="api_disable"),
# url(r'^editables/scorecard/enable/(?P<path>%s)$' % PATH_RE,
# EnableScorecardAPIView.as_view(), name="api_scorecard_enable"),
# url(r'^editables/scorecard/disable/(?P<path>%s)$' % PATH_RE,
# DisableScorecardAPIView.as_view(), name="api_scorecard_disable"),
url(r'^editables/(?P<organization>%s)/score/(?P<path>%s)$' % (
SLUG_RE, PATH_RE),
ScoreWeightAPIView.as_view(), name="api_score"),
url(r'^editables/column/(?P<path>%s)$' % PATH_RE,
ColumnAPIView.as_view(), name="api_column"),
url(r'^editables/(?P<organization>%s)/values$' % SLUG_RE,
ConsumptionListAPIView.as_view(), name="api_consumption_base"),
url(r'^editables/(?P<organization>%s)/values/(?P<path>%s)$' % (
SLUG_RE, PATH_RE),
ConsumptionEditableDetailAPIView.as_view(), name="api_consumption"),
url(r'^editables/(?P<organization>%s)/(?P<path>%s)$' % (SLUG_RE, PATH_RE),
BestPracticeAPIView.as_view(), name='pages_api_edit_element'),
url(r'^editables/(?P<organization>%s)/attach/(?P<path>%s)$' % (
SLUG_RE, PATH_RE),
BestPracticeMoveAPIView.as_view(), name='api_move_node'),
url(r'^editables/(?P<organization>%s)/mirror/(?P<path>%s)$' % (
SLUG_RE, PATH_RE),
BestPracticeMirrorAPIView.as_view(), name='api_mirror_node'),
url(r'^editables/(?P<organization>%s)/',
include('pages.urls.api.editables')),
]
| gpl-2.0 | 807,961,651,836,970,400 | 43.5 | 78 | 0.666517 | false |
erigones/esdc-ce | api/mon/base/tasks.py | 1 | 5346 | from celery.utils.log import get_task_logger
from api.mon import get_monitoring, del_monitoring
from api.mon.exceptions import RemoteObjectDoesNotExist, RemoteObjectAlreadyExists
from api.mon.vm.tasks import mon_vm_sync
from api.mon.node.tasks import mon_node_sync
# noinspection PyProtectedMember
from api.mon.alerting.tasks import mon_all_groups_sync
from api.task.utils import mgmt_lock, mgmt_task
from que.exceptions import MgmtTaskException
from que.erigonesd import cq
from que.internal import InternalTask
from que.mgmt import MgmtTask
from vms.models import Dc, Node
__all__ = (
'mon_sync_all',
'mon_template_list',
'mon_hostgroup_list',
'mon_hostgroup_get',
'mon_hostgroup_create',
'mon_hostgroup_delete',
)
logger = get_task_logger(__name__)
def mon_clear_zabbix_cache(dc, full=True):
"""
Clear Zabbix instance from global zabbix cache used by get_monitoring() if full==True.
Reset internal zabbix instance cache if full==False and the zabbix instance exists in global zabbix cache.
Should be reviewed with every new backend implemented.
"""
if full:
if del_monitoring(dc):
logger.info('Zabbix instance for DC "%s" was successfully removed from global cache', dc)
else:
logger.info('Zabbix instance for DC "%s" was not found in global cache', dc)
else:
zx = get_monitoring(dc)
zx.reset_cache()
logger.info('Cleared cache for zabbix instance %s in DC "%s"', zx, dc)
# noinspection PyUnusedLocal
@cq.task(name='api.mon.base.tasks.mon_sync_all', base=InternalTask)
@mgmt_lock(key_args=(1,), wait_for_release=True)
def mon_sync_all(task_id, dc_id, clear_cache=True, sync_groups=True, sync_nodes=True, sync_vms=True, **kwargs):
"""
Clear Zabbix cache and sync everything in Zabbix.
Related to a specific DC.
Triggered by dc_settings_changed signal.
"""
dc = Dc.objects.get_by_id(int(dc_id))
if clear_cache:
logger.info('Clearing zabbix cache in DC %s', dc)
mon_clear_zabbix_cache(dc)
get_monitoring(dc) # Cache new Zabbix instance for tasks below
if sync_groups:
logger.info('Running monitoring group synchronization for all user groups in DC %s', dc)
mon_all_groups_sync.call(task_id, dc_name=dc.name)
if sync_nodes:
logger.info('Running monitoring host synchronization for all compute nodes')
for node in Node.all():
mon_node_sync.call(task_id, node_uuid=node.uuid)
if sync_vms:
logger.info('Running monitoring host synchronization for all VMs in DC %s', dc)
for vm_uuid in dc.vm_set.values_list('uuid', flat=True):
mon_vm_sync.call(task_id, vm_uuid=vm_uuid)
# noinspection PyUnusedLocal
@cq.task(name='api.mon.base.tasks.mon_template_list', base=MgmtTask)
@mgmt_task(log_exception=False)
def mon_template_list(task_id, dc_id, full=False, extended=False, **kwargs):
"""
Return list of templates available in Zabbix.
"""
dc = Dc.objects.get_by_id(int(dc_id))
return get_monitoring(dc).template_list(full=full, extended=extended)
# noinspection PyUnusedLocal
@cq.task(name='api.mon.base.tasks.mon_hostgroup_list', base=MgmtTask)
@mgmt_task(log_exception=False)
def mon_hostgroup_list(task_id, dc_id, dc_bound=True, full=False, extended=False, **kwargs):
"""
Return list of hostgroups available in Zabbix.
"""
dc = Dc.objects.get_by_id(int(dc_id))
return get_monitoring(dc).hostgroup_list(dc_bound=dc_bound, full=full, extended=extended)
# noinspection PyUnusedLocal
@cq.task(name='api.mon.base.tasks.mon_hostgroup_get', base=MgmtTask)
@mgmt_task(log_exception=False)
def mon_hostgroup_get(task_id, dc_id, hostgroup_name, dc_bound=True, **kwargs):
dc = Dc.objects.get_by_id(int(dc_id))
mon = get_monitoring(dc)
try:
return mon.hostgroup_detail(hostgroup_name, dc_bound=dc_bound)
except RemoteObjectDoesNotExist as exc:
raise MgmtTaskException(exc.detail)
# noinspection PyUnusedLocal
@cq.task(name='api.mon.base.tasks.mon_hostgroup_create', base=MgmtTask)
@mgmt_task(log_exception=True)
def mon_hostgroup_create(task_id, dc_id, hostgroup_name, dc_bound=True, **kwargs):
dc = Dc.objects.get_by_id(int(dc_id))
mon = get_monitoring(dc)
try:
result = mon.hostgroup_create(hostgroup_name, dc_bound=dc_bound)
except RemoteObjectAlreadyExists as exc:
raise MgmtTaskException(exc.detail)
detail = 'Monitoring hostgroup "%s" was successfully created' % hostgroup_name
mon.task_log_success(task_id, obj=mon.server_class(dc), detail=detail, **kwargs['meta'])
return result
# noinspection PyUnusedLocal
@cq.task(name='api.mon.base.tasks.mon_hostgroup_delete', base=MgmtTask)
@mgmt_task(log_exception=True)
def mon_hostgroup_delete(task_id, dc_id, hostgroup_name, dc_bound=True, **kwargs):
dc = Dc.objects.get_by_id(int(dc_id))
mon = get_monitoring(dc)
try:
result = mon.hostgroup_delete(hostgroup_name, dc_bound=dc_bound) # Fail loudly if doesnt exist
except RemoteObjectDoesNotExist as exc:
raise MgmtTaskException(exc.detail)
detail = 'Monitoring hostgroup "%s" was successfully deleted' % hostgroup_name
mon.task_log_success(task_id, obj=mon.server_class(dc), detail=detail, **kwargs['meta'])
return result
| apache-2.0 | -342,738,251,245,723,970 | 35.616438 | 111 | 0.701459 | false |
matthew-brett/pymc | pymc/examples/model_1_missing.py | 1 | 1501 | """
A model for the disasters data with a changepoint, with missing data
changepoint ~ U(0,110)
early_mean ~ Exp(1.)
late_mean ~ Exp(1.)
disasters[t] ~ Po(early_mean if t <= switchpoint, late_mean otherwise)
"""
__all__ = ['swichpoint','early_mean','late_mean','disasters']
from pymc import DiscreteUniform, Exponential, deterministic, Poisson, Uniform, Lambda, MCMC, observed, poisson_like
from pymc.distributions import Impute
import numpy as np
# Missing values indicated by None placeholders
disasters_array = np.array([ 4, 5, 4, 0, 1, 4, 3, 4, 0, 6, 3, 3, 4, 0, 2, 6,
3, 3, 5, 4, 5, 3, 1, 4, 4, 1, 5, 5, 3, 4, 2, 5,
2, 2, 3, 4, 2, 1, 3, None, 2, 1, 1, 1, 1, 3, 0, 0,
1, 0, 1, 1, 0, 0, 3, 1, 0, 3, 2, 2, 0, 1, 1, 1,
0, 1, 0, 1, 0, 0, 0, 2, 1, 0, 0, 0, 1, 1, 0, 2,
3, 3, 1, None, 2, 1, 1, 1, 1, 2, 4, 2, 0, 0, 1, 4,
0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1])
# Switchpoint
s = DiscreteUniform('s', lower=0, upper=110)
# Early mean
e = Exponential('e', beta=1)
# Late mean
l = Exponential('l', beta=1)
@deterministic(plot=False)
def r(s=s, e=e, l=l):
"""Allocate appropriate mean to time series"""
out = np.empty(len(disasters_array))
# Early mean prior to switchpoint
out[:s] = e
# Late mean following switchpoint
out[s:] = l
return out
# Where the mask is true, the value is taken as missing.
D = Impute('D', Poisson, disasters_array, mu=r)
| mit | -4,282,114,124,814,546,000 | 32.355556 | 116 | 0.56429 | false |
cdeboever3/cdpybio | cdpybio/ldsc.py | 1 | 2980 | import datetime as dt
import pandas as pd
def parse_ldsc_rg_log(fn):
conv_month = {'': 0, 'Apr': 4, 'Aug': 8, 'Dec': 12, 'Feb': 2,
'Jan': 1, 'Jul': 7, 'Jun': 6, 'Mar': 3,
'May': 5, 'Nov': 11, 'Oct': 10, 'Sep': 9}
with open(fn) as f:
fcontents = f.read()
lines = fcontents.split(69 * '*' + '\n')[-1].strip().split('\n')
month, day, time, year = [x.split() for x in lines if x[0:10] == 'Beginning '][0][4:]
hour, minute, second = time.split(':')
begin = dt.datetime(int(year), int(conv_month[month]), int(day), int(hour), int(minute), int(second))
month, day, time, year = [x.split() for x in lines if x[0:17] == 'Analysis finished'][0][4:]
hour, minute, second = time.split(':')
end = dt.datetime(int(year), int(conv_month[month]), int(day), int(hour), int(minute), int(second))
num_snps = int([x for x in lines if 'valid' in x][0].split()[0])
# Pheno 1
lines = fcontents.split(69 * '*' + '\n')[-1].split(29 * '-' + '\n')[0].strip().split('\n')
p1_h2, p1_h2_se = [x for x in lines if x[0:5] == 'Total'][0].split()[-2:]
p1_h2 = float(p1_h2)
p1_h2_se = float(p1_h2_se[1:-1])
p1_lambda_gc = float([x for x in lines if x[0:6] == 'Lambda'][0].strip().split()[-1])
p1_mean_chi2 = float([x for x in lines if x[0:4] == 'Mean'][0].strip().split()[-1])
p1_intercept, p1_intercept_se = [x for x in lines if x[0:9] == 'Intercept'][0].strip().split()[-2:]
p1_intercept = float(p1_intercept)
p1_intercept_se = float(p1_intercept_se[1:-1])
# Pheno 2
lines = fcontents.split(69 * '*' + '\n')[-1].split(29 * '-' + '\n')[0].strip().split('\n')
p2_h2, p2_h2_se = [x for x in lines if x[0:5] == 'Total'][0].split()[-2:]
p2_h2 = float(p2_h2)
p2_h2_se = float(p2_h2_se[1:-1])
p2_lambda_gc = float([x for x in lines if x[0:6] == 'Lambda'][0].strip().split()[-1])
p2_mean_chi2 = float([x for x in lines if x[0:4] == 'Mean'][0].strip().split()[-1])
p2_intercept, p2_intercept_se = [x for x in lines if x[0:9] == 'Intercept'][0].strip().split()[-2:]
p2_intercept = float(p2_intercept)
p2_intercept_se = float(p2_intercept_se[1:-1])
vals = [begin, end, num_snps]
ind = ['start_time', 'end_time', 'num_snps']
vals += [p1_h2, p1_h2_se, p1_lambda_gc, p1_mean_chi2, p1_intercept,
p1_intercept_se]
ind += ['h2_p1', 'h2_se_p1', 'lambda_gc_p1', 'mean_chi2_p1', 'intercept_p1',
'intercept_se_p1']
vals += [p2_h2, p2_h2_se, p2_lambda_gc, p2_mean_chi2, p2_intercept,
p2_intercept_se]
ind += ['h2_p2', 'h2_se_p2', 'lambda_gc_p2', 'mean_chi2_p2', 'intercept_p2',
'intercept_se_p2']
lines = fcontents.split(69 * '*' + '\n')[-1].strip().split('\n')
vals += lines[-4].split()[0:2]
ind += lines[-5].split()[0:2]
vals += [float(x) for x in lines[-4].split()[2:]]
ind += lines[-5].split()[2:]
out = pd.Series(vals, index=ind)
return(out)
| mit | -9,163,779,512,349,474,000 | 47.852459 | 105 | 0.538926 | false |
demharters/git_scripts | super_all.py | 1 | 2172 | #! /usr/bin/env python
# original Written by Jules Jacobsen ([email protected]). Feel free to do whatever you like with this code.
# extensively modified by Robert L. Campbell ([email protected])
from pymol import cmd
def super_all(target=None,mobile_selection='name ca',target_selection='name ca',cutoff=2, cycles=5,cgo_object=0):
"""
Superimposes all models in a list to one target using the "super" algorithm
usage:
super_all [target][target_selection=name ca][mobile_selection=name ca][cutoff=2][cycles=5][cgo_object=0]
where target specifies is the model id you want to superimpose all others against,
and selection, cutoff and cycles are options passed to the super command.
By default the selection is all C-alpha atoms and the cutoff is 2 and the
number of cycles is 5.
Setting cgo_object to 1, will cause the generation of an superposition object for
each object. They will be named like <object>_on_<target>, where <object> and
<target> will be replaced by the real object and target names.
Example:
super_all target=name1, mobile_selection=c. b & n. n+ca+c+o,target_selection=c.a & n. n+ca+c+o
"""
cutoff = int(cutoff)
cycles = int(cycles)
cgo_object = int(cgo_object)
object_list = cmd.get_names()
object_list.remove(target)
rmsd = {}
rmsd_list = []
objectname = 'super_on_%s' % target
for i in range(len(object_list)):
if cgo_object:
# objectname = 'super_%s_on_%s' % (object_list[i],target)
rms = cmd.super('%s & %s'%(object_list[i],mobile_selection),'%s & %s'%(target,target_selection),cutoff=cutoff,cycles=cycles,object=objectname)
else:
rms = cmd.super('%s & %s'%(object_list[i],mobile_selection),'%s & %s'%(target,target_selection),cutoff=cutoff,cycles=cycles)
rmsd[object_list[i]] = rms[0]
rmsd_list.append((rms[0],object_list[i]))
rmsd_list.sort()
# loop over dictionary and print out matrix of final rms values
print "Superimposing against:",target
for object_name in object_list:
print "%s: %6.3f" % (object_name,rmsd[object_name])
for r in rmsd_list:
print "%6.3f %s" % r
cmd.extend('super_all',super_all)
| apache-2.0 | -2,541,234,397,714,040,300 | 39.222222 | 148 | 0.685543 | false |
zengchunyun/s12 | day5/Day5/CreditCard/modules/creditcard.py | 1 | 9215 | #!/usr/bin/env python
import os
from datetime import datetime, date, timedelta
from conf import settings, errorcode
from modules import common
from dbhelper import dbapi
class CreditCard(object):
__database = "{0}.db".format(os.path.join(settings.DATABASE['dbpath'], settings.DATABASE["tables"]["creditcard"]))
def __init__(self, cardno):
# 信用卡卡号
self.cardno = cardno
# 信用卡密码
self.password = ""
# 卡所有者
self.owner = ""
# 信用卡额度
self.credit_total = settings.CREDIT_TOTAL
# 信用卡透支余额
self.credit_balance = settings.CREDIT_TOTAL
# 信用卡日息
self.dayrate = settings.EXPIRE_DAY_RATE
# 提现手续费率
self.feerate = settings.FETCH_MONEY_RATE
# 所有信用卡数据
self.credit_card = {}
# 信用卡是否存在标识
self.card_is_exists = True
# 信用卡状态(是否冻结)
self.frozenstatus = 0
# 获取卡的信息
self._load_card_info()
def _load_card_info(self):
"""
根据用户输入的卡号获取信用卡信息,如果卡号不存在就返回False
:return: 信用卡对象
"""
exists_flag = False
self.credit_card = dbapi.load_data_from_db(self.__database)
for key, items in self.credit_card.items():
if key == self.cardno:
self.password = self.credit_card[self.cardno]['password']
self.credit_total = self.credit_card[self.cardno]['credit_total']
self.credit_balance = self.credit_card[self.cardno]['credit_balance']
self.owner = self.credit_card[self.cardno]['owner']
self.frozenstatus = self.credit_card[self.cardno]['frozenstatus']
exists_flag = True
break
self.card_is_exists = exists_flag
"""
@property
def card_is_exists(self):
if self.cardno in list(self.credit_card.keys()):
return True
else:
return False
"""
def card_pay(self, cost, paytype, sereialno):
"""
信用卡支付,从信用卡可透支余额中扣费
:param sereialno: 流水号
:param cost: 消费金额 float类型
:param paytype: 消费类型 int类型 ( 1:消费、2:转账、3:提现、4:手续费 ) 对于2,3类型的支付要扣手续费,单记录一条流水单
:return:
"""
if paytype == 1:
payfor = "消费"
elif paytype == 2:
payfor = "转账"
elif paytype == 3:
payfor = "提现"
elif paytype == 4:
payfor = "手续费"
else:
payfor = "未知"
# 支付扣款
self.credit_balance -= cost
# 记录消费流水对账单,将发生了费用还没有还款的账单信息写入文件 report_bill 中
_tmp_bill_record = dict(cardno="{0}".format(self.cardno),
starttime=datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M"),
payfor=payfor,
cost=cost,
serialno=sereialno)
dbapi.append_db_json(_tmp_bill_record, os.path.join(settings.REPORT_PATH, "report_bill"))
# 更新信用卡可透支余额信息到数据库 creditcard.db
self.credit_card[self.cardno]["credit_balance"] = self.credit_balance
dbapi.write_db_json(self.credit_card, self.__database)
def create_card(self):
"""
新发行一张行用卡
:return:
"""
password = common.encrypt(self.password)
self.credit_card[self.cardno] = dict(password=password,
credit_total=self.credit_total,
credit_balance=self.credit_balance,
owner=self.owner,
frozenstatus=self.frozenstatus)
# 保存到数据库
dbapi.write_db_json(self.credit_card, self.__database)
def update_card(self):
password = common.encrypt(self.password)
self.credit_card[self.cardno]["password"] = password
self.credit_card[self.cardno]["owner"] = self.owner
self.credit_card[self.cardno]["credit_total"] = self.credit_total
self.credit_card[self.cardno]["credit_balance"] = self.credit_balance
self.credit_card[self.cardno]["frozenstatus"] = self.frozenstatus
# 写入数据库
dbapi.write_db_json(self.credit_card, self.__database)
def _pay_check(self, cost, password):
"""
转账、提现时验证操作,判断卡的余额与支付密码是否正确。并返回错误类型码
:param cost: 转账、提现金额(包含手续费)
:param password: 支付密码
:return: 错误码
"""
totalfee = cost
# 提现金额及手续费和大于余额,
if totalfee > self.credit_balance:
return errorcode.BALANCE_NOT_ENOUGHT
elif common.encrypt(password) != self.password:
return errorcode.CARD_PASS_ERROR
else:
return errorcode.NO_ERROR
def fetch_money(self, count, passwd):
"""
提现
:param count: 提现金额
:param passwd:信用卡提现密码
:return: 返回错误类型码
"""
totalfee = count + count * self.feerate
check_result = self._pay_check(totalfee, passwd)
if check_result == errorcode.NO_ERROR:
# 扣取提现金额并写入数据库,生成账单
self.card_pay(count, 3, common.create_serialno())
# 扣取手续费并写入数据库, 生成账单
self.card_pay(count * self.feerate, 4, common.create_serialno())
return errorcode.NO_ERROR
else:
return check_result
def translate_money(self, trans_count, passwd, trans_cardobj):
"""
信用卡转账模块
:param trans_count: 要转账的金额
:param passwd: 信用卡密码
:param trans_cardobj: 对方卡号对应的卡对象
:return: 转账结果
"""
totalfee = trans_count + trans_count * self.feerate
check_result = self._pay_check(totalfee, passwd)
if check_result == errorcode.NO_ERROR:
# 先扣款,生成消费流水账单
self.card_pay(trans_count, 2, common.create_serialno())
# 扣手续费, 生成消费流水账单
self.card_pay(trans_count * self.feerate, 4, common.create_serialno())
# 给对方卡充值,并写入数据库文件
trans_cardobj.credit_balance += totalfee
trans_cardobj.update_card()
return errorcode.NO_ERROR
else:
return check_result
def load_statement_list(self):
"""
获取要还款的对账单列表数据,仅包含对账单号、还款日、应还款额、已还款额
:return: 对账单列表
"""
# 获取要显示的信息
list_info = dbapi.load_statement_list(self.cardno)
return list_info
def recreate_statement(self):
"""
根据今天的日期将当前卡的对账单重新生成,主要对过了还款日的账单重新生成利息信息
:return:
"""
# 获取当前日期
today = datetime.strptime(date.today().strftime("%Y-%m-%d"), "%Y-%m-%d")
# 获取所有卡的对账单信息
card_statement = dbapi.load_statement_list(self.cardno)
tmp_list = list()
# 如果有记录
if len(card_statement) > 0:
for record in card_statement:
for k, v in record.items():
# 如果已经还款了,将对账单放入临时列表中
if v["isfinished"] == 1:
tmp_list.append(record)
else:
# 还未还款? 获取还款日期
pay_day = datetime.strptime(v["pdate"], "%Y-%m-%d")
# 如果还款日大于当前日期,无利息
day_delta = (today - pay_day).days
if day_delta > 0:
# 过了还款日了,计算利息 = 总费用 * 日息 * 超过天数
interest = v["total"] * settings.EXPIRE_DAY_RATE * day_delta
# 更新利息信息记录
record[k]["interest"] = interest
# 将更新过的记录写入临时列表
tmp_list.append(record)
else:
# 没有过还款日直接写入临时列表
tmp_list.append(record)
# 都处理完了,将更新过的列表写入文件,替换原有信息
dbapi.write_statement_list(self.cardno, tmp_list)
else:
# 此卡没有对账单记录
pass
| gpl-2.0 | -345,904,432,875,530,240 | 33.827434 | 118 | 0.526363 | false |
parente/clique | Interface.py | 1 | 4948 | '''
Defines Clique interfaces.
@author: Peter Parente <[email protected]>
@copyright: Copyright (c) 2008 Peter Parente
@license: BSD License
All rights reserved. This program and the accompanying materials are made
available under the terms of The BSD License which accompanies this
distribution, and is available at
U{http://www.opensource.org/licenses/bsd-license.php}
'''
from protocols import Interface
class IOption(Interface):
'''
Allows an object to be listed by a L{Chooser} instance.
'''
def GetObject(): pass
def GetName(): pass
class IContext(Interface):
'''Allows access to child objects at a given path.'''
def GetObjectAt(path): pass
class IInteractive(Interface):
'''
Allows activation and deactivation of an object. Defines methods for getting
an object's name and determining if an object has changed.
'''
def Activate(): pass
def Deactivate(): pass
def GetName(override, default): pass
def HasChanged(): pass
class ISeekable(Interface):
'''
Allows seeking to an item in this object given a single character.
'''
BACKWARD, FORWARD = 0, 1
def SeekToItem(pred, direction=FORWARD): pass
class ISearchable(Interface):
'''
Allows searching to an item in this object given a string. Supports
navigation to the next and previous matching item.
'''
def SearchStart(): pass
def SearchForNextMatch(text, current): pass
def SearchForPrevMatch(text, current): pass
def SearchReset(): pass
class ISortable(Interface):
'''
Allows sorting of items based on one or more criteria.
'''
def GetSortName(): pass
def SortNext(): pass
def SortPrev(): pass
class ISelectable(Interface):
'''
Allows the selection of one or all items managed by an object.
'''
def Reselect(): pass
def SelectAllItems(): pass
def UnselectItems(): pass
class IDeletable(Interface):
'''
Allows the deletion of one item managed by an object.
'''
def Delete(): pass
class IDetailable(Interface):
'''
Allows access to additional information about the currently selected item
managed by an object.
'''
def GetFields(): pass
def GetInheritedFields(): pass
class IStrideable(Interface):
'''
Allows variable levels of navigation through items.
'''
def NextLevel(): pass
def PrevLevel(): pass
def GetLevel(): pass
class IInfiniteCollection(Interface):
'''
Allows navigation through items via previous and next commands. Allows access
to the currently selected item and its name.
'''
def GetSelectedName(default=''): pass
def NextItem(): pass
def PrevItem(): pass
class IFiniteCollection(IInfiniteCollection):
'''
Allows navigation to the first item in a bounded collection. Provides methods
for getting the total number of items and the currently selected item's index.
'''
def GetItemCount(): pass
def GetIndex(): pass
def FirstItem(): pass
class IList(IFiniteCollection):
'''
Allows navigation to the last item in a bounded collection.
'''
def LastItem(): pass
class ITree(IFiniteCollection):
'''
Allows access to information about items managed at higher and lower levels.
'''
def GetParentName(default=''): pass
def GetChildCount(): pass
def HasChildren(): pass
def HasParent(): pass
class ILabel(Interface):
'''
Allows read-only access to an entire body of text that can only be retrieved
as one large string.
'''
def __str__(self): pass
def GetAllText(): pass
class IText(Interface):
'''
Allows read-only access to properties of a body of text and navigation by
character, word, and chunk.
'''
BOTH, FROM_START, TO_END = 0, 1, 2
CURR, PREV, NEXT = 0, 2, 4
def GetAllText(): pass
def GetWordCount(all=True): pass
def GetChunkText(which): pass
def GetWordText(which): pass
def GetCharText(which): pass
def NextChunk(skip=False): pass
def PrevChunk(): pass
def NextWord(): pass
def PrevWord(): pass
def PrevChar(): pass
def NextChar(): pass
def IsLastChunk(): pass
def IsFirstChunk(): pass
def MoveXChars(diff): pass
def MoveStart(self): pass
def MoveEnd(self): pass
def MoveStartChunk(self): pass
def MoveEndChunk(self): pass
class IHypertext(IDetailable,IText):
'''
Allows read-only access to extended properties and actions of rich text.
'''
def IsLink(): pass
def FollowLink(): pass
def GetTitle(): pass
class IEditableText(IText):
'''
Allows write access to a body of text with methods to replace all text, insert
a character, delete a character, and insert a new chunk.
'''
def SetText(): pass
def DeleteNext(): pass
def DeletePrev(): pass
def InsertChar(char): pass
def InsertText(text): pass
def InsertChunk(): pass
class ISound(Interface):
'''
Provides a mapping from an object state, action, warn, and identity to a sound
representing it.
'''
def State(name): pass
def Action(name): pass
def Warn(name): pass
def Identity(name=''): pass
| bsd-3-clause | 1,599,633,449,436,640,000 | 25.459893 | 80 | 0.708165 | false |
JesseScott/PolyglotVancouver-Analysis | util.py | 1 | 1542 | #!/usr/bin/python
# Util file to import in all of the notebooks to allow for easy code re-use
# Calculate Percent of Attendees that did not speak
def percent_silent(df):
total = len(df)
silent = 0
for row in df.iteritems():
if row[1] == 0:
silent = silent + 1
percent = {}
percent['TOTAL'] = total
percent['SILENT'] = silent
percent['VERBOSE'] = total - silent
return percent
# Calculate Percent of Attendees that left
def percent_left(df):
total = len(df)
left = 0
for row in df.iteritems():
if row[1] == 0:
left = left + 1
percent = {}
percent['TOTAL'] = total
percent['LEFT'] = left
percent['STAYED'] = total - left
return percent
# Calculate Percent of Attendees along gender
def percent_gender(df):
total = len(df)
female = 0
for row in df.iteritems():
if row[1] == 1:
female = female + 1
percent = {}
percent['TOTAL'] = total
percent['FEMALE'] = female
percent['MALE'] = total - female
return percent
# Calculate Percent of Talking points by
def percent_talking_gender(df):
total = 0
male = 0
female = 0
for talks, gender in df.itertuples(index=False):
if talks > 0:
total = total + 1
if gender == 0:
male = male + 1
elif gender == 1:
female = female + 1
percent = {}
percent['TOTAL'] = total
percent['FEMALE'] = female
percent['MALE'] = male
return percent
| gpl-3.0 | -1,612,622,575,943,453,700 | 22.723077 | 75 | 0.56939 | false |
EdinburghGenomics/clarity_scripts | tests/test_prepare_discard_plate.py | 1 | 5619 | from unittest.mock import Mock, patch, PropertyMock
from EPPs.common import StepEPP
from scripts import prepare_discard_plate
from scripts.prepare_discard_plate import sample_discard_wf_stage_name
from tests.test_common import TestEPP, NamedMock
class FakeContainer(NamedMock):
@property
def placements(self):
return {
'1': NamedMock(
real_name=self.name + ' placement 1',
samples=[Mock(
artifact=Mock(workflow_stages_and_statuses=[(Mock(), 'COMPLETE', sample_discard_wf_stage_name)]))]
),
'2': NamedMock(
real_name=self.name + ' placement 2',
samples=[Mock(
artifact=Mock(workflow_stages_and_statuses=[(Mock(), 'COMPLETE', sample_discard_wf_stage_name)]))]
)
}
def fake_get_artifacts(samplelimsid, type):
return [NamedMock(real_name=type + ' ' + a, container=FakeContainer(real_name=a + '-DNA')) for a in samplelimsid]
class TestPrepareDiscardPlate(TestEPP):
def setUp(self):
self.epp = prepare_discard_plate.FindPlateToRoute(self.default_argv)
self.patched_process = patch.object(
StepEPP,
'process',
new_callable=PropertyMock(
return_value=Mock(
all_inputs=Mock(
return_value=[
Mock(samples=[Mock(id='LP1234567')]),
Mock(samples=[Mock(id='LP1234568')])
]
)
)
)
)
self.patched_lims = patch.object(
StepEPP,
'lims',
new_callable=PropertyMock(return_value=Mock(get_artifacts=fake_get_artifacts))
)
def test_discard(self):
patched_stage = patch('scripts.prepare_discard_plate.get_workflow_stage', return_value=Mock(uri='a_uri'))
patched_log = patch('scripts.prepare_discard_plate.FindPlateToRoute.info')
exp_log_messages = (
('Found Stage %s uri: %s', 'Dispose of Samples EG 1.0 ST', 'a_uri'),
('Found %d Samples in the step', 2),
('Found %d Analytes (derived samples)', 2),
('Found %d containers', 2),
('Found %d valid containers to potentially discard', 2),
('Found %d others associated with the container but not associated with discarded samples', 4),
('Test container %s, with %s artifacts', 'LP1234567-DNA', 2),
(
'Container %s might route because artifact %s in step_associated_artifacts (%s) or has been discarded before (%s)',
'LP1234567-DNA',
'LP1234567-DNA placement 2',
False,
True
),
('Will route container: %s', 'LP1234567-DNA'),
('Route %s containers with %s artifacts', 2, 4)
)
with patched_log as l, patched_stage as p, self.patched_lims as plims, self.patched_process:
self.epp._run()
p.assert_called_with(
self.epp.lims,
workflow_name='Sample Disposal EG 1.0 WF',
stage_name='Dispose of Samples EG 1.0 ST'
)
for m in exp_log_messages:
l.assert_any_call(*m)
# Has route the artifacts from the containers
assert plims.route_artifacts.call_count == 1
assert len(plims.route_artifacts.call_args[0][0]) == 4
assert plims.route_artifacts.call_args[1] == {'stage_uri': 'a_uri'}
def test_fetch_all_artifacts_for_samples():
lims = Mock(get_artifacts=Mock(return_value=[1, 2, 3]))
# 100 samples passed in, window size = 50, so lims should be called twice
samples = [Mock(id=str(x)) for x in range(100)]
limsids = [s.id for s in samples]
assert prepare_discard_plate.fetch_all_artifacts_for_samples(lims, samples) == [1, 2, 3, 1, 2, 3]
lims.get_artifacts.assert_any_call(samplelimsid=limsids[0:50], type='Analyte')
lims.get_artifacts.assert_any_call(samplelimsid=limsids[50:100], type='Analyte')
def test_is_valid_container():
valid_container = NamedMock(real_name='LP1234567-GTY')
assert prepare_discard_plate.is_valid_container(valid_container)
valid_container = NamedMock(real_name='LP1234567-DNA')
assert prepare_discard_plate.is_valid_container(valid_container)
valid_container = NamedMock(real_name='LP1234567P001')
assert prepare_discard_plate.is_valid_container(valid_container)
invalid_container = NamedMock(real_name='LP1234567-QNT')
assert not prepare_discard_plate.is_valid_container(invalid_container)
def test_has_workflow_stage():
valid_workflow_stage = ('w', 'COMPLETE', 'a_workflow_step_name')
invalid_workflow_stage = ('w', 'INCOMPLETE', 'a_workflow_step_name')
another_invalid_workflow_stage = ('w', 'COMPLETE', 'another_workflow_step_name')
def artifact(stages):
a = Mock(workflow_stages_and_statuses=stages)
return Mock(samples=[Mock(artifact=a)])
valid_artifact = artifact([invalid_workflow_stage, valid_workflow_stage])
invalid_artifact = artifact([invalid_workflow_stage, invalid_workflow_stage])
another_invalid_artifact = artifact([another_invalid_workflow_stage])
assert prepare_discard_plate.has_workflow_stage(valid_artifact, 'a_workflow_step_name')
assert not prepare_discard_plate.has_workflow_stage(invalid_artifact, 'a_workflow_step_name')
assert not prepare_discard_plate.has_workflow_stage(another_invalid_artifact, 'a_workflow_step_name')
| mit | -3,781,380,545,526,792,000 | 42.55814 | 131 | 0.61381 | false |
originaltebas/chmembers | app/familias/forms.py | 1 | 2788 | # app/familias/forms.py
# coding: utf-8
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, HiddenField, SelectField
from wtforms.validators import InputRequired, Length
class FamiliasForm(FlaskForm):
"""
Formulario para familias
"""
id = HiddenField("id")
id_direccion = HiddenField("idDir")
# Modelo Familia
apellidos_familia = StringField(u'Apellido de la Familia',
validators=[InputRequired(),
Length(min=1, max=60)])
descripcion_familia = StringField(u'Descripción de la Familia',
validators=[InputRequired(),
Length(min=0, max=200)])
telefono_familia = StringField(u'Teléfono de la Familia',
validators=[InputRequired(),
Length(min=0, max=15)])
TipoFamilia = SelectField(u'Tipo de Familia', coerce=int)
submit = SubmitField(u'Aceptar')
class DireccionModalForm(FlaskForm):
# Modelo Direccion
tipo_via = StringField(u'Tipo de vía',
validators=[InputRequired(),
Length(min=1, max=20)])
nombre_via = StringField(u'Nombre de la vía',
validators=[InputRequired(),
Length(min=1, max=100)])
nro_via = StringField(u'Nro',
validators=[InputRequired(),
Length(min=1, max=10)])
portalescalotros_via = StringField(u'Portal/Esc/Otro')
piso_nroletra_via = StringField(u'Nro/Letra del Piso')
cp_via = StringField(u'CP',
validators=[InputRequired(),
Length(min=1, max=10)])
ciudad_via = StringField(u'Ciudad',
validators=[InputRequired(),
Length(min=1, max=50)])
provincia_via = StringField(u'Provincia',
validators=[InputRequired(),
Length(min=1, max=50)])
pais_via = StringField(u'País',
validators=[InputRequired(),
Length(min=1, max=50)])
submit = SubmitField(u'Crear Dirección')
class AsignacionMiembrosForm(FlaskForm):
"""
Formulario para la asignacion de personas a las
ggcc. Las personas tienen que ser miembros creados
"""
ids_in = HiddenField('Ids IN')
ids_out = HiddenField('Ids OUT')
submit = SubmitField(u'Aceptar')
| mit | -5,743,181,648,455,708,000 | 35.093333 | 74 | 0.502876 | false |
jingsam/tianditu | CheckRoadName.py | 1 | 2300 | # -*- coding: utf-8 -*-
__author__ = '[email protected]'
import os
import arcpy
from parallel import check_parallel
def check_road_name_task(args, cpus, pid):
in_fc = args[0]
fields = args[1]
error_id = "ERR06"
layer = os.path.basename(in_fc)
content = "NAME填写位置正确性检查"
description = "图层【{0}】的ID为【{1}】的要素,【{2}】填写不正确。"
warning = "不忽略"
desc = arcpy.Describe(in_fc)
errors = []
_fields = ["OID@", "SHAPE@XY"] + fields
cursor = arcpy.da.SearchCursor(in_fc, _fields, spatial_reference=desc.spatialReference.GCS)
for row in cursor:
if row[0] % cpus != pid:
continue
all_names = [row[i] for i in xrange(2, len(row))]
names = [name for name in all_names if name]
if len(names) == 0:
continue
if len(set(names)) < len(names):
errors.append('{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}\n'
.format(row[0], error_id, layer, content, description.format(layer, row[0], ';'.join(fields)), row[1][0], row[1][1], warning))
continue
for name in names:
if all_names.index(name) >= len(names):
errors.append('{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}\n'
.format(row[0], error_id, layer, content, description.format(layer, row[0], ';'.join(fields)), row[1][0], row[1][1], warning))
break
del cursor
return ''.join(errors)
def check_road_name(in_fc, fields, out_chk):
if not arcpy.Exists(in_fc):
arcpy.AddIDMessage("ERROR", 110, in_fc)
raise SystemExit()
ext = os.path.splitext(out_chk)[1]
if ext != '.csv':
out_chk += '.csv'
f = open(out_chk, 'w')
f.write('OID, ErrorID, Layer, InspectionContent, Description, X, Y, Warning\n')
# result = check_road_name_task((in_fc, fields), 1, 0)
result = check_parallel(check_road_name_task, (in_fc, fields))
f.write(result)
f.close()
if __name__ == "__main__":
in_fc = arcpy.GetParameterAsText(0)
fields = arcpy.GetParameterAsText(1)
out_chk = arcpy.GetParameterAsText(2)
check_road_name(in_fc, fields.split(';'), out_chk) | mit | -4,269,319,113,325,853,000 | 30.434783 | 152 | 0.54025 | false |
JulyKikuAkita/PythonPrac | cs15211/LowestCommonAncestorofaBinaryTree.py | 1 | 5267 | __source__ = 'https://leetcode.com/problems/lowest-common-ancestor-of-a-binary-tree/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/lowest-common-ancestor-of-a-binary-tree.py
# Time: O(h)
# Space: O(h)
#
# Description: Leetcode # 236. Lowest Common Ancestor of a Binary Tree
#
# Given a binary tree, find the lowest common ancestor (LCA)
# of two given nodes in the tree.
#
# According to the definition of LCA on Wikipedia: "The lowest
# common ancestor is defined between two nodes v and w as the
# lowest node in T that has both v and w as descendants (where we
# allow a node to be a descendant of itself)."
#
# _______3______
# / \
# ___5__ ___1__
# / \ / \
# 6 _2 0 8
# / \
# 7 4
#
# For example, the lowest common ancestor (LCA) of nodes 5 and 1 is 3.
# Another example is LCA of nodes 5 and 4 is 5, since a node can be a
# descendant of itself according to the LCA definition.
#
# Companies
# Amazon LinkedIn Apple Facebook Microsoft
# Related Topics
# Tree
# Similar Questions
# Lowest Common Ancestor of a Binary Search Tree
#
import unittest
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
#DFS
class Solution:
# @param {TreeNode} root
# @param {TreeNode} p
# @param {TreeNode} q
# @return {TreeNode}
def lowestCommonAncestor(self, root, p, q):
if root in (None, p, q):
return root
left, right = [self.lowestCommonAncestor(child, p, q) for child in (root.left, root.right)]
# 1. If the current subtree contains both p and q,
# return their LCA.
# 2. If only one of them is in that subtree,
# return that one of them.
# 3. If neither of them is in that subtree,
# return the node of that subtree.
return root if left and right else left or right
#BFS
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
#
# 60ms 98.74%
class Solution(object):
def lowestCommonAncestor(self, root, p, q):
"""
:type root: TreeNode
:type p: TreeNode
:type q: TreeNode
:rtype: TreeNode
"""
stack = [root]
parent = {root:None}
while q not in parent or p not in parent:
node = stack.pop()
if node.left:
parent[node.left] = node
stack.append(node.left)
if node.right:
parent[node.right] = node
stack.append(node.right)
ancestor = set()
while p:
ancestor.add(p)
p = parent[p]
while q not in ancestor:
q = parent[q]
return q
def lowestCommonAncestor2(self, root, p, q):
if root in (None, p, q): return root
left, right = (self.lowestCommonAncestor(kid, p, q)
for kid in (root.left, root.right))
return root if left and right else left or right
def lowestCommonAncestor3(self, root, p, q):
if root in (None, p, q): return root
subs = [self.lowestCommonAncestor(kid, p, q)
for kid in (root.left, root.right)]
return root if all(subs) else max(subs)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/lowest-common-ancestor-of-a-binary-tree/solution/
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode(int x) { val = x; }
* }
*/
# DFS
# 6ms 99.80%
class Solution {
public TreeNode lowestCommonAncestor(TreeNode root, TreeNode p, TreeNode q) {
if (root == null || root == p || root == q) return root;
TreeNode left = lowestCommonAncestor(root.left, p, q);
TreeNode right = lowestCommonAncestor(root.right, p, q);
if (left != null && right != null) return root; //we don't care if left == null && right == null, just return null
return left == null ? right : left;
}
}
# BFS
# 30ms 8.14%
class Solution {
public TreeNode lowestCommonAncestor(TreeNode root, TreeNode p, TreeNode q) {
Map<TreeNode, TreeNode> parent = new HashMap<>();
parent.put(root, null);
Deque<TreeNode> stack = new ArrayDeque<>();
stack.push(root);
while(!parent.containsKey(p) || !parent.containsKey(q)){
TreeNode node = stack.pop();
if(node.left != null){
parent.put(node.left, node);
stack.push(node.left);
}
if(node.right != null){
parent.put(node.right, node);
stack.push(node.right);
}
}
Set<TreeNode> ancestors = new HashSet<>();
while(p != null){
ancestors.add(p);
p = parent.get(p);
}
while(!ancestors.contains(q)){
q = parent.get(q);
}
return q;
}
}
'''
| apache-2.0 | -252,872,569,561,111,650 | 28.424581 | 122 | 0.567686 | false |
willowd878/nca47 | nca47/agent/dns_driver/fake_driver.py | 1 | 5969 | from oslo_config import cfg
from oslo_log import log as logging
from nca47.common.i18n import _
from nca47.common.i18n import _LI
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
DNS_DRIVER = None
ZONES_AGENT_OPTS = [
cfg.StrOpt('host_ip',
default='0.0.0.0',
help=_('The IP address on which nca47-zdns_driver listens.')),
cfg.PortOpt('port',
default=20120,
help=_('The TCP port on which nca47-zdns_driver listens.')),
cfg.StrOpt('view_id',
default='telecom',
help=_('The TCP view_id on which nca47-zdns_driver listens.')),
cfg.StrOpt('auth_name',
default='admin',
help=_('The TCP auth_name on which nca47-zdns_driver'
'listens.')),
cfg.StrOpt('auth_pw',
default='zdns',
help=_('The TCP auth_pw on which nca47-zdns_driver listens.')),
]
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='zdns',
title='Options for the nca47-zdns_driver service')
CONF.register_group(opt_group)
CONF.register_opts(ZONES_AGENT_OPTS, opt_group)
class fake_dns_driver():
def __init__(self):
self.host = 'https://fake_ip'
self.port = CONF.zdns.port
self.view_id = CONF.zdns.view_id
self.auth_name = CONF.zdns.auth_name
self.auth_pw = CONF.zdns.auth_pw
@classmethod
def get_instance(cls):
global DNS_DRIVER
if not DNS_DRIVER:
DNS_DRIVER = cls()
return DNS_DRIVER
def create_zone(self, context, zone):
url = (self.host + ":" + str(self.port) +
'/views/' + self.view_id + '/zones')
LOG.info(_LI("create zones:"+url))
return {" fake create zone": "success"}
def update_zone_owners(self, context, zone, zone_id):
url = (self.host + ":" + str(self.port) + '/views/' +
self.view_id + '/zones/' + zone_id + '/owners')
LOG.info(_LI("update_zone_owners:"+url))
return {"fake update zone owners zone": "success"}
def update_zone(self, context, zone, zone_id):
url = (self.host + ":" + str(self.port) +
'/views/' + self.view_id + '/zones/' + zone_id)
LOG.info(_LI("update zones :"+url))
return {"fake update_zone zone": "success"}
def delete_zone(self, context, zone, zone_id):
url = (self.host + ":" + str(self.port) + '/views/' + self.view_id +
'/zones/' + zone_id)
LOG.info(_LI("delete zones :" + url))
return {"fake delete_zone zone": "success"}
def create_rrs(self, context, rrs, zone_id):
url = (str(self.host) + ":" + str(self.port) + '/views/' +
self.view_id + '/zones/' + str(zone_id) + '/rrs')
LOG.info(_LI("create rrs:" + url))
res = {
"fake comment": "", "name": "www.baidu.", "type": "A",
"ttl": 1200, "state": "",
"href": "/views/default/zones/www.baidu/rrs/"
"www.baidu.$1200$A$MTk4LjIwMi4zOC40OA==",
"klass": "IN", "rdata": "198.202.38.48",
"reverse_name": "baidu.www",
"id": "www.baidu.$1200$A$MTk4LjIwMi4zOC40OA==",
"is_shared": ""
}
return res
def update_rrs(self, context, rrs, zone_id, rrs_id):
url = (self.host + ":" + str(self.port) + '/views/' + self.view_id +
'/zones/' + zone_id + '/rrs/' + rrs_id)
LOG.info(_LI("update rrs:" + url))
return {"fake id": "update_rrs"}
def delete_rrs(self, context, rrs, zone_id, rrs_id):
url = (self.host + ":" + str(self.port) + '/views/' + self.view_id +
'/zones/' + zone_id + '/rrs/' + rrs_id)
LOG.info(_LI("delete rrs :" + url))
return {"fake delete_rss": "success"}
def del_cache(self, context, cache_dic):
url = (self.host + ":" + str(self.port) + '/cache/clean')
LOG.info(_LI("delete cache :" + url))
return {"fake clean cache": "success"}
def get_zone_one(self, context, zone_id):
url = (self.host + ":" + str(self.port) +
'/views/' + self.view_id + '/zones/' + zone_id)
LOG.info(_LI("view one zone :" + url))
return {"fake get_zone_one": "success"}
def get_zones(self, context):
url = (self.host + ":" + str(self.port) +
'/views/' + self.view_id + '/zones')
LOG.info(_LI("view all zone :" + url))
return {"fake get_zones": "success"}
def get_rrs(self, context, zone_id):
url = (self.host + ":" + str(self.port) + '/views/' + self.view_id +
'/zones/' + zone_id + '/rrs')
LOG.info(_LI("get_rrs :" + url))
res = {
"total_size": 2, "page_num": 1,
"resources":
[
{
"comment": "", "name": "www.baidu.",
"type": "NS", "ttl": 3600, "state": "",
"href": "/views/default/zones/www.baidu/rrs/"
"www.baidu.$3600$NS$bnMud3d3LmJhaWR1Lg==",
"klass": "IN", "rdata": "ns.www.baidu.",
"reverse_name": "baidu.www",
"id": "www.baidu.$3600$NS$bnMud3d3LmJhaWR1Lg==",
"is_shared": ""
},
{
"comment": "", "name": "ns.www.baidu.",
"type": "A", "ttl": 3600, "state": "",
"href": "/views/default/zones/www.baidu/rrs/"
"ns.www.baidu.$3600$A$MTI3LjAuMC4x",
"klass": "IN", "rdata": "127.0.0.1",
"reverse_name": "baidu.www.ns",
"id": "ns.www.baidu.$3600$A$MTI3LjAuMC4x",
"is_shared": ""
}
],
"page_size": 2
}
return res
| apache-2.0 | -7,830,155,788,497,134,000 | 38.269737 | 78 | 0.482493 | false |
hunger/cleanroom | cleanroom/commands/set.py | 1 | 1120 | # -*- coding: utf-8 -*-
"""set command.
@author: Tobias Hunger <[email protected]>
"""
from cleanroom.command import Command
from cleanroom.location import Location
from cleanroom.systemcontext import SystemContext
import typing
class SetCommand(Command):
"""The set command."""
def __init__(self, **services: typing.Any) -> None:
"""Constructor."""
super().__init__(
"set",
syntax="<KEY> <VALUE>",
help_string="Set up a substitution.",
file=__file__,
**services
)
def validate(
self, location: Location, *args: typing.Any, **kwargs: typing.Any
) -> None:
"""Validate the arguments."""
self._validate_arguments_exact(
location, 2, '"{}" needs a key and a value.', *args, **kwargs
)
def __call__(
self,
location: Location,
system_context: SystemContext,
*args: typing.Any,
**kwargs: typing.Any
) -> None:
"""Execute command."""
print(args)
system_context.set_substitution(args[0], args[1])
| gpl-3.0 | 94,178,301,023,868,740 | 23.888889 | 73 | 0.55625 | false |
stamparm/tsusen | core/settings.py | 1 | 3668 | #!/usr/bin/env python
"""
Copyright (c) 2015-2016 Miroslav Stampar (@stamparm)
See the file 'LICENSE' for copying permission
"""
import os
import re
import socket
import stat
import subprocess
from core.attribdict import AttribDict
config = AttribDict()
NAME = "tsusen"
VERSION = "0.3.6"
DEBUG = False
SNAP_LEN = 100
IPPROTO = 8
ETH_LENGTH = 14
IPPROTO_LUT = dict(((getattr(socket, _), _.replace("IPPROTO_", "")) for _ in dir(socket) if _.startswith("IPPROTO_")))
LOCAL_ADDRESSES = []
DATE_FORMAT = "%Y-%m-%d"
TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
SYSTEM_LOG_DIRECTORY = "/var/log" if not subprocess.mswindows else "C:\\Windows\\Logs"
LOG_DIRECTORY = os.path.join(SYSTEM_LOG_DIRECTORY, NAME)
DEFAULT_LOG_PERMISSIONS = stat.S_IREAD | stat.S_IWRITE | stat.S_IRGRP | stat.S_IROTH
CSV_HEADER = "proto dst_port dst_ip src_ip first_seen last_seen count"
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
HTML_DIR = os.path.join(ROOT_DIR, "html")
CONFIG_FILE = os.path.join(ROOT_DIR, "tsusen.conf")
DISABLED_CONTENT_EXTENSIONS = (".py", ".pyc", ".md", ".bak", ".conf", ".zip", "~")
SERVER_HEADER = "%s/%s" % (NAME, VERSION)
MAX_IP_FILTER_RANGE = 2 ** 16
MAX_PUT_SIZE = 5 * 1024 * 1024
# Reference: https://sixohthree.com/media/2003/06/26/lock_your_doors/portscan.txt
MISC_PORTS = { 17: "qotd", 53: "dns", 135: "dcom-rpc", 502: "modbus", 623: "ipmi", 1433: "mssql", 1723: "pptp", 1900: "upnp", 3128: "squid", 3389: "rdesktop", 5351: "nat-pmp", 5357: "wsdapi", 5631: "pc-anywhere", 5800: "vnc", 5900: "vnc", 5901: "vnc-1", 5902: "vnc-2", 5903: "vnc-3", 6379: "redis", 7547: "cwmp", 8118: "privoxy", 8338: "maltrail", 8339: "tsusen", 8443: "https-alt", 9200: "wap-wsp", 11211: "memcached", 17185: "vxworks", 27017: "mongo", 53413: "netis" }
def read_config(config_file):
global config
if not os.path.isfile(config_file):
exit("[!] missing configuration file '%s'" % config_file)
config.clear()
try:
array = None
content = open(config_file, "rb").read()
for line in content.split("\n"):
line = re.sub(r"#.+", "", line)
if not line.strip():
continue
if line.count(' ') == 0:
array = line.upper()
config[array] = []
continue
if array and line.startswith(' '):
config[array].append(line.strip())
continue
else:
array = None
try:
name, value = line.strip().split(' ', 1)
except ValueError:
name = line.strip()
value = ""
finally:
name = name.upper()
value = value.strip("'\"")
if name.startswith("USE_"):
value = value.lower() in ("1", "true")
elif value.isdigit():
value = int(value)
else:
for match in re.finditer(r"\$([A-Z0-9_]+)", value):
if match.group(1) in globals():
value = value.replace(match.group(0), globals()[match.group(1)])
else:
value = value.replace(match.group(0), os.environ.get(match.group(1), match.group(0)))
if subprocess.mswindows and "://" not in value:
value = value.replace("/", "\\")
config[name] = value
except (IOError, OSError):
pass
for option in ("MONITOR_INTERFACE",):
if not option in config:
exit("[!] missing mandatory option '%s' in configuration file '%s'" % (option, config_file))
| mit | 5,561,311,949,671,512,000 | 36.050505 | 470 | 0.549891 | false |
f3at/feat | src/feat/agents/base/sender.py | 1 | 8431 | # F3AT - Flumotion Asynchronous Autonomous Agent Toolkit
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# See "LICENSE.GPL" in the source distribution for more information.
# Headers in this file shall remain intact.
from feat.agents.base import task, replay, requester
from feat.common import defer, fiber, formatable
from feat.agents.application import feat
from feat.agents.monitor.interface import IClerk, DEFAULT_NOTIFICATION_PERIOD
from feat.agents.monitor.interface import PatientState
from feat.interface.protocols import ProtocolFailed
from feat.interface.recipient import IRecipient
from feat.database.interface import NotFoundError
class AgentMixin(object):
def initiate(self):
desc = self.get_descriptor()
if not hasattr(desc, 'pending_notifications'):
raise ValueError("Agent using this mixin, should have "
"'pending_notification' dictionary field"
"in his descriptor")
@replay.mutable
def startup(self, state):
config = state.medium.get_configuration()
period = config.notification_period
clerk = getattr(state, 'clerk', None)
proto = state.medium.initiate_protocol(NotificationSender,
clerk=clerk,
period=period)
state.notification_sender = proto
@replay.immutable
def has_empty_outbox(self, state):
return state.notification_sender.has_empty_outbox()
@feat.register_restorator
class PendingNotification(formatable.Formatable):
type_name = 'notification'
formatable.field('type', None)
formatable.field('origin', None)
formatable.field('payload', None)
formatable.field('recipient', None)
class NotificationSender(task.StealthPeriodicTask):
protocol_id = 'notification-sender'
@replay.entry_point
def initiate(self, state, clerk=None, period=None):
state.clerk = clerk and IClerk(clerk)
period = period or DEFAULT_NOTIFICATION_PERIOD
# IRecipient -> list of PendingNotifications
return task.StealthPeriodicTask.initiate(self, period)
@replay.immutable
def run(self, state):
defers = list()
for agent_id, notifications in self._iter_outbox():
if not notifications:
continue
if state.clerk and state.clerk.has_patient(agent_id):
status = state.clerk.get_patient(agent_id)
if status.state == PatientState.alive:
defers.append(self.flush_notifications(agent_id))
else:
defers.append(self.flush_notifications(agent_id))
return defer.DeferredList(defers)
@replay.mutable
def flush_notifications(self, state, agent_id):
return self._flush_next(agent_id)
@replay.immutable
def has_empty_outbox(self, state):
desc = state.agent.get_descriptor()
if desc.pending_notifications:
self.debug('Pending notifications keys are: %r',
desc.pending_notifications.keys())
return False
return True
### flushing notifications ###
@replay.mutable
def _flush_next(self, state, agent_id):
notification = self._get_first_pending(agent_id)
if notification:
recp = notification.recipient
f = requester.notify_partner(
state.agent, recp, notification.type,
notification.origin, notification.payload)
f.add_callbacks(fiber.drop_param, self._sending_failed,
cbargs=(self._sending_cb, recp, notification, ),
ebargs=(recp, ))
return f
@replay.mutable
def _sending_cb(self, state, recp, notification):
f = self._remove_notification(recp, notification)
f.add_both(fiber.drop_param, self._flush_next, str(recp.key))
return f
@replay.mutable
def _sending_failed(self, state, fail, recp):
fail.trap(ProtocolFailed)
# check that the document still exists, if not it means that this
# agent got buried
f = state.agent.get_document(recp.key)
f.add_callbacks(self._check_recipient, self._handle_not_found,
ebargs=(recp, ), cbargs=(recp, ))
return f
@replay.journaled
def _handle_not_found(self, state, fail, recp):
fail.trap(NotFoundError)
return self._forget_recipient(recp)
@replay.journaled
def _check_recipient(self, state, desc, recp):
self.log("Descriptor is still there, waiting patiently for the agent.")
new_recp = IRecipient(desc)
if recp != new_recp and new_recp.route is not None:
return self._update_recipient(recp, new_recp)
### methods for handling the list of notifications ###
@replay.journaled
def notify(self, state, notifications):
'''
Call this to schedule sending partner notification.
'''
def do_append(desc, notifications):
for notification in notifications:
if not isinstance(notification, PendingNotification):
raise ValueError("Expected notify() params to be a list "
"of PendingNotification instance, got %r."
% notification)
key = str(notification.recipient.key)
if key not in desc.pending_notifications:
desc.pending_notifications[key] = list()
desc.pending_notifications[key].append(notification)
return state.agent.update_descriptor(do_append, notifications)
@replay.immutable
def _iter_outbox(self, state):
desc = state.agent.get_descriptor()
return desc.pending_notifications.iteritems()
@replay.immutable
def _get_first_pending(self, state, agent_id):
desc = state.agent.get_descriptor()
pending = desc.pending_notifications.get(agent_id, list())
if pending:
return pending[0]
@replay.journaled
def _remove_notification(self, state, recp, notification):
def do_remove(desc, recp, notification):
try:
desc.pending_notifications[recp.key].remove(notification)
if not desc.pending_notifications[recp.key]:
del(desc.pending_notifications[recp.key])
except (ValueError, KeyError, ):
self.warning("Tried to remove notification %r for "
"agent_id %r from %r, but not found",
notification, recp.key,
desc.pending_notifications)
return state.agent.update_descriptor(do_remove, recp, notification)
@replay.journaled
def _forget_recipient(self, state, recp):
def do_remove(desc, recp):
desc.pending_notifications.pop(str(recp.key))
return state.agent.update_descriptor(do_remove, recp)
@replay.journaled
def _update_recipient(self, state, old, new):
old = IRecipient(old)
new = IRecipient(new)
if old.key != new.key:
raise AttributeError("Tried to subsituted recipient %r with %r, "
"the key should be the same!" % (old, new))
def do_update(desc, recp):
if not desc.pending_notifications.get(recp.key, None):
return
for notification in desc.pending_notifications[recp.key]:
notification.recipient = recp
return state.agent.update_descriptor(do_update, new)
| gpl-2.0 | 1,688,419,642,821,006,000 | 36.638393 | 79 | 0.629225 | false |
mastacheata/tvheadend-xz.bundle | Contents/Code/__init__.py | 1 | 2752 | import htsp
TITLE = 'XZ'
PREFIX = '/video/xz'
ART = 'art-default.jpg'
ICON = 'tvheadend.png'
ICON_LIVE = 'televisions.png'
ICON_REC = 'rec.png'
tvh = None
def Start():
ObjectContainer.art = R(ART)
HTTP.CacheTime = 1
Log.Debug('XZ start')
global tvh
tvh = TVheadend()
ValidatePrefs()
@route(PREFIX + '/validate')
def ValidatePrefs():
if not Prefs['tvheadend-url']:
Log.Error('Please specify a URL to TVheadend in the settings')
return False
if not Prefs['tvheadend-http-port']:
Log.Error('Please specify the TVheadend HTTP port in the settings')
return False
if not Prefs['tvheadend-login']:
Log.Warning('Please specify your TVheadend username in the settings')
login = ''
# return False
else:
login = Prefs['tvheadend-login']
if not Prefs['tvheadend-password']:
Log.Warning('Please specify your TVheadend password in the settings')
password = ''
# return False
else:
password = Prefs['tvheadend-password']
global tvh
tvh.connect(Prefs['tvheadend-url'], int(Prefs['tvheadend-http-port'])+1)
return tvh.login(login, password)
@handler(PREFIX, TITLE, ICON, ART)
def main_menu():
main = ObjectContainer()
main.title1 = 'XZ'
main.no_cache = True
main.header = None
main.message = None
main.add(DirectoryObject(
key=Callback(epg_menu),
title='EPG / Live TV',
thumb=R(ICON_LIVE),
))
main.add(DirectoryObject(
key=Callback(dvr_menu),
title='Rec Timers',
thumb=R(ICON_REC),
))
return main
@route(PREFIX + '/live')
def epg_menu():
global tvh
tvh.get_channel_list()
epg = ObjectContainer(
)
return epg
@route(PREFIX + '/rec')
def dvr_menu():
dvr = ObjectContainer(
)
return dvr
class TVheadend:
tvh = None
channels = {}
channelNumbers = []
def __init__(self):
pass
def connect(self, host, port):
address = (host, port)
self.tvh = htsp.HTSPClient(address, 'TVheadend Plex Client')
def login(self, login, password):
self.tvh.hello()
response = self.tvh.authenticate(login, password)
if 'noaccess' in response:
Log.Error('Authentication with TVheadend server failed')
return False
else:
return True
def get_channel_list(self):
self.tvh.send('enableAsyncMetadata')
while True:
msg = self.tvh.recv()
if 'error' in msg:
Log.Error(msg['error'])
raise Exception(msg['Error'])
elif 'method' in msg:
Log.Info(msg)
return msg['method']
| lgpl-3.0 | -8,167,687,725,349,745,000 | 21.743802 | 77 | 0.587573 | false |
groceryheist/UWBotThings | tests/testBotUserStream.py | 1 | 3811 | import time
import sys
import unittest
import random
sys.path.append("../code/")
sys.path.append("../code/models/")
from ModelBase import SessionFactory
from Bot import Bot, BotUserListener
import tweepy
class TestBotUserStream(unittest.TestCase):
def setUp(self):
self.sesh = SessionFactory()
self.testbotA = self.sesh.query(Bot).filter(
Bot.alias == 'testvaxBot').first()
self.testbotB = self.sesh.query(Bot).filter(
Bot.alias == 'Bernardo,Officer').first()
self.sesh.commit()
self.sesh.close()
def teardown(self):
self.sesh.close()
# def test_wakeUp(self):
# self.testbotA.wakeUp()
# print(self.testbotA)
# #test that we can wake up
# self.assertTrue(hasattr(
# self.testbotA,'api'))
# time.sleep(5)
# #test that we can run a search
# self.assertEqual(len(self.testbotA.api.search(q='bieber', count=1)), 1)
class TestDirectMessage(unittest.TestCase):
class testListener(BotUserListener):
def __init__(self, Api, testObj):
self.testObj = testObj
super(TestDirectMessage.testListener, self).__init__(self)
def on_connect(self):
self.testObj.ImReady()
def on_data(self,raw_data):
super(TestDirectMessage.testListener,self).on_data(raw_data)
print raw_data
def on_direct_message(self, message):
print "simple message partly recieved"
self.testObj.testDMCallback()
super(TestDirectMessage.testListener, self).on_direct_message(message = message)
# def on_connect(self):
# self.testObj.ImReady()
def setUpClassOnce(self):
self.sesh = SessionFactory()
self.testbotA = self.sesh.query(Bot).filter(
Bot.alias == 'testvaxBot').first()
self.testbotB = self.sesh.query(Bot).filter(
Bot.alias == 'Bernardo,Officer').first()
self.isComplete = False
self.readyBots = 0
self.count = 0
self.testbotA.wakeUp()
self.testbotB.wakeUp()
#self.testbotA.follow(self.testbotB)
#self.testbotB.follow(self.testbotA)
print ("creating stream B")
self.testbotB.activateUserStream(TestDirectMessage.testListener(self.testbotB.api, self))
self.testbotB.closeUserStream()
self.testbotB.activateUserStream(TestDirectMessage.testListener(self.testbotB.api, self))
# print ('creating stream A')
# self.testbotA.activateUserStream(TestDirectMessage.testListener(self.testbotA.api, self))
def testDMCallback(self):
self.count += 1
if(self.count == 1):
self.testbotB.closeUserStream()
self.testbotA.activateUserStream(TestDirectMessage.testListener(self.testbotA.api,self))
if(self.count == 2):
self.testbotA.closeUserStream()
self.isComplete = True
def ImReady(self):
print ('im ready')
self.readyBots += 1
if self.readyBots == 1:
self.testbotA.sendDirectMesssage(
target=self.testbotB, text='can you hear me bot?' + str(random.randint(0,1000)))
print "message sent"
def test_directMessage(self):
self.setUpClassOnce()
tries = 0
while not self.isComplete and tries < 20:
time.sleep(1)
self.assertTrue(self.count == 2)
def teardown(self):
self.testbotA.closeUserStream()
self.testBotB.closeUserStream()
self.sesh.commit()
self.sesh.close()
if __name__ == '__main__':
unittest.main()
| mit | -6,619,130,527,017,587,000 | 31.02521 | 100 | 0.592495 | false |
praekeltfoundation/ndoh-hub | registrations/tasks.py | 1 | 60994 | import json
import random
import re
import uuid
from datetime import datetime, timedelta
from functools import partial
import phonenumbers
import requests
from celery import chain
from celery.exceptions import SoftTimeLimitExceeded
from celery.task import Task
from celery.utils.log import get_task_logger
from demands import HTTPServiceError
from django.conf import settings
from django.contrib.auth.models import User
from django.utils import timezone, translation
from requests.exceptions import ConnectionError, HTTPError, RequestException
from seed_services_client.identity_store import IdentityStoreApiClient
from seed_services_client.service_rating import ServiceRatingApiClient
from temba_client.exceptions import TembaHttpError
from wabclient.exceptions import AddressException
from ndoh_hub import utils
from ndoh_hub.celery import app
from ndoh_hub.utils import rapidpro, redis
from .models import ClinicCode, JembiSubmission, Registration, Source, WhatsAppContact
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin
is_client = IdentityStoreApiClient(
api_url=settings.IDENTITY_STORE_URL, auth_token=settings.IDENTITY_STORE_TOKEN
)
sr_client = ServiceRatingApiClient(
api_url=settings.SERVICE_RATING_URL, auth_token=settings.SERVICE_RATING_TOKEN
)
def get_risk_status(reg_type, mom_dob, edd):
""" Determine the risk level of the mother """
# high risk if postbirth registration
if "postbirth" in reg_type:
return "high"
# high risk if age < 18
age = utils.get_mom_age(utils.get_today(), mom_dob)
if age < 18:
return "high"
# high risk if registering after 20 weeks pregnant
weeks = utils.get_pregnancy_week(utils.get_today(), edd)
if weeks >= 20:
return "high"
# otherwise normal risk
return "normal"
class HTTPRetryMixin(object):
"""
A mixin for exponential delay retries on retriable http errors
"""
max_retries = 10
delay_factor = 1
jitter_percentage = 0.25
def on_failure(self, exc, task_id, args, kwargs, einfo):
delay = (2 ** self.request.retries) * self.delay_factor
delay *= 1 + (random.random() * self.jitter_percentage)
if (
isinstance(exc, HTTPError)
and self.request.retries < self.max_retries
and 500 <= exc.response.status_code < 600
):
raise self.retry(countdown=delay, exc=exc)
if isinstance(exc, ConnectionError) and self.request.retries < self.max_retries:
raise self.retry(countdown=delay, exc=exc)
class ValidateSubscribe(Task):
""" Task to validate a registration model entry's registration
data.
"""
name = "ndoh_hub.registrations.tasks.validate_subscribe"
log = get_task_logger(__name__)
# Validation checks
def check_lang(self, data_fields, registration):
if "language" not in data_fields:
return ["Language is missing from data"]
elif not utils.is_valid_lang(registration.data["language"]):
return ["Language not a valid option"]
else:
return []
def check_mom_dob(self, data_fields, registration):
if "mom_dob" not in data_fields:
return ["Mother DOB missing"]
elif not utils.is_valid_date(registration.data["mom_dob"]):
return ["Mother DOB invalid"]
else:
return []
def check_edd(self, data_fields, registration):
if "edd" not in data_fields:
return ["Estimated Due Date missing"]
elif not utils.is_valid_edd(registration.data["edd"]):
return ["Estimated Due Date invalid"]
else:
return []
def check_baby_dob(self, data_fields, registration):
if "baby_dob" not in data_fields:
return ["Baby Date of Birth missing"]
elif not utils.is_valid_date(registration.data["baby_dob"]):
return ["Baby Date of Birth invalid"]
else:
return []
def check_operator_id(self, data_fields, registration):
if "operator_id" not in data_fields:
return ["Operator ID missing"]
elif not utils.is_valid_uuid(registration.data["operator_id"]):
return ["Operator ID invalid"]
else:
return []
def check_msisdn_registrant(self, data_fields, registration):
if "msisdn_registrant" not in data_fields:
return ["MSISDN of Registrant missing"]
elif not utils.is_valid_msisdn(registration.data["msisdn_registrant"]):
return ["MSISDN of Registrant invalid"]
else:
return []
def check_msisdn_device(self, data_fields, registration):
if "msisdn_device" not in data_fields:
return ["MSISDN of device missing"]
elif not utils.is_valid_msisdn(registration.data["msisdn_device"]):
return ["MSISDN of device invalid"]
else:
return []
def check_faccode(self, data_fields, registration):
if "faccode" not in data_fields:
return ["Facility (clinic) code missing"]
elif not utils.is_valid_faccode(registration.data["faccode"]):
return ["Facility code invalid"]
else:
return []
def check_consent(self, data_fields, registration):
if "consent" not in data_fields:
return ["Consent is missing"]
elif registration.data["consent"] is not True:
return ["Cannot continue without consent"]
else:
return []
def check_sa_id_no(self, data_fields, registration):
if "sa_id_no" not in data_fields:
return ["SA ID number missing"]
elif not utils.is_valid_sa_id_no(registration.data["sa_id_no"]):
return ["SA ID number invalid"]
else:
return []
def check_passport_no(self, data_fields, registration):
if "passport_no" not in data_fields:
return ["Passport number missing"]
elif not utils.is_valid_passport_no(registration.data["passport_no"]):
return ["Passport number invalid"]
else:
return []
def check_passport_origin(self, data_fields, registration):
if "passport_origin" not in data_fields:
return ["Passport origin missing"]
elif not utils.is_valid_passport_origin(registration.data["passport_origin"]):
return ["Passport origin invalid"]
else:
return []
def check_id(self, data_fields, registration):
if "id_type" not in data_fields:
return ["ID type missing"]
elif not utils.is_valid_id_type(registration.data["id_type"]):
return ["ID type should be one of {}".format(utils.ID_TYPES)]
else:
id_errors = []
if registration.data["id_type"] == "sa_id":
id_errors += self.check_sa_id_no(data_fields, registration)
id_errors += self.check_mom_dob(data_fields, registration)
elif registration.data["id_type"] == "passport":
id_errors += self.check_passport_no(data_fields, registration)
id_errors += self.check_passport_origin(data_fields, registration)
elif registration.data["id_type"] == "none":
id_errors += self.check_mom_dob(data_fields, registration)
return id_errors
# Validate
def validate(self, registration):
""" Validates that all the required info is provided for a
registration.
"""
self.log.info("Starting registration validation")
validation_errors = []
# Check if registrant_id is a valid UUID
if not utils.is_valid_uuid(registration.registrant_id):
validation_errors += ["Invalid UUID registrant_id"]
# Check that required fields are provided and valid
data_fields = registration.data.keys()
if "pmtct_prebirth" in registration.reg_type:
validation_errors += self.check_lang(data_fields, registration)
validation_errors += self.check_mom_dob(data_fields, registration)
validation_errors += self.check_edd(data_fields, registration)
validation_errors += self.check_operator_id(data_fields, registration)
elif "pmtct_postbirth" in registration.reg_type:
validation_errors += self.check_lang(data_fields, registration)
validation_errors += self.check_mom_dob(data_fields, registration)
validation_errors += self.check_baby_dob(data_fields, registration)
validation_errors += self.check_operator_id(data_fields, registration)
elif "nurseconnect" in registration.reg_type:
validation_errors += self.check_faccode(data_fields, registration)
validation_errors += self.check_operator_id(data_fields, registration)
validation_errors += self.check_msisdn_registrant(data_fields, registration)
validation_errors += self.check_msisdn_device(data_fields, registration)
validation_errors += self.check_lang(data_fields, registration)
elif registration.reg_type in ["momconnect_prebirth", "whatsapp_prebirth"]:
# Checks that apply to clinic, chw, public
validation_errors += self.check_operator_id(data_fields, registration)
validation_errors += self.check_msisdn_registrant(data_fields, registration)
validation_errors += self.check_msisdn_device(data_fields, registration)
validation_errors += self.check_lang(data_fields, registration)
validation_errors += self.check_consent(data_fields, registration)
# Checks that apply to clinic, chw
if registration.source.authority in ["hw_full", "hw_partial"]:
validation_errors += self.check_id(data_fields, registration)
# Checks that apply to clinic only
if registration.source.authority == "hw_full":
validation_errors += self.check_edd(data_fields, registration)
validation_errors += self.check_faccode(data_fields, registration)
elif registration.reg_type in ("momconnect_postbirth", "whatsapp_postbirth"):
if registration.source.authority == "hw_full":
validation_errors += self.check_operator_id(data_fields, registration)
validation_errors += self.check_msisdn_registrant(
data_fields, registration
)
validation_errors += self.check_msisdn_device(data_fields, registration)
validation_errors += self.check_lang(data_fields, registration)
validation_errors += self.check_consent(data_fields, registration)
validation_errors += self.check_id(data_fields, registration)
validation_errors += self.check_baby_dob(data_fields, registration)
validation_errors += self.check_faccode(data_fields, registration)
else:
validation_errors += [
"Momconnect postbirth not yet supported for public or CHW"
]
elif registration.reg_type == "loss_general":
validation_errors.append("Loss general not yet supported")
# Evaluate if there were any problems, save and return
if len(validation_errors) == 0:
self.log.info(
"Registration validated successfully - updating " "registration object"
)
registration.validated = True
registration.save()
self.log.info("Registration object updated.")
return True
else:
self.log.info(
"Registration validation failed - updating " "registration object"
)
registration.data["invalid_fields"] = validation_errors
registration.save()
self.log.info("Registration object updated.")
return False
def create_popi_subscriptionrequest(self, registration):
"""
Creates a new subscription request for the POPI message set. This
message set tells the user how to access the POPI required services.
This should only be sent for Clinic or CHW registrations.
"""
if registration.reg_type not in (
"momconnect_prebirth",
"momconnect_postbirth",
"whatsapp_prebirth",
"whatsapp_postbirth",
) or registration.source.authority not in ["hw_partial", "hw_full"]:
return "POPI Subscription request not created"
self.log.info("Fetching messageset")
r = ""
msgset_id, msgset_schedule, next_sequence_number = r
self.log.info("Creating subscription request")
from .models import SubscriptionRequest
SubscriptionRequest.objects.create(
identity=registration.registrant_id,
messageset=msgset_id,
next_sequence_number=next_sequence_number,
lang=registration.data["language"],
schedule=msgset_schedule,
)
self.log.info("POPI Subscription request created")
return "POPI Subscription Request created"
def create_service_info_subscriptionrequest(self, registration):
"""
Creates a new subscription request for the service info message set.
This should only be created for momconnect whatsapp registrations.
"""
if registration.reg_type not in (
"whatsapp_prebirth",
"whatsapp_postbirth",
) or registration.source.authority in ["hw_partial", "patient"]:
return
self.log.info("Fetching messageset")
if registration.reg_type == "whatsapp_prebirth":
weeks = utils.get_pregnancy_week(
utils.get_today(), registration.data["edd"]
)
else:
weeks = (
utils.get_baby_age(utils.get_today(), registration.data["baby_dob"])
+ 40
)
msgset_short_name = utils.get_messageset_short_name(
"whatsapp_service_info", registration.source.authority, weeks
)
r = utils.get_messageset_schedule_sequence(msgset_short_name, weeks)
msgset_id, msgset_schedule, next_sequence_number = r
self.log.info("Creating subscription request")
from .models import SubscriptionRequest
SubscriptionRequest.objects.create(
identity=registration.registrant_id,
messageset=msgset_id,
next_sequence_number=next_sequence_number,
lang=registration.data["language"],
schedule=msgset_schedule,
)
self.log.info("Service Info Subscription request created")
# Create SubscriptionRequest
def create_subscriptionrequests(self, registration):
""" Create SubscriptionRequest(s) based on the
validated registration.
"""
self.log.info("Starting subscriptionrequest creation")
self.log.info("Calculating weeks")
weeks = 1 # default week number
# . calculate weeks along
if registration.reg_type in (
"momconnect_prebirth",
"whatsapp_prebirth",
) and registration.source.authority not in ["hw_partial", "patient"]:
weeks = utils.get_pregnancy_week(
utils.get_today(), registration.data["edd"]
)
elif "pmtct_prebirth" in registration.reg_type:
weeks = utils.get_pregnancy_week(
utils.get_today(), registration.data["edd"]
)
elif "pmtct_postbirth" in registration.reg_type:
weeks = utils.get_baby_age(utils.get_today(), registration.data["baby_dob"])
elif (
registration.reg_type in ("momconnect_postbirth", "whatsapp_postbirth")
and registration.source.authority == "hw_full"
):
weeks = utils.get_baby_age(utils.get_today(), registration.data["baby_dob"])
# . determine messageset shortname
self.log.info("Determining messageset shortname")
short_name = utils.get_messageset_short_name(
registration.reg_type, registration.source.authority, weeks
)
# . determine sbm details
self.log.info("Determining SBM details")
r = utils.get_messageset_schedule_sequence(short_name, weeks)
msgset_id, msgset_schedule, next_sequence_number = r
subscription = {
"identity": registration.registrant_id,
"messageset": msgset_id,
"next_sequence_number": next_sequence_number,
"lang": registration.data["language"],
"schedule": msgset_schedule,
}
self.log.info("Creating SubscriptionRequest object")
from .models import SubscriptionRequest
SubscriptionRequest.objects.create(**subscription)
self.log.info("SubscriptionRequest created")
return "SubscriptionRequest created"
# Create ServiceRating Invite
def create_servicerating_invite(self, registration):
""" Create a new servicerating invite
"""
invite_data = {
"identity": registration.registrant_id
# could provide "invite" to override servicerating defaults
}
self.log.info("Creating ServiceRating invite")
response = sr_client.create_invite(invite_data)
self.log.info("Created ServiceRating invite")
return response
# Set risk status
def set_risk_status(self, registration):
""" Determine the risk status of the mother and save it to her identity
"""
self.log.info("Calculating risk level")
risk = get_risk_status(
registration.reg_type,
registration.data["mom_dob"],
registration.data["edd"],
)
self.log.info("Reading the identity")
identity = is_client.get_identity(registration.registrant_id)
details = identity["details"]
if "pmtct" in details:
details["pmtct"]["risk_status"] = risk
else:
details["pmtct"] = {"risk_status": risk}
self.log.info("Saving risk level to the identity")
is_client.update_identity(registration.registrant_id, {"details": details})
self.log.info("Identity updated with risk level")
return risk
def opt_in_identity(self, registration):
"""
Opts in the identity if they've previously opted out
"""
try:
msisdn = registration.data["msisdn_registrant"]
except KeyError:
return
opt_in_identity.delay(
registration.registrant_id, msisdn, registration.source_id
)
def send_welcome_message(self, registration):
"""
If this is a prebirth momconnect registration, send the welcome message
"""
if registration.reg_type not in ("momconnect_prebirth", "whatsapp_prebirth"):
return
if registration.source.authority != "hw_full":
# Only clinic registrations should get this message
return
try:
msisdn = registration.data["msisdn_registrant"]
language = registration.data["language"]
except KeyError:
return
send_welcome_message.delay(
language=language,
channel="WHATSAPP" if "whatsapp" in registration.reg_type else "JUNE_TEXT",
msisdn=msisdn,
identity_id=registration.registrant_id,
)
# Run
def run(self, registration_id, **kwargs):
""" Sets the registration's validated field to True if
validation is successful.
"""
self.log = self.get_logger(**kwargs)
self.log.info("Looking up the registration")
from .models import Registration
registration = Registration.objects.get(id=registration_id)
if registration.reg_type == "jembi_momconnect":
# We do this validation in it's own task
return
reg_validates = self.validate(registration)
if reg_validates:
self.create_subscriptionrequests(registration)
self.create_popi_subscriptionrequest(registration)
self.create_service_info_subscriptionrequest(registration)
self.opt_in_identity(registration)
self.send_welcome_message(registration)
# NOTE: disable service rating for now
# if registration.reg_type == "momconnect_prebirth" and\
# registration.source.authority == "hw_full":
# self.create_servicerating_invite(registration)
if "pmtct" in registration.reg_type:
self.set_risk_status(registration)
self.log.info("Scheduling registration push to Jembi")
jembi_task = BasePushRegistrationToJembi.get_jembi_task_for_registration(
registration
)
task = chain(
jembi_task.si(str(registration.pk)),
remove_personally_identifiable_fields.si(str(registration.pk)),
)
task.delay()
self.log.info("Task executed successfully")
return True
else:
self.log.info("Task terminated due to validation issues")
return False
validate_subscribe = ValidateSubscribe()
@app.task()
def remove_personally_identifiable_fields(registration_id):
"""
Saves the personally identifiable fields to the identity, and then
removes them from the registration object.
"""
registration = Registration.objects.get(id=registration_id)
fields = set(
(
"id_type",
"mom_dob",
"passport_no",
"passport_origin",
"sa_id_no",
"language",
"consent",
"mom_given_name",
"mom_family_name",
"mom_email",
)
).intersection(registration.data.keys())
if fields:
identity = is_client.get_identity(registration.registrant_id)
for field in fields:
# Language is stored as 'lang_code' in the Identity Store
if field == "language":
identity["details"]["lang_code"] = registration.data.pop(field)
continue
identity["details"][field] = registration.data.pop(field)
is_client.update_identity(identity["id"], {"details": identity["details"]})
msisdn_fields = set(("msisdn_device", "msisdn_registrant")).intersection(
registration.data.keys()
)
for field in msisdn_fields:
msisdn = registration.data.pop(field)
identities = is_client.get_identity_by_address("msisdn", msisdn)
try:
field_identity = next(identities["results"])
except StopIteration:
field_identity = is_client.create_identity(
{"details": {"addresses": {"msisdn": {msisdn: {}}}}}
)
field = field.replace("msisdn", "uuid")
registration.data[field] = field_identity["id"]
registration.save()
def add_personally_identifiable_fields(registration):
"""
Sometimes we might want to rerun the validation and subscription, and for
that we want to put back any fields that we placed on the identity when
anonymising the registration.
This function just adds those fields to the 'registration' object, it
doesn't save those fields to the database.
"""
identity = is_client.get_identity(registration.registrant_id)
if not identity:
return registration
fields = (
set(
(
"id_type",
"mom_dob",
"passport_no",
"passport_origin",
"sa_id_no",
"lang_code",
"consent",
"mom_given_name",
"mom_family_name",
"mom_email",
)
)
.intersection(identity["details"].keys())
.difference(registration.data.keys())
)
for field in fields:
if field == "lang_code":
registration.data["language"] = identity["details"][field]
continue
registration.data[field] = identity["details"][field]
uuid_fields = set(("uuid_device", "uuid_registrant")).intersection(
registration.data.keys()
)
for field in uuid_fields:
msisdn = utils.get_identity_msisdn(registration.data[field])
if msisdn:
field = field.replace("uuid", "msisdn")
registration.data[field] = msisdn
return registration
class ValidateSubscribeJembiAppRegistration(HTTPRetryMixin, ValidateSubscribe):
"""
Validates and creates subscriptions for registrations coming from the
Jembi application.
"""
def is_primary_address(self, addr_type, address, identity):
"""
Returns whether `address` is the primary address for `identity`
Arguments:
addr_type {string} -- The type of address to check for
address {string} -- The address to check for
identity {dict} -- The identity that has addresses to check
Returns:
A bool which is `True` when the address is the identity's primary
address.
"""
return all(
map(
lambda addr: address == addr[0] or not addr[1].get("default"),
identity.get("details", {})
.get("addresses", {})
.get(addr_type, {})
.items(),
)
)
def get_or_update_identity_by_address(self, address):
"""
Gets the first identity with the given primary address, or if no
identity exists, creates an identity with the given address
Arguments:
address {string} -- The MSISDN to search for
Returns:
A dict representing the identity for `address`
"""
identities = filter(
partial(self.is_primary_address, "msisdn", address),
is_client.get_identity_by_address("msisdn", address)["results"],
)
try:
return next(identities)
except StopIteration:
identity = {
"details": {
"default_addr_type": "msisdn",
"addresses": {"msisdn": {address: {"default": True}}},
}
}
return is_client.create_identity(identity)
def is_opted_out(self, identity, address):
"""
Returns whether or not an address on an identity is opted out
"""
addr_details = identity["details"]["addresses"]["msisdn"][address]
return "optedout" in addr_details and addr_details["optedout"] is True
def opt_in(self, identity, address, source):
"""
Opts in a previously opted out identity
"""
optin = {
"identity": identity["id"],
"address_type": "msisdn",
"address": address,
"request_source": source.name,
"requestor_source_id": source.id,
}
return is_client.create_optin(optin)
def fail_validation(self, registration, reason):
"""
Validation for the registration has failed
"""
registration.data["invalid_fields"] = reason
registration.save()
return self.send_webhook(registration)
def fail_error(self, registration, reason):
"""
Uncaught error that caused the registration to fail
"""
registration.data["error_data"] = reason
registration.save()
return self.send_webhook(registration)
def registration_success(self, registration):
"""
Registration has been successfully processed
"""
return self.send_webhook(registration)
def send_webhook(self, registration):
"""
Sends a webhook if one is specified for the given registration
Also sends the status over websocket
"""
url = registration.data.get("callback_url", None)
token = registration.data.get("callback_auth_token", None)
headers = {}
if token is not None:
headers["Authorization"] = "Bearer {}".format(token)
if url is not None:
http_request_with_retries.delay(
method="POST", url=url, headers=headers, payload=registration.status
)
def is_registered_on_whatsapp(self, address):
"""
Returns whether or not the number is recognised on wassup
"""
r = requests.post(
urljoin(settings.ENGAGE_URL, "v1/contacts"),
json={"blocking": "wait", "contacts": [address]},
headers={"Authorization": "Bearer {}".format(settings.ENGAGE_TOKEN)},
)
r.raise_for_status()
data = r.json()
existing = filter(lambda d: d.get("status", False) == "valid", data["contacts"])
return any(existing)
def create_pmtct_registration(self, registration, operator):
if "whatsapp" in registration.reg_type:
reg_type = "whatsapp_pmtct_prebirth"
else:
reg_type = "pmtct_prebirth"
data = {
"language": registration.data["language"],
"mom_dob": registration.data["mom_dob"],
"edd": registration.data["edd"],
"operator_id": operator["id"],
}
Registration.objects.create(
reg_type=reg_type,
registrant_id=registration.registrant_id,
source=registration.source,
created_by=registration.created_by,
data=data,
)
def is_identity_subscribed(self, identity, regex):
"""
Checks to see if the identity is subscribed to the specified
messageset. Check is done on the short name of the messageset matching
the given regular expression
"""
active_subs = utils.sbm_client.get_subscriptions(
{"identity": identity["id"], "active": True}
)["results"]
messagesets = utils.sbm_client.get_messagesets()["results"]
messagesets = {ms["id"]: ms["short_name"] for ms in messagesets}
for sub in active_subs:
short_name = messagesets[sub["messageset"]]
if re.search(regex, short_name):
return True
return False
def is_valid_clinic_code(self, code):
"""
Checks to see if the specified clinic code is recognised or not
"""
return ClinicCode.objects.filter(code=code).exists()
def run(self, registration_id, **kwargs):
registration = Registration.objects.get(id=registration_id)
msisdn_registrant = registration.data["msisdn_registrant"]
registrant = self.get_or_update_identity_by_address(msisdn_registrant)
device = self.get_or_update_identity_by_address(
registration.data["msisdn_device"]
)
registration.registrant_id = registrant["id"]
# Check for existing subscriptions
if self.is_identity_subscribed(registrant, r"prebirth\.hw_full"):
self.fail_validation(
registration,
{
"mom_msisdn": "Number is already subscribed to MomConnect "
"messaging"
},
)
return
# Check for previously opted out
if self.is_opted_out(registrant, msisdn_registrant):
if registration.data["mom_opt_in"]:
self.opt_in(registrant, msisdn_registrant, registration.source)
else:
self.fail_validation(
registration,
{
"mom_opt_in": "Mother has previously opted out and has "
"not chosen to opt back in again"
},
)
return
# Determine WhatsApp vs SMS registration
registration.data["registered_on_whatsapp"] = self.is_registered_on_whatsapp(
msisdn_registrant
)
if (
registration.data["mom_whatsapp"]
and registration.data["registered_on_whatsapp"]
):
registration.reg_type = "whatsapp_prebirth"
else:
registration.reg_type = "momconnect_prebirth"
# Check clinic code
if not self.is_valid_clinic_code(registration.data["faccode"]):
self.fail_validation(
registration, {"clinic_code": "Not a recognised clinic code"}
)
return
registration.validated = True
registration.save()
# Create subscriptions
self.create_subscriptionrequests(registration)
self.create_popi_subscriptionrequest(registration)
self.create_service_info_subscriptionrequest(registration)
# Send welcome message
send_welcome_message(
language=registration.data["language"],
channel="WHATSAPP" if "whatsapp" in registration.reg_type else "JUNE_TEXT",
msisdn=msisdn_registrant,
identity_id=registration.registrant_id,
)
# Push to Jembi and remove personally identifiable information
jembi_task = BasePushRegistrationToJembi.get_jembi_task_for_registration(
registration
)
task = chain(
jembi_task.si(str(registration.pk)),
remove_personally_identifiable_fields.si(str(registration.pk)),
)
task.delay()
# Create PMTCT registration if required
if registration.data["mom_pmtct"]:
self.create_pmtct_registration(registration, device)
# Send success webhook
self.registration_success(registration)
def on_failure(self, exc, task_id, args, kwargs, einfo):
super(ValidateSubscribeJembiAppRegistration, self).on_failure(
exc, task_id, args, kwargs, einfo
)
# Send failure webhook
registration_id = kwargs.get("registration_id", None) or args[0]
registration = Registration.objects.get(id=registration_id)
self.fail_error(
registration,
{
"type": einfo.type.__name__,
"message": str(exc),
"traceback": einfo.traceback,
},
)
validate_subscribe_jembi_app_registration = ValidateSubscribeJembiAppRegistration()
@app.task(
autoretry_for=(RequestException, SoftTimeLimitExceeded, TembaHttpError),
retry_backoff=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def submit_jembi_registration_to_rapidpro(data):
rapidpro.create_flow_start(
settings.RAPIDPRO_JEMBI_REGISTRATION_FLOW,
urns=[f"whatsapp:{data['msisdn_registrant'].strip('+')}"],
extra=data,
)
class BasePushRegistrationToJembi(object):
"""
Base class that contains helper functions for pushing registration data
to Jembi.
"""
name = "ndoh_hub.registrations.tasks.base_push_registration_to_jembi"
log = get_task_logger(__name__)
def get_patient_id(
self, id_type, id_no=None, passport_origin=None, mom_msisdn=None
):
if id_type == "sa_id":
return id_no + "^^^ZAF^NI"
elif id_type == "passport":
return id_no + "^^^" + passport_origin.upper() + "^PPN"
elif mom_msisdn:
return mom_msisdn.replace("+", "") + "^^^ZAF^TEL"
def get_dob(self, mom_dob):
if mom_dob is not None:
return mom_dob.strftime("%Y%m%d")
else:
return None
def get_today(self):
return datetime.today()
def get_timestamp(self, registration):
return registration.created_at.strftime("%Y%m%d%H%M%S")
@staticmethod
def get_jembi_task_for_registration(registration):
"""
NOTE: this is a convenience method for getting the relevant
Jembi task to fire for a registration.
"""
if "nurseconnect" in registration.reg_type:
return push_nurse_registration_to_jembi
if "pmtct" in registration.reg_type:
return push_pmtct_registration_to_jembi
return push_registration_to_jembi
@staticmethod
def get_authority_from_source(source):
"""
NOTE: this is a convenience method to map the new "source"
back to ndoh-control's "authority" fields to maintain
backwards compatibility with existing APIs
"""
source_name = source.name.upper()
if source_name.startswith("EXTERNAL CHW"):
# catch all external chw sources
return "chw"
elif source_name.startswith("EXTERNAL CLINIC"):
# catch all external clinic sources
return "clinic"
else:
return {
"PUBLIC USSD APP": "personal",
"OPTOUT USSD APP": "optout",
"CLINIC USSD APP": "clinic",
"CHW USSD APP": "chw",
"NURSE USSD APP": "nurse",
"PMTCT USSD APP": "pmtct",
"PUBLIC WHATSAPP APP": "personal",
"CLINIC WHATSAPP APP": "clinic",
}.get(source_name)
def run(self, registration_id, **kwargs):
from .models import Registration
registration = Registration.objects.get(pk=registration_id)
authority = self.get_authority_from_source(registration.source)
if authority is None:
self.log.error(
"Unable to establish authority for source %s. Skipping."
% (registration.source)
)
return
json_doc = self.build_jembi_json(registration)
request_to_jembi_api.delay(self.URL, json_doc)
class PushRegistrationToJembi(BasePushRegistrationToJembi, Task):
""" Task to push registration data to Jembi
"""
name = "ndoh_hub.registrations.tasks.push_registration_to_jembi"
log = get_task_logger(__name__)
URL = "subscription"
def get_subscription_type(self, authority):
authority_map = {
"personal": 1,
"chw": 2,
"clinic": 3,
"optout": 4,
# NOTE: these are other valid values recognised by Jembi but
# currently not used by us.
# 'babyloss': 5,
# 'servicerating': 6,
# 'helpdesk': 7,
"pmtct": 9,
}
return authority_map[authority]
def get_software_type(self, registration):
""" Get the software type (swt) code Jembi expects """
if registration.data.get("swt", None):
return registration.data.get("swt")
if "whatsapp" in registration.reg_type:
registration.data["swt"] = 7 # USSD4WHATSAPP
registration.save()
return 7
return 1 # Default 1
def transform_language_code(self, lang):
return {
"zul_ZA": "zu",
"xho_ZA": "xh",
"afr_ZA": "af",
"eng_ZA": "en",
"nso_ZA": "nso",
"tsn_ZA": "tn",
"sot_ZA": "st",
"tso_ZA": "ts",
"ssw_ZA": "ss",
"ven_ZA": "ve",
"nbl_ZA": "nr",
}[lang]
def build_jembi_json(self, registration):
""" Compile json to be sent to Jembi. """
self.log.info("Compiling Jembi Json data for PushRegistrationToJembi")
authority = self.get_authority_from_source(registration.source)
id_msisdn = None
if not registration.data.get("msisdn_registrant"):
id_msisdn = utils.get_identity_msisdn(registration.registrant_id)
json_template = {
"mha": registration.data.get("mha", 1),
"swt": self.get_software_type(registration),
"dmsisdn": registration.data.get("msisdn_device"),
"cmsisdn": registration.data.get("msisdn_registrant", id_msisdn),
"id": self.get_patient_id(
registration.data.get("id_type"),
(
registration.data.get("sa_id_no")
if registration.data.get("id_type") == "sa_id"
else registration.data.get("passport_no")
),
# passport_origin may be None if sa_id is used
registration.data.get("passport_origin"),
registration.data.get("msisdn_registrant", id_msisdn),
),
"type": self.get_subscription_type(authority),
"lang": self.transform_language_code(registration.data["language"]),
"encdate": registration.data.get(
"encdate", self.get_timestamp(registration)
),
"faccode": registration.data.get("faccode"),
"dob": (
self.get_dob(
datetime.strptime(registration.data["mom_dob"], "%Y-%m-%d")
)
if registration.data.get("mom_dob")
else None
),
"sid": str(registration.registrant_id),
"eid": str(registration.id),
}
# Self registrations on all lines should use cmsisdn as dmsisdn too
if registration.data.get("msisdn_device") is None:
json_template["dmsisdn"] = registration.data.get(
"msisdn_registrant", id_msisdn
)
if authority == "clinic":
json_template["edd"] = datetime.strptime(
registration.data["edd"], "%Y-%m-%d"
).strftime("%Y%m%d")
return json_template
push_registration_to_jembi = PushRegistrationToJembi()
class PushPmtctRegistrationToJembi(PushRegistrationToJembi, Task):
""" Task to push PMTCT registration data to Jembi
"""
name = "ndoh_hub.registrations.tasks.push_pmtct_registration_to_jembi"
URL = "pmtctSubscription"
def build_jembi_json(self, registration):
json_template = super(PushPmtctRegistrationToJembi, self).build_jembi_json(
registration
)
json_template["risk_status"] = get_risk_status(
registration.reg_type,
registration.data["mom_dob"],
registration.data["edd"],
)
if not json_template.get("faccode"):
related_reg = (
Registration.objects.filter(
validated=True,
registrant_id=registration.registrant_id,
data__has_key="faccode",
)
.exclude(
reg_type__in=(
"whatsapp_pmtct_prebirth",
"pmtct_prebirth",
"whatsapp_pmtct_postbirth",
"pmtct_postbirth",
)
)
.order_by("-created_at")
.first()
)
if related_reg:
json_template["faccode"] = related_reg.data["faccode"]
return json_template
push_pmtct_registration_to_jembi = PushPmtctRegistrationToJembi()
class PushNurseRegistrationToJembi(BasePushRegistrationToJembi, Task):
name = "ndoh_hub.registrations.tasks.push_nurse_registration_to_jembi"
log = get_task_logger(__name__)
URL = "nc/subscription"
def get_persal(self, identity):
details = identity["details"]
return details.get("nurseconnect", {}).get("persal_no")
def get_sanc(self, identity):
details = identity["details"]
return details.get("nurseconnect", {}).get("sanc_reg_no")
def get_software_type(self, registration):
""" Get the software type (swt) code Jembi expects """
if registration.data.get("swt", None):
return registration.data.get("swt")
if "whatsapp" in registration.reg_type:
registration.data["swt"] = 7 # USSD4WHATSAPP
registration.save(update_fields=("data",))
return 7
return 3 # Default 3
def build_jembi_json(self, registration):
"""
Compiles and returns a dictionary representing the JSON that should
be sent to Jembi for the given registration.
"""
self.log.info("Compiling Jembi Json data for PushNurseRegistrationToJembi")
identity = is_client.get_identity(registration.registrant_id)
json_template = {
"mha": 1,
"swt": self.get_software_type(registration),
"type": 7,
"dmsisdn": registration.data["msisdn_device"],
"cmsisdn": registration.data["msisdn_registrant"],
# NOTE: this likely needs to be updated to reflect a change
# in msisdn as `rmsisdn` stands for replacement msisdn
"rmsisdn": None,
"faccode": registration.data["faccode"],
"id": self.get_patient_id(
registration.data.get("id_type"),
(
registration.data.get("sa_id_no")
if registration.data.get("id_type") == "sa_id"
else registration.data.get("passport_no")
),
# passport_origin may be None if sa_id is used
registration.data.get("passport_origin"),
registration.data["msisdn_registrant"],
),
"dob": (
self.get_dob(
datetime.strptime(registration.data["mom_dob"], "%Y-%m-%d")
)
if registration.data.get("mom_db")
else None
),
"persal": self.get_persal(identity),
"sanc": self.get_sanc(identity),
"encdate": self.get_timestamp(registration),
"sid": str(registration.registrant_id),
"eid": str(registration.id),
}
return json_template
push_nurse_registration_to_jembi = PushNurseRegistrationToJembi()
class DeliverHook(Task):
def run(self, target, payload, instance_id=None, hook_id=None, **kwargs):
"""
target: the url to receive the payload.
payload: a python primitive data structure
instance_id: a possibly None "trigger" instance ID
hook_id: the ID of defining Hook object
"""
requests.post(
url=target,
data=json.dumps(payload),
headers={
"Content-Type": "application/json",
"Authorization": "Token %s" % settings.HOOK_AUTH_TOKEN,
},
)
def deliver_hook_wrapper(target, payload, instance, hook):
if instance is not None:
if isinstance(instance.id, uuid.UUID):
instance_id = str(instance.id)
else:
instance_id = instance.id
else:
instance_id = None
kwargs = dict(
target=target, payload=payload, instance_id=instance_id, hook_id=hook.id
)
DeliverHook.apply_async(kwargs=kwargs)
class HTTPRequestWithRetries(HTTPRetryMixin, Task):
def run(self, method, url, headers, payload):
r = requests.request(method, url, headers=headers, json=payload)
r.raise_for_status()
return r.text
http_request_with_retries = HTTPRequestWithRetries()
@app.task(
autoretry_for=(RequestException, SoftTimeLimitExceeded, HTTPError),
retry_backoff=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def get_whatsapp_contact(msisdn):
"""
Fetches the whatsapp contact ID from the API, and stores it in the database.
Args:
msisdn (str): The MSISDN to perform the lookup for.
"""
if redis.get(f"wacontact:{msisdn}"):
return
with redis.lock(f"wacontact:{msisdn}", timeout=15):
# Try to get existing
try:
contact = (
WhatsAppContact.objects.filter(
created__gt=timezone.now() - timedelta(days=7)
)
.filter(msisdn=msisdn)
.latest("created")
)
return contact.api_format
except WhatsAppContact.DoesNotExist:
pass
# If no existing, fetch status from API and create
try:
whatsapp_id = utils.wab_client.get_address(msisdn)
except AddressException:
whatsapp_id = ""
contact = WhatsAppContact.objects.create(msisdn=msisdn, whatsapp_id=whatsapp_id)
return contact.api_format
@app.task(
autoretry_for=(RequestException, HTTPServiceError, SoftTimeLimitExceeded),
retry_backoff=True,
retry_jitter=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def get_or_create_identity_from_msisdn(context, field):
"""
Fetches the identity from the identity store using the MSISDN in the context from
`field` adds it to the context as `{field}_identity`. Creates the identity if it
doesn't exist.
Args:
context (dict): The context to find the msisdn and add the ID in
field (str): The field in the context that contains the MSISDN
"""
msisdn = phonenumbers.parse(context[field], "ZA")
msisdn = phonenumbers.format_number(msisdn, phonenumbers.PhoneNumberFormat.E164)
try:
identity = next(
utils.is_client.get_identity_by_address("msisdn", msisdn)["results"]
)
except StopIteration:
identity = utils.is_client.create_identity(
{
"details": {
"default_addr_type": "msisdn",
"addresses": {"msisdn": {msisdn: {"default": True}}},
}
}
)
context["{}_identity".format(field)] = identity
return context
@app.task(
autoretry_for=(RequestException, HTTPServiceError, SoftTimeLimitExceeded),
retry_backoff=True,
retry_jitter=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def update_identity_from_rapidpro_clinic_registration(context):
"""
Updates the identity's details from the registration details
"""
identity = context["mom_msisdn_identity"]
identity["details"]["lang_code"] = context["mom_lang"]
identity["details"]["consent"] = True
identity["details"]["last_mc_reg_on"] = "clinic"
if context["mom_id_type"] == "sa_id":
identity["details"]["sa_id_no"] = context["mom_sa_id_no"]
identity["details"]["mom_dob"] = datetime.strptime(
context["mom_sa_id_no"][:6], "%y%m%d"
).strftime("%Y-%m-%d")
elif context["mom_id_type"] == "passport":
identity["details"]["passport_no"] = context["mom_passport_no"]
identity["details"]["passport_origin"] = context["mom_passport_origin"]
else: # mom_id_type == none
identity["details"]["mom_dob"] = context["mom_dob"]
if context["registration_type"] == "prebirth":
identity["details"]["last_edd"] = context["mom_edd"]
else: # registration_type == postbirth
identity["details"]["last_baby_dob"] = context["baby_dob"]
context["mom_msisdn_identity"] = utils.is_client.update_identity(
identity["id"], {"details": identity["details"]}
)
return context
@app.task(
autoretry_for=(SoftTimeLimitExceeded,),
retry_backoff=True,
retry_jitter=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def _create_rapidpro_clinic_registration(context):
"""
Creates the registration from the registration details
"""
user = User.objects.get(id=context["user_id"])
source = Source.objects.get(user=user)
reg_type = {
("prebirth", "WhatsApp"): "whatsapp_prebirth",
("prebirth", "SMS"): "momconnect_prebirth",
("postbirth", "WhatsApp"): "whatsapp_postbirth",
("postbirth", "SMS"): "momconnect_postbirth",
}.get((context["registration_type"], context["channel"]))
data = {
"operator_id": context["device_msisdn_identity"]["id"],
"msisdn_registrant": context["mom_msisdn"],
"msisdn_device": context["device_msisdn"],
"id_type": context["mom_id_type"],
"language": context["mom_lang"],
"faccode": context["clinic_code"],
"consent": True,
"mha": 6,
}
if data["id_type"] == "sa_id":
data["sa_id_no"] = context["mom_sa_id_no"]
data["mom_dob"] = datetime.strptime(
context["mom_sa_id_no"][:6], "%y%m%d"
).strftime("%Y-%m-%d")
elif data["id_type"] == "passport":
data["passport_no"] = context["mom_passport_no"]
data["passport_origin"] = context["mom_passport_origin"]
else: # id_type = None
data["mom_dob"] = context["mom_dob"]
if context["registration_type"] == "prebirth":
data["edd"] = context["mom_edd"]
else: # registration_type = postbirth
data["baby_dob"] = context["baby_dob"]
Registration.objects.create(
reg_type=reg_type,
registrant_id=context["mom_msisdn_identity"]["id"],
source=source,
created_by=user,
updated_by=user,
data=data,
)
create_rapidpro_clinic_registration = (
get_or_create_identity_from_msisdn.s("mom_msisdn")
| update_identity_from_rapidpro_clinic_registration.s()
| get_or_create_identity_from_msisdn.s("device_msisdn")
| _create_rapidpro_clinic_registration.s()
)
@app.task(
autoretry_for=(RequestException, HTTPServiceError, SoftTimeLimitExceeded),
retry_backoff=True,
retry_jitter=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def update_identity_from_rapidpro_public_registration(context):
"""
Updates the identity's details from the registration details
"""
identity = context["mom_msisdn_identity"]
identity["details"]["lang_code"] = context["mom_lang"]
identity["details"]["consent"] = True
identity["details"]["last_mc_reg_on"] = "public"
context["mom_msisdn_identity"] = utils.is_client.update_identity(
identity["id"], {"details": identity["details"]}
)
return context
@app.task(
autoretry_for=(SoftTimeLimitExceeded,),
retry_backoff=True,
retry_jitter=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def _create_rapidpro_public_registration(context):
user = User.objects.get(id=context["user_id"])
source = Source.objects.get(user=user)
data = {
"operator_id": context["mom_msisdn_identity"]["id"],
"msisdn_registrant": context["mom_msisdn"],
"msisdn_device": context["mom_msisdn"],
"language": context["mom_lang"],
"consent": True,
"registered_on_whatsapp": True,
"mha": 6,
}
Registration.objects.create(
reg_type="whatsapp_prebirth",
registrant_id=context["mom_msisdn_identity"]["id"],
source=source,
created_by=user,
updated_by=user,
data=data,
)
create_rapidpro_public_registration = (
get_or_create_identity_from_msisdn.s("mom_msisdn")
| update_identity_from_rapidpro_public_registration.s()
| _create_rapidpro_public_registration.s()
)
@app.task
def store_jembi_request(url, json_doc):
sub = JembiSubmission.objects.create(path=url, request_data=json_doc)
return sub.id, url, json_doc
@app.task(
autoretry_for=(RequestException, SoftTimeLimitExceeded),
retry_backoff=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def push_to_jembi_api(args):
if not settings.ENABLE_JEMBI_EVENTS:
return
db_id, url, json_doc = args
r = requests.post(
url=urljoin(settings.JEMBI_BASE_URL, url),
headers={"Content-Type": "application/json"},
data=json.dumps(json_doc),
auth=(settings.JEMBI_USERNAME, settings.JEMBI_PASSWORD),
verify=False,
)
r.raise_for_status()
JembiSubmission.objects.filter(pk=db_id).update(
submitted=True,
response_status_code=r.status_code,
response_headers=dict(r.headers),
response_body=r.text,
)
if settings.ENABLE_JEMBI_EVENTS:
request_to_jembi_api = store_jembi_request.s() | push_to_jembi_api.s()
else:
request_to_jembi_api = store_jembi_request.s()
@app.task
def delete_jembi_pii(msisdn):
JembiSubmission.objects.filter(request_data__cmsisdn=msisdn).delete()
@app.task(
autoretry_for=(RequestException, SoftTimeLimitExceeded),
retry_backoff=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def opt_in_identity(identity_id, address, source_id):
"""
Opts in an identity if previously opted out
"""
identity = is_client.get_identity(identity_id)
address_details = (
identity.get("details", {})
.get("addresses", {})
.get("msisdn", {})
.get(address, {})
)
if not address_details.get("optedout"):
return
source = Source.objects.get(id=source_id)
optin = {
"identity": identity_id,
"address_type": "msisdn",
"address": address,
"request_source": source.name,
"requestor_source_id": source.id,
}
return is_client.create_optin(optin)
@app.task(
autoretry_for=(RequestException, SoftTimeLimitExceeded),
retry_backoff=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def send_welcome_message(language, channel, msisdn, identity_id):
"""
Sends the welcome message to the user in the user's language using the
message sender
"""
# Transform to django language code
language = language.lower().replace("_", "-")
with translation.override(language):
translation_context = {
"popi_ussd": settings.POPI_USSD_CODE,
"optout_ussd": settings.OPTOUT_USSD_CODE,
}
if channel == "WHATSAPP":
text = (
translation.ugettext(
"Welcome! MomConnect will send helpful WhatsApp msgs. To stop "
"dial %(optout_ussd)s (Free). To get msgs via SMS instead, "
'reply "SMS" (std rates apply).'
)
% translation_context
)
else:
text = (
translation.ugettext(
"Congratulations on your pregnancy! MomConnect will send you "
"helpful SMS msgs. To stop dial %(optout_ussd)s, for more dial "
"%(popi_ussd)s (Free)."
)
% translation_context
)
utils.ms_client.create_outbound(
{
"to_addr": msisdn,
"to_identity": identity_id,
"content": text,
"channel": "JUNE_TEXT",
"metadata": {},
}
)
@app.task(
autoretry_for=(RequestException, SoftTimeLimitExceeded, TembaHttpError),
retry_backoff=True,
max_retries=15,
acks_late=True,
soft_time_limit=10,
time_limit=15,
)
def submit_third_party_registration_to_rapidpro(username, data):
registration = {
"registered_by": data["hcw_msisdn"],
"language": data["mom_lang"],
"timestamp": data["encdate"],
"source": username,
}
if data.get("mha"):
registration["mha"] = data["mha"]
if data.get("swt"):
registration["swt"] = data["swt"]
if data["authority"] in ("chw", "clinic"):
id_type = registration["id_type"] = data["mom_id_type"]
if id_type == "sa_id":
registration["sa_id_number"] = data["mom_id_no"]
registration["dob"] = data["mom_dob"]
elif id_type == "passport":
registration["passport_origin"] = data["mom_passport_origin"]
registration["passport_number"] = data["mom_id_no"]
elif id_type == "none":
registration["dob"] = data["mom_dob"]
if data["authority"] == "patient":
rapidpro.create_flow_start(
settings.RAPIDPRO_PUBLIC_REGISTRATION_FLOW,
urns=[f"whatsapp:{data['mom_msisdn'].strip('+')}"],
extra=registration,
)
elif data["authority"] == "chw":
rapidpro.create_flow_start(
settings.RAPIDPRO_CHW_REGISTRATION_FLOW,
urns=[f"whatsapp:{data['mom_msisdn'].strip('+')}"],
extra=registration,
)
elif data["authority"] == "clinic":
registration["edd"] = data["mom_edd"]
registration["clinic_code"] = data["clinic_code"]
rapidpro.create_flow_start(
settings.RAPIDPRO_CLINIC_REGISTRATION_FLOW,
urns=[f"whatsapp:{data['mom_msisdn'].strip('+')}"],
extra=registration,
)
| bsd-3-clause | 4,513,856,714,026,675,000 | 34.297454 | 88 | 0.592665 | false |
indexofire/gork | src/gork/application/article/mixins.py | 1 | 7231 | # -*- coding: utf-8 -*-
from django.http import Http404
from django.template import Template
from django.utils.datastructures import SortedDict
from django.views import generic
from django.views.generic.base import TemplateResponseMixin
from feincms import settings
class ContentModelMixin(object):
"""
Mixin for ``feincms.models.Base`` subclasses which need need some degree of
additional control over the request-response cycle.
"""
#: Collection of request processors
request_processors = None
#: Collection of response processors
response_processors = None
def setup_request(self, request):
import warnings
warnings.warn(
'%s.setup_request does nothing anymore, and will be removed in'
' FeinCMS v1.8',
DeprecationWarning, stacklevel=2)
@classmethod
def register_request_processor(cls, fn, key=None):
"""
Registers the passed callable as request processor. A request processor
always receives two arguments, the current object and the request.
"""
if cls.request_processors is None:
cls.request_processors = SortedDict()
cls.request_processors[fn if key is None else key] = fn
@classmethod
def register_response_processor(cls, fn, key=None):
"""
Registers the passed callable as response processor. A response
processor always receives three arguments, the current object, the
request and the response.
"""
if cls.response_processors is None:
cls.response_processors = SortedDict()
cls.response_processors[fn if key is None else key] = fn
class ContentObjectMixin(TemplateResponseMixin):
"""
Mixin for Django's class based views which knows how to handle
``ContentModelMixin`` detail pages.
This is a mixture of Django's ``SingleObjectMixin`` and
``TemplateResponseMixin`` conceptually to support FeinCMS'
``ApplicationContent`` inheritance. It does not inherit
``SingleObjectMixin`` however, because that would set a
precedence for the way how detail objects are determined
(and would f.e. make the page and blog module implementation
harder).
"""
context_object_name = None
def handler(self, request, *args, **kwargs):
if not hasattr(self.request, '_feincms_extra_context'):
self.request._feincms_extra_context = {}
r = self.run_request_processors()
if r:
return r
r = self.process_content_types()
if r:
return r
response = self.render_to_response(self.get_context_data())
r = self.finalize_content_types(response)
if r:
return r
r = self.run_response_processors(response)
if r:
return r
return response
def get_template_names(self):
# According to the documentation this method is supposed to return
# a list. However, we can also return a Template instance...
if isinstance(self.template_name, (Template, list, tuple)):
return self.template_name
if self.template_name:
return [self.template_name]
self.object._needs_templates()
if self.object.template.path:
return [self.object.template.path]
# Hopefully someone else has a usable get_template_names()
# implementation...
return super(ContentObjectMixin, self).get_template_names()
def get_context_data(self, **kwargs):
context = self.request._feincms_extra_context
context[self.context_object_name or 'feincms_object'] = self.object
context.update(kwargs)
return super(ContentObjectMixin, self).get_context_data(**context)
@property
def __name__(self):
"""
Dummy property to make this handler behave like a normal function.
This property is used by django-debug-toolbar
"""
return self.__class__.__name__
def run_request_processors(self):
"""
Before rendering an object, run all registered request processors. A
request processor may peruse and modify the page or the request. It can
also return a ``HttpResponse`` for shortcutting the rendering and
returning that response immediately to the client.
"""
if self.object.request_processors is None:
return
for fn in reversed(self.object.request_processors.values()):
r = fn(self.object, self.request)
if r:
return r
def run_response_processors(self, response):
"""
After rendering an object to a response, the registered response
processors are called to modify the response, eg. for setting cache or
expiration headers, keeping statistics, etc.
"""
if self.object.response_processors is None:
return
for fn in self.object.response_processors.values():
r = fn(self.object, self.request, response)
if r:
return r
def process_content_types(self):
"""
Run the ``process`` method of all content types sporting one
"""
# store eventual Http404 exceptions for re-raising,
# if no content type wants to handle the current self.request
http404 = None
# did any content type successfully end processing?
successful = False
for content in self.object.content.all_of_type(tuple(
self.object._feincms_content_types_with_process)):
try:
r = content.process(self.request, view=self)
if r in (True, False):
successful = r
elif r:
return r
except Http404, e:
http404 = e
if not successful:
if http404:
# re-raise stored Http404 exception
raise http404
extra_context = self.request._feincms_extra_context
if (not settings.FEINCMS_ALLOW_EXTRA_PATH
and extra_context.get('extra_path', ['/'])[-1] != '/'):
raise Http404('Not found (extra_path %r on %r)' % (
extra_context.get('extra_path'),
self.object,
))
def finalize_content_types(self, response):
"""
Runs finalize() on content types having such a method, adds headers and
returns the final response.
"""
for content in self.object.content.all_of_type(tuple(
self.object._feincms_content_types_with_finalize)):
r = content.finalize(self.request, response)
if r:
return r
class ContentView(ContentObjectMixin, generic.DetailView):
def dispatch(self, request, *args, **kwargs):
if request.method.lower() not in self.http_method_names:
return self.http_method_not_allowed(request, *args, **kwargs)
self.request = request
self.args = args
self.kwargs = kwargs
self.object = self.get_object()
return self.handler(request, *args, **kwargs)
| mit | -7,984,136,424,656,516,000 | 33.598086 | 79 | 0.617342 | false |
evrom/genitag | controllers/config/profile/contactme.py | 1 | 2027 | from bottle import request
from sqlalchemy import exc
from libraries.database import engine as db
from libraries.template import view
from libraries.status import Status
from libraries.authentication import login_required
from libraries.forms import ContactMe as Form
from libraries.forms import Blank as BlankForm
from libraries.insert import contactme as description_insert
from libraries.select import contactme as description_select
from libraries.delete import contactme as description_delete
from libraries.session import open_session
from libraries.csrf import csrf
@view('config/profile/description.html')
@login_required
@csrf
def contactme():
status = Status()
form = Form(request.forms)
username = open_session()['u']
if request.method == 'POST' and\
request.query['action'] == 'update':
if form.validate():
try:
conn = db.engine.connect()
conn.execute(description_insert,
description=form.description.data,
username=username)
conn.close()
status.success = "Updated description"
except exc.SQLAlchemyError as message:
status.danger = message
if request.method == 'POST' and\
request.query['action'] == 'delete':
blank_form = BlankForm(request.forms)
if blank_form.validate():
try:
conn = db.engine.connect()
conn.execute(description_delete,
username=username)
conn.close()
status.success = "Deleted description"
except exc.SQLAlchemyError as message:
status.danger = message
conn = db.engine.connect()
result = conn.execute(description_select,
username=username)
conn.close()
row = result.fetchone()
form.description.data = row['description']
return dict(status=status, form=form, description_type="contactme")
| gpl-2.0 | 3,004,933,397,199,181,000 | 37.245283 | 71 | 0.635915 | false |
SouthForkResearch/CHaMP_Metrics | lib/topoproject.py | 1 | 7281 | from os import path
from xml.etree import ElementTree as ET
from exception import DataException, MissingException
from loghelper import Logger
from lib.util import getAbsInsensitivePath
# TODO: This shares a lot in common with riverscapes.py. Let's look at refactoring
class TopoProject():
# Dictionary with layer { layernname : layerxpath }
LAYERS = {
"DEM": "./Realizations/Topography/TIN[@active='true']/DEM/Path",
"DetrendedDEM": "./Realizations/Topography/TIN[@active='true']/Detrended/Path",
"WaterDepth": "./Realizations/Topography/TIN[@active='true']/WaterDepth/Path",
"ErrorSurface": "./Realizations/Topography/TIN[@active='true']/AssocSurfaces/ErrSurface/Path",
"WaterSurfaceDEM": "./Realizations/Topography/TIN[@active='true']/WaterSurfaceDEM/Path",
"AssocPointQuality": "./Realizations/Topography/TIN[@active='true']/AssocSurfaces/PointQuality3D/Path",
"AssocSlope": "./Realizations/Topography/TIN[@active='true']/AssocSurfaces/Slope/Path",
"AssocRough": "./Realizations/Topography/TIN[@active='true']/AssocSurfaces/Roughness/Path",
"AssocPointDensity": "./Realizations/Topography/TIN[@active='true']/AssocSurfaces/PointDensity/Path",
"AssocInterpolationError": "./Realizations/Topography/TIN[@active='true']/AssocSurfaces/InterpolationError/Path",
"Topo_Points": "./Realizations/SurveyData[@projected='true']/Vector[@id='topo_points']/Path",
"StreamFeatures": "./Realizations/SurveyData[@projected='true']/Vector[@id='stream_features']/Path",
"EdgeofWater_Points": "./Realizations/SurveyData[@projected='true']/Vector[@id='eow_points']/Path",
"Control_Points": "./Realizations/SurveyData[@projected='true']/Vector[@id='control_points']/Path",
"Error_Points": "./Realizations/SurveyData[@projected='true']/Vector[@id='error_points']/Path",
"Breaklines": "./Realizations/SurveyData[@projected='true']/Vector[@id='breaklines']/Path",
"WaterExtent": "./Realizations/Topography/TIN[@active='true']/Stages/Vector[@stage='wetted'][@type='extent']/Path",
"BankfullExtent": "./Realizations/Topography/TIN[@active='true']/Stages/Vector[@stage='bankfull'][@type='extent']/Path",
"WettedIslands": "./Realizations/Topography/TIN[@active='true']/Stages/Vector[@stage='wetted'][@type='islands']/Path",
"BankfullIslands": "./Realizations/Topography/TIN[@active='true']/Stages/Vector[@stage='bankfull'][@type='islands']/Path",
"ChannelUnits": "./Realizations/Topography/TIN[@active='true']/ChannelUnits/Path",
"Thalweg": "./Realizations/Topography/TIN[@active='true']/Thalweg/Path",
"WettedCenterline": "./Realizations/Topography/TIN[@active='true']/Stages/Vector[@stage='wetted'][@type='centerline']/Path",
"BankfullCenterline": "./Realizations/Topography/TIN[@active='true']/Stages/Vector[@stage='bankfull'][@type='centerline']/Path",
"WettedCrossSections": "./Realizations/Topography/TIN[@active='true']/Stages/Vector[@stage='wetted'][@type='crosssections']/Path",
"BankfullCrossSections": "./Realizations/Topography/TIN[@active='true']/Stages/Vector[@stage='bankfull'][@type='crosssections']/Path",
"SurveyExtent": "./Realizations/SurveyData/SurveyExtents/Vector[@active='true']/Path", #MR?
"ControlPoints": "./Realizations/SurveyData/Vector[@id='control_points']/Path",
"TopoTin": "./Realizations/Topography/TIN[@active='true']/Path",
"Survey_Extent": "./Realizations/SurveyData[@projected='true']/SurveyExtents/Vector[@id='survey_extent']/Path"} #KMW
def __init__(self, sProjPath):
"""
:param sProjPath: Either the folder containing the project.rs.xml or the filepath of the actual project.rs.xml
"""
log = Logger('TopoProject')
try:
if path.isfile(sProjPath):
self.projpath = path.dirname(sProjPath)
self.projpathxml = sProjPath
elif path.isdir(sProjPath):
self.projpath = sProjPath
self.projpathxml = path.join(sProjPath, "project.rs.xml")
else:
raise MissingException("No project file or directory with the name could be found: {}".format(sProjPath))
except Exception, e:
raise MissingException("No project file or directory with the name could be found: {}".format(sProjPath))
self.isrsproject = False
if path.isfile(self.projpathxml):
log.info("Attempting to load project file: {}".format(self.projpathxml))
self.isrsproject = True
try:
self.domtree = ET.parse(self.projpathxml)
except ET.ParseError, e:
raise DataException("project.rs.xml exists but could not be parsed.")
self.domroot = self.domtree.getroot()
log.info("XML Project file loaded")
def getdir(self, layername):
return path.dirname(self.getpath(layername))
def getpath(self, layername):
"""
Turn a relative path into an absolute one.
:param project_path:
:param root:
:param xpath:
:return:
"""
if layername not in TopoProject.LAYERS:
raise DataException("'{}' is not a valid layer name".format(layername))
try:
node = self.domroot.find(TopoProject.LAYERS[layername]).text.replace("\\", path.sep).replace("/", path.sep)
except Exception, e:
raise DataException("Error retrieving layer '{}' from project file.".format(layername))
if node is not None:
finalpath = path.join(self.projpath, node)
if not path.isfile(finalpath) and not path.isdir(finalpath):
# One last, desparate call to see if there's a case error. This is expensive and should not be run
# as default
finalpath = getAbsInsensitivePath(finalpath, ignoreAbsent=True)
return finalpath
else:
raise DataException("Could not find layer '{}' with xpath '{}'".format(layername, TopoProject.LAYERS[layername]))
def getMeta(self, metaname):
"""
Retrieve Meta tags from the project.rs.xml file
:param metaname:
:return:
"""
try:
return self.domroot.find('./MetaData/Meta[@name="{}"]'.format(metaname)).text
except Exception, e:
raise DataException("Error retrieving metadata with name '{}' from project file.".format(metaname, self.projpathxml))
def get_guid(self, layername):
"""
Get the guid from a given layer
:param layername:
:return:
"""
if layername not in TopoProject.LAYERS:
raise DataException("'{}' is not a valid layer name".format(layername))
node = self.domroot.find(TopoProject.LAYERS[layername].rstrip("/Path"))
if node is not None:
return node.get("guid")
else:
raise DataException("Could not find layer '{}' with xpath '{}'".format(layername, TopoProject.LAYERS[layername]))
def layer_exists(self, layername):
node = self.domroot.find(TopoProject.LAYERS[layername])
return True if node is not None else False
| gpl-3.0 | -8,287,736,121,320,830,000 | 52.536765 | 142 | 0.648675 | false |
siliconchris1973/picbrick | stuff/picView_standalone.py | 1 | 9662 | #!/usr/bin/env python
import os
import pygame
from pygame.locals import *
#define some colors
#color R G B
white = (255, 255, 255)
red = (255, 0, 0)
green = ( 0, 255, 0)
blue = ( 0, 0, 255)
black = ( 0, 0, 0)
cyan = ( 0, 255, 255)
btnCycle_col = white
btnPrev_col = white
btnNext_col = white
btnF1_col = cyan
btnF2_col = blue
btnF3_col = red
btnF4_col = green
btnF5_col = cyan
# directory structure
core_data = 'data'
image_dir = 'images'
video_dir = 'videos'
initial_image = 'HAL900_320x240.png'
"""
Screen layout:
|------------- 320 -------------|
C y c l e
+-------------------------------+ ---
20 |### ####################### ###| 20 |
|### ###| |
|### ###| |
P |### ###| N |
R |### ###| E 240
E |### ###| X |
V |### ###| T |
|### ###| |
|### ###| |
|### ### ### ### ### ### ### ###| |
+-------------------------------+ ---
40 F1 F2 F3 F4 F5 F6 40
"""
#screen size
width = 320
height = 240
size = (width, height)
# button definitions
# pressed button 21, 219
number_of_x_buttons = 5
number_of_y_buttons = 2
btn_width = 40
btn_height = 40
safetyMargin = 2
# evenly distribute function buttons
btnDistance_x = ((width - 2 * btn_width) - (number_of_x_buttons * btn_width)) / (number_of_x_buttons + 1)
btnDistance_y = ((height - btn_height) - (number_of_y_buttons * btn_height)) / (number_of_y_buttons + 1)
# these are the two big area to "scroll" left and right
btnPrev_x = 0
btnPrev_y = safetyMargin
btnPrev_width = btn_width
btnPrev_height = height - safetyMargin
btnNext_x = width - btn_width
btnNext_y = safetyMargin
btnNext_width = btn_width
btnNext_height = height - safetyMargin
btnCycle_x = 0 + (btn_width + safetyMargin)
btnCycle_y = 0
btnCycle_width = width - (2 * btn_width + 2 * safetyMargin)
btnCycle_height = btn_height
btnF1_x = 0 + (btn_width + safetyMargin)
btnF1_y = height - btn_height
btnF1_width = btn_width
btnF1_height = btn_height
btnF2_x = btnF1_x + btnDistance_x
btnF2_y = height - btn_height
btnF2_width = btn_width
btnF2_height = btn_height / 2
btnF3_x = btnF2_x + btnDistance_x
btnF3_y = height - btn_height
btnF3_width = btn_width
btnF3_height = btn_height / 2
btnF4_x = btnF3_x + btnDistance_x
btnF4_y = height - btn_height
btnF4_width = btn_width
btnF4_height = btn_height / 2
btnF5_x = btnF4_x + btnDistance_x
btnF5_y = height - btn_height
btnF5_width = btn_width
btnF5_height = btn_height / 2
# initialize pyGame and the screen
pygame.init()
screen = pygame.display.set_mode(size)
screen.fill((black))
touch_buttons = {
'btnPrev.png':(btnPrev_x, btnPrev_y, btnPrev_width, btnPrev_height) # Previous image button
,'btnNext.png':(btnNext_x,btnNext_y,btnNext_width, btnNext_height) # Next image button
,'btnCycle.png':(btnCycle_x,btnCycle_y,btnCycle_width, btnCycle_height) # Cycle screen button
,'btnF1.png':(btnF1_x,btnF1_y,btnF1_width, btnF1_height) # function 1 button
,'btnF1.png':(btnF2_x,btnF2_y,btnF2_width, btnF2_height) # function 2 button
,'btnF1.png':(btnF3_x,btnF3_y,btnF3_width, btnF3_height) # function 3 button
,'btnF1.png':(btnF4_x,btnF4_y,btnF4_width, btnF4_height) # function 4 button
,'btnF5.png':(btnF5_x,btnF5_y,btnF5_width, btnF5_height) # function 5 button
}
# functions
def prev_picture():
print 'prev picture called'
def next_picture():
print 'next picture called'
def cycle_function():
print 'cycle function called'
def display_image(directory, filename):
try:
# load from subfolder 'data'
img = pygame.image.load(os.path.join(directory,filename))
except:
raise UserWarning, "Unable to find the images in the folder 'data' :-( "
screen.blit(img,(0,0))
# This function takes the name of an image to load.
# It also optionally takes an argument it can use to set a colorkey for the image.
# A colorkey is used in graphics to represent a color of the image that is transparent.
# we also use this this function to initialize filenav.py -- see modules
def load_image(name, colorkey=None):
fullname = os.path.join('data', name)
try:
image = pygame.image.load(fullname)
except pygame.error, message:
print 'Cannot load image:', name
raise SystemExit, message
image = image.convert()
if colorkey is not None:
if colorkey is -1:
colorkey = image.get_at((0,0))
image.set_colorkey(colorkey, RLEACCEL)
return image, image.get_rect()
def show_controls():
# Draw a rectangle outline
pygame.draw.rect(screen, btnPrev_col, [btnPrev_x, btnPrev_y, btnPrev_width, btnPrev_height], 2)
#pygame.blit(source, dest, area=None, special_flags = 0) -> Rect
pygame.draw.rect(screen, btnNext_col, [btnNext_x, btnNext_y, btnNext_width, btnNext_height], 2)
pygame.draw.rect(screen, btnCycle_col, [btnCycle_x, btnCycle_y, btnCycle_width, btnCycle_height], 2)
#pygame.draw.rect(screen, btnF1_col, [btnF1_x, btnF1_y, btnF1_width, btnF1_height], 2)
#pygame.draw.rect(screen, btnF2_col, [btnF2_x, btnF2_y, btnF2_width, btnF2_height], 2)
#pygame.draw.rect(screen, btnF3_col, [btnF3_x, btnF3_y, btnF3_width, btnF3_height], 2)
#pygame.draw.rect(screen, btnF4_col, [btnF4_x, btnF4_y, btnF4_width, btnF4_height], 2)
#pygame.draw.rect(screen, btnF5_col, [btnF5_x, btnF5_y, btnF5_width, btnF5_height], 2)
"""
for i,v in touch_buttons.items():
btn_image = pygame.image.load(os.path.join('data', i))
# X Y W H
rect = btn_image.set_rect(v[0], v[1], v[2], v[3])
screen.blit(btn_image, rect)
"""
# Go ahead and update the screen with what we've drawn.
# This MUST happen after all the other drawing commands.
pygame.display.flip()
def hide_controls():
display_image(core_data, current_image)
# Go ahead and update the screen with what we've drawn.
# This MUST happen after all the other drawing commands.
pygame.display.flip()
def get_display():
disp_no = os.getenv('DISPLAY')
if disp_no:
print "I'm running under X display = {0}".format(disp_no)
pygame.mouse.set_visible(True)
else:
drivers = ['directfb', 'fbcon', 'svgalib']
found = False
for driver in drivers:
if not os.getenv('SDL_VIDEODRIVER'):
os.putenv('SDL_VIDEODRIVER', driver)
try:
pygame.display.init()
except pygame.error:
print 'Driver: {0} failed.'.format(driver)
continue
found = True
print "I'm running on the framebuffer using driver " + str(driver)
pygame.mouse.set_visible(False)
break
if not found:
raise Exception('No suitable video driver found!')
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ["SDL_MOUSEDEV"] = "/dev/input/touchscreen"
os.environ["SDL_MOUSEDRV"] = "TSLIB"
def run(done, toggle_controls):
display_image(core_data, current_image)
show_controls()
while not done:
# This limits the while loop to a max of 10 times per second.
# Leave this out and we will use all CPU we can.
clock.tick(10)
# Scan touchscreen events
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
if(event.type is MOUSEBUTTONDOWN):
# get list of images in picture folder
pos = pygame.mouse.get_pos()
# Find which quarter of the screen we're in
x,y = pos
print 'pos is ' + str(pos)
# check which button was pressed
if btnPrev_x <= x <= btnPrev_x + btnPrev_width and btnPrev_y <= y <= btnPrev_y + btnPrev_height:
prev_picture()
elif btnNext_x <= x <= btnNext_x + btnNext_width and btnNext_y <= y <= btnNext_y + btnNext_height:
next_picture()
elif btnCycle_x <= x <= btnCycle_x + btnCycle_width and btnCycle_y <= y <= btnCycle_y + btnCycle_height:
cycle_function()
else:
print 'event outside of control buttons'
if (toggle_controls == True):
toggle_controls = False
print 'showing controls'
show_controls()
else:
toggle_controls = True
print 'hiding controls'
# Go ahead and update the screen with what we've drawn.
# This MUST happen after all the other drawing commands.
pygame.display.flip()
elif(event.type is MOUSEBUTTONUP):
pos = pygame.mouse.get_pos()
if __name__ == "__main__":
done = False
toggle_controls = True
get_display()
#define font
font = pygame.font.Font(None, 20)
font_big = pygame.font.Font(None, 50)
pygame.display.update()
current_image = initial_image
clock = pygame.time.Clock()
#background = pygame.Surface(screen.get_size())
#background = background.convert()
#background.fill((black))
run(done, toggle_controls)
# Be IDLE friendly
pygame.quit() | apache-2.0 | 2,926,098,843,309,572,600 | 31.10299 | 120 | 0.575968 | false |
masschallenge/impact-api | web/impact/impact/tests/test_jwt_cookie_name_view.py | 1 | 1457 | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
import json
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework.test import APIClient
from impact.tests.api_test_case import APITestCase
from impact.tests.factories import UserFactory
from impact.views import JWTCookieNameView
User = get_user_model() # pylint: disable=invalid-name
class TestJWTCookieNameView(APITestCase):
client_class = APIClient
user_factory = UserFactory
url = reverse(JWTCookieNameView.view_name)
def test_logged_in_user_gets_response_with_cookie_name(self):
cookie_name = 'test-jwt-name'
with self.settings(JWT_AUTH={'JWT_AUTH_COOKIE': cookie_name}):
with self.login(email=self.basic_user().email):
response = self.client.get(self.url)
response_data = json.loads(response.content)
self.assertTrue('name' in response_data.keys())
self.assertTrue(cookie_name in response_data['name'])
def test_unauthenticated_user_is_denied(self):
cookie_name = 'test-jwt-name'
with self.settings(JWT_AUTH={'JWT_AUTH_COOKIE': cookie_name}):
response = self.client.get(self.url)
response_data = json.loads(response.content)
self.assertTrue(
response_data['detail'] == 'Authentication credentials '
'were not provided.')
| mit | 7,622,803,628,918,328,000 | 38.378378 | 72 | 0.660947 | false |
mathiasertl/django-ca | ca/django_ca/tests/tests_querysets.py | 1 | 17267 | # This file is part of django-ca (https://github.com/mathiasertl/django-ca).
#
# django-ca is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# django-ca is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with django-ca. If not,
# see <http://www.gnu.org/licenses/>.
"""Test querysets."""
import typing
from contextlib import contextmanager
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
from django.db import models
from django.test import TestCase
from django.test import TransactionTestCase
from freezegun import freeze_time
from .. import ca_settings
from ..extensions import BasicConstraints
from ..extensions import KeyUsage
from ..models import AcmeAccount
from ..models import AcmeAuthorization
from ..models import AcmeCertificate
from ..models import AcmeChallenge
from ..models import AcmeOrder
from ..models import Certificate
from ..models import CertificateAuthority
from ..subject import Subject
from .base import override_settings
from .base import override_tmpcadir
from .base import timestamps
from .base.mixins import AcmeValuesMixin
from .base.mixins import TestCaseMixin
class QuerySetTestCaseMixin(TestCaseMixin):
"""Mixin for QuerySet test cases."""
def assertQuerySet( # pylint: disable=invalid-name; unittest standard
self, qs: "models.QuerySet[models.Model]", *items: models.Model
) -> None:
"""Minor shortcut to test querysets."""
self.assertCountEqual(qs, items)
@contextmanager
def attr(self, obj: models.Model, attr: str, value: typing.Any) -> typing.Iterator[None]:
"""Context manager to temporarily set an attribute for an object."""
original = getattr(obj, attr)
try:
setattr(obj, attr, value)
obj.save()
yield
finally:
setattr(obj, attr, original)
obj.save()
@override_settings(CA_MIN_KEY_SIZE=1024)
class CertificateAuthorityQuerySetTestCase(TestCaseMixin, TestCase):
"""Test cases for :py:class:`~django_ca.querysets.CertificateAuthorityQuerySet`."""
load_cas = ("root", "child")
@override_tmpcadir()
def test_basic(self) -> None:
"""Basic test for init()."""
key_size = ca_settings.CA_MIN_KEY_SIZE
ca = CertificateAuthority.objects.init(
name="Root CA",
key_size=key_size,
key_type="RSA",
algorithm=hashes.SHA256(),
expires=self.expires(720),
parent=None,
pathlen=0,
subject=Subject([("CN", "ca.example.com")]),
)
self.assertEqual(ca.name, "Root CA")
# verify private key properties
self.assertEqual(ca.key(None).key_size, 1024)
self.assertIsInstance(ca.key(None).public_key(), RSAPublicKey)
# verity public key propertiesa
self.assertBasic(ca.pub.loaded)
self.assertEqual(ca.subject, Subject({"CN": "ca.example.com"}))
# verify X509 properties
self.assertEqual(
ca.basic_constraints, BasicConstraints({"critical": True, "value": {"ca": True, "pathlen": 0}})
)
self.assertEqual(ca.key_usage, KeyUsage({"critical": True, "value": ["cRLSign", "keyCertSign"]}))
self.assertIsNone(ca.subject_alternative_name, None)
self.assertIsNone(ca.extended_key_usage)
self.assertIsNone(ca.tls_feature)
self.assertIsNone(ca.issuer_alternative_name)
@override_tmpcadir()
def test_pathlen(self) -> None:
"""Test pathlen parameter in manager."""
key_size = ca_settings.CA_MIN_KEY_SIZE
kwargs = dict(
key_size=key_size,
key_type="RSA",
algorithm=hashes.SHA256(),
expires=self.expires(720),
parent=None,
subject=Subject([("CN", "ca.example.com")]),
)
ca = CertificateAuthority.objects.init(name="1", **kwargs)
self.assertEqual(ca.basic_constraints, BasicConstraints({"critical": True, "value": {"ca": True}}))
ca = CertificateAuthority.objects.init(pathlen=0, name="2", **kwargs)
self.assertEqual(
ca.basic_constraints, BasicConstraints({"critical": True, "value": {"ca": True, "pathlen": 0}})
)
ca = CertificateAuthority.objects.init(pathlen=2, name="3", **kwargs)
self.assertEqual(
ca.basic_constraints, BasicConstraints({"critical": True, "value": {"ca": True, "pathlen": 2}})
)
@override_tmpcadir()
def test_parent(self) -> None:
"""Test parent parameter in manager."""
key_size = ca_settings.CA_MIN_KEY_SIZE
kwargs = dict(
key_size=key_size,
key_type="RSA",
algorithm=hashes.SHA256(),
expires=self.expires(720),
subject=Subject([("CN", "ca.example.com")]),
)
parent = CertificateAuthority.objects.init(name="Root", parent=None, pathlen=1, **kwargs)
child = CertificateAuthority.objects.init(name="Child", parent=parent, pathlen=0, **kwargs)
self.assertAuthorityKeyIdentifier(parent, child)
@override_tmpcadir()
def test_key_size(self) -> None:
"""Test key size validation in manager."""
kwargs = dict(
name="Root CA",
key_type="RSA",
algorithm="sha256",
expires=self.expires(720),
parent=None,
pathlen=0,
subject={
"CN": "ca.example.com",
},
)
key_size = ca_settings.CA_MIN_KEY_SIZE
# type ignores because kwargs is Dict[str, Any]
with self.assertRaisesRegex(ValueError, r"^3072: Key size must be a power of two$"):
CertificateAuthority.objects.init(key_size=key_size * 3, **kwargs) # type: ignore[arg-type]
with self.assertRaisesRegex(ValueError, r"^1025: Key size must be a power of two$"):
CertificateAuthority.objects.init(key_size=key_size + 1, **kwargs) # type: ignore[arg-type]
with self.assertRaisesRegex(ValueError, r"^512: Key size must be least 1024 bits$"):
CertificateAuthority.objects.init(key_size=int(key_size / 2), **kwargs) # type: ignore[arg-type]
with self.assertRaisesRegex(ValueError, r"^256: Key size must be least 1024 bits$"):
CertificateAuthority.objects.init(key_size=int(key_size / 4), **kwargs) # type: ignore[arg-type]
def test_enabled_disabled(self) -> None:
"""Test enabled/disabled filter."""
self.load_named_cas("__usable__")
self.assertCountEqual(CertificateAuthority.objects.enabled(), self.cas.values())
self.assertCountEqual(CertificateAuthority.objects.disabled(), [])
self.ca.enabled = False
self.ca.save()
self.assertCountEqual(
CertificateAuthority.objects.enabled(),
[c for c in self.cas.values() if c.name != self.ca.name],
)
self.assertCountEqual(CertificateAuthority.objects.disabled(), [self.ca])
def test_valid(self) -> None:
"""Test valid/usable/invalid filters."""
self.load_named_cas("__usable__")
with freeze_time(timestamps["before_cas"]):
self.assertCountEqual(CertificateAuthority.objects.valid(), [])
self.assertCountEqual(CertificateAuthority.objects.usable(), [])
self.assertCountEqual(CertificateAuthority.objects.invalid(), self.cas.values())
with freeze_time(timestamps["before_child"]):
valid = [c for c in self.cas.values() if c.name != "child"]
self.assertCountEqual(CertificateAuthority.objects.valid(), valid)
self.assertCountEqual(CertificateAuthority.objects.usable(), valid)
self.assertCountEqual(CertificateAuthority.objects.invalid(), [self.cas["child"]])
with freeze_time(timestamps["after_child"]):
self.assertCountEqual(CertificateAuthority.objects.valid(), self.cas.values())
self.assertCountEqual(CertificateAuthority.objects.usable(), self.cas.values())
self.assertCountEqual(CertificateAuthority.objects.invalid(), [])
with freeze_time(timestamps["cas_expired"]):
self.assertCountEqual(CertificateAuthority.objects.valid(), [])
self.assertCountEqual(CertificateAuthority.objects.usable(), [])
self.assertCountEqual(CertificateAuthority.objects.invalid(), self.cas.values())
class CertificateQuerysetTestCase(QuerySetTestCaseMixin, TestCase):
"""Test cases for :py:class:`~django_ca.querysets.CertificateQuerySet`."""
load_cas = "__usable__"
load_certs = "__usable__"
def test_validity(self) -> None:
"""Test validity filter."""
with freeze_time(timestamps["everything_valid"]):
self.assertQuerySet(Certificate.objects.expired())
self.assertQuerySet(Certificate.objects.not_yet_valid())
self.assertQuerySet(Certificate.objects.valid(), *self.certs.values())
with freeze_time(timestamps["everything_expired"]):
self.assertQuerySet(Certificate.objects.expired(), *self.certs.values())
self.assertQuerySet(Certificate.objects.not_yet_valid())
self.assertQuerySet(Certificate.objects.valid())
with freeze_time(timestamps["before_everything"]):
self.assertQuerySet(Certificate.objects.expired())
self.assertQuerySet(Certificate.objects.not_yet_valid(), *self.certs.values())
self.assertQuerySet(Certificate.objects.valid())
expired = [
self.certs["root-cert"],
self.certs["child-cert"],
self.certs["ecc-cert"],
self.certs["dsa-cert"],
self.certs["pwd-cert"],
]
valid = [c for c in self.certs.values() if c not in expired]
with freeze_time(timestamps["ca_certs_expired"]):
self.assertQuerySet(Certificate.objects.expired(), *expired)
self.assertQuerySet(Certificate.objects.not_yet_valid())
self.assertQuerySet(Certificate.objects.valid(), *valid)
class AcmeQuerySetTestCase( # pylint: disable=too-many-instance-attributes
QuerySetTestCaseMixin, AcmeValuesMixin, TransactionTestCase
):
"""Base class for ACME querysets (creates different instances)."""
load_cas = "__usable__"
def setUp(self) -> None:
super().setUp()
self.ca.acme_enabled = True
self.ca.save()
self.ca2 = self.cas["root"]
self.ca2.acme_enabled = True
self.ca2.save()
self.kid = self.absolute_uri(":acme-account", serial=self.ca.serial, slug=self.ACME_SLUG_1)
self.account = AcmeAccount.objects.create(
ca=self.ca,
contact="[email protected]",
terms_of_service_agreed=True,
status=AcmeAccount.STATUS_VALID,
pem=self.ACME_PEM_1,
thumbprint=self.ACME_THUMBPRINT_1,
slug=self.ACME_SLUG_1,
kid=self.kid,
)
self.kid2 = self.absolute_uri(":acme-account", serial=self.ca2.serial, slug=self.ACME_SLUG_2)
self.account2 = AcmeAccount.objects.create(
ca=self.ca2,
contact="[email protected]",
terms_of_service_agreed=True,
status=AcmeAccount.STATUS_VALID,
pem=self.ACME_PEM_2,
thumbprint=self.ACME_THUMBPRINT_2,
slug=self.ACME_SLUG_2,
kid=self.kid2,
)
self.order = AcmeOrder.objects.create(account=self.account)
self.auth = AcmeAuthorization.objects.create(order=self.order, value="example.com")
self.chall = AcmeChallenge.objects.create(auth=self.auth, type=AcmeChallenge.TYPE_HTTP_01)
self.cert = AcmeCertificate.objects.create(order=self.order)
class AcmeAccountQuerySetTestCase(AcmeQuerySetTestCase):
"""Test cases for :py:class:`~django_ca.querysets.AcmeAccountQuerySet`."""
@freeze_time(timestamps["everything_valid"])
def test_viewable(self) -> None:
"""Test the viewable() method."""
self.assertQuerySet(AcmeAccount.objects.viewable(), self.account, self.account2)
with self.attr(self.account, "status", AcmeAccount.STATUS_REVOKED):
self.assertQuerySet(AcmeAccount.objects.viewable(), self.account, self.account2)
with self.attr(self.ca, "enabled", False):
self.assertQuerySet(AcmeAccount.objects.viewable(), self.account2)
with self.attr(self.ca, "acme_enabled", False):
self.assertQuerySet(AcmeAccount.objects.viewable(), self.account2)
# Test that we're back to the original state
self.assertQuerySet(AcmeAccount.objects.viewable(), self.account, self.account2)
with freeze_time(timestamps["everything_expired"]):
self.assertQuerySet(AcmeAccount.objects.viewable())
class AcmeOrderQuerysetTestCase(AcmeQuerySetTestCase):
"""Test cases for :py:class:`~django_ca.querysets.AcmeOrderQuerySet`."""
def test_account(self) -> None:
"""Test the account filter."""
self.assertQuerySet(AcmeOrder.objects.account(self.account), self.order)
self.assertQuerySet(AcmeOrder.objects.account(self.account2))
@freeze_time(timestamps["everything_valid"])
def test_viewable(self) -> None:
"""Test the viewable() method."""
self.assertQuerySet(AcmeOrder.objects.viewable(), self.order)
with self.attr(self.order.account, "status", AcmeAccount.STATUS_REVOKED):
self.assertQuerySet(AcmeOrder.objects.viewable())
with freeze_time(timestamps["everything_expired"]):
self.assertQuerySet(AcmeOrder.objects.viewable())
class AcmeAuthorizationQuerysetTestCase(AcmeQuerySetTestCase):
"""Test cases for :py:class:`~django_ca.querysets.AcmeAuthorizationQuerySet`."""
def test_account(self) -> None:
"""Test the account filter."""
self.assertQuerySet(AcmeAuthorization.objects.account(self.account), self.auth)
self.assertQuerySet(AcmeAuthorization.objects.account(self.account2))
@freeze_time(timestamps["everything_valid"])
def test_url(self) -> None:
"""Test the url filter."""
# pylint: disable=expression-not-assigned
with self.assertNumQueries(1):
AcmeAuthorization.objects.url().get(pk=self.auth.pk).acme_url
@freeze_time(timestamps["everything_valid"])
def test_viewable(self) -> None:
"""Test the viewable() method."""
self.assertQuerySet(AcmeAuthorization.objects.viewable(), self.auth)
with self.attr(self.order.account, "status", AcmeAccount.STATUS_REVOKED):
self.assertQuerySet(AcmeAuthorization.objects.viewable())
class AcmeChallengeQuerysetTestCase(AcmeQuerySetTestCase):
"""Test cases for :py:class:`~django_ca.querysets.AcmeChallengeQuerySet`."""
def test_account(self) -> None:
"""Test the account filter."""
self.assertQuerySet(AcmeChallenge.objects.account(self.account), self.chall)
self.assertQuerySet(AcmeChallenge.objects.account(self.account2))
@freeze_time(timestamps["everything_valid"])
def test_url(self) -> None:
"""Test the url filter."""
# pylint: disable=expression-not-assigned
with self.assertNumQueries(1):
AcmeChallenge.objects.url().get(pk=self.chall.pk).acme_url
@freeze_time(timestamps["everything_valid"])
def test_viewable(self) -> None:
"""Test the viewable() method."""
self.assertQuerySet(AcmeChallenge.objects.viewable(), self.chall)
with self.attr(self.order.account, "status", AcmeAccount.STATUS_REVOKED):
self.assertQuerySet(AcmeChallenge.objects.viewable())
class AcmeCertificateQuerysetTestCase(AcmeQuerySetTestCase):
"""Test cases for :py:class:`~django_ca.querysets.AcmeCertificateQuerySet`."""
def test_account(self) -> None:
"""Test the account filter."""
self.assertQuerySet(AcmeCertificate.objects.account(self.account), self.cert)
self.assertQuerySet(AcmeCertificate.objects.account(self.account2))
@freeze_time(timestamps["everything_valid"])
def test_url(self) -> None:
"""Test the url filter."""
# pylint: disable=expression-not-assigned
with self.assertNumQueries(1):
AcmeCertificate.objects.url().get(pk=self.cert.pk).acme_url
@freeze_time(timestamps["everything_valid"])
def test_viewable(self) -> None:
"""Test the viewable() method."""
# none by default because we need a valid order and cert
self.assertQuerySet(AcmeCertificate.objects.viewable())
with self.attr(self.order.account, "status", AcmeAccount.STATUS_REVOKED):
self.assertQuerySet(AcmeCertificate.objects.viewable())
| gpl-3.0 | 5,701,134,692,565,751,000 | 39.532864 | 109 | 0.655412 | false |
lellolandi/Tagger | python/mp3_m4a.py | 1 | 1680 | from os.path import splitext
from mutagen.mp4 import MP4
from mutagen.id3 import ID3
class TagObject:
def __init__(self,filename):
self.path = filename
self.track = self.total = self.year = 0
self.coverpath = self.artist = self.album = self.title = self.cover = ""
ext = splitext(filename)[1]
if ext == ".mp3":
tagfile = ID3(filename)
for key in tagfile:
if key == "TIT2": self.title = tagfile[key].text[0]
elif key == "TALB": self.album = tagfile[key].text[0]
elif key == "TPE1": self.artist = tagfile[key].text[0]
elif key == "TDRC":
try: self.year = int(str(tagfile[key].text[0]))
except ValueError: self.year = int(str(tagfile[key].text[0]).split("-")[0])
except OverflowError: pass
elif key == "TRCK":
val = tagfile[key].text[0].split("/")
try: self.track = int(val[0])
except (ValueError,OverflowError): pass
try: self.total = int(val[1])
except (ValueError,OverflowError,IndexError): pass
elif "APIC" in key: self.cover = tagfile[key].data
else:
tagfile = MP4(filename)
for key in tagfile:
if key == "\xa9nam": self.title = tagfile[key][0]
elif key == "\xa9alb": self.album = tagfile[key][0]
elif key == "\xa9ART": self.artist = tagfile[key][0]
elif key == "\xa9day":
try: self.year = int(tagfile[key][0])
except ValueError: self.year = int(tagfile[key][0].split("-")[0])
except OverflowError: pass
elif key == "covr": self.cover = tagfile[key][0]
elif key == "trkn":
try: self.track = tagfile[key][0][0]
except (OverflowError,IndexError): pass
try: self.total = tagfile[key][0][1]
except (OverflowError,IndexError): pass
| gpl-3.0 | 7,655,653,335,960,738,000 | 36.333333 | 80 | 0.629762 | false |
abonaca/gary | gary/dynamics/tests/test_plot.py | 1 | 2943 | # coding: utf-8
""" Test dynamics plotting """
from __future__ import division, print_function
__author__ = "adrn <[email protected]>"
# Standard library
import os, sys
import logging
# Third-party
import matplotlib.pyplot as plt
import numpy as np
from astropy import log as logger
# Project
from ..plot import *
logger.setLevel(logging.DEBUG)
plot_path = "plots/tests/dynamics/plot"
if not os.path.exists(plot_path):
os.makedirs(plot_path)
def test_orbits():
# generate an "orbit"
n = 8
t = np.linspace(0, 100, 1000).reshape(1000,1)
x = np.cos(np.random.uniform(1.,8.,size=(1,n))*t).T[None]
y = np.cos(np.random.uniform(1.,8.,size=(1,n))*t).T[None]
z = np.cos(np.random.uniform(1.,8.,size=(1,n))*t).T[None]
w = np.vstack((x,y,z)).T
fig = plot_orbits(w, linestyle='none', marker='.', alpha=0.25)
fig.savefig(os.path.join(plot_path, "all_orbits.png"))
fig = plot_orbits(w, ix=0, linestyle='none', marker='.', alpha=0.25)
fig.savefig(os.path.join(plot_path, "one_orbit.png"))
fig = plot_orbits(w, ix=0, linestyle='none', marker='.', alpha=0.25,
labels=("herp","derp","merp"))
fig.savefig(os.path.join(plot_path, "one_orbit_labels.png"))
fig = plot_orbits(w, triangle=True, linestyle='-', marker=None)
fig.savefig(os.path.join(plot_path, "all_orbits_triangle.png"))
fig = plot_orbits(w, ix=0, triangle=True, linestyle='-', marker=None)
fig.savefig(os.path.join(plot_path, "one_orbit_triangle.png"))
fig = plot_orbits(w, ix=0, triangle=True, linestyle='-', marker=None,
labels=("herp","derp","merp"))
fig.savefig(os.path.join(plot_path, "one_orbit_triangle_labels.png"))
def test_three_panel():
q = np.random.uniform(0.,10.,size=(1000,3))
q0 = np.array([5,5,5])
fig = three_panel(q)
fig.savefig(os.path.join(plot_path, "three-panel-random.png"))
fig = three_panel(q, triangle=True)
fig.savefig(os.path.join(plot_path, "three-panel-random_triangle.png"))
fig = three_panel(q, relative_to=q0, symbol=r'\Omega')
fig.savefig(os.path.join(plot_path, "three-panel-random-relative.png"))
fig = three_panel(q, relative_to=q0, triangle=True, symbol=r'\Omega')
fig.savefig(os.path.join(plot_path, "three-panel-random-relative_triangle.png"))
def test_1d():
t = np.linspace(0,100.,1000)
q = np.cos(2*np.pi*t/10.)
q = q[:,np.newaxis]
fig = plot_orbits(q, labels=(r"$\theta$",))
fig.savefig(os.path.join(plot_path, "1d-orbit-labels.png"))
fig = plot_orbits(q, t=t, labels=(r"$\theta$",))
fig.savefig(os.path.join(plot_path, "1d-orbit-labels-time.png"))
def test_2d():
t = np.linspace(0,100.,1000)
q = np.zeros((len(t),1,2))
q[:,0,0] = np.cos(2*np.pi*t/10.)
q[:,0,1] = np.sin(2*np.pi*t/5.5)
fig = plot_orbits(q, labels=(r"$\theta$",r"$\omega$"))
fig.savefig(os.path.join(plot_path, "2d-orbit-labels.png"))
| mit | 5,296,330,896,054,207,000 | 29.65625 | 84 | 0.627591 | false |
ecrespo/django_kanban-agile | kanban/django_kanban_agile/django_kanban_agile/settings.py | 1 | 2913 | """
Django settings for django_kanban_agile project.
Generated by 'django-admin startproject' using Django 1.8.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import os.path
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'm_#*ocyst)mcc8z*84%j2e2o2+9qo17isuf6$f-p^nf*+kdvt-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'material',
'material.admin',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'apps.kanban',
'apps.backlog',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'django_kanban_agile.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__),'templates'),],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_kanban_agile.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'es-ve'
TIME_ZONE = 'America/Caracas'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
STATIC_URL = '/static/'
| mit | 1,358,321,658,298,448,100 | 24.552632 | 71 | 0.687951 | false |
opena11y/fae2 | fae2/fae-util/save_markup_information.py | 1 | 2728 | """
Copyright 2014-2016 University of Illinois
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
file: fae-util/save_markup_information.py
Author: Jon Gunderson
"""
from __future__ import absolute_import
import sys
import os
import string
import glob
import optparse
import subprocess
import shlex
import time
import getopt
import shutil
import json
import csv
import urllib
# sys.path.append(os.path.abspath('..'))
from django.utils.encoding import iri_to_uri
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'fae20.settings')
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.models import User
from django.db import connection, transaction
from ruleCategories.models import RuleCategory
from rules.models import Rule
DEBUG=False
INFO=True
ERROR=True
class PageMarkupInformation:
def __init__(self, mi, log):
self.markup_information = mi
log = log
def debug(s):
if DEBUG and log:
log.write("[SAVE MARKUP][DEBUG]: " + str(s) + "\n")
def info(s):
if INFO and log:
log.write("[SAVE MARKUP][INFO]: " + str(s) + "\n")
def error(s):
if ERROR and log:
log.write("[SAVE MARKUP][ERROR]: " + str(s) + "\n")
def saveMarkupGroup(self, page_result, group, cursor):
insert_str = "INSERT INTO \"markupInfo_mipage" + str(group) + "\" ( "
insert_str += "page_result_id"
value_str = ") VALUES ( "
value_str += str(page_result.id)
for item in self.markup_information[group]:
insert_str += ", " + str(item)
value_str += ", " + str(self.markup_information[group][item])
insert_str = insert_str + value_str + ")"
# debug("[PageMarkupInformation][saveMarkupGroup] " + insert_str)
try:
# Data insertion operation - commit required
cursor.execute(insert_str, [])
except:
self.error("[PageMarkupInformation][saveMarkupGroup] SQL insert error ")
def saveToDjango(self, page_result):
try:
cursor = connection.cursor()
for group in self.markup_information:
self.saveMarkupGroup(page_result, group, cursor)
except:
self.error("[PageMarkupInformation][saveToDango] SQL insert error ")
| apache-2.0 | -170,159,004,764,348,670 | 24.027523 | 78 | 0.690249 | false |
chunlaw/GeoNews | models/applespider.py | 1 | 5145 | from html.parser import HTMLParser
from urllib.request import urlopen
from urllib import parse
from bs4 import BeautifulSoup
# We are going to create a class called LinkParser that inherits some
# methods from HTMLParser which is why it is passed into the definition
class LinkParser(HTMLParser):
# This is a function that HTMLParser normally has
# but we are adding some functionality to it
def handle_starttag(self, tag, attrs):
# We are looking for the begining of a link. Links normally look
# like <a href="www.someurl.com"></a>
if tag == 'a':
for (key, value) in attrs:
if key == 'href':
# We are grabbing the new URL. We are also adding the
# base URL to it. For example:
# www.netinstructions.com is the base and
# somepage.html is the new URL (a relative URL)
#
# We combine a relative URL with the base URL to create
# an absolute URL like:
# www.netinstructions.com/somepage.html
newUrl = parse.urljoin(self.baseUrl, value)
# And add it to our colection of links:
if self.rules is not None and self.rules.get('link_prefix') is not None:
found = False
for rule in self.rules.get('link_prefix'):
found = found or newUrl.startswith( parse.urljoin(self.baseUrl, rule ) )
if not found:
break
self.links = self.links + [newUrl]
# This is a new function that we are creating to get links
# that our spider() function will call
def getLinks(self, url, rules=None):
self.links = []
self.rules = rules
# Remember the base URL which will be important when creating
# absolute URLs
self.baseUrl = url
# Use the urlopen function from the standard Python 3 library
response = urlopen(url)
# Make sure that we are looking at HTML and not other things that
# are floating around on the internet (such as
# JavaScript files, CSS, or .PDFs for example)
if response.getheader('Content-Type')=='text/html':
htmlBytes = response.read()
# Note that feed() handles Strings well, but not bytes
# (A change from Python 2.x to Python 3.x)
htmlString = htmlBytes.decode("utf-8")
self.feed(htmlString)
return htmlString, self.links
else:
return "",[]
class AppleSpider:
def __init__(self, baseUrl=None, rules=None, callback=None):
self.baseUrl = baseUrl or [('http://hkm.appledaily.com/list.php?category_guid=10829391&category=instant', 0)]
self.rules = rules or {'link_prefix': ['http://hkm.appledaily.com/detail.php']}
self.callback = callback
def setCallback(self,callback):
self.callback = callback
def extractContent(self, html, url):
soup = BeautifulSoup(html, 'html.parser')
content = ''
lastUpdateTime = None
title = ''
if soup.select('.lastupdate'):
lastUpdateTime = soup.select('.lastupdate')[0].text
if soup.select('#content-article h1'):
title = soup.select('#content-article h1')[0].text
paragraphs = soup.select('#content-article p')
for paragraph in paragraphs:
if paragraph.get('class') is None or ( paragraph.get('class') not in [ ['video-caption'], ['next'] ] ):
if not paragraph.text.startswith('【'):
content += paragraph.text
if self.callback is not None and lastUpdateTime is not None:
self.callback(title, content, url, lastUpdateTime)
# And finally here is our spider. It takes in an URL, a word to find,
# and the number of pages to search through before giving up
def crawl(self, maxLevel=1):
pagesToVisit = self.baseUrl
levelVisited = 0
# The main loop. Create a LinkParser and get all the links on the page.
# Also search the page for the word or string
# In our getLinks function we return the web page
# (this is useful for searching for the word)
# and we return a set of links from that web page
# (this is useful for where to go next)
while pagesToVisit != []:
# Start from the beginning of our collection of pages to visit:
url, levelVisited = pagesToVisit[0]
if levelVisited > maxLevel:
break
pagesToVisit = pagesToVisit[1:]
print(levelVisited, "Visiting:", url)
parser = LinkParser()
data, links = parser.getLinks(url, self.rules)
self.extractContent(data,url)
# Add the pages that we visited to the end of our collection
# of pages to visit:
links = [(link, levelVisited+1) for link in links ]
pagesToVisit = pagesToVisit + links
| mit | -8,124,131,442,782,550,000 | 45.333333 | 117 | 0.588761 | false |
Hearen/OnceServer | pool_management/bn-xend-core/xend/XendLocalStorageRepo.py | 1 | 14839 | #!/usr/bin/python
#============================================================================
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Copyright (C) 2007 XenSource Ltd.
#============================================================================
#
# A pseudo-StorageRepository to provide a representation for the images
# that can be specified by xm.
#
import commands
import logging
import os
import stat
import threading
import re
import sys
import struct
import subprocess
from xen.util import mkdir
import uuid
from XendError import XendError
from XendVDI import *
from XendTask import XendTask
from XendStorageRepository import XendStorageRepository
from XendStateStore import XendStateStore
from XendOptions import instance as xendoptions
from XendPBD import XendPBD
from XendNode import XendNode
from xen.util.xpopen import xPopen3
KB = 1024
MB = 1024 * 1024
BYTE = 1024 * 1024 * 1024
STORAGE_LOCATION = "/home"
FILE_EXT = ".vhd"
VDI_TYPE = "file:"
IMG_FILE_PATH = "/home/os/"
VG_BINARY = "/sbin/vgs"
LV_CREATE_BINARY = "/sbin/lvcreate"
log = logging.getLogger("xend.XendLocalStorageRepo")
file_h = logging.FileHandler("/var/log/xen/local_sr.log")
log.addHandler(file_h)
log.setLevel(logging.DEBUG)
#log = logging.getLogger("xend.XendLocalStorageRepo")
def storage_max(location=None):
storage_max = 0
if not location:
location = STORAGE_LOCATION
cmd = "df -PT %s | awk \'END{print $3}\' | awk \'{if ($0) print}\'" %location
# cmd = [DF_COMMAND, '-Tl', '%s' %location,"|awk \'NR>1{print $2}\'|awk \'{if ($1!=null) print}\'"]
(rc, stdout, stderr) = doexec(cmd)
if rc != 0:
err = stderr.read();
out = stdout.read();
stdout.close();
stderr.close();
log.error('Failed to get %s storage_max.%s' %(location, err))
return storage_max
storage_max = stdout.read()
stdout.close()
stderr.close()
return storage_max
def storage_util(location=None):
storage_util = 0
if not location:
location = STORAGE_LOCATION
cmd = "df -PT %s | awk \'END{print $4}\' | awk \'{if ($0) print}\'" %location
# cmd = [DF_COMMAND, '-Tl', '%s' %location,"|awk \'NR>1{print $3}\'|awk \'{if ($1!=null) print}\'"]
(rc, stdout, stderr) = doexec(cmd)
if rc != 0:
err = stderr.read();
out = stdout.read();
stdout.close();
stderr.close();
log.error('Failed to get %s storage_util.%s' %(location, err))
return storage_util
storage_util = stdout.read()
stdout.close()
stderr.close()
return storage_util
def dir_util(location):
dir_util = 0
cmd = "du -c %s | awk \'/total/{print $1}\'" %location
(rc, stdout, stderr) = doexec(cmd)
if rc != 0:
err = stderr.read();
out = stdout.read();
stdout.close();
stderr.close();
log.error('Failed to get %s dir_util.%s' %(location, err))
return dir_util
dir_util = stdout.read()
stdout.close()
stderr.close()
return dir_util
def vhd_files(filepath):
result = {}
for root,dirs,files in os.walk(filepath):
if dirs:
for i in dirs:
for j in files:
if os.path.isfile(os.path.join(root,i,j)) and os.path.splitext(j)[1] == FILE_EXT:
result[os.path.join(root,i,j)] = os.path.getsize(os.path.join(root,i,j))
else:
for overfile in files:
if os.path.isfile(os.path.join(root,overfile)) and os.path.splitext(overfile)[1] == FILE_EXT:
result[os.path.join(root,overfile)] = os.path.getsize(os.path.join(root,overfile))
return result
def doexec(args, inputtext=None):
"""Execute a subprocess, then return its return code, stdout and stderr"""
proc = xPopen3(args, True)
if inputtext != None:
proc.tochild.write(inputtext)
stdout = proc.fromchild
stderr = proc.childerr
rc = proc.wait()
return (rc, stdout, stderr)
def mytrim(zstr):
ystr=zstr.lstrip()
ystr=ystr.rstrip()
ystr=ystr.strip()
return ystr
class XendLocalStorageRepo(XendStorageRepository):
"""A backwards compatibility storage repository so that
traditional file:/dir/file.img and phy:/dev/hdxx images can
still be represented in terms of the Xen API.
"""
def __init__(self, sr_uuid, sr_type='local',
name_label='local',
name_description='Traditional Local Storage Repo',
other_config={'location':'/home/local_sr', 'auto-scan':'False'},
content_type='vhd',
shared=False,
sm_config={}):
"""
@ivar images: mapping of all the images.
@type images: dictionary by image uuid.
@ivar lock: lock to provide thread safety.
"""
XendStorageRepository.__init__(self, sr_uuid, sr_type,
name_label, name_description)
self.type = sr_type
self.name_label = name_label
self.name_description = name_description
self.other_config = other_config
self.content_type = content_type
self.shared = shared
self.sm_config = sm_config
self.local_sr_dir = self.other_config.get('location')
self.location = self.local_sr_dir
# self.local_sr_dir = os.path.join(self.location, self.uuid)
if not os.path.exists(self.local_sr_dir):
os.makedirs(self.local_sr_dir)
# s_max = storage_max(self.local_sr_dir)
# if s_max:
# self.physical_size = int(s_max)*KB
# else:
# self.physical_size = 0
# s_util = storage_util(self.local_sr_dir)
# if s_util:
# self.physical_utilisation = int(s_util)*KB
# else:
# self.physical_utilisation = 0
# d_util = dir_util(self.local_sr_dir)
# if d_util:
# self.virtual_allocation = int(d_util)*KB
# else:
# self.virtual_allocation = 0
self.state = XendStateStore(xendoptions().get_xend_state_path()
+ '/local_sr/%s' % self.uuid)
stored_images = self.state.load_state('vdi')
if stored_images:
for image_uuid, image in stored_images.items():
self.images[image_uuid] = XendLocalVDI(image)
def update(self, auto=True):
stored_images = self.state.load_state('vdi')
images_path = []
if stored_images:
for image_uuid, image in stored_images.items():
images_path.append(image['location'])
self.images[image_uuid] = XendLocalVDI(image)
def get_record(self, transient = True):
retval = {'uuid': self.uuid,
'name_label': self.name_label,
'name_description': self.name_description,
'resident_on' : XendNode.instance().uuid,
'virtual_allocation': 0,
'physical_utilisation': self.get_physical_utilisation(),
'physical_size': self.get_physical_size(),
'type': self.type,
'content_type': self.content_type,
'VDIs': self.images.keys(),
'PBDs': XendPBD.get_by_SR(self.uuid),
'other_config': self.other_config,
'shared': self.shared,
'sm_config': self.sm_config,
}
return retval
def get_physical_utilisation(self):
s_util = storage_util(self.local_sr_dir)
if s_util:
self.physical_utilisation = int(s_util)*KB
else:
self.physical_utilisation = 0
return self.physical_utilisation
def get_physical_size(self):
s_max = storage_max(self.local_sr_dir)
if s_max:
self.physical_size = int(s_max)*KB
else:
self.physical_size = 0
return self.physical_size
def get_vdi_physical_utilisation(self, vdi_ref):
vdi = self.images.get(vdi_ref)
return vdi.get_physical_utilisation()
def get_vdi_virtual_size(self, vdi_ref):
vdi = self.images.get(vdi_ref)
return vdi.get_virtual_size()
def create_vdi(self, vdi_struct, transient = False, create_file=True):
""" Creates a fake VDI image for a traditional image string.
The image uri is stored in the attribute 'uri'
"""
if not vdi_struct.get('uuid') or vdi_struct.get('uuid') == '':
vdi_struct['uuid'] = uuid.createString()
vdi_struct['SR'] = self.uuid
# vdi_struct['name_label'] = vdi_struct['uuid']
if vdi_struct.get('type') == 'user' and create_file:
self.create_img_file(vdi_struct)
vdi_struct['physical_utilisation'] = int(vdi_struct['virtual_size']) * BYTE
new_image = XendLocalVDI(vdi_struct)
# self.create_logical_volume(vdi_struct)
self.images[new_image.uuid] = new_image
self.save_state(transient)
return new_image.uuid
def copy_vdi(self, vdi_struct, p_vdi_uuid, transient = False, copy_file = False):
""" Creates a fake VDI image for a traditional image string.
The image uri is stored in the attribute 'uri'
"""
if not vdi_struct.get('uuid') or vdi_struct.get('uuid') == '':
vdi_struct['uuid'] = uuid.createString()
vdi_struct['SR'] = self.uuid
new_image = XendLocalVDI(vdi_struct)
self.images[new_image.uuid] = new_image
self.save_state(transient)
if vdi_struct.get('type') == 'user' and copy_file:
self.copy_img_file(vdi_struct, p_vdi_uuid)
# self.create_logical_volume(vdi_struct)
return new_image.uuid
def create_img_file(self, vdi_struct, path=None, size=None):
# path = IMG_FILE_PATH + vdi_struct.get('uuid') + '.img'
path = self.local_sr_dir
file = '%s/%s.vhd' %(path, vdi_struct.get('uuid'))
size = int(vdi_struct.get('virtual_size')) * KB
if not os.path.exists(path):
os.makedirs(path)
import subprocess
if not os.path.exists(file):
# subprocess.Popen("vhd-util create -n %s -s %d" % (file, size), shell=True,
# stdout=subprocess.PIPE)
p = subprocess.Popen("dd if=/dev/zero of=%s bs=1M count=0 seek=%d" % (file, size), shell=True,
stdout=subprocess.PIPE)
def copy_img_file(self, vdi_struct, p_vdi_uuid, path=None, size=None):
# location = self.other_config['location']
# local = location.split(':')[1]
# path = '%s/%s' %(local,self.uuid)
path = self.local_sr_dir
file = '%s/%s.vhd' %(path, vdi_struct.get('uuid'))
p_file = '%s/%s.vhd' %(path, p_vdi_uuid)
cmd = 'cp %s %s' %(p_file, file)
log.debug("copy img file: %s" % cmd)
(rc, stdout, stderr) = doexec(cmd)
if rc != 0:
err = stderr.read();
out = stdout.read();
stdout.close();
stderr.close();
raise Exception, 'Failed to execute cp %s %s.%s' %(p_file, file, err);
stdout.close();
stderr.close();
time_out = 20
i = 0
while True:
i += 1
if os.path.exists(file):
break
elif cmp(i, time_out) > 0:
raise Exception, 'Clone file %s, timeout!' % file;
else:
time.sleep(1)
log.debug("Clone finished, cost: %i s." % i)
def get_vg_name(self):
cmd = [VG_BINARY, '--noheadings', '--nosuffix', '--options=vg_name']
(rc, stdout, stderr) = doexec(cmd)
if rc != 0:
err = stderr.read();
out = stdout.read();
stdout.close();
stderr.close();
raise Exception, 'Failed to get VG name. Check that lvm installed in dom0.';
vg_name = stdout.read()
stdout.close()
stderr.close()
return vg_name
def create_logical_volume(self, vdi_struct, lv_name=None, size=None, vg_name=None):
lv_name = 'VHD-' + vdi_struct.get('uuid')
size = int(vdi_struct.get('virtual_size')) * 1024
vg_name = mytrim(self.get_vg_name())
cmd = [LV_CREATE_BINARY, '%s' %vg_name, '-L', '%dM' %size, '-n', '%s' %lv_name]
(rc, stdout, stderr) = doexec(cmd)
if rc != 0:
err = stderr.read();
out = stdout.read();
stdout.close();
stderr.close();
raise Exception, 'Failed to create logical volume: %s, lv_size: %d on VG: %s.\n%s' %(lv_name, size, vg_name, err);
stdout.close()
stderr.close()
def del_img_file(self, vdi_uuid):
# location = self.other_config['location']
# local = location.split(':')[1]
# path = '%s/%s' %(local,self.uuid)
path = self.local_sr_dir
file = '%s/%s.vhd' %(path, vdi_uuid)
cmd = 'rm -f %s' % file
(rc, stdout, stderr) = doexec(cmd)
if rc != 0:
err = stderr.read();
out = stdout.read();
stdout.close();
stderr.close();
raise Exception, 'Failed to execute rm -f %s.%s' %(file, err);
stdout.close();
stderr.close();
def save_state(self, transient=False):
vdi_records = dict([(k, v.get_record(transient))
for k, v in self.images.items()])
self.state.save_state('vdi', vdi_records)
def destroy_vdi(self, vdi_uuid, del_file=True, transient = False):
if vdi_uuid in self.images:
if del_file:
log.debug('destroy_vdi')
log.debug(self.images[vdi_uuid])
self.del_img_file(vdi_uuid)
del self.images[vdi_uuid]
self.save_state(transient)
XendNode.instance().save_local_SRs()
| mit | -7,252,698,364,943,811,000 | 36.662437 | 126 | 0.554754 | false |
RedFantom/ttkwidgets | examples/example_timeline.py | 1 | 1282 | # -*- coding: utf-8 -*-
# Copyright (c) RedFantom 2017
# For license see LICENSE
import tkinter as tk
from ttkwidgets import TimeLine
window = tk.Tk()
timeline = TimeLine(
window,
categories={str(key): {"text": "Category {}".format(key)} for key in range(0, 5)},
height=100, extend=True
)
menu = tk.Menu(window, tearoff=False)
menu.add_command(label="Some Action", command=lambda: print("Command Executed"))
timeline.tag_configure("1", right_callback=lambda *args: print(args), menu=menu, foreground="green",
active_background="yellow", hover_border=2, move_callback=lambda *args: print(args))
timeline.create_marker("1", 1.0, 2.0, background="white", text="Change Color", tags=("1",), iid="1")
timeline.create_marker("2", 2.0, 3.0, background="green", text="Change Category", foreground="white", iid="2",
change_category=True)
timeline.create_marker("3", 1.0, 2.0, text="Show Menu", tags=("1",))
timeline.create_marker("4", 4.0, 5.0, text="Do nothing", move=False)
timeline.draw_timeline()
timeline.grid()
window.after(2500, lambda: timeline.configure(marker_background="cyan"))
window.after(5000, lambda: timeline.update_marker("1", background="red"))
window.after(5000, lambda: print(timeline.time))
window.mainloop()
| gpl-3.0 | -8,101,244,742,804,365,000 | 43.206897 | 110 | 0.684087 | false |
tundish/addisonarches | addisonarches/test/test_script.py | 1 | 2656 | #!/usr/bin/env python3.4
# encoding: UTF-8
# This file is part of turberfield.
#
# Turberfield is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Turberfield is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with turberfield. If not, see <http://www.gnu.org/licenses/>.
import unittest
from addisonarches.script import phrases
from addisonarches.script import Trigger
from addisonarches.script import Reply
from addisonarches.script import Script
class FuzzTests(unittest.TestCase):
def test_i_am(self):
script = Script()
im = Trigger("I am", ("i am", "i'm"))
imnot = Trigger("I am not", ("i am not", "i'm not"))
right = Reply("Correct reply", None)
wrong = Reply("Incorrect reply", None)
script.register(im, right)
script.register(imnot, wrong)
rv, score = next(script.prompt("I'm"))
self.assertIs(right, rv)
class NewPlayerTests(unittest.TestCase):
def test_null_reply(self):
script = Script()
r = Reply("Correct reply", None)
script.register(phrases["Hello"], r)
rv, score = next(script.prompt("Goodbye"))
self.assertLessEqual(score, 50)
def test_say_hello(self):
script = Script()
script.register(
phrases["Hello"],
Reply("And hello to you", None))
rv, score = next(script.prompt("Hi"))
self.assertEqual("and hello to you", rv.text.lower())
def test_want_food(self):
script = Script()
r = Reply("Correct reply", None)
w = Reply("Incorrect reply", None)
script.register(phrases["want food"], r)
script.register(phrases["Hello"], w)
self.assertIs(r, next(script.prompt("I want food"))[0])
self.assertIs(r, next(script.prompt("I'm hungry"))[0])
self.assertIs(r, next(script.prompt("I am hungry"))[0])
self.assertIs(r, next(script.prompt("Have you got any food?"))[0])
# Unexpected consequences...
rv, score = next(script.prompt("Do you want any food?"))
self.assertGreaterEqual(score, 75)
rv, score = next(script.prompt("Are you hungry?"))
self.assertGreaterEqual(score, 70)
| agpl-3.0 | 8,386,826,899,400,529,000 | 36.408451 | 74 | 0.649849 | false |
nathan-osman/StackIRC | setup.py | 1 | 1469 | #!/usr/bin/env python
'''
Copyright (c) 2012 Nathan Osman
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
from distutils.core import setup
setup(name='stackirc',
version='0.1',
description='A bot for posting Stack Exchange questions to IRC channels.',
author='Nathan Osman',
author_email='[email protected]',
url='https://github.com/nathan-osman/StackIRC',
license='MIT',
packages=['stackirc',],
scripts=['scripts/stackirc',])
| mit | 5,386,130,225,626,811,000 | 43.515152 | 80 | 0.761743 | false |
danac/xpensemate | doc/intro/debt_graph.py | 1 | 1722 | import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
import pylab
G1 = nx.DiGraph()
G2 = nx.DiGraph()
for G in [G1, G2]:
G.add_node('A', pos=(0.55,0.5))
G.add_node('B', pos=(0.95,0.6))
G.add_node('C', pos=(0,0.7))
G.add_node('D', pos=(0.9,1.2))
G.add_node('E', pos=(0.35,1.1))
G1.add_edges_from([('A', 'B')], weight=1)
G1.add_edges_from([('A', 'C')], weight=2)
G1.add_edges_from([('D', 'B')], weight=1.5)
G1.add_edges_from([('D', 'C')], weight=5)
G1.add_edges_from([('A', 'D')], weight=1)
G1.add_edges_from([('C', 'B')], weight=1.5)
G1.add_edges_from([('E', 'C')], weight=1)
G2.add_edges_from([('A', 'B')], weight=4)
G2.add_edges_from([('E', 'C')], weight=1)
G2.add_edges_from([('D', 'C')], weight=5.5)
names = ["full", "simple"]
i=0
for G in [G1, G2]:
f=plt.figure()
edge_labels=dict([((u,v,),d['weight'])
for u,v,d in G.edges(data=True)])
#red_edges = [('C','D'),('D','A')]
edge_colors = ['black' for edge in G.edges()] #['black' if not edge in red_edges else 'red' for edge in G.edges()]
pos=nx.spring_layout(G)
pos=nx.get_node_attributes(G,'pos')
# Draw nodes
nx.draw_networkx_nodes(G,pos,node_size=700, node_color='orange')
nx.draw_networkx_edge_labels(G,pos,edge_labels=edge_labels, font_size=16)
nx.draw_networkx_edges(G,pos,edgelist=G.edges(data=True), edge_color='k')
nx.draw_networkx_labels(G,pos,font_size=16,font_family='sans-serif')
#nx.draw(G,pos, node_color = 'orange', node_size=1500,edge_color=edge_colors,edge_cmap=plt.cm.Reds)
plt.axis('off')
plt.savefig("../static/debt_graph_"+names[i]+".png", format='png', transparent=True)
i+=1
pylab.show()
| agpl-3.0 | -3,601,441,419,496,271,000 | 33.44 | 118 | 0.585366 | false |
SiLab-Bonn/pyBAR_mimosa26_interpreter | pymosa_mimosa26_interpreter/testing/test_interpreter.py | 1 | 9948 | ''' Script to check the correctness of the interpretation. Files with _orig.h5 suffix are files interpreted with Tokos
original code. The new interpretation is checked against the old implementation of the interpreter.
'''
import os
import unittest
import tables as tb
import numpy as np
from pymosa_mimosa26_interpreter import data_interpreter
from pymosa_mimosa26_interpreter import raw_data_interpreter
# from pymosa_mimosa26_interpreter.testing.tools.test_tools import compare_h5_files
testing_path = os.path.dirname(__file__) # Get file path
tests_data_folder = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(testing_path)) + r'/testing/')) # Set test data path
def create_tlu_word(trigger_number, time_stamp):
return ((time_stamp << 16) & (0x7FFF0000)) | (trigger_number & 0x0000FFFF) | (1 << 31 & 0x80000000)
def create_m26_header(plane, data_loss=False):
return (0x20 << 24 & 0xFF000000) | (plane << 20 & 0x00F00000)
def create_frame_header_low(plane, m26_timestamp):
return create_m26_header(plane=plane) | (m26_timestamp & 0x0000FFFF) | (1 << 16 & 0x00010000)
def create_frame_header_high(plane, m26_timestamp):
return create_m26_header(plane=plane) | (((m26_timestamp & 0xFFFF0000) >> 16) & 0x0000FFFF)
def create_frame_id_low(plane, m26_frame_number):
return create_m26_header(plane=plane) | (m26_frame_number & 0x0000FFFF)
def create_frame_id_high(plane, m26_frame_number):
return create_m26_header(plane=plane) | (((m26_frame_number & 0xFFFF0000) >> 16) & 0x0000FFFF)
def create_frame_length(plane, frame_length):
return create_m26_header(plane=plane) | (frame_length & 0x0000FFFF)
def create_row_data_word(plane, row, n_words):
return create_m26_header(plane=plane) | (row << 4 & 0x00007FF0) | (n_words & 0x0000000F)
def create_column_data_word(plane, column, n_hits):
return create_m26_header(plane=plane) | (column << 2 & 0x00001FFC) | (n_hits & 0x00000003)
def create_frame_trailer0(plane):
return create_m26_header(plane=plane) | (0xaa50 & 0x0000FFFF)
def create_frame_trailer1(plane):
return create_m26_header(plane=plane) | (((0xaa50 | plane)) & 0x0000FFFF)
class TestInterpreter(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls): # Remove created files
pass
# os.remove(os.path.join(tests_data_folder, 'anemone_generated_raw_data.h5'))
# os.remove(os.path.join(tests_data_folder, 'anemone_generated_raw_data_interpreted.h5'))
# os.remove(os.path.join(tests_data_folder, 'anemone_interpreted.h5'))
# os.remove(os.path.join(tests_data_folder, 'anemone_interpreted.pdf'))
@unittest.skip("bug in create_raw_data")
def test_interpretation(self):
result_dtype = raw_data_interpreter.hits_dtype
FRAME_UNIT_CYCLE = raw_data_interpreter.FRAME_UNIT_CYCLE
ROW_UNIT_CYCLE = raw_data_interpreter.ROW_UNIT_CYCLE
generated_raw_data_file = os.path.join(tests_data_folder, 'anemone_generated_raw_data.h5')
generated_raw_data_interpreted_file = os.path.join(tests_data_folder, 'anemone_generated_raw_data_interpreted.h5')
interpreted_file = os.path.join(tests_data_folder, 'anemone_interpreted.h5')
# TODO: add multi-hits events, add events with multiple trigger, add events with out of range trigger ts
def create_raw_data(n_events=1000, plane=0, delta_trigger_ts=8000, n_hits_per_events=1, n_events_trigger_hit=0.6, n_events_trigger_no_hit=0.3, n_events_no_trigger_hit=0.1):
# shuffle event type: 1: event with hit and trigger; 2: event with trigger but no hit; 3: event with hit but no trigger
event_type = np.random.choice([1, 2, 3], size=(n_events,), p=[n_events_trigger_hit, n_events_trigger_no_hit, n_events_no_trigger_hit])
print('Generated %i events. %i events have no hit, %i events have no trigger (or no matching trigger).' % (n_events, np.sum([event_type == 2]), np.sum([event_type == 3])))
result_array = np.zeros(shape=(np.sum([event_type == 1]),), dtype=result_dtype)
# create random trigger time stamps
trigger_time_stamps = np.linspace(start=14103, stop=14103 + n_events * delta_trigger_ts, num=n_events, dtype=np.int)
hit_i = 0
event_number = -1 # event number starts at 0
event_status = 0
for index in range(n_events):
# generate row and column
row, column = [np.random.randint(low=0, high=566), np.random.randint(low=0, high=1151)]
# generate m26 time stamp based on event type
row_time_stamp = np.random.randint(low=trigger_time_stamps[index] - raw_data_interpreter.FRAME_UNIT_CYCLE - raw_data_interpreter.ROW_UNIT_CYCLE, high=trigger_time_stamps[index])
if event_type[index] != 3:
raw_data.append(create_tlu_word(trigger_number=index, time_stamp=trigger_time_stamps[index]))
event_number += 1
if event_type[index - 1] == 3 and index != 0:
# if event before was event without trigger, set current event status as trigger increase error
event_status |= raw_data_interpreter.TRIGGER_NUMBER_ERROR
raw_data.append(create_frame_header_low(plane=plane, m26_timestamp=row_time_stamp + 2 * FRAME_UNIT_CYCLE - ROW_UNIT_CYCLE * row + raw_data_interpreter.TIMING_OFFSET))
raw_data.append(create_frame_header_high(plane=plane, m26_timestamp=row_time_stamp + 2 * FRAME_UNIT_CYCLE - ROW_UNIT_CYCLE * row + raw_data_interpreter.TIMING_OFFSET))
raw_data.append(create_frame_id_low(plane=plane, m26_frame_number=index))
raw_data.append(create_frame_id_high(plane=plane, m26_frame_number=index))
raw_data.append(create_frame_length(plane=plane, frame_length=n_hits_per_events)) # number of data record words
raw_data.append(create_frame_length(plane=plane, frame_length=n_hits_per_events)) # number of data record words
if event_type[index] != 2: # only create hit words if event with hit
raw_data.append(create_row_data_word(plane=plane, row=row, n_words=n_hits_per_events))
raw_data.append(create_column_data_word(plane=plane, column=column, n_hits=n_hits_per_events - 1)) # only one hit
raw_data.append(create_frame_trailer0(plane=plane))
raw_data.append(create_frame_trailer1(plane=plane))
# write to result array
if event_type[index] == 1: # only write trigger hits to data file
result_array['plane'][hit_i] = plane
result_array['event_status'][hit_i] = event_status
result_array['event_number'][hit_i] = event_number
result_array['trigger_number'][hit_i] = index
result_array['trigger_time_stamp'][hit_i] = trigger_time_stamps[index]
result_array['frame_id'][hit_i] = index
result_array['column'][hit_i] = column
result_array['row'][hit_i] = row
result_array['row_time_stamp'][hit_i] = row_time_stamp
hit_i += 1
event_status = 0
return raw_data, result_array
# create raw data from file
# TODO: do raw data word creation automatically
raw_data = []
raw_data, result_array = create_raw_data()
# write generated raw data to file
filter_raw_data = tb.Filters(complib='blosc', complevel=5, fletcher32=False)
with tb.open_file(generated_raw_data_file, 'w') as out_file_h5:
raw_data_earray = out_file_h5.create_earray(
where=out_file_h5.root,
name='raw_data',
atom=tb.UIntAtom(),
shape=(0,), title='raw_data',
filters=filter_raw_data)
raw_data_earray.append(raw_data)
# write generated interpreted file
with tb.open_file(generated_raw_data_interpreted_file, 'w') as out_file_h5:
hit_table = out_file_h5.create_table(
where=out_file_h5.root,
name='Hits',
description=result_dtype,
title='hit_data',
filters=tb.Filters(complib='blosc', complevel=5, fletcher32=False))
hit_table.append(result_array)
with data_interpreter.DataInterpreter(raw_data_file=generated_raw_data_file, analyzed_data_file=interpreted_file, trigger_data_format=2, analyze_m26_header_ids=[0], create_pdf=True, chunk_size=1000000) as raw_data_analysis:
raw_data_analysis.create_occupancy_hist = True
raw_data_analysis.create_error_hist = True
raw_data_analysis.create_hit_table = True
raw_data_analysis.interpret_word_table()
# Open result and interpreter file in order to compare them. Compare only Hit fields
with tb.open_file(generated_raw_data_interpreted_file, 'r') as in_file_h5:
data_generated = in_file_h5.root.Hits[:]
with tb.open_file(interpreted_file, 'r') as in_file_h5:
data_interpreted = in_file_h5.root.Hits[:]
# Compare with result
for key in data_generated.dtype.names:
if key == 'event_status':
continue # skip event status
np.testing.assert_array_equal(data_generated[key], data_interpreted[key], err_msg='Column %s mismatch' % key)
# checks_passed, error_msg = compare_h5_files(first_file=generated_raw_data_interpreted_file, second_file=interpreted_file)
# self.assertTrue(checks_passed, msg=error_msg)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestInterpreter)
unittest.TextTestRunner(verbosity=2).run(suite)
| bsd-3-clause | 1,335,327,771,051,795,500 | 51.634921 | 231 | 0.643245 | false |
krzysztofszymanski/appengine-bugs | issue_handler.py | 1 | 2028 | from google.appengine.api import users
from lib import BaseRequest
from service import *
from models import Issue
from google.appengine.ext.webapp import template
template.register_template_library('tags.br')
class IssueHandler(BaseRequest):
def get(self, project_slug, issue_slug):
if self.request.path[-1] != "/":
self.redirect("%s/" % self.request.path, True)
return
user = users.get_current_user()
if not user:
self.redirect('/')
try:
issue = Issue.all().filter('internal_url =', "/%s/%s/" % (
project_slug, issue_slug)).fetch(1)[0]
issues = Issue.all().filter('project =', issue.project).filter(
'fixed =', False).fetch(10)
except IndexError:
self.render_404()
return
on_list = False
try:
if user.email() in issue.project.other_users:
on_list = True
except:
pass
if issue.project.user == user or users.is_current_user_admin() or on_list:
owner = True
else:
owner = False
context = {
'issue': issue,
'issues': issues,
'owner': owner,
}
output = self.render("issue.html", context)
self.response.out.write(output)
def post(self, project_slug, issue_slug):
# if we don't have a user then throw
# an unauthorised error
user = users.get_current_user()
if not user:
self.render_403()
return
issue = Issue.all().filter('internal_url =', "/%s/%s/" % (
project_slug, issue_slug)).fetch(1)[0]
Service.update_issue_with_request_values(issue, self.request)
issue.put()
service = Service()
if issue.fixed:
service.send_fixed_email(issue)
else:
service.send_issue_updated_email(issue)
self.redirect("/projects{}".format(issue.internal_url))
| mit | 1,482,959,902,427,885,000 | 28.391304 | 82 | 0.54783 | false |
EdinburghGenomics/clarity_scripts | prodscripts/AssignWorkflow.py | 1 | 2969 | #!/usr/bin/env python
import getopt
import sys
from genologics.entities import Process
from genologics.lims import Lims
HOSTNAME = ""
VERSION = ""
BASE_URI = ""
api = None
args = None
def get_workflow_stage(lims, workflow_name, stage_name=None):
workflows = [w for w in lims.get_workflows() if w.name == workflow_name]
if len(workflows) != 1:
return
if not stage_name:
return workflows[0].stages[0]
stages = [s for s in workflows[0].stages if s.name == stage_name]
if len(stages) != 1:
return
return stages[0]
def get_parent_process_id(art):
return art.parent_process.id
def assignWorkflow():
LIMSID = args["limsid"]
usernameargs = args["username"]
passwordargs = args["password"]
stepURI = args["stepURI"]
apiLocation = stepURI.find('/api')
BASE_URI = stepURI[0:apiLocation]
l = Lims(baseuri=BASE_URI, username=usernameargs, password=passwordargs)
p = Process(l, id=LIMSID)
artifacts = p.all_inputs()
for art in artifacts:
sample = art.samples[0]
submitted_art = sample.artifact
if art.samples[0].udf.get("Proceed To SeqLab") and not art.samples[0].udf.get("2D Barcode"): #checks to see if sample is in plate or fluidX tube
stage = get_workflow_stage(l, "PreSeqLab EG 6.0", "Sequencing Plate Preparation EG 2.0")
l.route_artifacts([submitted_art], stage_uri=stage.uri)
elif art.samples[0].udf.get("Proceed To SeqLab") and art.samples[0].udf.get("2D Barcode"): #if is a fluidX tube will need to find the derived artifact created by the FluidX Transfer step
fluidX_artifacts = l.get_artifacts(process_type="FluidX Transfer From Rack Into Plate EG 1.0 ST", sample_name=art.samples[0].name, type='Analyte')
if len(fluidX_artifacts) >1: #its possible that the FluidX Transfer has occurred more than once so must find the most recent occurrence of that step
fluidX_artifacts.sort(key=get_parent_process_id, reverse=True) #sorts the artifacts returned to place the most recent artifact at position 0 in list
fluidX_artifact=fluidX_artifacts[0]
else:
fluidX_artifact=fluidX_artifacts[0]
stage = get_workflow_stage(l, "PreSeqLab EG 6.0", "Sequencing Plate Preparation EG 2.0")
l.route_artifacts([fluidX_artifact], stage_uri=stage.uri)
def main():
global api
global args
args = {}
opts, extraparams = getopt.getopt(sys.argv[1:], "l:s:u:p:")
for o, p in opts:
if o == '-l':
args["limsid"] = p
elif o == '-s':
args["stepURI"] = p
elif o == '-u':
args["username"] = p
elif o == '-p':
args["password"] = p
## at this point, we have the parameters the EPP plugin passed, and we have network plumbing
## so let's get this show on the road!
assignWorkflow()
if __name__ == "__main__":
main()
| mit | -7,272,142,704,908,751,000 | 33.126437 | 194 | 0.633547 | false |
Dzess/ALFIRT | alfirt.runner/src/generator/scene/tests/SceneInjecterX3DTests.py | 1 | 3811 | '''
Created on Aug 20, 2011
@author: Piotr
'''
import unittest
from generator.scene.SceneInjecterX3D import SceneInjecterX3D
from generator.data.SceneDescription import SceneDescription
from generator.data.ObjectPose import ObjectPose
from lxml import etree
from lxml import objectify
class TagWriterX3DTests(unittest.TestCase):
def setUp(self):
self.injecter = SceneInjecterX3D()
# Setting up the X3D string with ALFIRT namespace tags
self.x3dString = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE X3D PUBLIC "ISO//Web3D//DTD X3D 3.2//EN" "http://www.web3d.org/specifications/x3d-3.2.dtd">
<X3D profile="Interchange" version="3.2"
xmlns:xsd="http://www.w3.org/2001/XMLSchema-instance"
xmlns:alfirt="ALFIRT"
xsd:noNamespaceSchemaLocation=" http://www.web3d.org/specifications/x3d-3.2.xsd ">
<Scene>
<Viewpoint description='Rear View' orientation='0 1 0 3.14159' position='0 0 -10'/>
<Shape alfirt:anchor_translate="0 1 2" alfirt:anchor_rotate="0.4 0.2 0.3">
<IndexedFaceSet coordIndex="0 1 2">
<Coordinate point="0 0 0 1 0 0 0.5 1 0"/>
</IndexedFaceSet>
</Shape>
</Scene>
</X3D>
"""
camera = ObjectPose([0, 0, 0], [0, 0, 0])
anchor = ObjectPose([1, 2, 3], [4, 5, 6])
self.scene = SceneDescription(camera, anchor)
self.expected_x3dString = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE X3D PUBLIC "ISO//Web3D//DTD X3D 3.2//EN" "http://www.web3d.org/specifications/x3d-3.2.dtd">
<X3D profile="Interchange" version="3.2"
xmlns:xsd="http://www.w3.org/2001/XMLSchema-instance"
xmlns:alfirt="ALFIRT"
xsd:noNamespaceSchemaLocation=" http://www.web3d.org/specifications/x3d-3.2.xsd ">
<Scene>
<Viewpoint description='Rear View' orientation='-0.9999999403953552 0.0 0.0 1.5707963705062866' position='0.0 0.0 0.0'/>
<Shape alfirt:anchor_translate="0 1 2" alfirt:anchor_rotate="0.4 0.2 0.3">
<IndexedFaceSet coordIndex="0 1 2">
<Coordinate point="0 0 0 1 0 0 0.5 1 0"/>
</IndexedFaceSet>
</Shape>
</Scene>
</X3D>
"""
def test_writing_proper_values(self):
result = self.injecter.injectScene(data=self.x3dString, scene=self.scene)
print(result)
# get the whitespace trimmed
expected_tree = objectify.fromstring(self.expected_x3dString.encode(encoding='ascii', errors='ignore'))
result_tree = objectify.fromstring(result.encode(encoding='utf_8', errors='strict'))
expected_string = etree.tostring(expected_tree)
result_string = etree.tostring(result_tree)
print(expected_string)
print(result_string)
self.assertEqual(result_string, expected_string, "The values were not injected")
def test_writing_nones_values(self):
with self.assertRaises(TypeError):
self.injecter.injectScene(None, None)
def test_writing_wrong_values(self):
with self.assertRaises(TypeError):
self.injecter.injectScene(3, "scene")
if __name__ == "__main__":
unittest.main()
| mit | 8,723,458,854,731,689,000 | 41.820225 | 151 | 0.535817 | false |
fedora-infra/fedmsg_meta_fedora_infrastructure | fedmsg_meta_fedora_infrastructure/fasshim.py | 1 | 9107 | import collections
import logging
import os
import socket
import string
import threading
from hashlib import sha256, md5
_fas_cache = {}
_fas_cache_lock = threading.Lock()
log = logging.getLogger("moksha.hub")
try:
from six.moves.urllib import parse
except ImportError:
# Really really old 'six' doesn't have this move.. so we fall back to
# python-2 only usage. If we're on an old 'six', then we can assume that
# we must also be on an old Python.
import urllib as parse
def _ordered_query_params(params):
# if OrderedDict is available, preserver order of params
# to make this easily testable on PY3
if hasattr(collections, 'OrderedDict'):
retval = collections.OrderedDict(params)
else:
retval = dict(params)
return retval
# https://github.com/fedora-infra/fedmsg_meta_fedora_infrastructure/issues/320
hardcoded_avatars = {
'bodhi': 'https://apps.fedoraproject.org/img/icons/bodhi-{size}.png',
'koschei': 'https://apps.fedoraproject.org/img/icons/koschei-{size}.png',
# Taskotron may have a new logo at some point. Check this out:
# https://mashaleonova.wordpress.com/2015/08/18/a-logo-for-taskotron/
# Ask tflink before actually putting this in place though. we need
# a nice small square version. It'll look great!
# In the meantime, we can use this temporary logo.
'taskotron': (
'https://apps.fedoraproject.org/img/icons/taskotron-{size}.png'
)
}
def avatar_url(username, size=64, default='retro'):
if username in hardcoded_avatars:
return hardcoded_avatars[username].format(size=size)
openid = "http://%s.id.fedoraproject.org/" % username
return avatar_url_from_openid(openid, size, default)
def avatar_url_from_openid(openid, size=64, default='retro', dns=False):
"""
Our own implementation since fas doesn't support this nicely yet.
"""
if dns:
# This makes an extra DNS SRV query, which can slow down our webapps.
# It is necessary for libravatar federation, though.
import libravatar
return libravatar.libravatar_url(
openid=openid,
size=size,
default=default,
)
else:
params = _ordered_query_params([('s', size), ('d', default)])
query = parse.urlencode(params)
hash = sha256(openid.encode('utf-8')).hexdigest()
return "https://seccdn.libravatar.org/avatar/%s?%s" % (hash, query)
def avatar_url_from_email(email, size=64, default='retro', dns=False):
"""
Our own implementation since fas doesn't support this nicely yet.
"""
if dns:
# This makes an extra DNS SRV query, which can slow down our webapps.
# It is necessary for libravatar federation, though.
import libravatar
return libravatar.libravatar_url(
email=email,
size=size,
default=default,
)
else:
params = _ordered_query_params([('s', size), ('d', default)])
query = parse.urlencode(params)
hash = md5(email.encode('utf-8')).hexdigest()
return "https://seccdn.libravatar.org/avatar/%s?%s" % (hash, query)
def make_fasjson_cache(**config):
global _fas_cache
if _fas_cache:
return _fas_cache
log.warn("No previous fasjson cache found. Looking to rebuild.")
creds = config['fasjson_credentials']
krb5_principal = creds.get("krb5_principal")
krb5_client_ktname = creds.get("krb5_client_ktname")
gss_use_proxy = creds.get("gss_use_proxy")
if krb5_client_ktname:
os.environ["KRB5_CLIENT_KTNAME"] = krb5_client_ktname
if gss_use_proxy:
os.environ["GSS_USE_PROXY"] = "yes"
# the base URL shouldn't contain the API version, the fasjson client takes
# care of it
default_url = 'https://fasjson.fedoraproject.org/'
base_url = creds.get('base_url', default_url)
try:
import fasjson_client
except ImportError:
fasjson_client = None
log.warn(
"No fasjson-client installed. Falling back to querying directly."
)
if fasjson_client:
try:
client = fasjson_client.Client(
url=base_url, principal=krb5_principal
)
except fasjson_client.errors.ClientSetupError as e:
log.error(
"Error while setting up fasjson client: %s" % e
)
return {}
APIError = fasjson_client.errors.APIError
else:
import requests
import requests.exceptions
from requests.compat import urlencode, urljoin
from requests_gssapi import HTTPSPNEGOAuth
# shim inside a shim
class Client(object):
def __init__(self, url, principal=None):
self.url = url.rstrip("/") + "/v1/"
self.principal = principal
gssapi_auth = HTTPSPNEGOAuth(
opportunistic_auth=True, mutual_authentication="OPTIONAL"
)
self.session = requests.Session()
self.session.auth = gssapi_auth
def list_all_entities(self, ent_name):
if not ent_name.endswith("/"):
# avoid redirection round trip
ent_name += "/"
endpoint = urljoin(self.url, ent_name)
# yay, pagination
next_page_url = endpoint + "?" + urlencode({"page_number": 1})
while next_page_url:
res = self.session.get(next_page_url)
for item in res["result"]:
yield item
next_page_url = res.get("page", {}).get("next_page")
client = Client(url=base_url, principal=krb5_principal)
APIError = requests.exceptions.RequestException
try:
_add_to_cache(list(client.list_all_entities("users")))
except APIError as e:
log.error("Something went wrong building cache with error: %s" % e)
return {}
return _fas_cache
def _add_to_cache(users):
global _fas_cache
for user in users:
nicks = user.get('ircnicks', [])
for nick in nicks:
_fas_cache[nick] = user['username']
emails = user.get('emails', [])
for email in emails:
_fas_cache[email] = user['username']
def make_fas_cache(**config):
global _fas_cache
if _fas_cache:
return _fas_cache
log.warn("No previous fas cache found. Looking to rebuild.")
try:
import fedora.client
import fedora.client.fas2
except ImportError:
log.warn("No python-fedora installed. Not caching fas.")
return {}
if 'fas_credentials' not in config:
log.warn("No fas_credentials found. Not caching fas.")
return {}
creds = config['fas_credentials']
default_url = 'https://admin.fedoraproject.org/accounts/'
fasclient = fedora.client.fas2.AccountSystem(
base_url=creds.get('base_url', default_url),
username=creds['username'],
password=creds['password'],
)
timeout = socket.getdefaulttimeout()
for key in string.ascii_lowercase:
socket.setdefaulttimeout(600)
try:
log.info("Downloading FAS cache for %s*" % key)
response = fasclient.send_request(
'/user/list',
req_params={'search': '%s*' % key},
auth=True)
except fedora.client.ServerError as e:
log.warning("Failed to download fas cache for %s %r" % (key, e))
continue
finally:
socket.setdefaulttimeout(timeout)
log.info("Caching necessary user data for %s*" % key)
for user in response['people']:
nick = user['ircnick']
if nick:
_fas_cache[nick] = user['username']
email = user['email']
if email:
_fas_cache[email] = user['username']
del response
del fasclient
del fedora.client.fas2
return _fas_cache
def nick2fas(nickname, **config):
log.debug("Acquiring _fas_cache_lock for nicknames.")
with _fas_cache_lock:
log.debug("Got _fas_cache_lock for nicknames.")
fasjson = config.get('fasjson')
if fasjson:
fas_cache = make_fasjson_cache(**config)
else:
fas_cache = make_fas_cache(**config)
result = fas_cache.get(nickname, nickname)
log.debug("Released _fas_cache_lock for nicknames.")
return result
def email2fas(email, **config):
if email.endswith('@fedoraproject.org'):
return email.rsplit('@', 1)[0]
log.debug("Acquiring _fas_cache_lock for emails.")
with _fas_cache_lock:
log.debug("Got _fas_cache_lock for emails.")
fasjson = config.get('fasjson')
if fasjson:
fas_cache = make_fasjson_cache(**config)
else:
fas_cache = make_fas_cache(**config)
result = fas_cache.get(email, email)
log.debug("Released _fas_cache_lock for emails.")
return result
| lgpl-2.1 | 3,938,818,083,284,466,000 | 30.842657 | 78 | 0.599868 | false |
philgyford/django-ditto | tests/core/test_utils.py | 1 | 9888 | # coding: utf-8
import datetime
import pytz
from django.test import TestCase
from freezegun import freeze_time
import responses
from requests.exceptions import HTTPError
from ditto.core.utils import datetime_now, datetime_from_str, truncate_string
from ditto.core.utils.downloader import DownloadException, filedownloader
class DatetimeNowTestCase(TestCase):
@freeze_time("2015-08-14 12:00:00", tz_offset=-8)
def test_datetime_now(self):
self.assertEqual(
datetime_now(), datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
)
class DatetimeFromStrTestCase(TestCase):
def test_datetime_from_str(self):
s = "2015-08-12 12:00:00"
self.assertEqual(
datetime_from_str(s),
datetime.datetime.strptime(s, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.utc),
)
class TruncateStringTestCase(TestCase):
def test_truncate_string_strip_html(self):
"By default, strips HTML"
self.assertEqual(
truncate_string(
(
'<p>Some text. <a href="http://www.example.com/"><b>A link'
"</b></a>. And more."
)
),
u"Some text. A link. And more.",
)
def test_truncate_string_strip_html_false(self):
"Can be told not to strip HTML"
self.assertEqual(
truncate_string(
(
'<p>Some text. <a href="http://www.example.com/"><b>A link'
"</b></a>. And more."
),
strip_html=False,
),
(
u'<p>Some text. <a href="http://www.example.com/"><b>A link'
"</b></a>. And more."
),
)
def test_truncate_string_default_chars(self):
"By default, trims to 255 characters"
self.assertEqual(
truncate_string(
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec eget "
"odio eget odio porttitor accumsan in eget elit. Integer gravida "
"egestas nunc. Mauris at tortor ornare, blandit eros quis, auctor "
"lacus. Fusce ullamcorper nunc vitae tincidunt sodales. Vestibulum sit "
"amet lacus at sem porta porta."
),
(
u"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec eget "
"odio eget odio porttitor accumsan in eget elit. Integer gravida "
"egestas nunc. Mauris at tortor ornare, blandit eros quis, auctor "
"lacus. Fusce ullamcorper nunc vitae tincidunt sodales. Ve…"
),
)
def test_truncate_string_custom_chars(self):
"Can be told to truncate to other lengths"
self.assertEqual(
truncate_string(
(
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec "
"eget odio eget odio porttitor accumsan in eget elit. Integer "
"gravida egestas nunc. Mauris at tortor ornare, blandit eros quis, "
"auctor lacus. Fusce ullamcorper nunc vitae tincidunt sodales. "
"Vestibulum sit amet lacus at sem porta porta."
),
chars=100,
),
(
u"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec "
"eget odio eget odio porttitor accums…"
)
)
def test_truncate_string_truncate(self):
"Can be given a custom 'truncate' string"
self.assertEqual(
truncate_string(
(
"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec "
"eget odio eget odio porttitor accumsan in eget elit. Integer "
"gravida egestas nunc. Mauris at tortor ornare, blandit eros quis, "
"auctor lacus. Fusce ullamcorper nunc vitae tincidunt sodales. "
"Vestibulum sit amet lacus at sem porta porta."
),
truncate=" (cont.)"
),
(
u"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec "
"eget odio eget odio porttitor accumsan in eget elit. Integer "
"gravida egestas nunc. Mauris at tortor ornare, blandit eros quis, "
"auctor lacus. Fusce ullamcorper nunc vitae tincidunt soda (cont.)"
)
)
def test_at_word_boundary(self):
"Will break at word boundaries."
self.assertEqual(
truncate_string(
(
u"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec "
"eget odio eget odio porttitor accumsan in eget elit. Integer "
"gravida egestas nunc. Mauris at tortor ornare, blandit eros quis, "
"auctor lacus. Fusce ullamcorper nunc vitae tincidunt sodales. "
"Vestibulum sit amet lacus at sem porta porta."
),
at_word_boundary=True
),
(
u"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec "
"eget odio eget odio porttitor accumsan in eget elit. Integer "
"gravida egestas nunc. Mauris at tortor ornare, blandit eros quis, "
"auctor lacus. Fusce ullamcorper nunc vitae tincidunt sodales.…"
)
)
def test_no_truncation(self):
"""Too short to be truncated."""
self.assertEqual(truncate_string(u"This is my string."), u"This is my string.")
def test_no_truncation_at_word_boundary(self):
"""Too short to be truncated."""
self.assertEqual(
truncate_string(u"This is my string.", at_word_boundary=True),
u"This is my string.",
)
def test_truncate_string_all(self):
"""Will strip HTML, truncate to specified length, at a word boundary,
and add custom string.
"""
self.assertEqual(
truncate_string(
"""<p>Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec eget odio eget odio porttitor accumsan in eget elit. Integer gravida egestas nunc. Mauris at tortor ornare, blandit eros quis, auctorlacus.</p>
<p>Fusce ullamcorper nunc vitae tincidunt sodales. Vestibulum sit amet lacus at sem porta porta. Donec fringilla laoreet orci eu porta. Aenean non lacus hendrerit, semper odio a, feugiat orci. Suspendisse potenti.</p>""", # noqa: E501
strip_html=True,
chars=200,
truncate="...",
at_word_boundary=True,
),
u"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec eget odio eget odio porttitor accumsan in eget elit. Integer gravida egestas nunc. Mauris at tortor ornare, blandit eros quis,...", # noqa: E501
)
class FileDownloaderTestCase(TestCase):
def setUp(self):
self.url = "https://c2.staticflickr.com/8/7019/27006033235_caa438b3b8_o.jpg"
def do_download(self, status=200, content_type="image/jpeg"):
"Mocks requests and calls filedownloader.download()"
# Open the image we're going to pretend we're fetching from the URL:
with open("tests/core/fixtures/images/marmite.jpg", "rb") as img1:
responses.add(
responses.GET,
self.url,
body=img1.read(),
status=status,
content_type=content_type,
adding_headers={"Transfer-Encoding": "chunked"},
)
return filedownloader.download(self.url, ["image/jpeg"])
# @responses.activate
# @patch.object(filedownloader, 'make_filename')
# def test_downloads_file(self, make_filename):
# """
# Streams a jpg, saves it to /tmp/, returns the path,
# calls _make_filename_for_download().
# """"
# make_filename.return_value = 'marmite.jpg'
# filepath = self.do_download()
# self.assertTrue(os.path.isfile(filepath))
# self.assertEqual(len(responses.calls), 1)
# self.assertEqual(filepath, '/tmp/marmite.jpg')
# make_filename.assert_called_once_with(self.url,
# {'Content-Type': 'image/jpeg', 'Transfer-Encoding': 'chunked'})
@responses.activate
def test_raises_error_on_get_failure(self):
"If the requests.get() call raises an error."
responses.add(responses.GET, self.url, body=HTTPError("Something went wrong"))
with self.assertRaises(DownloadException):
filedownloader.download(self.url, ["image/jpeg"])
@responses.activate
def test_raises_error_on_bad_status_code(self):
with self.assertRaises(DownloadException):
self.do_download(status=500)
@responses.activate
def test_raises_error_with_invalid_content_type(self):
"If downloaded file has content type different to what we ask for."
with self.assertRaises(DownloadException):
self.do_download(content_type="text/html")
def test_make_filename_from_url(self):
"Should use the URL's filename."
filename = filedownloader.make_filename(
"https://c2.staticflickr.com/8/7019/27006033235_caa438b3b8_o.jpg", {}
)
self.assertEqual(filename, "27006033235_caa438b3b8_o.jpg")
def test_make_filename_from_content_disposition(self):
"If URL has no filename, should use the Content-Disposition filename."
filename = filedownloader.make_filename(
"https://www.flickr.com/photos/philgyford/26348530105/play/orig/2b5f3e0919/", # noqa: E501
{"Content-Disposition": "attachment; filename=26348530105.mov"},
)
self.assertEqual(filename, "26348530105.mov")
| mit | -1,946,705,440,464,163,000 | 40.696203 | 243 | 0.590366 | false |
jjaviergalvez/CarND-Term3-Quizzes | search/first-search-program.py | 1 | 2602 | # ----------
# User Instructions:
#
# Define a function, search() that returns a list
# in the form of [optimal path length, row, col]. For
# the grid shown below, your function should output
# [11, 4, 5].
#
# If there is no valid path from the start point
# to the goal, your function should return the string
# 'fail'
# ----------
# Grid format:
# 0 = Navigable space
# 1 = Occupied space
import sys
grid = [[0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 0, 0, 1, 0]]
init = [0, 0]
goal = [len(grid)-1, len(grid[0])-1]
cost = 1
delta = [[-1, 0], # go up
[ 0,-1], # go left
[ 1, 0], # go down
[ 0, 1]] # go right
delta_name = ['^', '<', 'v', '>']
def search(grid,init,goal,cost):
# ----------------------------------------
# insert code here
# ----------------------------------------
def smallest(open_list):
# This function recive an open list and return the index of the
# element with the lowest g-value
lowest_value = 100000
i_lowest_value = -1
i = 0
for element in open_list:
if element[0] < lowest_value:
lowest_value = element[0]
i_lowest_value = i
i += 1
if i_lowest_value != -1:
return i_lowest_value
else:
print("fail")
sys.exit(0)
def expand(pos):
# This function recieve an elemet pos and return the neighbors that are
# not yet used. Each of the elemt used are set to -1 on the grid
n_row = len(grid)
n_col = len(grid[0])
x = pos[1]
y = pos[2]
g_val = pos[0] + 1
expand_list = []
for i in delta:
x_ = i[0] + x
y_ = i[1] + y
#if are between the bounds of the map
if (x_>=0 and x_<n_row and y_>=0 and y_<n_col):
value = grid[x_][y_]
if (value != 1 and value != -1):
expand_list.append([g_val, x_, y_])
grid[x_][y_] = -1 #mark as value already taken
return expand_list
# intitialization
grid[init[0]][init[1]] = -1
open_list = init[:]
open_list.insert(0,0)
open_list = [open_list]
#print("initial open list:")
#print(open_list)
list_item = [0,0,0]
while (list_item[1:]!= goal):
#print("----")
#print("take list item")
index = smallest(open_list)
list_item = open_list.pop(index)
#print(list_item)
#print("new open list")
open_list += expand(list_item)
#print(open_list)
return list_item
print(search(grid, init, goal, cost)) | mit | -6,260,567,240,804,987,000 | 23.327103 | 76 | 0.506533 | false |
robjwells/adventofcode-solutions | 2015/python/2015-19.py | 1 | 5504 | #!/usr/bin/env python3
"""Advent of Code 2015, Day 19: Medicine for Rudolph"""
import pathlib
import re
input_file = pathlib.Path('../input/2015-19.txt')
def parse_input(text: str) -> (list, str):
"""Return a list of replacement pairs and the molecule string"""
replacement_block, molecule = text.rstrip().split('\n\n')
replacement_pairs = [tuple(line.split(' => '))
for line in replacement_block.splitlines()]
return replacement_pairs, molecule
def generate_replacements(molecule: str, replacements: list) -> set:
"""Return set of permutations for the given molecule
replacements should be a list of (str, str) tuples, with
the first item being the string to be replaced and
the second the replacement string.
"""
generated = set()
# This is quadratic!
for find_str, replace_str in replacements:
for match in re.finditer(find_str, molecule):
substring_start, substring_end = match.span()
new_molecule = (molecule[:substring_start] +
replace_str +
molecule[substring_end:])
generated.add(new_molecule)
return generated
def reverse_reps(replacements):
"""Map from replacement to source and reverse each string also
The string reverse is needed because the steps_to_molecule
reverses the molecule string itself.
"""
return {b[::-1]: a[::-1] for a, b in replacements}
def steps_to_molecule(molecule: str, replacements: list):
"""Return the minimum number of replacements needed to make molecule
This is based off askalski’s solution on Reddit:
https://www.reddit.com/r/adventofcode/comments/
3xflz8/day_19_solutions/cy4etju
This solution processes the molecule in reverse, matches the (reversed)
replacement elements with their source element and retraces the steps
back to the original element (which is e).
The reversal is necessary to avoid backtracking to match sequences
that end in Ar.
"""
reps = reverse_reps(replacements)
# Reverse the molecule so we can consume *Ar sequences
# without the regex engine backtracking
molecule = molecule[::-1]
count = 0
# e is the original molecule we're trying to reach
while molecule != 'e':
# Replace one molecule at a time, using the reps dictionary
# to find the replacement string
molecule = re.sub(
'|'.join(reps.keys()),
lambda m: reps[m.group()],
molecule,
count=1
)
count += 1
return count
def test_replacements():
test_molecule = 'HOH'
test_replacements = [
('H', 'HO'),
('H', 'OH'),
('O', 'HH'),
]
result = generate_replacements(molecule=test_molecule,
replacements=test_replacements)
expected = {'HOOH', 'HOHO', 'OHOH', 'HHHH'}
assert result == expected
def count(molecule, replacements):
"""This uses a modified version of askalski’s formula to count the steps
Note that in the following expression we don’t have an exact copy
of askalski’s formula, which is:
t - p - 2 * c - 1
This is because in the above function we’re left over with a
single token (which doesn’t get reduced by the pattern) matching,
which correlates with having 'e' left over if you do the step
by step reduction.
Having that left over, it doesn’t get added to our totals and
so we don’t have to subtract 1 from the rest of the calculation
for the total number of steps.
(At least, I’m pretty sure that’s how this works :)
I’ve adapted this solution from one in F# by Yan Cui:
http://theburningmonk.com/2015/12/advent-of-code-f-day-19/
"""
# Create a set of all the 'source' elements, with the strings reversed
reps = {a[::-1] for a, b in replacements}
def loop(molecule, tokens=0, parens=0, commas=0):
# Minimum length of the molecule list is 1.
if len(molecule) == 1:
return (tokens, parens, commas)
first, second, *rest = molecule
if first in ('(', ')'):
return loop(molecule[1:], tokens + 1, parens + 1, commas)
elif first == ',':
return loop(molecule[1:], tokens + 1, parens, commas + 1)
elif first in reps:
return loop(molecule[1:], tokens + 1, parens, commas)
elif first + second in reps:
return loop(rest, tokens + 1, parens, commas)
# This looks so gross in Python
molecule = molecule.replace(
'Rn', '(').replace(
'Ar', ')').replace(
'Y', ',')
molecule = molecule[::-1]
tokens, parens, commas = loop(molecule)
return tokens - parens - 2 * commas
def main():
replacement_pairs, molecule = parse_input(input_file.read_text())
generated_molecules = generate_replacements(
molecule=molecule,
replacements=replacement_pairs)
num_generated = len(generated_molecules)
print(f'Part one, number of molecules generated: {num_generated}')
min_steps_to_molecule = steps_to_molecule(molecule, replacement_pairs)
print(f'Part two, minimum steps to molecule: {min_steps_to_molecule}'
' (iter)')
min_steps_by_count = count(molecule, replacement_pairs)
print(f'Part two, minimum steps to molecule: {min_steps_by_count}'
' (count)')
if __name__ == '__main__':
main()
| mit | 1,421,638,115,674,974,700 | 32.024096 | 76 | 0.628968 | false |
afolmert/mentor | src/views.py | 1 | 17557 | #!/usr/bin/env python
# -*- coding: iso-8859-2 -*-
#
# Copyright (C) 2007 Adam Folmert <[email protected]>
#
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
#
#
#
"""This is the module for views used in Mentor GUI"""
import release
__author__ = '%s <%s>' % \
( release.authors['afolmert'][0], release.authors['afolmert'][1])
__license__ = release.license
__version__ = release.version
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from utils_qt import tr
from utils import log
# main gui parts
# card widget is a sort of delegate and it should behave as one
# it currently has card model assigned
# or is it like more like listwidget
# helper widget classes
# TODO move this to views module
class MyTextEdit(QTextEdit):
"""Overriden to emit focusLost signals."""
# TODO maybe better is to subclass the event procedure?
def __init__(self, parent=None):
QTextEdit.__init__(self, parent)
def keyPressEvent(self, event):
if event.key() == Qt.Key_Tab:
self.emit(SIGNAL('tabPressed()'))
event.accept()
else:
QTextEdit.keyPressEvent(self, event)
def focusOutEvent(self, event):
QTextEdit.focusOutEvent(self, event)
self.emit(SIGNAL('focusLost()'))
class AbstractCardView(QAbstractItemView):
"""Base abstract class for card widgets."""
# current index is stored in selection model
# it is updated by connecting changing of current index in other views with
# currentChanged slots in here
def __init__(self, parent=None):
QAbstractItemView.__init__(self, parent)
self._dirty = False
# self.setSelectionModel(QAbstractItemView.SingleSelection)
# these control what it looks for
def currentChanged(self, current, previous):
# TODO how to check if two indexes are equal/inequal?
if current != self.getCurrentIndex():
# save pending changes
self.saveChanges()
self.setCurrentIndex(current)
self._updateView(self.model(), current)
def setModel(self, model):
QAbstractItemView.setModel(self, model)
self.connect(model, SIGNAL('modelAboutToBeReset()'), self.saveChanges)
def dataChanged(self, index):
# TODO do this only if index is the one as currently used
# TODO how to check whether this is the model
if index == self.getCurrentIndex():
self._updateView(self.model(), index)
def dirty(self):
return self._dirty
def setDirty(self, dirty):
self._dirty = dirty
def saveChanges(self):
if self.dirty():
self._updateModel(self.model(), self.getCurrentIndex())
self.setDirty(False)
def reset(self):
# what in here ?
# the changes will not be saved
# external app must call save
self._updateView(self.model(), self.getCurrentIndex())
def _updateModel(self, model, index):
# to be overridden
pass
def _updateView(self, model, index):
# to be overridden
pass
def getCurrentIndex(self):
"""Returns currently selected item"""
selection = self.selectionModel()
# get current selection
selectedIndex = selection.selectedIndexes()
if len(selectedIndex) > 0:
return selectedIndex[0]
else:
return None
def setCurrentIndex(self, index):
"""Returns currenly selected item from the model"""
selection = self.selectionModel()
selection.select(index, QItemSelectionModel.Select | QItemSelectionModel.Current)
# must override pure virtual functions
# perhaps I should abandon the idea of having this as abstractitemview?
def verticalOffset(self):
return 1
def horizontalOffset(self):
return 1
def visualRegionForSelection(self, selection):
return QRegion(0, 0, 1, 1)
def visualRect(self):
return QRect(0, 0, 1, 1)
class CardMainView(AbstractCardView):
"""Widget for displaying current card.
May be later subclassed to display all kinds of cards:
RTF , graphical, simple etc.
"""
def __init__(self, parent=None):
AbstractCardView.__init__(self, parent)
self._updatingView = False
self._updatingModel = False
self.lblQuestion = QLabel("&Question:")
self.txtQuestion = MyTextEdit()
self.txtQuestion.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.txtQuestion.setFont(QFont("Courier New", 13, QFont.Bold))
self.txtQuestion.setText("")
self.txtQuestion.setMinimumHeight(100)
self.lblQuestion.setBuddy(self.txtQuestion)
self.splitter = QSplitter(Qt.Vertical)
self.splitter.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.lblAnswer = QLabel("&Answer:")
self.txtAnswer = MyTextEdit()
self.txtAnswer.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.txtAnswer.setFont(QFont("Courier New", 13, QFont.Bold))
self.txtAnswer.setText("")
self.txtAnswer.setMinimumHeight(100)
self.lblAnswer.setBuddy(self.txtAnswer)
self.connect(self.txtAnswer, SIGNAL('tabPressed()'), self.on_txtAnswer_tabPressed)
self.connect(self.txtQuestion, SIGNAL('tabPressed()'), self.on_txtQuestion_tabPressed)
self.connect(self.txtAnswer, SIGNAL('textChanged()'), self.txtAnswer_textChanged)
self.connect(self.txtQuestion, SIGNAL('textChanged()'), self.txtQuestion_textChanged)
self.connect(self.txtAnswer, SIGNAL('focusLost()'), self.saveChanges)
self.connect(self.txtQuestion, SIGNAL('focusLost()'), self.saveChanges)
self.splitter.addWidget(self.txtQuestion)
self.splitter.addWidget(self.txtAnswer)
self.splitter.setSizes([200, 100])
# FIXME how to block splitter from hiding one window completely ??
layout = QHBoxLayout()
layout.setMargin(2)
layout.setSpacing(2)
layout.addWidget(self.splitter)
self.setLayout(layout)
def _updateModel(self, model, index):
self._updatingModel = True
if index:
model.updateCard(index, \
self.txtQuestion.toPlainText(), self.txtAnswer.toPlainText())
self._updatingModel = False
def _updateView(self, model, index):
self._updatingView = True
try:
assert index and index.isValid(), "Invalid card model"
card = model.data(index, Qt.UserRole)
self.txtQuestion.setText(card.question)
self.txtAnswer.setText(card.answer)
self.txtQuestion.setEnabled(True)
self.txtAnswer.setEnabled(True)
# TODO narrow it to No data found exception !
except:
self.txtQuestion.setText("")
self.txtQuestion.setEnabled(False)
self.txtAnswer.setText("")
self.txtAnswer.setEnabled(False)
self._updatingView = False
def on_txtAnswer_tabPressed(self):
self.txtQuestion.setFocus(Qt.TabFocusReason)
def on_txtQuestion_tabPressed(self):
self.txtAnswer.setFocus(Qt.TabFocusReason)
def txtAnswer_focusLost(self):
if self._dirty:
self._updateModel(self.model(), self.getCurrentIndex())
def txtQuestion_focusLost(self):
if self._dirty:
self._updateModel(self.model(), self.getCurrentIndex())
def txtAnswer_textChanged(self):
if not self._updatingView:
self.setDirty(True)
def txtQuestion_textChanged(self):
if not self._updatingView:
self.setDirty(True)
# FIXME
# these functions are not really connected with the model/view thing
# the question is : should this be connected with a model and be descended
# from QAbstractItemView or just be a standalone control for displaying
# cards?
def displayCard(self, card, readonly=True, showAnswer=True):
self.txtQuestion.setEnabled(not readonly)
self.txtAnswer.setEnabled(not readonly)
self.txtQuestion.setText(card.question)
if showAnswer:
self.txtAnswer.setText(card.answer)
else:
self.txtAnswer.setText("")
def switchAnswer(self):
self.txtAnswer.setVisible(not self.txtAnswer.isVisible())
class CardDetailView(AbstractCardView):
"""Widget for displaying card details (score, hints, review dates etc.)"""
def __init__(self, parent=None):
AbstractCardView.__init__(self, parent)
self._updatingView = False
self.setFont(QFont("vt100", 8))
self.lblId = QLabel("Id:")
self.edId = QLabel("edId")
self.lblScore = QLabel("Score:")
self.edScore = QLabel("edScore")
self.lblDesc = QLabel("Description:")
self.edDesc = QLabel("edDescription")
self.lblRepetitions = QLabel("Repetitions:")
self.edRepetitions = QLabel("edRepetitions")
self.lblInterval = QLabel("Interval:")
self.edInterval = QLabel("edInterval")
self.lblLastRepetition = QLabel("Last repetition:")
self.edLastRepetition = QLabel("edLast repetition")
self.lblNextRepetition = QLabel("Next repetition:")
self.edNextRepetition = QLabel("edNext repetition")
self.lblAFactor = QLabel("A-Factor:")
self.edAFactor = QLabel("edA-Factor")
self.lblUFactor = QLabel("U-Factor:")
self.edUFactor = QLabel("edU-Factor")
self.lblForgettingIndex = QLabel("Forgetting index:")
self.edForgettingIndex = QLabel("edForgetting index")
self.lblFutureRep = QLabel("Future reptition:")
self.edFutureRep = QLabel("edFuture reptition")
self.lblOrdinal = QLabel("Ordinal:")
self.edOrdinal = QLabel("edOrdinal")
self.lblDifficulty = QLabel("Difficulty:")
self.edDifficulty = QLabel("edDifficulty")
self.lblFirstGrade = QLabel("First grade:")
self.edFirstGrade = QLabel("edFirst grade")
self.lblType = QLabel("Type:")
self.edType = QLabel("edType")
layout = QGridLayout()
layout.addWidget(self.lblId , 0, 0)
layout.addWidget(self.edId , 0, 1)
layout.addWidget(self.lblScore , 1, 0)
layout.addWidget(self.edScore , 1, 1)
layout.addWidget(self.lblDesc , 2, 0)
layout.addWidget(self.edDesc , 2, 1)
layout.addWidget(self.lblRepetitions , 3, 0)
layout.addWidget(self.edRepetitions , 3, 1)
layout.addWidget(self.lblInterval , 4, 0)
layout.addWidget(self.edInterval , 4, 1)
layout.addWidget(self.lblLastRepetition, 5, 0)
layout.addWidget(self.edLastRepetition , 5, 1)
layout.addWidget(self.lblNextRepetition, 6, 0)
layout.addWidget(self.edNextRepetition , 6, 1)
layout.addWidget(self.lblAFactor , 7, 0)
layout.addWidget(self.edAFactor , 7, 1)
layout.addWidget(self.lblUFactor , 8, 0)
layout.addWidget(self.edUFactor , 8, 1)
layout.addWidget(self.lblForgettingIndex , 9, 0)
layout.addWidget(self.edForgettingIndex , 9, 1)
layout.addWidget(self.lblFutureRep , 10, 0)
layout.addWidget(self.edFutureRep , 10, 1)
layout.addWidget(self.lblOrdinal , 11, 0)
layout.addWidget(self.edOrdinal , 11, 1)
layout.addWidget(self.lblDifficulty , 12, 0)
layout.addWidget(self.edDifficulty , 12, 1)
layout.addWidget(self.lblFirstGrade , 13, 0)
layout.addWidget(self.edFirstGrade , 13, 1)
layout.addWidget(self.lblType , 14, 0)
layout.addWidget(self.edType , 14, 1)
layout.setMargin(1)
layout.setSpacing(1)
self.setLayout(layout)
def _updateView(self, model, index):
# display information from the current cardModel and cardModelIndex
self._updatingView = True
try:
assert index and index.isValid(), "Invalid cardModel index!"
self.edId.setText(model.data(index).toString()[:10])
self.edScore.setText(model.data(index).toString()[:10])
self.edDesc.setText(model.data(index).toString()[:10])
except:
self.edId.setText("")
self.edScore.setText("")
self.edDesc.setText("")
self._updatingView = False
class CardSourceView(AbstractCardView):
"""Widget for displaying XML source for card"""
def __init__(self, parent=None):
AbstractCardView.__init__(self, parent)
self._updatingView = False
#self.lblSource = QLabel("&Source:")
self.txtSource = MyTextEdit()
self.setFont(QFont("vt100", 8))
#self.lblSource.setBuddy(self.txtSource)
layout = QVBoxLayout(self)
layout.setMargin(2)
layout.setSpacing(2)
#layout.addWidget(self.lblSource)
layout.addWidget(self.txtSource)
self.setLayout(layout)
def _updateView(self, model, index):
self._updatingView = True
try:
assert index and index.isValid(), "Invalid card model index!"
self.txtSource.setText(model.data(index).toString())
self.txtSource.setEnabled(True)
except:
self.txtSource.setText("")
self.txtSource.setEnabled(False)
self._updatingView = False
class CardGridView(QTableView):
def __init__(self, parent=None):
QTableView.__init__(self, parent)
self.setSortingEnabled(False)
self.setShowGrid(False)
self.setSelectionMode(QAbstractItemView.NoSelection)
self.setSelectionBehavior(QAbstractItemView.SelectRows)
self.setAlternatingRowColors(True)
self.setFont(QFont("vt100", 8))
class CardSidesView(QListWidget):
"""This is view for card sides """
def __init__(self, parent=None):
QListWidget.__init__(self, parent)
self.addItem('Side 1')
self.addItem('Side 2')
self.addItem('Side 3')
self.setMaximumWidth(50)
self.setMaximumHeight(50)
class CardContentView(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
# cardView
self.cardView = QWidget(self)
self.cardMainView2 = CardMainView(self)
self.cardMainView2.setEnabled(False)
self.cardDetailView = CardDetailView(self)
self.cardSidesView = CardSidesView(self)
topLayout = QHBoxLayout()
topLayout.addWidget(self.cardSidesView)
topLayout.addWidget(QLabel(r"""This is a preview of the given side of the card.
Select specific side on the left in order to see it.
Click on the 'Show/hide' button to switch between answer and question view.""", self))
self.btnSwitch = QPushButton("Show/hide answer", self)
self.connect(self.btnSwitch, SIGNAL('clicked()'), self.cardMainView2.switchAnswer)
topLayout.addWidget(self.btnSwitch)
layout = QVBoxLayout()
layout.addLayout(topLayout)
layout.addWidget(self.cardMainView2)
layout.addWidget(self.cardDetailView)
self.cardView.setLayout(layout)
# cardEditView
self.cardEditView = QWidget(self)
self.cardMainView = CardMainView(self)
layout = QVBoxLayout()
layout.addWidget(self.cardMainView)
self.cardEditView.setLayout(layout)
# cardSourceView
self.cardSourceView = CardSourceView(self)
tab = QTabWidget(self)
tab.addTab(self.cardView, "Card")
tab.addTab(self.cardEditView, "Edit card")
tab.addTab(self.cardSourceView, "Edit card source")
tab.addTab(QLabel("Here will go template graphical editor (ala SuperMemo designing mode or color scheme editor)", self), "Edit template")
tab.addTab(QLabel("Here will go template source editor (XSL)", self), "Edit template source")
layout = QVBoxLayout()
layout.setMargin(0)
layout.setSpacing(0)
layout.addWidget(tab)
self.setLayout(layout)
def setModel(self, model):
self.cardMainView.setModel(model)
self.cardMainView2.setModel(model)
self.cardSourceView.setModel(model)
self.cardDetailView.setModel(model)
def currentChanged(self, current, previous):
self.cardMainView.currentChanged(current, previous)
self.cardMainView2.currentChanged(current, previous)
self.cardSourceView.currentChanged(current, previous)
self.cardDetailView.currentChanged(current, previous)
| gpl-2.0 | -3,008,446,038,544,591,000 | 34.612576 | 145 | 0.638264 | false |
archesproject/arches | arches/app/models/migrations/5613_notification_type.py | 1 | 3420 | # Generated by Django 2.2.6 on 2019-12-03 14:15
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("models", "5475_update_geom_mv"),
]
operations = [
migrations.CreateModel(
name="NotificationType",
fields=[
("typeid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("name", models.TextField(blank=True, null=True)),
("emailtemplate", models.TextField(blank=True, null=True)),
("emailnotify", models.BooleanField(default=False)),
("webnotify", models.BooleanField(default=False)),
],
options={"db_table": "notification_types", "managed": True},
),
migrations.CreateModel(
name="UserXNotificationType",
fields=[
("id", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("emailnotify", models.BooleanField(default=False)),
("webnotify", models.BooleanField(default=False)),
("notiftype", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="models.NotificationType")),
("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={"db_table": "user_x_notification_types", "managed": True},
),
migrations.AddField(
model_name="notification",
name="notiftype",
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="models.NotificationType"),
),
migrations.RenameField(model_name="userxtask", old_name="date_done", new_name="datedone"),
migrations.RenameField(model_name="userxtask", old_name="date_start", new_name="datestart"),
migrations.CreateModel(
name="UserXNotification",
fields=[
("id", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)),
("isread", models.BooleanField(default=False)),
("notif", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="models.Notification")),
("recipient", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={"db_table": "user_x_notifications", "managed": True},
),
migrations.AddField(
model_name="notification",
name="context",
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, default=dict, null=True),
),
migrations.RunSQL(
"""
INSERT INTO notification_types (typeid, name, emailtemplate, emailnotify, webnotify)
VALUES (
'441e6ed4-188d-11ea-a35b-784f435179ea',
'Search Export Download Ready',
'email/download_ready_email_notification.htm',
true,
true
);
""",
"""
DELETE FROM notification_types
WHERE typeid in ('441e6ed4-188d-11ea-a35b-784f435179ea');
""",
),
]
| agpl-3.0 | 8,205,261,245,665,000,000 | 43.415584 | 124 | 0.58655 | false |
butla/PyDAS | tests/unit/test_resources.py | 1 | 7392 | import copy
import json
import os
from unittest.mock import MagicMock, call
from bravado.client import SwaggerClient
import bravado.exception
from bravado_falcon import FalconHttpClient
import falcon
import pytest
import pytest_falcon.plugin
import responses
import yaml
from data_acquisition.acquisition_request import AcquisitionRequest, RequestNotFoundError
from data_acquisition.consts import (ACQUISITION_PATH, DOWNLOAD_CALLBACK_PATH,
METADATA_PARSER_CALLBACK_PATH, GET_REQUEST_PATH)
from data_acquisition.resources import (get_download_callback_url, get_metadata_callback_url,
AcquisitionResource)
import tests
from tests.consts import (TEST_DOWNLOAD_REQUEST, TEST_DOWNLOAD_CALLBACK, TEST_ACQUISITION_REQ,
TEST_ACQUISITION_REQ_JSON)
FAKE_TIME = 234.25
FAKE_TIMESTAMP = 234
@pytest.fixture(scope='function')
def client(falcon_api):
client = pytest_falcon.plugin.Client(falcon_api)
client.post = (lambda path, data, post=client.post:
post(path, json.dumps(data), headers={'Content-Type': 'application/json'}))
return client
@pytest.fixture(scope='session')
def swagger_spec():
spec_file_path = os.path.join(tests.__path__[0], '../api_doc.yaml')
with open(spec_file_path) as spec_file:
return yaml.load(spec_file)
@pytest.fixture(scope='function')
def client_no_req_validation(falcon_api, swagger_spec):
return SwaggerClient.from_spec(swagger_spec,
http_client=FalconHttpClient(falcon_api),
config={'validate_requests': False})
@pytest.fixture(scope='function')
def client_swagger(falcon_api, swagger_spec):
return SwaggerClient.from_spec(swagger_spec,
http_client=FalconHttpClient(falcon_api))
@pytest.fixture(scope='function')
def acquisition_requests_resource(das_config, mock_executor, mock_req_store, fake_time):
return AcquisitionResource(mock_req_store, mock_executor, das_config)
@pytest.fixture(scope='function')
def req_store_get(mock_req_store):
mock_req_store.get.return_value = copy.deepcopy(TEST_ACQUISITION_REQ)
return mock_req_store.get
@pytest.fixture(scope='function')
def fake_time(monkeypatch):
monkeypatch.setattr('time.time', lambda: FAKE_TIME)
def test_get_download_callback_url():
callback_url = get_download_callback_url('https://some-test-das-url', 'some-test-id')
assert callback_url == 'https://some-test-das-url/v1/das/callback/downloader/some-test-id'
def test_get_metadata_callback_url():
callback_url = get_metadata_callback_url('https://some-test-das-url', 'some-test-id')
assert callback_url == 'https://some-test-das-url/v1/das/callback/metadata/some-test-id'
@responses.activate
def test_external_service_call_not_ok(acquisition_requests_resource):
test_url = 'https://some-fake-url/'
responses.add(responses.POST, test_url, status=404)
assert not acquisition_requests_resource._external_service_call(
url=test_url, data={'a': 'b'}, token='bearer fake-token', request_id='some-fake-id')
def test_processing_acquisition_request_for_hdfs(acquisition_requests_resource, mock_req_store):
# arrange
mock_enqueue_metadata_req = MagicMock()
acquisition_requests_resource._enqueue_metadata_request = mock_enqueue_metadata_req
hdfs_acquisition_req = copy.deepcopy(TEST_ACQUISITION_REQ)
hdfs_acquisition_req.source = TEST_ACQUISITION_REQ.source.replace('http://', 'hdfs://')
proper_saved_request = copy.deepcopy(hdfs_acquisition_req)
proper_saved_request.set_downloaded()
fake_token = 'bearer asdasdasdasd'
# act
acquisition_requests_resource._process_acquisition_request(hdfs_acquisition_req, fake_token)
# assert
mock_enqueue_metadata_req.assert_called_with(proper_saved_request, None, fake_token)
mock_req_store.put.assert_called_with(proper_saved_request)
def test_acquisition_bad_request(client_no_req_validation):
broken_request = dict(TEST_DOWNLOAD_REQUEST)
del broken_request['category']
with pytest.raises(bravado.exception.HTTPError):
client_no_req_validation.rest.submitAcquisitionRequest(body=broken_request).result()
def test_downloader_callback_failed(client, fake_time, mock_req_store, req_store_get):
failed_callback_req = dict(TEST_DOWNLOAD_CALLBACK)
failed_callback_req['state'] = 'ERROR'
response = client.post(
path=DOWNLOAD_CALLBACK_PATH.format(req_id=TEST_ACQUISITION_REQ.id),
data=failed_callback_req)
assert response.status == falcon.HTTP_200
updated_request = AcquisitionRequest(**TEST_ACQUISITION_REQ_JSON)
updated_request.state = 'ERROR'
updated_request.timestamps['ERROR'] = FAKE_TIMESTAMP
mock_req_store.put.assert_called_with(updated_request)
def test_metadata_callback_failed(client, fake_time, mock_req_store, req_store_get):
response = client.post(
path=METADATA_PARSER_CALLBACK_PATH.format(req_id=TEST_ACQUISITION_REQ.id),
data={'state': 'FAILED'})
assert response.status == falcon.HTTP_200
updated_request = AcquisitionRequest(**TEST_ACQUISITION_REQ_JSON)
updated_request.state = 'ERROR'
updated_request.timestamps['ERROR'] = FAKE_TIMESTAMP
mock_req_store.put.assert_called_with(updated_request)
def test_get_request(das_api, client_swagger, req_store_get):
das_api.request_management_res._org_checker = MagicMock()
acquisition_request = client_swagger.rest.getRequest(req_id=TEST_ACQUISITION_REQ.id).result()
assert AcquisitionRequest(**acquisition_request.__dict__) == TEST_ACQUISITION_REQ
def test_get_request_not_found(client, mock_req_store):
mock_req_store.get.side_effect = RequestNotFoundError()
response = client.get(GET_REQUEST_PATH.format(req_id='some-fake-id'))
assert response.status == falcon.HTTP_404
def test_delete_request(das_api, client, mock_req_store, req_store_get):
das_api.request_management_res._org_checker = MagicMock()
response = client.delete(GET_REQUEST_PATH.format(req_id=TEST_ACQUISITION_REQ.id))
assert response.status == falcon.HTTP_200
mock_req_store.delete.assert_called_with(TEST_ACQUISITION_REQ)
def test_delete_request_not_found(client, mock_req_store):
mock_req_store.get.side_effect = RequestNotFoundError()
response = client.delete(GET_REQUEST_PATH.format(req_id='fake-id'))
assert response.status == falcon.HTTP_404
@pytest.mark.parametrize('org_ids', [
['id-1'],
['id-1', 'id-2'],
['id-1', 'id-2', 'id-3'],
])
@pytest.mark.parametrize('acquisition_requests', [
[TEST_ACQUISITION_REQ],
[TEST_ACQUISITION_REQ, TEST_ACQUISITION_REQ]
])
def test_get_requests_for_org(org_ids, acquisition_requests,
das_api, client, mock_req_store):
das_api.acquisition_res._org_checker = MagicMock()
mock_req_store.get_for_org.return_value = acquisition_requests
response = client.get(path=ACQUISITION_PATH,
query_string='orgs=' + ','.join(org_ids))
returned_requests = [AcquisitionRequest(**req_json) for req_json in response.json]
assert response.status == falcon.HTTP_200
assert returned_requests == acquisition_requests * len(org_ids)
assert mock_req_store.get_for_org.call_args_list == [call(id) for id in org_ids]
| mit | -5,892,326,582,538,794,000 | 37.300518 | 97 | 0.70901 | false |
Subsets and Splits