repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
minhphung171093/GreenERP | openerp/addons/l10n_be_invoice_bba/partner.py | 47 | 1364 | # -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
from openerp.osv import fields, osv
import time
from openerp.tools.translate import _
class res_partner(osv.osv):
""" add field to indicate default 'Communication Type' on customer invoices """
_inherit = 'res.partner'
def _get_comm_type(self, cr, uid, context=None):
res = self.pool.get('account.invoice')._get_reference_type(cr, uid,context=context)
return res
_columns = {
'out_inv_comm_type': fields.selection(_get_comm_type, 'Communication Type', change_default=True,
help='Select Default Communication Type for Outgoing Invoices.' ),
'out_inv_comm_algorithm': fields.selection([
('random','Random'),
('date','Date'),
('partner_ref','Customer Reference'),
], 'Communication Algorithm',
help='Select Algorithm to generate the Structured Communication on Outgoing Invoices.' ),
}
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + \
['out_inv_comm_type', 'out_inv_comm_algorithm']
_default = {
'out_inv_comm_type': 'none',
}
| gpl-3.0 |
pbrazdil/phantomjs | src/qt/qtwebkit/Source/ThirdParty/gtest/test/gtest_break_on_failure_unittest.py | 1050 | 7214 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's break-on-failure mode.
A user can ask Google Test to seg-fault when an assertion fails, using
either the GTEST_BREAK_ON_FAILURE environment variable or the
--gtest_break_on_failure flag. This script tests such functionality
by invoking gtest_break_on_failure_unittest_ (a program written with
Google Test) with different environments and command line flags.
"""
__author__ = '[email protected] (Zhanyong Wan)'
import gtest_test_utils
import os
import sys
# Constants.
IS_WINDOWS = os.name == 'nt'
# The environment variable for enabling/disabling the break-on-failure mode.
BREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE'
# The command line flag for enabling/disabling the break-on-failure mode.
BREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure'
# The environment variable for enabling/disabling the throw-on-failure mode.
THROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE'
# The environment variable for enabling/disabling the catch-exceptions mode.
CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS'
# Path to the gtest_break_on_failure_unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_break_on_failure_unittest_')
# Utilities.
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets an environment variable to a given value; unsets it when the
given value is None.
"""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def Run(command):
"""Runs a command; returns 1 if it was killed by a signal, or 0 otherwise."""
p = gtest_test_utils.Subprocess(command, env=environ)
if p.terminated_by_signal:
return 1
else:
return 0
# The tests.
class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable or
the --gtest_break_on_failure flag to turn assertion failures into
segmentation faults.
"""
def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault):
"""Runs gtest_break_on_failure_unittest_ and verifies that it does
(or does not) have a seg-fault.
Args:
env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
variable; None if the variable should be unset.
flag_value: value of the --gtest_break_on_failure flag;
None if the flag should not be present.
expect_seg_fault: 1 if the program is expected to generate a seg-fault;
0 otherwise.
"""
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value)
if env_var_value is None:
env_var_value_msg = ' is not set'
else:
env_var_value_msg = '=' + env_var_value
if flag_value is None:
flag = ''
elif flag_value == '0':
flag = '--%s=0' % BREAK_ON_FAILURE_FLAG
else:
flag = '--%s' % BREAK_ON_FAILURE_FLAG
command = [EXE_PATH]
if flag:
command.append(flag)
if expect_seg_fault:
should_or_not = 'should'
else:
should_or_not = 'should not'
has_seg_fault = Run(command)
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None)
msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' %
(BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command),
should_or_not))
self.assert_(has_seg_fault == expect_seg_fault, msg)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(env_var_value=None,
flag_value=None,
expect_seg_fault=0)
def testEnvVar(self):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value=None,
expect_seg_fault=0)
self.RunAndVerify(env_var_value='1',
flag_value=None,
expect_seg_fault=1)
def testFlag(self):
"""Tests using the --gtest_break_on_failure flag."""
self.RunAndVerify(env_var_value=None,
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value=None,
flag_value='1',
expect_seg_fault=1)
def testFlagOverridesEnvVar(self):
"""Tests that the flag overrides the environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value='0',
flag_value='1',
expect_seg_fault=1)
self.RunAndVerify(env_var_value='1',
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value='1',
flag_value='1',
expect_seg_fault=1)
def testBreakOnFailureOverridesThrowOnFailure(self):
"""Tests that gtest_break_on_failure overrides gtest_throw_on_failure."""
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1')
try:
self.RunAndVerify(env_var_value=None,
flag_value='1',
expect_seg_fault=1)
finally:
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None)
if IS_WINDOWS:
def testCatchExceptionsDoesNotInterfere(self):
"""Tests that gtest_catch_exceptions doesn't interfere."""
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1')
try:
self.RunAndVerify(env_var_value='1',
flag_value='1',
expect_seg_fault=1)
finally:
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None)
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause |
teamtuga4/teamtuga4ever.repository | plugin.video.pancas/resources/lib/sources/directdl_tv.py | 20 | 4246 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json,base64
from resources.lib.libraries import control
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://directdownload.tv'
self.search_link = 'L2FwaT9rZXk9NEIwQkI4NjJGMjRDOEEyOSZxdWFsaXR5W109SERUViZxdWFsaXR5W109RFZEUklQJnF1YWxpdHlbXT03MjBQJnF1YWxpdHlbXT1XRUJETCZxdWFsaXR5W109V0VCREwxMDgwUCZsaW1pdD0yMCZrZXl3b3JkPQ=='
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
url = tvshowtitle
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
try:
if url == None: return
url = '%s S%02dE%02d' % (url, int(season), int(episode))
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
if (control.setting('realdedrid_user') == '' and control.setting('premiumize_user') == ''): raise Exception()
query = base64.urlsafe_b64decode(self.search_link) + urllib.quote_plus(url)
query = urlparse.urljoin(self.base_link, query)
result = client.source(query)
result = json.loads(result)
title, hdlr = re.compile('(.+?) (S\d*E\d*)$').findall(url)[0]
title = cleantitle.tv(title)
hdlr = [hdlr]
links = []
for i in result:
try:
t = i['showName']
t = client.replaceHTMLCodes(t)
t = cleantitle.tv(t)
if not t == title: raise Exception()
y = i['release']
y = re.compile('[\.|\(|\[|\s](\d{4}|S\d*E\d*)[\.|\)|\]|\s]').findall(y)[-1]
y = y.upper()
if not any(x == y for x in hdlr): raise Exception()
quality = i['quality']
if quality == 'WEBDL1080P': quality = '1080p'
elif quality in ['720P', 'WEBDL']: quality = 'HD'
else: quality = 'SD'
size = i['size']
size = float(size)/1024
info = '%.2f GB' % size
url = i['links']
for x in url.keys(): links.append({'url': url[x], 'quality': quality, 'info': info})
except:
pass
for i in links:
try:
url = i['url']
if len(url) > 1: raise Exception()
url = url[0]
host = (urlparse.urlparse(url).netloc).replace('www.', '').rsplit('.', 1)[0].lower()
if not host in hosthdDict: raise Exception()
sources.append({'source': host, 'quality': i['quality'], 'provider': 'DirectDL', 'url': url, 'info': i['info']})
except:
pass
return sources
except:
return sources
def resolve(self, url):
try:
url = resolvers.request(url)
return url
except:
return
| gpl-2.0 |
ZhenxingWu/luigi | test/s3_test.py | 42 | 12104 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Mortar Data
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
from __future__ import print_function
import os
import sys
import tempfile
from target_test import FileSystemTargetTestMixin
from helpers import with_config, unittest
from boto.exception import S3ResponseError
from boto.s3 import key
from moto import mock_s3
from luigi import configuration
from luigi.s3 import FileNotFoundException, InvalidDeleteException, S3Client, S3Target
from luigi.target import MissingParentDirectory
if (3, 4, 0) <= sys.version_info[:3] < (3, 4, 3):
# spulec/moto#308
raise unittest.SkipTest('moto mock doesn\'t work with python3.4')
AWS_ACCESS_KEY = "XXXXXXXXXXXXXXXXXXXX"
AWS_SECRET_KEY = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
class TestS3Target(unittest.TestCase, FileSystemTargetTestMixin):
def setUp(self):
f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
self.tempFileContents = (
b"I'm a temporary file for testing\nAnd this is the second line\n"
b"This is the third.")
self.tempFilePath = f.name
f.write(self.tempFileContents)
f.close()
self.addCleanup(os.remove, self.tempFilePath)
self.mock_s3 = mock_s3()
self.mock_s3.start()
self.addCleanup(self.mock_s3.stop)
def create_target(self, format=None):
client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
client.s3.create_bucket('mybucket')
return S3Target('s3://mybucket/test_file', client=client, format=format)
def test_read(self):
client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
client.s3.create_bucket('mybucket')
client.put(self.tempFilePath, 's3://mybucket/tempfile')
t = S3Target('s3://mybucket/tempfile', client=client)
read_file = t.open()
file_str = read_file.read()
self.assertEqual(self.tempFileContents, file_str.encode('utf-8'))
def test_read_no_file(self):
t = self.create_target()
self.assertRaises(FileNotFoundException, t.open)
def test_read_iterator_long(self):
# write a file that is 5X the boto buffersize
# to test line buffering
old_buffer = key.Key.BufferSize
key.Key.BufferSize = 2
try:
tempf = tempfile.NamedTemporaryFile(mode='wb', delete=False)
temppath = tempf.name
firstline = ''.zfill(key.Key.BufferSize * 5) + os.linesep
contents = firstline + 'line two' + os.linesep + 'line three'
tempf.write(contents.encode('utf-8'))
tempf.close()
client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
client.s3.create_bucket('mybucket')
client.put(temppath, 's3://mybucket/largetempfile')
t = S3Target('s3://mybucket/largetempfile', client=client)
with t.open() as read_file:
lines = [line for line in read_file]
finally:
key.Key.BufferSize = old_buffer
self.assertEqual(3, len(lines))
self.assertEqual(firstline, lines[0])
self.assertEqual("line two" + os.linesep, lines[1])
self.assertEqual("line three", lines[2])
class TestS3Client(unittest.TestCase):
def setUp(self):
f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
self.tempFilePath = f.name
f.write(b"I'm a temporary file for testing\n")
f.close()
self.addCleanup(os.remove, self.tempFilePath)
self.mock_s3 = mock_s3()
self.mock_s3.start()
self.addCleanup(self.mock_s3.stop)
def test_init_with_environment_variables(self):
os.environ['AWS_ACCESS_KEY_ID'] = 'foo'
os.environ['AWS_SECRET_ACCESS_KEY'] = 'bar'
# Don't read any exsisting config
old_config_paths = configuration.LuigiConfigParser._config_paths
configuration.LuigiConfigParser._config_paths = [tempfile.mktemp()]
s3_client = S3Client()
configuration.LuigiConfigParser._config_paths = old_config_paths
self.assertEqual(s3_client.s3.gs_access_key_id, 'foo')
self.assertEqual(s3_client.s3.gs_secret_access_key, 'bar')
@with_config({'s3': {'aws_access_key_id': 'foo', 'aws_secret_access_key': 'bar'}})
def test_init_with_config(self):
s3_client = S3Client()
self.assertEqual(s3_client.s3.access_key, 'foo')
self.assertEqual(s3_client.s3.secret_key, 'bar')
def test_put(self):
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
s3_client.put(self.tempFilePath, 's3://mybucket/putMe')
self.assertTrue(s3_client.exists('s3://mybucket/putMe'))
def test_put_string(self):
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
s3_client.put_string("SOMESTRING", 's3://mybucket/putString')
self.assertTrue(s3_client.exists('s3://mybucket/putString'))
def test_put_multipart_multiple_parts_non_exact_fit(self):
"""
Test a multipart put with two parts, where the parts are not exactly the split size.
"""
# 5MB is minimum part size
part_size = (1024 ** 2) * 5
file_size = (part_size * 2) - 5000
self._run_multipart_test(part_size, file_size)
def test_put_multipart_multiple_parts_exact_fit(self):
"""
Test a multipart put with multiple parts, where the parts are exactly the split size.
"""
# 5MB is minimum part size
part_size = (1024 ** 2) * 5
file_size = part_size * 2
self._run_multipart_test(part_size, file_size)
def test_put_multipart_less_than_split_size(self):
"""
Test a multipart put with a file smaller than split size; should revert to regular put.
"""
# 5MB is minimum part size
part_size = (1024 ** 2) * 5
file_size = 5000
self._run_multipart_test(part_size, file_size)
def test_put_multipart_empty_file(self):
"""
Test a multipart put with an empty file.
"""
# 5MB is minimum part size
part_size = (1024 ** 2) * 5
file_size = 0
self._run_multipart_test(part_size, file_size)
def test_exists(self):
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
self.assertTrue(s3_client.exists('s3://mybucket/'))
self.assertTrue(s3_client.exists('s3://mybucket'))
self.assertFalse(s3_client.exists('s3://mybucket/nope'))
self.assertFalse(s3_client.exists('s3://mybucket/nope/'))
s3_client.put(self.tempFilePath, 's3://mybucket/tempfile')
self.assertTrue(s3_client.exists('s3://mybucket/tempfile'))
self.assertFalse(s3_client.exists('s3://mybucket/temp'))
s3_client.put(self.tempFilePath, 's3://mybucket/tempdir0_$folder$')
self.assertTrue(s3_client.exists('s3://mybucket/tempdir0'))
s3_client.put(self.tempFilePath, 's3://mybucket/tempdir1/')
self.assertTrue(s3_client.exists('s3://mybucket/tempdir1'))
s3_client.put(self.tempFilePath, 's3://mybucket/tempdir2/subdir')
self.assertTrue(s3_client.exists('s3://mybucket/tempdir2'))
self.assertFalse(s3_client.exists('s3://mybucket/tempdir'))
def test_get_key(self):
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
s3_client.put(self.tempFilePath, 's3://mybucket/key_to_find')
self.assertTrue(s3_client.get_key('s3://mybucket/key_to_find'))
self.assertFalse(s3_client.get_key('s3://mybucket/does_not_exist'))
def test_isdir(self):
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
self.assertTrue(s3_client.isdir('s3://mybucket'))
s3_client.put(self.tempFilePath, 's3://mybucket/tempdir0_$folder$')
self.assertTrue(s3_client.isdir('s3://mybucket/tempdir0'))
s3_client.put(self.tempFilePath, 's3://mybucket/tempdir1/')
self.assertTrue(s3_client.isdir('s3://mybucket/tempdir1'))
s3_client.put(self.tempFilePath, 's3://mybucket/key')
self.assertFalse(s3_client.isdir('s3://mybucket/key'))
def test_mkdir(self):
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
self.assertTrue(s3_client.isdir('s3://mybucket'))
s3_client.mkdir('s3://mybucket')
s3_client.mkdir('s3://mybucket/dir')
self.assertTrue(s3_client.isdir('s3://mybucket/dir'))
self.assertRaises(MissingParentDirectory,
s3_client.mkdir, 's3://mybucket/dir/foo/bar', parents=False)
self.assertFalse(s3_client.isdir('s3://mybucket/dir/foo/bar'))
def test_listdir(self):
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
s3_client.put_string("", 's3://mybucket/hello/frank')
s3_client.put_string("", 's3://mybucket/hello/world')
self.assertEquals(['s3://mybucket/hello/frank', 's3://mybucket/hello/world'],
list(s3_client.listdir('s3://mybucket/hello')))
self.assertEquals(['s3://mybucket/hello/frank', 's3://mybucket/hello/world'],
list(s3_client.listdir('s3://mybucket/hello/')))
self.assertEquals(['frank', 'world'],
list(s3_client.list('s3://mybucket/hello')))
self.assertEquals(['frank', 'world'],
list(s3_client.list('s3://mybucket/hello/')))
def test_remove(self):
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
self.assertRaises(
S3ResponseError,
lambda: s3_client.remove('s3://bucketdoesnotexist/file')
)
self.assertFalse(s3_client.remove('s3://mybucket/doesNotExist'))
s3_client.put(self.tempFilePath, 's3://mybucket/existingFile0')
self.assertTrue(s3_client.remove('s3://mybucket/existingFile0'))
self.assertFalse(s3_client.exists('s3://mybucket/existingFile0'))
self.assertRaises(
InvalidDeleteException,
lambda: s3_client.remove('s3://mybucket/')
)
self.assertRaises(
InvalidDeleteException,
lambda: s3_client.remove('s3://mybucket')
)
s3_client.put(self.tempFilePath, 's3://mybucket/removemedir/file')
self.assertRaises(
InvalidDeleteException,
lambda: s3_client.remove('s3://mybucket/removemedir', recursive=False)
)
def _run_multipart_test(self, part_size, file_size):
file_contents = b"a" * file_size
s3_path = 's3://mybucket/putMe'
tmp_file = tempfile.NamedTemporaryFile(mode='wb', delete=True)
tmp_file_path = tmp_file.name
tmp_file.write(file_contents)
tmp_file.flush()
s3_client = S3Client(AWS_ACCESS_KEY, AWS_SECRET_KEY)
s3_client.s3.create_bucket('mybucket')
s3_client.put_multipart(tmp_file_path, s3_path, part_size=part_size)
self.assertTrue(s3_client.exists(s3_path))
# b/c of https://github.com/spulec/moto/issues/131 have to
# get contents to check size
key_contents = s3_client.get_key(s3_path).get_contents_as_string()
self.assertEqual(len(file_contents), len(key_contents))
tmp_file.close()
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
mgracer48/panda3d | direct/src/gui/OnscreenImage.py | 9 | 5704 | """OnscreenImage module: contains the OnscreenImage class"""
__all__ = ['OnscreenImage']
from panda3d.core import *
from direct.showbase.DirectObject import DirectObject
import types
class OnscreenImage(DirectObject, NodePath):
def __init__(self, image = None,
pos = None,
hpr = None,
scale = None,
color = None,
parent = None,
sort = 0):
"""
Make a image node from string or a node path,
put it into the 2d sg and set it up with all the indicated parameters.
The parameters are as follows:
image: the actual geometry to display or a file name.
This may be omitted and specified later via setImage()
if you don't have it available.
pos: the x, y, z position of the geometry on the screen.
This maybe a 3-tuple of floats or a vector.
y should be zero
hpr: the h, p, r of the geometry on the screen.
This maybe a 3-tuple of floats or a vector.
scale: the size of the geometry. This may either be a single
float, a 3-tuple of floats, or a vector, specifying a
different x, y, z scale. y should be 1
color: the (r, g, b, a) color of the geometry. This is
normally a 4-tuple of floats or ints.
parent: the NodePath to parent the geometry to initially.
"""
# We ARE a node path. Initially, we're an empty node path.
NodePath.__init__(self)
if parent == None:
parent = aspect2d
self.setImage(image, parent = parent, sort = sort)
# Adjust pose
# Set pos
if (isinstance(pos, types.TupleType) or
isinstance(pos, types.ListType)):
apply(self.setPos, pos)
elif isinstance(pos, VBase3):
self.setPos(pos)
# Hpr
if (isinstance(hpr, types.TupleType) or
isinstance(hpr, types.ListType)):
apply(self.setHpr, hpr)
elif isinstance(hpr, VBase3):
self.setHpr(hpr)
# Scale
if (isinstance(scale, types.TupleType) or
isinstance(scale, types.ListType)):
apply(self.setScale, scale)
elif isinstance(scale, VBase3):
self.setScale(scale)
elif (isinstance(scale, types.FloatType) or
isinstance(scale, types.IntType)):
self.setScale(scale)
# Set color
if color:
# Set color, if specified
self.setColor(color[0], color[1], color[2], color[3])
def setImage(self, image,
parent = NodePath(),
transform = None,
sort = 0):
# Get the original parent, transform, and sort, if any, so we can
# preserve them across this call.
if not self.isEmpty():
parent = self.getParent()
if transform == None:
# If we're replacing a previous image, we throw away
# the new image's transform in favor of the original
# image's transform.
transform = self.getTransform()
sort = self.getSort()
self.removeNode()
# Assign geometry
if isinstance(image, NodePath):
self.assign(image.copyTo(parent, sort))
elif isinstance(image, types.StringTypes) or \
isinstance(image, Texture):
if isinstance(image, Texture):
# It's a Texture
tex = image
else:
# It's a Texture file name
tex = loader.loadTexture(image)
cm = CardMaker('OnscreenImage')
cm.setFrame(-1, 1, -1, 1)
self.assign(parent.attachNewNode(cm.generate(), sort))
self.setTexture(tex)
elif type(image) == type(()):
# Assume its a file+node name, extract texture from node
model = loader.loadModel(image[0])
if model:
node = model.find(image[1])
if node:
self.assign(node.copyTo(parent, sort))
else:
print 'OnscreenImage: node %s not found' % image[1]
else:
print 'OnscreenImage: model %s not found' % image[0]
if transform and not self.isEmpty():
self.setTransform(transform)
def getImage(self):
return self
def configure(self, option=None, **kw):
for option, value in kw.items():
# Use option string to access setter function
try:
setter = getattr(self, 'set' + option[0].upper() + option[1:])
if (((setter == self.setPos) or
(setter == self.setHpr) or
(setter == self.setScale)) and
(isinstance(value, types.TupleType) or
isinstance(value, types.ListType))):
apply(setter, value)
else:
setter(value)
except AttributeError:
print 'OnscreenImage.configure: invalid option:', option
# Allow index style references
def __setitem__(self, key, value):
apply(self.configure, (), {key: value})
def cget(self, option):
# Get current configuration setting.
# This is for compatibility with DirectGui functions
getter = getattr(self, 'get' + option[0].upper() + option[1:])
return getter()
# Allow index style refererences
__getitem__ = cget
def destroy(self):
self.removeNode()
| bsd-3-clause |
Maistho/CouchPotatoServer | couchpotato/core/media/_base/search/main.py | 80 | 2211 | from couchpotato.api import addApiView
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.variable import mergeDicts, getImdb
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
log = CPLog(__name__)
class Search(Plugin):
def __init__(self):
addApiView('search', self.search, docs = {
'desc': 'Search the info in providers for a movie',
'params': {
'q': {'desc': 'The (partial) movie name you want to search for'},
'type': {'desc': 'Search for a specific media type. Leave empty to search all.'},
},
'return': {'type': 'object', 'example': """{
'success': True,
'movies': array,
'show': array,
etc
}"""}
})
addEvent('app.load', self.addSingleSearches)
def search(self, q = '', types = None, **kwargs):
# Make sure types is the correct instance
if isinstance(types, (str, unicode)):
types = [types]
elif isinstance(types, (list, tuple, set)):
types = list(types)
imdb_identifier = getImdb(q)
if not types:
if imdb_identifier:
result = fireEvent('movie.info', identifier = imdb_identifier, merge = True)
result = {result['type']: [result]}
else:
result = fireEvent('info.search', q = q, merge = True)
else:
result = {}
for media_type in types:
if imdb_identifier:
result[media_type] = fireEvent('%s.info' % media_type, identifier = imdb_identifier)
else:
result[media_type] = fireEvent('%s.search' % media_type, q = q)
return mergeDicts({
'success': True,
}, result)
def createSingleSearch(self, media_type):
def singleSearch(q, **kwargs):
return self.search(q, type = media_type, **kwargs)
return singleSearch
def addSingleSearches(self):
for media_type in fireEvent('media.types', merge = True):
addApiView('%s.search' % media_type, self.createSingleSearch(media_type))
| gpl-3.0 |
Beauhurst/django | django/utils/translation/trans_real.py | 17 | 18459 | """Translation helper functions."""
import functools
import gettext as gettext_module
import os
import re
import sys
import warnings
from collections import OrderedDict
from threading import local
from django.apps import apps
from django.conf import settings
from django.conf.locale import LANG_INFO
from django.core.exceptions import AppRegistryNotReady
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.utils.safestring import SafeData, mark_safe
from django.utils.translation import LANGUAGE_SESSION_KEY
# Translations are cached in a dictionary for every language.
# The active translations are stored by threadid to make them thread local.
_translations = {}
_active = local()
# The default translation is based on the settings file.
_default = None
# magic gettext number to separate context from message
CONTEXT_SEPARATOR = "\x04"
# Format of Accept-Language header values. From RFC 2616, section 14.4 and 3.9
# and RFC 3066, section 2.1
accept_language_re = re.compile(r'''
([A-Za-z]{1,8}(?:-[A-Za-z0-9]{1,8})*|\*) # "en", "en-au", "x-y-z", "es-419", "*"
(?:\s*;\s*q=(0(?:\.\d{,3})?|1(?:\.0{,3})?))? # Optional "q=1.00", "q=0.8"
(?:\s*,\s*|$) # Multiple accepts per header.
''', re.VERBOSE)
language_code_re = re.compile(
r'^[a-z]{1,8}(?:-[a-z0-9]{1,8})*(?:@[a-z0-9]{1,20})?$',
re.IGNORECASE
)
language_code_prefix_re = re.compile(r'^/(\w+([@-]\w+)?)(/|$)')
@receiver(setting_changed)
def reset_cache(**kwargs):
"""
Reset global state when LANGUAGES setting has been changed, as some
languages should no longer be accepted.
"""
if kwargs['setting'] in ('LANGUAGES', 'LANGUAGE_CODE'):
check_for_language.cache_clear()
get_languages.cache_clear()
get_supported_language_variant.cache_clear()
def to_locale(language, to_lower=False):
"""
Turn a language name (en-us) into a locale name (en_US). If 'to_lower' is
True, the last component is lower-cased (en_us).
"""
p = language.find('-')
if p >= 0:
if to_lower:
return language[:p].lower() + '_' + language[p + 1:].lower()
else:
# Get correct locale for sr-latn
if len(language[p + 1:]) > 2:
return language[:p].lower() + '_' + language[p + 1].upper() + language[p + 2:].lower()
return language[:p].lower() + '_' + language[p + 1:].upper()
else:
return language.lower()
def to_language(locale):
"""Turn a locale name (en_US) into a language name (en-us)."""
p = locale.find('_')
if p >= 0:
return locale[:p].lower() + '-' + locale[p + 1:].lower()
else:
return locale.lower()
class DjangoTranslation(gettext_module.GNUTranslations):
"""
Set up the GNUTranslations context with regard to output charset.
This translation object will be constructed out of multiple GNUTranslations
objects by merging their catalogs. It will construct an object for the
requested language and add a fallback to the default language, if it's
different from the requested language.
"""
domain = 'django'
def __init__(self, language, domain=None, localedirs=None):
"""Create a GNUTranslations() using many locale directories"""
gettext_module.GNUTranslations.__init__(self)
if domain is not None:
self.domain = domain
self.__language = language
self.__to_language = to_language(language)
self.__locale = to_locale(language)
self._catalog = None
# If a language doesn't have a catalog, use the Germanic default for
# pluralization: anything except one is pluralized.
self.plural = lambda n: int(n != 1)
if self.domain == 'django':
if localedirs is not None:
# A module-level cache is used for caching 'django' translations
warnings.warn("localedirs is ignored when domain is 'django'.", RuntimeWarning)
localedirs = None
self._init_translation_catalog()
if localedirs:
for localedir in localedirs:
translation = self._new_gnu_trans(localedir)
self.merge(translation)
else:
self._add_installed_apps_translations()
self._add_local_translations()
if self.__language == settings.LANGUAGE_CODE and self.domain == 'django' and self._catalog is None:
# default lang should have at least one translation file available.
raise IOError("No translation files found for default language %s." % settings.LANGUAGE_CODE)
self._add_fallback(localedirs)
if self._catalog is None:
# No catalogs found for this language, set an empty catalog.
self._catalog = {}
def __repr__(self):
return "<DjangoTranslation lang:%s>" % self.__language
def _new_gnu_trans(self, localedir, use_null_fallback=True):
"""
Return a mergeable gettext.GNUTranslations instance.
A convenience wrapper. By default gettext uses 'fallback=False'.
Using param `use_null_fallback` to avoid confusion with any other
references to 'fallback'.
"""
return gettext_module.translation(
domain=self.domain,
localedir=localedir,
languages=[self.__locale],
codeset='utf-8',
fallback=use_null_fallback)
def _init_translation_catalog(self):
"""Create a base catalog using global django translations."""
settingsfile = sys.modules[settings.__module__].__file__
localedir = os.path.join(os.path.dirname(settingsfile), 'locale')
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_installed_apps_translations(self):
"""Merge translations from each installed app."""
try:
app_configs = reversed(list(apps.get_app_configs()))
except AppRegistryNotReady:
raise AppRegistryNotReady(
"The translation infrastructure cannot be initialized before the "
"apps registry is ready. Check that you don't make non-lazy "
"gettext calls at import time.")
for app_config in app_configs:
localedir = os.path.join(app_config.path, 'locale')
if os.path.exists(localedir):
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_local_translations(self):
"""Merge translations defined in LOCALE_PATHS."""
for localedir in reversed(settings.LOCALE_PATHS):
translation = self._new_gnu_trans(localedir)
self.merge(translation)
def _add_fallback(self, localedirs=None):
"""Set the GNUTranslations() fallback with the default language."""
# Don't set a fallback for the default language or any English variant
# (as it's empty, so it'll ALWAYS fall back to the default language)
if self.__language == settings.LANGUAGE_CODE or self.__language.startswith('en'):
return
if self.domain == 'django':
# Get from cache
default_translation = translation(settings.LANGUAGE_CODE)
else:
default_translation = DjangoTranslation(
settings.LANGUAGE_CODE, domain=self.domain, localedirs=localedirs
)
self.add_fallback(default_translation)
def merge(self, other):
"""Merge another translation into this catalog."""
if not getattr(other, '_catalog', None):
return # NullTranslations() has no _catalog
if self._catalog is None:
# Take plural and _info from first catalog found (generally Django's).
self.plural = other.plural
self._info = other._info.copy()
self._catalog = other._catalog.copy()
else:
self._catalog.update(other._catalog)
def language(self):
"""Return the translation language."""
return self.__language
def to_language(self):
"""Return the translation language name."""
return self.__to_language
def translation(language):
"""
Return a translation object in the default 'django' domain.
"""
global _translations
if language not in _translations:
_translations[language] = DjangoTranslation(language)
return _translations[language]
def activate(language):
"""
Fetch the translation object for a given language and install it as the
current translation object for the current thread.
"""
if not language:
return
_active.value = translation(language)
def deactivate():
"""
Uninstall the active translation object so that further _() calls resolve
to the default translation object.
"""
if hasattr(_active, "value"):
del _active.value
def deactivate_all():
"""
Make the active translation object a NullTranslations() instance. This is
useful when we want delayed translations to appear as the original string
for some reason.
"""
_active.value = gettext_module.NullTranslations()
_active.value.to_language = lambda *args: None
def get_language():
"""Return the currently selected language."""
t = getattr(_active, "value", None)
if t is not None:
try:
return t.to_language()
except AttributeError:
pass
# If we don't have a real translation object, assume it's the default language.
return settings.LANGUAGE_CODE
def get_language_bidi():
"""
Return selected language's BiDi layout.
* False = left-to-right layout
* True = right-to-left layout
"""
lang = get_language()
if lang is None:
return False
else:
base_lang = get_language().split('-')[0]
return base_lang in settings.LANGUAGES_BIDI
def catalog():
"""
Return the current active catalog for further processing.
This can be used if you need to modify the catalog or want to access the
whole message catalog instead of just translating one string.
"""
global _default
t = getattr(_active, "value", None)
if t is not None:
return t
if _default is None:
_default = translation(settings.LANGUAGE_CODE)
return _default
def gettext(message):
"""
Translate the 'message' string. It uses the current thread to find the
translation object to use. If no current translation is activated, the
message will be run through the default translation object.
"""
global _default
eol_message = message.replace('\r\n', '\n').replace('\r', '\n')
if len(eol_message) == 0:
# Return an empty value of the corresponding type if an empty message
# is given, instead of metadata, which is the default gettext behavior.
result = type(message)("")
else:
_default = _default or translation(settings.LANGUAGE_CODE)
translation_object = getattr(_active, "value", _default)
result = translation_object.gettext(eol_message)
if isinstance(message, SafeData):
return mark_safe(result)
return result
def pgettext(context, message):
msg_with_ctxt = "%s%s%s" % (context, CONTEXT_SEPARATOR, message)
result = gettext(msg_with_ctxt)
if CONTEXT_SEPARATOR in result:
# Translation not found
result = message
return result
def gettext_noop(message):
"""
Mark strings for translation but don't translate them now. This can be
used to store strings in global variables that should stay in the base
language (because they might be used externally) and will be translated
later.
"""
return message
def do_ntranslate(singular, plural, number, translation_function):
global _default
t = getattr(_active, "value", None)
if t is not None:
return getattr(t, translation_function)(singular, plural, number)
if _default is None:
_default = translation(settings.LANGUAGE_CODE)
return getattr(_default, translation_function)(singular, plural, number)
def ngettext(singular, plural, number):
"""
Return a string of the translation of either the singular or plural,
based on the number.
"""
return do_ntranslate(singular, plural, number, 'ngettext')
def npgettext(context, singular, plural, number):
msgs_with_ctxt = ("%s%s%s" % (context, CONTEXT_SEPARATOR, singular),
"%s%s%s" % (context, CONTEXT_SEPARATOR, plural),
number)
result = ngettext(*msgs_with_ctxt)
if CONTEXT_SEPARATOR in result:
# Translation not found
result = ngettext(singular, plural, number)
return result
def all_locale_paths():
"""
Return a list of paths to user-provides languages files.
"""
globalpath = os.path.join(
os.path.dirname(sys.modules[settings.__module__].__file__), 'locale')
return [globalpath] + list(settings.LOCALE_PATHS)
@functools.lru_cache(maxsize=1000)
def check_for_language(lang_code):
"""
Check whether there is a global language file for the given language
code. This is used to decide whether a user-provided language is
available.
lru_cache should have a maxsize to prevent from memory exhaustion attacks,
as the provided language codes are taken from the HTTP request. See also
<https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
"""
# First, a quick check to make sure lang_code is well-formed (#21458)
if lang_code is None or not language_code_re.search(lang_code):
return False
for path in all_locale_paths():
if gettext_module.find('django', path, [to_locale(lang_code)]) is not None:
return True
return False
@functools.lru_cache()
def get_languages():
"""
Cache of settings.LANGUAGES in an OrderedDict for easy lookups by key.
"""
return OrderedDict(settings.LANGUAGES)
@functools.lru_cache(maxsize=1000)
def get_supported_language_variant(lang_code, strict=False):
"""
Return the language-code that's listed in supported languages, possibly
selecting a more generic variant. Raise LookupError if nothing is found.
If `strict` is False (the default), look for an alternative
country-specific variant when the currently checked is not found.
lru_cache should have a maxsize to prevent from memory exhaustion attacks,
as the provided language codes are taken from the HTTP request. See also
<https://www.djangoproject.com/weblog/2007/oct/26/security-fix/>.
"""
if lang_code:
# If 'fr-ca' is not supported, try special fallback or language-only 'fr'.
possible_lang_codes = [lang_code]
try:
possible_lang_codes.extend(LANG_INFO[lang_code]['fallback'])
except KeyError:
pass
generic_lang_code = lang_code.split('-')[0]
possible_lang_codes.append(generic_lang_code)
supported_lang_codes = get_languages()
for code in possible_lang_codes:
if code in supported_lang_codes and check_for_language(code):
return code
if not strict:
# if fr-fr is not supported, try fr-ca.
for supported_code in supported_lang_codes:
if supported_code.startswith(generic_lang_code + '-'):
return supported_code
raise LookupError(lang_code)
def get_language_from_path(path, strict=False):
"""
Return the language-code if there is a valid language-code
found in the `path`.
If `strict` is False (the default), the function will look for an alternative
country-specific variant when the currently checked is not found.
"""
regex_match = language_code_prefix_re.match(path)
if not regex_match:
return None
lang_code = regex_match.group(1)
try:
return get_supported_language_variant(lang_code, strict=strict)
except LookupError:
return None
def get_language_from_request(request, check_path=False):
"""
Analyze the request to find what language the user wants the system to
show. Only languages listed in settings.LANGUAGES are taken into account.
If the user requests a sublanguage where we have a main language, we send
out the main language.
If check_path is True, the URL path prefix will be checked for a language
code, otherwise this is skipped for backwards compatibility.
"""
if check_path:
lang_code = get_language_from_path(request.path_info)
if lang_code is not None:
return lang_code
supported_lang_codes = get_languages()
if hasattr(request, 'session'):
lang_code = request.session.get(LANGUAGE_SESSION_KEY)
if lang_code in supported_lang_codes and lang_code is not None and check_for_language(lang_code):
return lang_code
lang_code = request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME)
try:
return get_supported_language_variant(lang_code)
except LookupError:
pass
accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
for accept_lang, unused in parse_accept_lang_header(accept):
if accept_lang == '*':
break
if not language_code_re.search(accept_lang):
continue
try:
return get_supported_language_variant(accept_lang)
except LookupError:
continue
try:
return get_supported_language_variant(settings.LANGUAGE_CODE)
except LookupError:
return settings.LANGUAGE_CODE
def parse_accept_lang_header(lang_string):
"""
Parse the lang_string, which is the body of an HTTP Accept-Language
header, and return a list of (lang, q-value), ordered by 'q' values.
Return an empty list if there are any format errors in lang_string.
"""
result = []
pieces = accept_language_re.split(lang_string.lower())
if pieces[-1]:
return []
for i in range(0, len(pieces) - 1, 3):
first, lang, priority = pieces[i:i + 3]
if first:
return []
if priority:
priority = float(priority)
else:
priority = 1.0
result.append((lang, priority))
result.sort(key=lambda k: k[1], reverse=True)
return result
| bsd-3-clause |
Rhaseven7h/hellanzb | Hellanzb/NZBLeecher/NZBModel.py | 4 | 35912 | """
NZBModel - Representations of the NZB file format in memory
(c) Copyright 2005 Philip Jenvey
[See end of file]
"""
import os, re, Hellanzb
try:
set
except NameError:
from sets import Set as set
from shutil import move
from threading import Lock, RLock
from Hellanzb.Log import *
from Hellanzb.NZBQueue import writeStateXML
from Hellanzb.Util import IDPool, UnicodeList, archiveName, getFileExtension, getMsgId, \
hellaRename, isHellaTemp, nuke, toUnicode
from Hellanzb.NZBLeecher.ArticleDecoder import parseArticleData, setRealFileName, tryAssemble
from Hellanzb.NZBLeecher.DupeHandler import handleDupeNZBFileNeedsDownload
from Hellanzb.NZBLeecher.NZBLeecherUtil import validWorkingFile
from Hellanzb.PostProcessorUtil import Archive, getParEnum, getParName
from Hellanzb.SmartPar import identifyPar, logSkippedPars, smartDequeue, smartRequeue
__id__ = '$Id$'
class NZB(Archive):
""" Representation of an nzb file -- the root <nzb> tag """
def __init__(self, nzbFileName, id = None, rarPassword = None, archiveDir = None,
category = ''):
Archive.__init__(self, archiveDir, id, None, rarPassword)
## NZB file general information
self.nzbFileName = nzbFileName
self.archiveName = archiveName(self.nzbFileName) # pretty name
self.msgid = None
filename = os.path.basename(nzbFileName)
self.msgid = getMsgId(filename)
if self.msgid:
self.msgid = int(self.msgid)
self.nzbFiles = []
self.skippedParFiles = []
self.category = category
## Where the nzb files will be downloaded
self.destDir = Hellanzb.WORKING_DIR
## A cancelled NZB is marked for death. ArticleDecoder will dispose of any
## recently downloaded data that might have been downloading during the time the
## cancel call was made (after the fact cleanup)
self.canceled = False
self.canceledLock = Lock()
## Acquired during assembly of an NZBFile
self.assembleLock = Lock()
## Total bytes this NZB represents
self.totalBytes = 0
## Whether the total byte count of this NZB is still being calculated
self.calculatingBytes = True
## How many bytes were skipped for downloading
self.totalSkippedBytes = 0
## How many bytes have been downloaded for this NZB
self.totalReadBytes = 0
## How many bytes of encoded article data is currently cached to memory
self.cachedArticleDataBytes = 0
## Time this NZB began downloading
self.downloadStartTime = None
## Amount of time taken to download the NZB
self.downloadTime = 0
## Amount of time taken to download AND decode the NZB
self.downloadAndDecodeTime = 0
## Whether or not we should redownload NZBFile and NZBSegment files on disk that
## are 0 bytes in size
self.overwriteZeroByteFiles = True
# All segment0001s are downloaded first. Every time we successfully decode a
# segment0001, we add to this number
self.firstSegmentsDownloaded = 0
## Whether or not this NZB is downloading in par recovery mode
self.isParRecovery = False
## Whether or not this is an NZB that contains all par files
self.allParsMode = False
## Skipped par file's subjects are kept here, in a list, during post
## processing. This list is arranged by the file's size
self.skippedParSubjects = None
## The number of par blocks (or par files for par1 mode), queued to download
## recovery blocks, the par version, and the par prefix for the current par
## recovery download
self.neededBlocks = 0
self.queuedBlocks = 0
self.parType = None
self.parPrefix = None
def isCanceled(self):
""" Whether or not this NZB was cancelled """
# FIXME: this doesn't need locks
self.canceledLock.acquire()
c = self.canceled
self.canceledLock.release()
return c
def cancel(self):
""" Mark this NZB as having been cancelled """
# FIXME: this doesn't need locks
self.canceledLock.acquire()
self.canceled = True
self.canceledLock.release()
def postpone(self):
""" Postpone an active NZB """
assert self in Hellanzb.queue.currentNZBs(), \
'Attempting to postpone an NZB not actively being downloaded: %s' % self.archiveName
postponed = os.path.join(Hellanzb.POSTPONED_DIR, self.archiveName)
hellaRename(postponed)
os.mkdir(postponed)
self.assembleLock.acquire()
try:
self.destDir = postponed
move(self.nzbFileName, os.path.join(Hellanzb.QUEUE_DIR,
os.path.basename(self.nzbFileName)))
self.nzbFileName = os.path.join(Hellanzb.QUEUE_DIR,
os.path.basename(self.nzbFileName))
Hellanzb.nzbQueue.insert(0, self)
writeStateXML()
# Move the postponed files to the new postponed dir
for file in os.listdir(Hellanzb.WORKING_DIR):
move(os.path.join(Hellanzb.WORKING_DIR, file), os.path.join(postponed, file))
finally:
self.assembleLock.release()
def isAllPars(self):
""" Determine whether or not all nzbFiles in this archive are par files. An NZB only
containing par files needs to be specially handled (all its nzbFiles should be
downloaded, instead of skipped) -- otherwise, no downloading would occur. This
situation isn't applicable to isParRecovery downloads
All nzbFiles in this NZB should have their real filename for the results of this
function to be accurate
newzbin.com will always add the .nfo file to an NZB if it exists (even if you
didn't select it for download) -- this function attempts to take that into account
"""
if self.isParRecovery:
return False
skippedLen = len(self.skippedParFiles)
nzbFilesLen = len(self.nzbFiles)
if skippedLen == nzbFilesLen:
return True
if (skippedLen > 0 and skippedLen == nzbFilesLen - 1) or \
(skippedLen > 1 and skippedLen == nzbFilesLen - 2):
# We only queued 1 or 2 files for download. If both are either a main par file
# or a .nfo file, this is an all par archive
queuedFiles = [nzbFile for nzbFile in self.nzbFiles if nzbFile not \
in self.skippedParFiles]
for queuedFile in queuedFiles[:]:
if queuedFile.filename.lower().endswith('.nfo') or queuedFile.isPar:
queuedFiles.remove(queuedFile)
return not len(queuedFiles)
return False
def cleanStats(self):
""" Reset downlaod statistics """
self.allParsMode = False
self.totalBytes = 0
self.totalSkippedBytes = 0
self.totalReadBytes = 0
self.cachedArticleDataBytes = 0
self.firstSegmentsDownloaded = 0
##self.neededBlocks = 0 # ?
self.queuedBlocks = 0
for nzbFile in self.nzbFiles:
nzbFile.totalSkippedBytes = 0
nzbFile.totalReadBytes = 0
nzbFile.downloadPercentage = 0
nzbFile.speed = 0
def finalize(self, justClean = False):
""" Delete any potential cyclic references existing in this NZB, then garbage
collect. justClean will only clean/delete specific things, to prep the NZB for
another download """
# nzbFiles aren't needed for another download
for nzbFile in self.nzbFiles:
# The following two sets used to be del'd. This was changed in r961
# for the associated ticket; but the fix is really a bandaid. If
# the root cause is mitigated, go back to del
if justClean:
nzbFile.todoNzbSegments.clear()
nzbFile.dequeuedSegments.clear()
else:
del nzbFile.todoNzbSegments
del nzbFile.dequeuedSegments
del nzbFile.nzb
del nzbFile
if justClean:
self.nzbFiles = []
self.skippedParFiles = []
self.postProcessor = None
self.cleanStats()
else:
del self.nzbFiles
del self.skippedParFiles
del self.postProcessor
def getSkippedParSubjects(self):
""" Return a list of skipped par file's subjects, sorted by the size of the par """
unsorted = []
for nzbFile in self.nzbFiles:
if nzbFile.isSkippedPar:
unsorted.append((nzbFile.totalBytes, nzbFile.subject))
# Ensure the list of pars is sorted by the par's number of bytes (so we pick off
# the smallest ones first when doing a par recovery download)
unsorted.sort()
sorted = UnicodeList()
for bytes, subject in unsorted:
sorted.append(subject)
return sorted
def isSkippedParSubject(self, subject):
""" Determine whether the specified subject is that of a known skipped par file """
if self.skippedParSubjects is None:
return False
return toUnicode(subject) in self.skippedParSubjects
def getName(self):
return os.path.basename(self.archiveName)
def getPercentDownloaded(self):
""" Return the percentage of this NZB that has already been downloaded """
if self.totalBytes == 0:
return 0
else:
# FIXME: there are two ways of getting this value, either from the NZB
# statistics or from the queue statistics. There should really only be one way..?
return int((float(self.totalReadBytes + self.totalSkippedBytes) / \
float(self.totalBytes)) * 100)
def getETA(self):
""" Return the amount of time needed to finish downloadling this NZB at the current rate
"""
# FIXME: this isn't used anywhere. could be broken
currentRate = Hellanzb.getCurrentRate()
if self.totalBytes == 0 or currentRate == 0:
return 0
else:
return int(((self.totalBytes - self.totalReadBytes - self.totalSkippedBytes) \
/ 1024) / currentRate * 1024)
def getStateAttribs(self):
""" Return attributes to be written out to the """
attribs = Archive.getStateAttribs(self)
# NZBs in isParRecovery mode need the par recovery state written
if self.isParRecovery:
attribs['isParRecovery'] = 'True'
for attrib in ('neededBlocks', 'parPrefix'):
val = getattr(self, attrib)
if isinstance(val, int):
val = str(val)
attribs[attrib] = toUnicode(val)
attribs['parType'] = getParName(self.parType)
if self.downloadTime:
attribs['downloadTime'] = str(self.downloadTime)
if not self.calculatingBytes and self.totalBytes > 0:
attribs['totalBytes'] = str(self.totalBytes)
if self.category:
attribs['category'] = self.category
return attribs
def toStateXML(self, xmlWriter):
""" Write a brief version of this object to an elementtree.SimpleXMLWriter.XMLWriter """
attribs = self.getStateAttribs()
if self in Hellanzb.queue.currentNZBs():
type = 'downloading'
elif self.postProcessor is not None and \
self.postProcessor in Hellanzb.postProcessors:
type = 'processing'
attribs['nzbFileName'] = os.path.basename(self.nzbFileName)
elif self in Hellanzb.nzbQueue:
type = 'queued'
else:
return
xmlWriter.start(type, attribs)
if type != 'downloading' or self.isParRecovery:
# Write 'skippedPar' tags describing the known skipped par files that haven't
# been downloaded
if self.skippedParSubjects is not None:
for nzbFileName in self.skippedParSubjects:
xmlWriter.element('skippedPar', nzbFileName)
else:
for skippedParFileSubject in self.getSkippedParSubjects():
xmlWriter.element('skippedPar', skippedParFileSubject)
xmlWriter.end(type)
def fromStateXML(type, target):
""" Factory method, returns a new NZB object for the specified target, and recovers
the NZB state from the RecoveredState object if the target exists there for
the specified type (such as 'processing', 'downloading') """
if type == 'processing':
recoveredDict = Hellanzb.recoveredState.getRecoveredDict(type, target)
archiveDir = os.path.join(Hellanzb.PROCESSING_DIR, target)
if recoveredDict and recoveredDict.get('nzbFileName') is not None:
target = recoveredDict.get('nzbFileName')
else:
# If this is a processing recovery request, and we didn't recover any
# state information, we'll consider this a basic Archive object (it has no
# accompanying .NZB file to keep track of)
return Archive.fromStateXML(archiveDir, recoveredDict)
else:
recoveredDict = Hellanzb.recoveredState.getRecoveredDict(type,
archiveName(target))
# Pass the id in with the constructor (instead of setting it after the fact) --
# otherwise the constructor would unnecessarily incremenet the IDPool
nzbId = None
if recoveredDict:
nzbId = recoveredDict['id']
nzb = NZB(target, nzbId)
if type == 'processing':
nzb.archiveDir = archiveDir
if recoveredDict:
for key, value in recoveredDict.iteritems():
if key == 'id' or key == 'order':
continue
if key == 'neededBlocks':
value = int(value)
if key == 'totalBytes':
nzb.calculatingBytes = False
value = int(value)
if key == 'downloadTime':
value = float(value)
if key == 'parType':
value = getParEnum(value)
setattr(nzb, key, value)
return nzb
fromStateXML = staticmethod(fromStateXML)
def smartRequeue(self):
""" Shortcut to the SmartPar function of the same name """
smartRequeue(self)
def logSkippedPars(self):
""" Shortcut to the SmartPar function of the same name """
logSkippedPars(self)
class NZBFile:
""" <nzb><file/><nzb> """
def __init__(self, subject, date = None, poster = None, nzb = None):
## XML attributes
self.subject = subject
self.date = date
self.poster = poster
## XML tree-collections/references
# Parent NZB
self.nzb = nzb
# FIXME: thread safety?
self.nzb.nzbFiles.append(self)
self.groups = []
self.nzbSegments = []
## TO download segments --
# we'll remove from this set everytime a segment is found completed (on the FS)
# during NZB parsing, or later written to the FS
self.todoNzbSegments = set()
## Segments that have been dequeued on the fly (during download). These are kept
## track of in the rare case that an nzb file is dequeued when all segments have
## actually been downloaded
self.dequeuedSegments = set()
## NZBFile statistics
self.number = len(self.nzb.nzbFiles)
self.totalBytes = 0
self.totalSkippedBytes = 0
self.totalReadBytes = 0
self.downloadPercentage = 0
self.speed = 0
## yEncode header keywords. Optional (not used for UUDecoded segments)
# the expected file size, as reported from yencode headers
self.ySize = None
## On Disk filenames
# The real filename, determined from the actual articleData's yDecode/UUDecode
# headers
self.filename = None
# The filename used temporarily until the real filename is determined
self.tempFilename = None
## Optimizations
# LAME: maintain a cached file name displayed in the scrolling UI, and whether or
# not the cached name might be stale (might be stale = a temporary name).
self.showFilename = None
self.showFilenameIsTemp = False
# Whether or not the filename was forcefully changed from the original by the
# DupeHandler
self.forcedChangedFilename = False
# direct pointer to the first segment of this file, when we have a tempFilename we
# look at this segment frequently until we find the real file name
# FIXME: this most likely doesn't optimize for shit.
self.firstSegment = None
# LAME: re-entrant lock for maintaing temp filenames/renaming temp -> real file
# names in separate threads. FIXME: This is a lot of RLock() construction, it
# should be removed eventually
self.tempFileNameLock = RLock() # this isn't used right
# filename could be modified/accessed concurrently (getDestination called by the
# downloader doesnt lock).
# NOTE: maybe just change nzbFile.filename via the reactor (callFromThread), and
# remove the lock entirely?
# Toggled to True when this nzbFile's assembly was interrupted during an
# OutOfDiskSpace exception
self.interruptedAssembly = False
## Whether or not this is a par file, an extra par file
## (e.g. archiveA.vol02+01.par2), and has been skipped by the downloader
self.isPar = False
self.isExtraPar = False
self.isSkippedPar = False
## Download statistics
# The current download rate for this NZBFile
self.rate = 0
# Tally of the amount of data read within the last second
self.readThisSecond = 0
def getDestination(self):
""" Return the full pathname of where this NZBFile should be written to on disk """
return os.path.join(self.nzb.destDir, self.getFilename())
def getFilename(self):
""" Return the filename of where this NZBFile will reside on the filesystem, within the
WORKING_DIR (not a full path)
The filename information is grabbed from the first segment's articleData
(uuencode's fault -- yencode includes the filename in every segment's
articleData). In the case where we need this file's filename filename, and that
first segment doesn't have articleData (hasn't been downloaded yet), a temp
filename will be returned
Downloading segments out of order often occurs in hellanzb, thus the need for
temporary file names """
if self.filename is not None:
# We've determined the real filename (or the last filename we're ever going to
# get)
return self.filename
elif self.firstSegment is not None and self.firstSegment.articleData is not None:
# No real filename yet, but we should be able to determine it from the first
# segment's article data
try:
# getFilenameFromArticleData will either set our self.filename when
# successful, or raise a FatalError
self.firstSegment.getFilenameFromArticleData()
except Exception, e:
debug('getFilename: Unable to getFilenameFromArticleData: file number: %i: %s' % \
(self.number, str(e)))
if self.filename is None:
# We only check the first segment for a real filename (FIXME: looking at
# any yDecode segment for the real filename would be nice). If we had
# trouble finding it there -- force this file to use the temp filename
# throughout its lifetime
self.filename = self.getTempFileName()
return self.filename
elif self.tempFilename is not None:
# We can't get the real filename yet -- use the already cached tempFilename
# for now (NOTE: caching this is really unnecessary)
return self.tempFilename
# We can't get the real filename yet, cache the temp filename and use it for now
self.tempFilename = self.getTempFileName()
return self.tempFilename
def needsDownload(self, workingDirListing, workingDirDupeMap):
""" Whether or not this NZBFile needs to be downloaded (isn't on the file system). You may
specify the optional workingDirListing so this function does not need to prune
this directory listing every time it is called (i.e. prune directory
names). workingDirListing should be a list of only filenames (basename, not
including dirname) of files lying in Hellanzb.WORKING_DIR """
if os.path.isfile(self.getDestination()):
# This block only handles matching temporary file names
self.nzb.firstSegmentsDownloaded += 1
return False
elif self.filename is None:
# First, check if this is one of the dupe files on disk
isDupe, dupeNeedsDl = handleDupeNZBFileNeedsDownload(self, workingDirDupeMap)
if isDupe:
# NOTE: We should know this is a par, but probably don't care if it is.
# If there is a par file fully assembled on disk, we don't care about
# skipping it
if self.filename is not None:
identifyPar(self)
if not dupeNeedsDl:
self.nzb.firstSegmentsDownloaded += 1
return dupeNeedsDl
# We only know about the temp filename. In that case, fall back to matching
# filenames in our subject line
for file in workingDirListing:
# Whole file match
if self.subject.find(file) > -1:
# No need for setRealFileName(self, file)'s extra work here
self.filename = file
# Prevent matching of this file multiple times
workingDirListing.remove(file)
if Hellanzb.SMART_PAR:
identifyPar(self)
if self.isPar:
debug('needsDownload: Found par on disk: %s isExtraPar: %s' % \
(file, str(self.isExtraPar)))
self.nzb.firstSegmentsDownloaded += 1
return False
return True
def getTempFileName(self):
""" Generate a temporary filename for this file, for when we don't have it's actual file
name on hand """
return 'hellanzb-tmp-' + self.nzb.archiveName + '.file' + str(self.number).zfill(4)
def isAllSegmentsDecoded(self):
""" Determine whether all these file's segments have been decoded (nzbFile is ready to be
assembled) """
if self.isSkippedPar:
return not len(self.dequeuedSegments) and not len(self.todoNzbSegments)
return not len(self.todoNzbSegments)
def getCurrentRate(self):
""" Returns the current download rate for ONLY this NZBFile """
return self.rate / 1024
def tryAssemble(self):
""" Call the ArticleDecoder function of the same name """
tryAssemble(self)
#def __repr__(self):
# msg = 'nzbFile: ' + os.path.basename(self.getDestination())
# if self.filename is not None:
# msg += ' tempFileName: ' + self.getTempFileName()
# msg += ' number: ' + str(self.number) + ' subject: ' + \
# self.subject
# return msg
class NZBSegment:
""" <file><segment/></file> """
def __init__(self, bytes, number, messageId, nzbFile):
## XML attributes
self.bytes = bytes
self.number = number
self.messageId = messageId
## XML tree-collections/references
# Reference to the parent NZBFile this segment belongs to
self.nzbFile = nzbFile
# This segment belongs to the parent nzbFile
self.nzbFile.nzbSegments.append(self)
self.nzbFile.todoNzbSegments.add(self)
self.nzbFile.totalBytes += self.bytes
self.nzbFile.nzb.totalBytes += self.bytes
## Downloaded article data is written to this list immediately as it's received
## from the other end
self.encodedDataList = None
## To-be a file object. Downloaded article data will be written to this file
## instead of encodedDataList when cachedToDisk is True
self.encodedDataFile = None
# Whether or not decoded data for this segment is written to disk, or
# kept in memory
self.cachedToDisk = False
## Downloaded article data stored as an array of lines whose CRLFs are stripped
## NOTE: encodedDataList and encodedDataFile are splitlines() into this list for
## use by the ArticleDecoder. In the future we should avoid this translation,
## somehow. Also note that NZBFile.getFilename relies on this variable being None
## until the entire segment has been downloaded and put here.
self.articleData = None
## Number of bytes downloaded
self.readBytes = 0
## yEncoder header keywords used for validation. Optional, obviously not used for
## UUDecoded segments
self.yCrc = None # Not the original crc (upper()'d and lpadded with 0s)
self.yBegin = None
self.yEnd = None
self.ySize = None
## A copy of the priority level of this segment, as set in the NZBQueue
self.priority = None
## Any server pools that failed to download this file
self.failedServerPools = []
# This flag is set when we want to trash the NZB and prevent the leechers from
# trying to requeue it
self.dontRequeue = False
# The NZBSegmentQueue this segment was last pulled from
self.fromQueue = None
# The NZBLeecherFactory this segment was last downloaded from
self.fromServer = None
def getDestination(self):
""" Where this decoded segment will reside on the fs """
return self.nzbFile.getDestination() + '.segment' + str(self.number).zfill(4)
def getTempFileName(self):
""" """
return self.nzbFile.getTempFileName() + '.segment' + str(self.number).zfill(4)
def getFilenameFromArticleData(self):
""" Determine the segment's filename via the articleData """
parseArticleData(self, justExtractFilename = True)
if self.nzbFile.filename is None and self.nzbFile.tempFilename is None:
raise FatalError('Could not getFilenameFromArticleData, file:' + str(self.nzbFile) +
' segment: ' + str(self))
def loadArticleData(self):
if self.cachedToDisk:
self.loadArticleDataFromDisk()
else:
self.loadArticleDataFromMem()
def loadArticleDataFromMem(self):
articleData = ''.join(self.encodedDataList)
self.articleData = articleData.splitlines()
self.encodedDataList = None
def loadArticleDataFromDisk(self):
""" Load the previously downloaded article BODY from disk, as a list to the .articleData
variable. Removes the on disk version upon loading """
# downloaded encodedDataFile was written to disk by NZBLeecher
encodedDataFile = open(os.path.join(Hellanzb.DOWNLOAD_TEMP_DIR,
self.getTempFileName() + '_ENC'), 'rb')
# remove crlfs. FIXME: might be quicker to do this during a later loop
self.articleData = [line[:-2] for line in encodedDataFile]
encodedDataFile.close()
# Delete the copy on disk ASAP
nuke(os.path.join(Hellanzb.DOWNLOAD_TEMP_DIR, self.getTempFileName() + '_ENC'))
def isFirstSegment(self):
""" Determine whether or not this is the first segment """
return self is self.nzbFile.firstSegment
def smartDequeue(self, readOnlyQueue = False):
""" Shortcut to the SmartPar function of the same name """
smartDequeue(self, readOnlyQueue)
#def __repr__(self):
# return 'segment: ' + os.path.basename(self.getDestination()) + ' number: ' + \
# str(self.number) + ' subject: ' + self.nzbFile.subject
segmentEndRe = re.compile(r'^segment\d{4}$')
def segmentsNeedDownload(segmentList, overwriteZeroByteSegments = False):
""" Faster version of needsDownload for multiple segments that do not have their real file
name (for use by the Queue).
When an NZB is loaded and parsed, NZB<file>s not found on disk at the time of parsing
are marked as needing to be downloaded. (An easy first pass of figuring out exactly
what needs to be downloaded).
This function is the second pass. It takes all of those NZBFiles that need to be
downloaded's child NZBSegments and scans the disk, detecting which segments are
already on disk and can be skipped
"""
# Arrange all WORKING_DIR segment's filenames in a list. Key this list by segment
# number in a map. Loop through the specified segmentList, doing a subject.find for
# each segment filename with a matching segment number
onDiskSegmentsByNumber = {}
needDlFiles = set() # for speed while iterating
needDlSegments = []
onDiskSegments = []
# Cache all WORKING_DIR segment filenames in a map of lists
for file in os.listdir(Hellanzb.WORKING_DIR):
if not validWorkingFile(os.path.join(Hellanzb.WORKING_DIR, file),
overwriteZeroByteSegments):
continue
ext = getFileExtension(file)
if ext is not None and segmentEndRe.match(ext):
segmentNumber = int(ext[-4:])
if onDiskSegmentsByNumber.has_key(segmentNumber):
segmentFileNames = onDiskSegmentsByNumber[segmentNumber]
else:
segmentFileNames = []
onDiskSegmentsByNumber[segmentNumber] = segmentFileNames
# cut off .segmentXXXX
fileNoExt = file[:-12]
segmentFileNames.append(fileNoExt)
# Determine if each segment needs to be downloaded
for segment in segmentList:
if not onDiskSegmentsByNumber.has_key(segment.number):
# No matching segment numbers, obviously needs to be downloaded
needDlSegments.append(segment)
needDlFiles.add(segment.nzbFile)
continue
segmentFileNames = onDiskSegmentsByNumber[segment.number]
foundFileName = None
for segmentFileName in segmentFileNames:
# We've matched to our on disk segment if we:
# a) find that on disk segment's file name in our potential segment's subject
# b) match that on disk segment's file name to our potential segment's temp
# file name (w/ .segmentXXXX cutoff)
if segment.nzbFile.subject.find(segmentFileName) > -1 or \
segment.getTempFileName()[:-12] == segmentFileName:
foundFileName = segmentFileName
break
if not foundFileName:
needDlSegments.append(segment)
needDlFiles.add(segment.nzbFile)
else:
if segment.isFirstSegment() and not isHellaTemp(foundFileName) and \
segment.nzbFile.filename is None:
# HACK: filename is None. so we only have the temporary name in
# memory. since we didnt see the temporary name on the filesystem, but we
# found a subject match, that means we have the real name on the
# filesystem. In the case where this happens we've figured out the real
# filename (hopefully!). Set it if it hasn't already been set
setRealFileName(segment.nzbFile, foundFileName,
settingSegmentNumber = segment.number)
if Hellanzb.SMART_PAR:
# smartDequeue won't actually 'dequeue' any of this segment's
# nzbFile's segments (because there are no segments in the queue at
# this point). It will identifyPar the segment AND more importantly it
# will mark nzbFiles as isSkippedPar (taken into account later during
# parseNZB) and print a 'Skipping par' message for those isSkippedPar
# nzbFiles
segment.smartDequeue(readOnlyQueue = True)
onDiskSegments.append(segment)
# Originally the main reason to call segmentDone here is to update the queue's
# onDiskSegments (so isBeingDownloaded can safely detect things on disk during
# Dupe renaming). However it's correct to call this here, it's as if hellanzb
# just finished downloading and decoding the segment. The only incorrect part
# about the call is the queue's totalQueuedBytes is decremented. That total is
# reset to zero just before it is recalculated at the end of parseNZB, however
Hellanzb.queue.segmentDone(segment)
# This segment was matched. Remove it from the list to avoid matching it again
# later (dupes)
segmentFileNames.remove(foundFileName)
#else:
# debug('SKIPPING SEGMENT: ' + segment.getTempFileName() + ' subject: ' + \
# segment.nzbFile.subject)
return needDlFiles, needDlSegments, onDiskSegments
"""
Copyright (c) 2005 Philip Jenvey <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author or contributors may not be used to endorse or
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
$Id$
"""
| bsd-3-clause |
mx3L/enigma2 | lib/python/Screens/LanguageSelection.py | 37 | 2948 | import gettext
from Screen import Screen
from Components.ActionMap import ActionMap
from Components.Language import language
from Components.config import config
from Components.Sources.List import List
from Components.Label import Label
from Components.Pixmap import Pixmap
from Screens.InfoBar import InfoBar
from Screens.Rc import Rc
from Tools.Directories import resolveFilename, SCOPE_CURRENT_SKIN, SCOPE_LANGUAGE
from Tools.LoadPixmap import LoadPixmap
import enigma
def LanguageEntryComponent(file, name, index):
png = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "countries/" + index + ".png"))
if png == None:
png = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "countries/" + file + ".png"))
if png == None:
png = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "countries/missing.png"))
res = (index, name, png)
return res
class LanguageSelection(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.oldActiveLanguage = language.getActiveLanguage()
self.list = []
self["languages"] = List(self.list)
self.updateList()
self.onLayoutFinish.append(self.selectActiveLanguage)
self["actions"] = ActionMap(["OkCancelActions"],
{
"ok": self.save,
"cancel": self.cancel,
}, -1)
def selectActiveLanguage(self):
self.setTitle(_("Language selection"))
pos = 0
for pos, x in enumerate(self.list):
if x[0] == self.oldActiveLanguage:
self["languages"].index = pos
break
def save(self):
self.commit(self.run())
if InfoBar.instance and self.oldActiveLanguage != config.osd.language.value:
self.close(True)
else:
self.close()
def cancel(self):
language.activateLanguage(self.oldActiveLanguage)
self.close()
def run(self):
print "updating language..."
lang = self["languages"].getCurrent()[0]
if lang != config.osd.language.value:
config.osd.language.value = lang
config.osd.language.save()
return lang
def commit(self, lang):
print "commit language"
language.activateLanguage(lang)
config.misc.languageselected.value = 0
config.misc.languageselected.save()
def updateList(self):
languageList = language.getLanguageList()
if not languageList: # no language available => display only english
list = [ LanguageEntryComponent("en", "English", "en_EN") ]
else:
list = [ LanguageEntryComponent(file = x[1][2].lower(), name = x[1][0], index = x[0]) for x in languageList]
self.list = list
self["languages"].list = list
class LanguageWizard(LanguageSelection, Rc):
def __init__(self, session):
LanguageSelection.__init__(self, session)
Rc.__init__(self)
self.onLayoutFinish.append(self.selectKeys)
self["wizard"] = Pixmap()
self["text"] = Label()
self.setText()
def selectKeys(self):
self.clearSelectedKeys()
self.selectKey("UP")
self.selectKey("DOWN")
def setText(self):
self["text"].setText(_("Please use the UP and DOWN keys to select your language. Afterwards press the OK button."))
| gpl-2.0 |
FireWRT/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python2.7/bsddb/test/test_dbenv.py | 68 | 19274 | import unittest
import os, glob
from test_all import db, test_support, get_new_environment_path, \
get_new_database_path
#----------------------------------------------------------------------
class DBEnv(unittest.TestCase):
def setUp(self):
self.homeDir = get_new_environment_path()
self.env = db.DBEnv()
def tearDown(self):
self.env.close()
del self.env
test_support.rmtree(self.homeDir)
class DBEnv_general(DBEnv) :
def test_get_open_flags(self) :
flags = db.DB_CREATE | db.DB_INIT_MPOOL
self.env.open(self.homeDir, flags)
self.assertEqual(flags, self.env.get_open_flags())
def test_get_open_flags2(self) :
flags = db.DB_CREATE | db.DB_INIT_MPOOL | \
db.DB_INIT_LOCK | db.DB_THREAD
self.env.open(self.homeDir, flags)
self.assertEqual(flags, self.env.get_open_flags())
if db.version() >= (4, 7) :
def test_lk_partitions(self) :
for i in [10, 20, 40] :
self.env.set_lk_partitions(i)
self.assertEqual(i, self.env.get_lk_partitions())
def test_getset_intermediate_dir_mode(self) :
self.assertEqual(None, self.env.get_intermediate_dir_mode())
for mode in ["rwx------", "rw-rw-rw-", "rw-r--r--"] :
self.env.set_intermediate_dir_mode(mode)
self.assertEqual(mode, self.env.get_intermediate_dir_mode())
self.assertRaises(db.DBInvalidArgError,
self.env.set_intermediate_dir_mode, "abcde")
if db.version() >= (4, 6) :
def test_thread(self) :
for i in [16, 100, 1000] :
self.env.set_thread_count(i)
self.assertEqual(i, self.env.get_thread_count())
def test_cache_max(self) :
for size in [64, 128] :
size = size*1024*1024 # Megabytes
self.env.set_cache_max(0, size)
size2 = self.env.get_cache_max()
self.assertEqual(0, size2[0])
self.assertTrue(size <= size2[1])
self.assertTrue(2*size > size2[1])
if db.version() >= (4, 4) :
def test_mutex_stat(self) :
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
db.DB_INIT_LOCK)
stat = self.env.mutex_stat()
self.assertTrue("mutex_inuse_max" in stat)
def test_lg_filemode(self) :
for i in [0600, 0660, 0666] :
self.env.set_lg_filemode(i)
self.assertEqual(i, self.env.get_lg_filemode())
def test_mp_max_openfd(self) :
for i in [17, 31, 42] :
self.env.set_mp_max_openfd(i)
self.assertEqual(i, self.env.get_mp_max_openfd())
def test_mp_max_write(self) :
for i in [100, 200, 300] :
for j in [1, 2, 3] :
j *= 1000000
self.env.set_mp_max_write(i, j)
v=self.env.get_mp_max_write()
self.assertEqual((i, j), v)
def test_invalid_txn(self) :
# This environment doesn't support transactions
self.assertRaises(db.DBInvalidArgError, self.env.txn_begin)
def test_mp_mmapsize(self) :
for i in [16, 32, 64] :
i *= 1024*1024
self.env.set_mp_mmapsize(i)
self.assertEqual(i, self.env.get_mp_mmapsize())
def test_tmp_dir(self) :
for i in ["a", "bb", "ccc"] :
self.env.set_tmp_dir(i)
self.assertEqual(i, self.env.get_tmp_dir())
def test_flags(self) :
self.env.set_flags(db.DB_AUTO_COMMIT, 1)
self.assertEqual(db.DB_AUTO_COMMIT, self.env.get_flags())
self.env.set_flags(db.DB_TXN_NOSYNC, 1)
self.assertEqual(db.DB_AUTO_COMMIT | db.DB_TXN_NOSYNC,
self.env.get_flags())
self.env.set_flags(db.DB_AUTO_COMMIT, 0)
self.assertEqual(db.DB_TXN_NOSYNC, self.env.get_flags())
self.env.set_flags(db.DB_TXN_NOSYNC, 0)
self.assertEqual(0, self.env.get_flags())
def test_lk_max_objects(self) :
for i in [1000, 2000, 3000] :
self.env.set_lk_max_objects(i)
self.assertEqual(i, self.env.get_lk_max_objects())
def test_lk_max_locks(self) :
for i in [1000, 2000, 3000] :
self.env.set_lk_max_locks(i)
self.assertEqual(i, self.env.get_lk_max_locks())
def test_lk_max_lockers(self) :
for i in [1000, 2000, 3000] :
self.env.set_lk_max_lockers(i)
self.assertEqual(i, self.env.get_lk_max_lockers())
def test_lg_regionmax(self) :
for i in [128, 256, 1000] :
i = i*1024*1024
self.env.set_lg_regionmax(i)
j = self.env.get_lg_regionmax()
self.assertTrue(i <= j)
self.assertTrue(2*i > j)
def test_lk_detect(self) :
flags= [db.DB_LOCK_DEFAULT, db.DB_LOCK_EXPIRE, db.DB_LOCK_MAXLOCKS,
db.DB_LOCK_MINLOCKS, db.DB_LOCK_MINWRITE,
db.DB_LOCK_OLDEST, db.DB_LOCK_RANDOM, db.DB_LOCK_YOUNGEST]
flags.append(db.DB_LOCK_MAXWRITE)
for i in flags :
self.env.set_lk_detect(i)
self.assertEqual(i, self.env.get_lk_detect())
def test_lg_dir(self) :
for i in ["a", "bb", "ccc", "dddd"] :
self.env.set_lg_dir(i)
self.assertEqual(i, self.env.get_lg_dir())
def test_lg_bsize(self) :
log_size = 70*1024
self.env.set_lg_bsize(log_size)
self.assertTrue(self.env.get_lg_bsize() >= log_size)
self.assertTrue(self.env.get_lg_bsize() < 4*log_size)
self.env.set_lg_bsize(4*log_size)
self.assertTrue(self.env.get_lg_bsize() >= 4*log_size)
def test_setget_data_dirs(self) :
dirs = ("a", "b", "c", "d")
for i in dirs :
self.env.set_data_dir(i)
self.assertEqual(dirs, self.env.get_data_dirs())
def test_setget_cachesize(self) :
cachesize = (0, 512*1024*1024, 3)
self.env.set_cachesize(*cachesize)
self.assertEqual(cachesize, self.env.get_cachesize())
cachesize = (0, 1*1024*1024, 5)
self.env.set_cachesize(*cachesize)
cachesize2 = self.env.get_cachesize()
self.assertEqual(cachesize[0], cachesize2[0])
self.assertEqual(cachesize[2], cachesize2[2])
# Berkeley DB expands the cache 25% accounting overhead,
# if the cache is small.
self.assertEqual(125, int(100.0*cachesize2[1]/cachesize[1]))
# You can not change configuration after opening
# the environment.
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
cachesize = (0, 2*1024*1024, 1)
self.assertRaises(db.DBInvalidArgError,
self.env.set_cachesize, *cachesize)
cachesize3 = self.env.get_cachesize()
self.assertEqual(cachesize2[0], cachesize3[0])
self.assertEqual(cachesize2[2], cachesize3[2])
# In Berkeley DB 5.1, the cachesize can change when opening the Env
self.assertTrue(cachesize2[1] <= cachesize3[1])
def test_set_cachesize_dbenv_db(self) :
# You can not configure the cachesize using
# the database handle, if you are using an environment.
d = db.DB(self.env)
self.assertRaises(db.DBInvalidArgError,
d.set_cachesize, 0, 1024*1024, 1)
def test_setget_shm_key(self) :
shm_key=137
self.env.set_shm_key(shm_key)
self.assertEqual(shm_key, self.env.get_shm_key())
self.env.set_shm_key(shm_key+1)
self.assertEqual(shm_key+1, self.env.get_shm_key())
# You can not change configuration after opening
# the environment.
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
# If we try to reconfigure cache after opening the
# environment, core dump.
self.assertRaises(db.DBInvalidArgError,
self.env.set_shm_key, shm_key)
self.assertEqual(shm_key+1, self.env.get_shm_key())
if db.version() >= (4, 4) :
def test_mutex_setget_max(self) :
v = self.env.mutex_get_max()
v2 = v*2+1
self.env.mutex_set_max(v2)
self.assertEqual(v2, self.env.mutex_get_max())
self.env.mutex_set_max(v)
self.assertEqual(v, self.env.mutex_get_max())
# You can not change configuration after opening
# the environment.
self.env.open(self.homeDir, db.DB_CREATE)
self.assertRaises(db.DBInvalidArgError,
self.env.mutex_set_max, v2)
def test_mutex_setget_increment(self) :
v = self.env.mutex_get_increment()
v2 = 127
self.env.mutex_set_increment(v2)
self.assertEqual(v2, self.env.mutex_get_increment())
self.env.mutex_set_increment(v)
self.assertEqual(v, self.env.mutex_get_increment())
# You can not change configuration after opening
# the environment.
self.env.open(self.homeDir, db.DB_CREATE)
self.assertRaises(db.DBInvalidArgError,
self.env.mutex_set_increment, v2)
def test_mutex_setget_tas_spins(self) :
self.env.mutex_set_tas_spins(0) # Default = BDB decides
v = self.env.mutex_get_tas_spins()
v2 = v*2+1
self.env.mutex_set_tas_spins(v2)
self.assertEqual(v2, self.env.mutex_get_tas_spins())
self.env.mutex_set_tas_spins(v)
self.assertEqual(v, self.env.mutex_get_tas_spins())
# In this case, you can change configuration
# after opening the environment.
self.env.open(self.homeDir, db.DB_CREATE)
self.env.mutex_set_tas_spins(v2)
def test_mutex_setget_align(self) :
v = self.env.mutex_get_align()
v2 = 64
if v == 64 :
v2 = 128
self.env.mutex_set_align(v2)
self.assertEqual(v2, self.env.mutex_get_align())
# Requires a nonzero power of two
self.assertRaises(db.DBInvalidArgError,
self.env.mutex_set_align, 0)
self.assertRaises(db.DBInvalidArgError,
self.env.mutex_set_align, 17)
self.env.mutex_set_align(2*v2)
self.assertEqual(2*v2, self.env.mutex_get_align())
# You can not change configuration after opening
# the environment.
self.env.open(self.homeDir, db.DB_CREATE)
self.assertRaises(db.DBInvalidArgError,
self.env.mutex_set_align, v2)
class DBEnv_log(DBEnv) :
def setUp(self):
DBEnv.setUp(self)
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL | db.DB_INIT_LOG)
def test_log_file(self) :
log_file = self.env.log_file((1, 1))
self.assertEqual("log.0000000001", log_file[-14:])
if db.version() >= (4, 4) :
# The version with transactions is checked in other test object
def test_log_printf(self) :
msg = "This is a test..."
self.env.log_printf(msg)
logc = self.env.log_cursor()
self.assertTrue(msg in (logc.last()[1]))
if db.version() >= (4, 7) :
def test_log_config(self) :
self.env.log_set_config(db.DB_LOG_DSYNC | db.DB_LOG_ZERO, 1)
self.assertTrue(self.env.log_get_config(db.DB_LOG_DSYNC))
self.assertTrue(self.env.log_get_config(db.DB_LOG_ZERO))
self.env.log_set_config(db.DB_LOG_ZERO, 0)
self.assertTrue(self.env.log_get_config(db.DB_LOG_DSYNC))
self.assertFalse(self.env.log_get_config(db.DB_LOG_ZERO))
class DBEnv_log_txn(DBEnv) :
def setUp(self):
DBEnv.setUp(self)
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
db.DB_INIT_LOG | db.DB_INIT_TXN)
if (db.version() >= (4, 5)) and (db.version() < (5, 2)) :
def test_tx_max(self) :
txns=[]
def tx() :
for i in xrange(self.env.get_tx_max()) :
txns.append(self.env.txn_begin())
tx()
self.assertRaises(MemoryError, tx)
# Abort the transactions before garbage collection,
# to avoid "warnings".
for i in txns :
i.abort()
if db.version() >= (4, 4) :
# The version without transactions is checked in other test object
def test_log_printf(self) :
msg = "This is a test..."
txn = self.env.txn_begin()
self.env.log_printf(msg, txn=txn)
txn.commit()
logc = self.env.log_cursor()
logc.last() # Skip the commit
self.assertTrue(msg in (logc.prev()[1]))
msg = "This is another test..."
txn = self.env.txn_begin()
self.env.log_printf(msg, txn=txn)
txn.abort() # Do not store the new message
logc.last() # Skip the abort
self.assertTrue(msg not in (logc.prev()[1]))
msg = "This is a third test..."
txn = self.env.txn_begin()
self.env.log_printf(msg, txn=txn)
txn.commit() # Do not store the new message
logc.last() # Skip the commit
self.assertTrue(msg in (logc.prev()[1]))
class DBEnv_memp(DBEnv):
def setUp(self):
DBEnv.setUp(self)
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL | db.DB_INIT_LOG)
self.db = db.DB(self.env)
self.db.open("test", db.DB_HASH, db.DB_CREATE, 0660)
def tearDown(self):
self.db.close()
del self.db
DBEnv.tearDown(self)
def test_memp_1_trickle(self) :
self.db.put("hi", "bye")
self.assertTrue(self.env.memp_trickle(100) > 0)
# Preserve the order, do "memp_trickle" test first
def test_memp_2_sync(self) :
self.db.put("hi", "bye")
self.env.memp_sync() # Full flush
# Nothing to do...
self.assertTrue(self.env.memp_trickle(100) == 0)
self.db.put("hi", "bye2")
self.env.memp_sync((1, 0)) # NOP, probably
# Something to do... or not
self.assertTrue(self.env.memp_trickle(100) >= 0)
self.db.put("hi", "bye3")
self.env.memp_sync((123, 99)) # Full flush
# Nothing to do...
self.assertTrue(self.env.memp_trickle(100) == 0)
def test_memp_stat_1(self) :
stats = self.env.memp_stat() # No param
self.assertTrue(len(stats)==2)
self.assertTrue("cache_miss" in stats[0])
stats = self.env.memp_stat(db.DB_STAT_CLEAR) # Positional param
self.assertTrue("cache_miss" in stats[0])
stats = self.env.memp_stat(flags=0) # Keyword param
self.assertTrue("cache_miss" in stats[0])
def test_memp_stat_2(self) :
stats=self.env.memp_stat()[1]
self.assertTrue(len(stats))==1
self.assertTrue("test" in stats)
self.assertTrue("page_in" in stats["test"])
class DBEnv_logcursor(DBEnv):
def setUp(self):
DBEnv.setUp(self)
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
db.DB_INIT_LOG | db.DB_INIT_TXN)
txn = self.env.txn_begin()
self.db = db.DB(self.env)
self.db.open("test", db.DB_HASH, db.DB_CREATE, 0660, txn=txn)
txn.commit()
for i in ["2", "8", "20"] :
txn = self.env.txn_begin()
self.db.put(key = i, data = i*int(i), txn=txn)
txn.commit()
def tearDown(self):
self.db.close()
del self.db
DBEnv.tearDown(self)
def _check_return(self, value) :
self.assertTrue(isinstance(value, tuple))
self.assertEqual(len(value), 2)
self.assertTrue(isinstance(value[0], tuple))
self.assertEqual(len(value[0]), 2)
self.assertTrue(isinstance(value[0][0], int))
self.assertTrue(isinstance(value[0][1], int))
self.assertTrue(isinstance(value[1], str))
# Preserve test order
def test_1_first(self) :
logc = self.env.log_cursor()
v = logc.first()
self._check_return(v)
self.assertTrue((1, 1) < v[0])
self.assertTrue(len(v[1])>0)
def test_2_last(self) :
logc = self.env.log_cursor()
lsn_first = logc.first()[0]
v = logc.last()
self._check_return(v)
self.assertTrue(lsn_first < v[0])
def test_3_next(self) :
logc = self.env.log_cursor()
lsn_last = logc.last()[0]
self.assertEqual(logc.next(), None)
lsn_first = logc.first()[0]
v = logc.next()
self._check_return(v)
self.assertTrue(lsn_first < v[0])
self.assertTrue(lsn_last > v[0])
v2 = logc.next()
self.assertTrue(v2[0] > v[0])
self.assertTrue(lsn_last > v2[0])
v3 = logc.next()
self.assertTrue(v3[0] > v2[0])
self.assertTrue(lsn_last > v3[0])
def test_4_prev(self) :
logc = self.env.log_cursor()
lsn_first = logc.first()[0]
self.assertEqual(logc.prev(), None)
lsn_last = logc.last()[0]
v = logc.prev()
self._check_return(v)
self.assertTrue(lsn_first < v[0])
self.assertTrue(lsn_last > v[0])
v2 = logc.prev()
self.assertTrue(v2[0] < v[0])
self.assertTrue(lsn_first < v2[0])
v3 = logc.prev()
self.assertTrue(v3[0] < v2[0])
self.assertTrue(lsn_first < v3[0])
def test_5_current(self) :
logc = self.env.log_cursor()
logc.first()
v = logc.next()
self.assertEqual(v, logc.current())
def test_6_set(self) :
logc = self.env.log_cursor()
logc.first()
v = logc.next()
self.assertNotEqual(v, logc.next())
self.assertNotEqual(v, logc.next())
self.assertEqual(v, logc.set(v[0]))
def test_explicit_close(self) :
logc = self.env.log_cursor()
logc.close()
self.assertRaises(db.DBCursorClosedError, logc.next)
def test_implicit_close(self) :
logc = [self.env.log_cursor() for i in xrange(10)]
self.env.close() # This close should close too all its tree
for i in logc :
self.assertRaises(db.DBCursorClosedError, i.next)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DBEnv_general))
suite.addTest(unittest.makeSuite(DBEnv_memp))
suite.addTest(unittest.makeSuite(DBEnv_logcursor))
suite.addTest(unittest.makeSuite(DBEnv_log))
suite.addTest(unittest.makeSuite(DBEnv_log_txn))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| gpl-2.0 |
collex100/odoo | addons/edi/models/edi.py | 277 | 31944 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011-2014 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import hashlib
import simplejson as json
import logging
import re
import time
import urllib2
import openerp
import openerp.release as release
from openerp.osv import osv, fields
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
_logger = logging.getLogger(__name__)
EXTERNAL_ID_PATTERN = re.compile(r'^([^.:]+)(?::([^.]+))?\.(\S+)$')
EDI_VIEW_WEB_URL = '%s/edi/view?db=%s&token=%s'
EDI_PROTOCOL_VERSION = 1 # arbitrary ever-increasing version number
EDI_GENERATOR = 'Odoo' + release.major_version
EDI_GENERATOR_VERSION = release.version_info
def split_external_id(ext_id):
match = EXTERNAL_ID_PATTERN.match(ext_id)
assert match, \
_("'%s' is an invalid external ID") % (ext_id)
return {'module': match.group(1),
'db_uuid': match.group(2),
'id': match.group(3),
'full': match.group(0)}
def safe_unique_id(database_id, model, record_id):
"""Generate a unique string to represent a (database_uuid,model,record_id) pair
without being too long, and with a very low probability of collisions.
"""
msg = "%s-%s-%s-%s" % (time.time(), database_id, model, record_id)
digest = hashlib.sha1(msg).digest()
# fold the sha1 20 bytes digest to 9 bytes
digest = ''.join(chr(ord(x) ^ ord(y)) for (x,y) in zip(digest[:9], digest[9:-2]))
# b64-encode the 9-bytes folded digest to a reasonable 12 chars ASCII ID
digest = base64.urlsafe_b64encode(digest)
return '%s-%s' % (model.replace('.','_'), digest)
def last_update_for(record):
"""Returns the last update timestamp for the given record,
if available, otherwise False
"""
if record._log_access:
record_log = record.get_metadata()[0]
return record_log.get('write_date') or record_log.get('create_date') or False
return False
class edi(osv.AbstractModel):
_name = 'edi.edi'
_description = 'EDI Subsystem'
def new_edi_token(self, cr, uid, record):
"""Return a new, random unique token to identify this model record,
and to be used as token when exporting it as an EDI document.
:param browse_record record: model record for which a token is needed
"""
db_uuid = self.pool.get('ir.config_parameter').get_param(cr, uid, 'database.uuid')
edi_token = hashlib.sha256('%s-%s-%s-%s' % (time.time(), db_uuid, record._name, record.id)).hexdigest()
return edi_token
def serialize(self, edi_documents):
"""Serialize the given EDI document structures (Python dicts holding EDI data),
using JSON serialization.
:param [dict] edi_documents: list of EDI document structures to serialize
:return: UTF-8 encoded string containing the serialized document
"""
serialized_list = json.dumps(edi_documents)
return serialized_list
def generate_edi(self, cr, uid, records, context=None):
"""Generates a final EDI document containing the EDI serialization
of the given records, which should all be instances of a Model
that has the :meth:`~.edi` mixin. The document is not saved in the
database.
:param list(browse_record) records: records to export as EDI
:return: UTF-8 encoded string containing the serialized records
"""
edi_list = []
for record in records:
record_model = record._model
edi_list += record_model.edi_export(cr, uid, [record], context=context)
return self.serialize(edi_list)
def load_edi(self, cr, uid, edi_documents, context=None):
"""Import the given EDI document structures into the system, using
:meth:`~.import_edi`.
:param edi_documents: list of Python dicts containing the deserialized
version of EDI documents
:return: list of (model, id, action) tuple containing the model and database ID
of all records that were imported in the system, plus a suggested
action definition dict for displaying each document.
"""
ir_module = self.pool.get('ir.module.module')
res = []
for edi_document in edi_documents:
module = edi_document.get('__import_module') or edi_document.get('__module')
assert module, 'a `__module` or `__import_module` attribute is required in each EDI document.'
if module != 'base' and not ir_module.search(cr, uid, [('name','=',module),('state','=','installed')]):
raise osv.except_osv(_('Missing Application.'),
_("The document you are trying to import requires the Odoo `%s` application. "
"You can install it by connecting as the administrator and opening the configuration assistant.")%(module,))
model = edi_document.get('__import_model') or edi_document.get('__model')
assert model, 'a `__model` or `__import_model` attribute is required in each EDI document.'
assert model in self.pool, 'model `%s` cannot be found, despite module `%s` being available - '\
'this EDI document seems invalid or unsupported.' % (model,module)
model_obj = self.pool[model]
record_id = model_obj.edi_import(cr, uid, edi_document, context=context)
record_action = model_obj._edi_record_display_action(cr, uid, record_id, context=context)
res.append((model, record_id, record_action))
return res
def deserialize(self, edi_documents_string):
"""Return deserialized version of the given EDI Document string.
:param str|unicode edi_documents_string: UTF-8 string (or unicode) containing
JSON-serialized EDI document(s)
:return: Python object representing the EDI document(s) (usually a list of dicts)
"""
return json.loads(edi_documents_string)
def import_edi(self, cr, uid, edi_document=None, edi_url=None, context=None):
"""Import a JSON serialized EDI Document string into the system, first retrieving it
from the given ``edi_url`` if provided.
:param str|unicode edi: UTF-8 string or unicode containing JSON-serialized
EDI Document to import. Must not be provided if
``edi_url`` is given.
:param str|unicode edi_url: URL where the EDI document (same format as ``edi``)
may be retrieved, without authentication.
"""
if edi_url:
assert not edi_document, 'edi must not be provided if edi_url is given.'
edi_document = urllib2.urlopen(edi_url).read()
assert edi_document, 'EDI Document is empty!'
edi_documents = self.deserialize(edi_document)
return self.load_edi(cr, uid, edi_documents, context=context)
class EDIMixin(object):
"""Mixin class for Model objects that want be exposed as EDI documents.
Classes that inherit from this mixin class should override the
``edi_import()`` and ``edi_export()`` methods to implement their
specific behavior, based on the primitives provided by this mixin."""
def _edi_requires_attributes(self, attributes, edi):
model_name = edi.get('__imported_model') or edi.get('__model') or self._name
for attribute in attributes:
assert edi.get(attribute),\
'Attribute `%s` is required in %s EDI documents.' % (attribute, model_name)
# private method, not RPC-exposed as it creates ir.model.data entries as
# SUPERUSER based on its parameters
def _edi_external_id(self, cr, uid, record, existing_id=None, existing_module=None,
context=None):
"""Generate/Retrieve unique external ID for ``record``.
Each EDI record and each relationship attribute in it is identified by a
unique external ID, which includes the database's UUID, as a way to
refer to any record within any Odoo instance, without conflict.
For Odoo records that have an existing "External ID" (i.e. an entry in
ir.model.data), the EDI unique identifier for this record will be made of
"%s:%s:%s" % (module, database UUID, ir.model.data ID). The database's
UUID MUST NOT contain a colon characters (this is guaranteed by the
UUID algorithm).
For records that have no existing ir.model.data entry, a new one will be
created during the EDI export. It is recommended that the generated external ID
contains a readable reference to the record model, plus a unique value that
hides the database ID. If ``existing_id`` is provided (because it came from
an import), it will be used instead of generating a new one.
If ``existing_module`` is provided (because it came from
an import), it will be used instead of using local values.
:param browse_record record: any browse_record needing an EDI external ID
:param string existing_id: optional existing external ID value, usually coming
from a just-imported EDI record, to be used instead
of generating a new one
:param string existing_module: optional existing module name, usually in the
format ``module:db_uuid`` and coming from a
just-imported EDI record, to be used instead
of local values
:return: the full unique External ID to use for record
"""
ir_model_data = self.pool.get('ir.model.data')
db_uuid = self.pool.get('ir.config_parameter').get_param(cr, uid, 'database.uuid')
ext_id = record.get_external_id()[record.id]
if not ext_id:
ext_id = existing_id or safe_unique_id(db_uuid, record._name, record.id)
# ID is unique cross-db thanks to db_uuid (already included in existing_module)
module = existing_module or "%s:%s" % (record._original_module, db_uuid)
_logger.debug("%s: Generating new external ID `%s.%s` for %r.", self._name,
module, ext_id, record)
ir_model_data.create(cr, openerp.SUPERUSER_ID,
{'name': ext_id,
'model': record._name,
'module': module,
'res_id': record.id})
else:
module, ext_id = ext_id.split('.')
if not ':' in module:
# this record was not previously EDI-imported
if not module == record._original_module:
# this could happen for data records defined in a module that depends
# on the module that owns the model, e.g. purchase defines
# product.pricelist records.
_logger.debug('Mismatching module: expected %s, got %s, for %s.',
module, record._original_module, record)
# ID is unique cross-db thanks to db_uuid
module = "%s:%s" % (module, db_uuid)
return '%s.%s' % (module, ext_id)
def _edi_record_display_action(self, cr, uid, id, context=None):
"""Returns an appropriate action definition dict for displaying
the record with ID ``rec_id``.
:param int id: database ID of record to display
:return: action definition dict
"""
return {'type': 'ir.actions.act_window',
'view_mode': 'form,tree',
'view_type': 'form',
'res_model': self._name,
'res_id': id}
def edi_metadata(self, cr, uid, records, context=None):
"""Return a list containing the boilerplate EDI structures for
exporting ``records`` as EDI, including
the metadata fields
The metadata fields always include::
{
'__model': 'some.model', # record model
'__module': 'module', # require module
'__id': 'module:db-uuid:model.id', # unique global external ID for the record
'__last_update': '2011-01-01 10:00:00', # last update date in UTC!
'__version': 1, # EDI spec version
'__generator' : 'Odoo', # EDI generator
'__generator_version' : [6,1,0], # server version, to check compatibility.
'__attachments_':
}
:param list(browse_record) records: records to export
:return: list of dicts containing boilerplate EDI metadata for each record,
at the corresponding index from ``records``.
"""
ir_attachment = self.pool.get('ir.attachment')
results = []
for record in records:
ext_id = self._edi_external_id(cr, uid, record, context=context)
edi_dict = {
'__id': ext_id,
'__last_update': last_update_for(record),
'__model' : record._name,
'__module' : record._original_module,
'__version': EDI_PROTOCOL_VERSION,
'__generator': EDI_GENERATOR,
'__generator_version': EDI_GENERATOR_VERSION,
}
attachment_ids = ir_attachment.search(cr, uid, [('res_model','=', record._name), ('res_id', '=', record.id)])
if attachment_ids:
attachments = []
for attachment in ir_attachment.browse(cr, uid, attachment_ids, context=context):
attachments.append({
'name' : attachment.name,
'content': attachment.datas, # already base64 encoded!
'file_name': attachment.datas_fname,
})
edi_dict.update(__attachments=attachments)
results.append(edi_dict)
return results
def edi_m2o(self, cr, uid, record, context=None):
"""Return a m2o EDI representation for the given record.
The EDI format for a many2one is::
['unique_external_id', 'Document Name']
"""
edi_ext_id = self._edi_external_id(cr, uid, record, context=context)
relation_model = record._model
name = relation_model.name_get(cr, uid, [record.id], context=context)
name = name and name[0][1] or False
return [edi_ext_id, name]
def edi_o2m(self, cr, uid, records, edi_struct=None, context=None):
"""Return a list representing a O2M EDI relationship containing
all the given records, according to the given ``edi_struct``.
This is basically the same as exporting all the record using
:meth:`~.edi_export` with the given ``edi_struct``, and wrapping
the results in a list.
Example::
[ # O2M fields would be a list of dicts, with their
{ '__id': 'module:db-uuid.id', # own __id.
'__last_update': 'iso date', # update date
'name': 'some name',
#...
},
# ...
],
"""
result = []
for record in records:
result += record._model.edi_export(cr, uid, [record], edi_struct=edi_struct, context=context)
return result
def edi_m2m(self, cr, uid, records, context=None):
"""Return a list representing a M2M EDI relationship directed towards
all the given records.
This is basically the same as exporting all the record using
:meth:`~.edi_m2o` and wrapping the results in a list.
Example::
# M2M fields are exported as a list of pairs, like a list of M2O values
[
['module:db-uuid.id1', 'Task 01: bla bla'],
['module:db-uuid.id2', 'Task 02: bla bla']
]
"""
return [self.edi_m2o(cr, uid, r, context=context) for r in records]
def edi_export(self, cr, uid, records, edi_struct=None, context=None):
"""Returns a list of dicts representing EDI documents containing the
records, and matching the given ``edi_struct``, if provided.
:param edi_struct: if provided, edi_struct should be a dictionary
with a skeleton of the fields to export.
Basic fields can have any key as value, but o2m
values should have a sample skeleton dict as value,
to act like a recursive export.
For example, for a res.partner record::
edi_struct: {
'name': True,
'company_id': True,
'address': {
'name': True,
'street': True,
}
}
Any field not specified in the edi_struct will not
be included in the exported data. Fields with no
value (False) will be omitted in the EDI struct.
If edi_struct is omitted, no fields will be exported
"""
if edi_struct is None:
edi_struct = {}
fields_to_export = edi_struct.keys()
results = []
for record in records:
edi_dict = self.edi_metadata(cr, uid, [record], context=context)[0]
for field_name in fields_to_export:
field = self._fields[field_name]
value = getattr(record, field_name)
if not value and value not in ('', 0):
continue
elif field.type == 'many2one':
value = self.edi_m2o(cr, uid, value, context=context)
elif field.type == 'many2many':
value = self.edi_m2m(cr, uid, value, context=context)
elif field.type == 'one2many':
value = self.edi_o2m(cr, uid, value, edi_struct=edi_struct.get(field_name, {}), context=context)
edi_dict[field_name] = value
results.append(edi_dict)
return results
def _edi_get_object_by_name(self, cr, uid, name, model_name, context=None):
model = self.pool[model_name]
search_results = model.name_search(cr, uid, name, operator='=', context=context)
if len(search_results) == 1:
return model.browse(cr, uid, search_results[0][0], context=context)
return False
def _edi_generate_report_attachment(self, cr, uid, record, context=None):
"""Utility method to generate the first PDF-type report declared for the
current model with ``usage`` attribute set to ``default``.
This must be called explicitly by models that need it, usually
at the beginning of ``edi_export``, before the call to ``super()``."""
ir_actions_report = self.pool.get('ir.actions.report.xml')
matching_reports = ir_actions_report.search(cr, uid, [('model','=',self._name),
('report_type','=','pdf'),
('usage','=','default')])
if matching_reports:
report = ir_actions_report.browse(cr, uid, matching_reports[0])
result, format = openerp.report.render_report(cr, uid, [record.id], report.report_name, {'model': self._name}, context=context)
eval_context = {'time': time, 'object': record}
if not report.attachment or not eval(report.attachment, eval_context):
# no auto-saving of report as attachment, need to do it manually
result = base64.b64encode(result)
file_name = record.name_get()[0][1]
file_name = re.sub(r'[^a-zA-Z0-9_-]', '_', file_name)
file_name += ".pdf"
self.pool.get('ir.attachment').create(cr, uid,
{
'name': file_name,
'datas': result,
'datas_fname': file_name,
'res_model': self._name,
'res_id': record.id,
'type': 'binary'
},
context=context)
def _edi_import_attachments(self, cr, uid, record_id, edi, context=None):
ir_attachment = self.pool.get('ir.attachment')
for attachment in edi.get('__attachments', []):
# check attachment data is non-empty and valid
file_data = None
try:
file_data = base64.b64decode(attachment.get('content'))
except TypeError:
pass
assert file_data, 'Incorrect/Missing attachment file content.'
assert attachment.get('name'), 'Incorrect/Missing attachment name.'
assert attachment.get('file_name'), 'Incorrect/Missing attachment file name.'
assert attachment.get('file_name'), 'Incorrect/Missing attachment file name.'
ir_attachment.create(cr, uid, {'name': attachment['name'],
'datas_fname': attachment['file_name'],
'res_model': self._name,
'res_id': record_id,
# should be pure 7bit ASCII
'datas': str(attachment['content']),
}, context=context)
def _edi_get_object_by_external_id(self, cr, uid, external_id, model, context=None):
"""Returns browse_record representing object identified by the model and external_id,
or None if no record was found with this external id.
:param external_id: fully qualified external id, in the EDI form
``module:db_uuid:identifier``.
:param model: model name the record belongs to.
"""
ir_model_data = self.pool.get('ir.model.data')
# external_id is expected to have the form: ``module:db_uuid:model.random_name``
ext_id_members = split_external_id(external_id)
db_uuid = self.pool.get('ir.config_parameter').get_param(cr, uid, 'database.uuid')
module = ext_id_members['module']
ext_id = ext_id_members['id']
modules = []
ext_db_uuid = ext_id_members['db_uuid']
if ext_db_uuid:
modules.append('%s:%s' % (module, ext_id_members['db_uuid']))
if ext_db_uuid is None or ext_db_uuid == db_uuid:
# local records may also be registered without the db_uuid
modules.append(module)
data_ids = ir_model_data.search(cr, uid, [('model','=',model),
('name','=',ext_id),
('module','in',modules)])
if data_ids:
model = self.pool[model]
data = ir_model_data.browse(cr, uid, data_ids[0], context=context)
if model.exists(cr, uid, [data.res_id]):
return model.browse(cr, uid, data.res_id, context=context)
# stale external-id, cleanup to allow re-import, as the corresponding record is gone
ir_model_data.unlink(cr, 1, [data_ids[0]])
def edi_import_relation(self, cr, uid, model, value, external_id, context=None):
"""Imports a M2O/M2M relation EDI specification ``[external_id,value]`` for the
given model, returning the corresponding database ID:
* First, checks if the ``external_id`` is already known, in which case the corresponding
database ID is directly returned, without doing anything else;
* If the ``external_id`` is unknown, attempts to locate an existing record
with the same ``value`` via name_search(). If found, the given external_id will
be assigned to this local record (in addition to any existing one)
* If previous steps gave no result, create a new record with the given
value in the target model, assign it the given external_id, and return
the new database ID
:param str value: display name of the record to import
:param str external_id: fully-qualified external ID of the record
:return: database id of newly-imported or pre-existing record
"""
_logger.debug("%s: Importing EDI relationship [%r,%r]", model, external_id, value)
target = self._edi_get_object_by_external_id(cr, uid, external_id, model, context=context)
need_new_ext_id = False
if not target:
_logger.debug("%s: Importing EDI relationship [%r,%r] - ID not found, trying name_get.",
self._name, external_id, value)
target = self._edi_get_object_by_name(cr, uid, value, model, context=context)
need_new_ext_id = True
if not target:
_logger.debug("%s: Importing EDI relationship [%r,%r] - name not found, creating it.",
self._name, external_id, value)
# also need_new_ext_id here, but already been set above
model = self.pool[model]
res_id, _ = model.name_create(cr, uid, value, context=context)
target = model.browse(cr, uid, res_id, context=context)
else:
_logger.debug("%s: Importing EDI relationship [%r,%r] - record already exists with ID %s, using it",
self._name, external_id, value, target.id)
if need_new_ext_id:
ext_id_members = split_external_id(external_id)
# module name is never used bare when creating ir.model.data entries, in order
# to avoid being taken as part of the module's data, and cleanup up at next update
module = "%s:%s" % (ext_id_members['module'], ext_id_members['db_uuid'])
# create a new ir.model.data entry for this value
self._edi_external_id(cr, uid, target, existing_id=ext_id_members['id'], existing_module=module, context=context)
return target.id
def edi_import(self, cr, uid, edi, context=None):
"""Imports a dict representing an EDI document into the system.
:param dict edi: EDI document to import
:return: the database ID of the imported record
"""
assert self._name == edi.get('__import_model') or \
('__import_model' not in edi and self._name == edi.get('__model')), \
"EDI Document Model and current model do not match: '%s' (EDI) vs '%s' (current)." % \
(edi.get('__model'), self._name)
# First check the record is now already known in the database, in which case it is ignored
ext_id_members = split_external_id(edi['__id'])
existing = self._edi_get_object_by_external_id(cr, uid, ext_id_members['full'], self._name, context=context)
if existing:
_logger.info("'%s' EDI Document with ID '%s' is already known, skipping import!", self._name, ext_id_members['full'])
return existing.id
record_values = {}
o2m_todo = {} # o2m values are processed after their parent already exists
for field_name, field_value in edi.iteritems():
# skip metadata and empty fields
if field_name.startswith('__') or field_value is None or field_value is False:
continue
field = self._fields.get(field_name)
if not field:
_logger.warning('Ignoring unknown field `%s` when importing `%s` EDI document.', field_name, self._name)
continue
# skip function/related fields
if not field.store:
_logger.warning("Unexpected function field value is found in '%s' EDI document: '%s'." % (self._name, field_name))
continue
relation_model = field.comodel_name
if field.type == 'many2one':
record_values[field_name] = self.edi_import_relation(cr, uid, relation_model,
field_value[1], field_value[0],
context=context)
elif field.type == 'many2many':
record_values[field_name] = [self.edi_import_relation(cr, uid, relation_model, m2m_value[1],
m2m_value[0], context=context)
for m2m_value in field_value]
elif field.type == 'one2many':
# must wait until parent report is imported, as the parent relationship
# is often required in o2m child records
o2m_todo[field_name] = field_value
else:
record_values[field_name] = field_value
module_ref = "%s:%s" % (ext_id_members['module'], ext_id_members['db_uuid'])
record_id = self.pool.get('ir.model.data')._update(cr, uid, self._name, module_ref, record_values,
xml_id=ext_id_members['id'], context=context)
record_display, = self.name_get(cr, uid, [record_id], context=context)
# process o2m values, connecting them to their parent on-the-fly
for o2m_field, o2m_value in o2m_todo.iteritems():
field = self._fields[o2m_field]
dest_model = self.pool[field.comodel_name]
dest_field = field.inverse_name
for o2m_line in o2m_value:
# link to parent record: expects an (ext_id, name) pair
o2m_line[dest_field] = (ext_id_members['full'], record_display[1])
dest_model.edi_import(cr, uid, o2m_line, context=context)
# process the attachments, if any
self._edi_import_attachments(cr, uid, record_id, edi, context=context)
return record_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Werkov/PyQt4 | examples/network/threadedfortuneserver.py | 20 | 5283 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
import random
from PyQt4 import QtCore, QtGui, QtNetwork
class FortuneThread(QtCore.QThread):
error = QtCore.pyqtSignal(QtNetwork.QTcpSocket.SocketError)
def __init__(self, socketDescriptor, fortune, parent):
super(FortuneThread, self).__init__(parent)
self.socketDescriptor = socketDescriptor
self.text = fortune
def run(self):
tcpSocket = QtNetwork.QTcpSocket()
if not tcpSocket.setSocketDescriptor(self.socketDescriptor):
self.error.emit(tcpSocket.error())
return
block = QtCore.QByteArray()
outstr = QtCore.QDataStream(block, QtCore.QIODevice.WriteOnly)
outstr.setVersion(QtCore.QDataStream.Qt_4_0)
outstr.writeUInt16(0)
outstr.writeString(self.text)
outstr.device().seek(0)
outstr.writeUInt16(block.count() - 2)
tcpSocket.write(block)
tcpSocket.disconnectFromHost()
tcpSocket.waitForDisconnected()
class FortuneServer(QtNetwork.QTcpServer):
def __init__(self, parent=None):
super(FortuneServer, self).__init__(parent)
self.fortunes = (
"You've been leading a dog's life. Stay off the furniture.",
"You've got to think about tomorrow.",
"You will be surprised by a loud noise.",
"You will feel hungry again in another hour.",
"You might have mail.",
"You cannot kill time without injuring eternity.",
"Computers are not intelligent. They only think they are.")
def incomingConnection(self, socketDescriptor):
fortune = self.fortunes[random.randint(0, len(self.fortunes) - 1)]
try:
# Python v3.
fortune = bytes(fortune, encoding='ascii')
except:
# Python v2.
pass
thread = FortuneThread(socketDescriptor, fortune, self)
thread.finished.connect(thread.deleteLater)
thread.start()
class Dialog(QtGui.QDialog):
def __init__(self, parent=None):
super(Dialog, self).__init__(parent)
self.server = FortuneServer()
statusLabel = QtGui.QLabel()
quitButton = QtGui.QPushButton("Quit")
quitButton.setAutoDefault(False)
if not self.server.listen():
QtGui.QMessageBox.critical(self, "Threaded Fortune Server",
"Unable to start the server: %s." % self.server.errorString())
self.close()
return
statusLabel.setText("The server is running on port %d.\nRun the "
"Fortune Client example now." % self.server.serverPort())
quitButton.clicked.connect(self.close)
buttonLayout = QtGui.QHBoxLayout()
buttonLayout.addStretch(1)
buttonLayout.addWidget(quitButton)
buttonLayout.addStretch(1)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addWidget(statusLabel)
mainLayout.addLayout(buttonLayout)
self.setLayout(mainLayout)
self.setWindowTitle("Threaded Fortune Server")
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
dialog = Dialog()
dialog.show()
sys.exit(dialog.exec_())
| gpl-2.0 |
Lyleo/nupic | examples/opf/experiments/opfrunexperiment_test/simpleOPF/hotgym_no_agg/description.py | 8 | 14990 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'~/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': {
'fields': [ (u'timestamp', 'first'),
(u'gym', 'first'),
(u'consumption', 'sum')],
'days': 0,
'hours': 0,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalMultiStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 1,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
u'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (21, 1),
'type': 'DateEncoder'},
u'timestamp_dayOfWeek': { 'dayOfWeek': (21, 1),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
u'timestamp_weekend': { 'fieldname': u'timestamp',
'name': u'timestamp_weekend',
'type': 'DateEncoder',
'weekend': 21},
u'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'n': 100,
'name': u'consumption',
'type': 'AdaptiveScalarEncoder',
'w': 21},
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : { u'days': 0, u'hours': 0},
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose potentialPct * (2*potentialRadius+1)^2
'potentialPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
# Classifier implementation selection.
'implementation': 'cpp',
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1,5',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'nupic',
# Input stream specification per py/nupic/cluster/database/StreamDef.json.
#
'dataset' : { 'aggregation': config['aggregationInfo'],
u'info': u'test_hotgym',
u'streams': [ { u'columns': [u'*'],
u'info': u'hotGym.csv',
u'last_record': 100,
u'source': u'file://extra/hotgym/hotgym.csv'}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
'iterationCount' : -1,
# A dictionary containing all the supplementary parameters for inference
"inferenceArgs":{u'predictedField': u'consumption', u'predictionSteps': [1, 5]},
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'consumption', metric='multiStep', inferenceElement='multiStepBestPredictions', params={'window': 1000, 'steps': [1, 5], 'errorMetric': 'aae'}),
MetricSpec(field=u'consumption', metric='multiStep', inferenceElement='multiStepBestPredictions', params={'window': 1000, 'steps': [1, 5], 'errorMetric': 'altMAPE'}),
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*'],
}
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
| gpl-3.0 |
israelcoin/isracoin-p2pool | p2pool/work.py | 5 | 24245 | from __future__ import division
import base64
import random
import re
import sys
import time
from twisted.internet import defer
from twisted.python import log
import bitcoin.getwork as bitcoin_getwork, bitcoin.data as bitcoin_data
from bitcoin import helper, script, worker_interface
from util import forest, jsonrpc, variable, deferral, math, pack
import p2pool, p2pool.data as p2pool_data
class WorkerBridge(worker_interface.WorkerBridge):
COINBASE_NONCE_LENGTH = 8
def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee):
worker_interface.WorkerBridge.__init__(self)
self.recent_shares_ts_work = []
self.node = node
self.my_pubkey_hash = my_pubkey_hash
self.donation_percentage = donation_percentage
self.worker_fee = worker_fee
self.net = self.node.net.PARENT
self.running = True
self.pseudoshare_received = variable.Event()
self.share_received = variable.Event()
self.local_rate_monitor = math.RateMonitor(10*60)
self.local_addr_rate_monitor = math.RateMonitor(10*60)
self.removed_unstales_var = variable.Variable((0, 0, 0))
self.removed_doa_unstales_var = variable.Variable(0)
self.last_work_shares = variable.Variable( {} )
self.my_share_hashes = set()
self.my_doa_share_hashes = set()
self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0,
my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0,
)))
@self.node.tracker.verified.removed.watch
def _(share):
if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
self.removed_unstales_var.set((
self.removed_unstales_var.value[0] + 1,
self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
))
if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
# MERGED WORK
self.merged_work = variable.Variable({})
@defer.inlineCallbacks
def set_merged_work(merged_url, merged_userpass):
merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
while self.running:
auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)()
self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict(
hash=int(auxblock['hash'], 16),
target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')),
merged_proxy=merged_proxy,
)}))
yield deferral.sleep(1)
for merged_url, merged_userpass in merged_urls:
set_merged_work(merged_url, merged_userpass)
@self.merged_work.changed.watch
def _(new_merged_work):
print 'Got new merged mining work!'
# COMBINE WORK
self.current_work = variable.Variable(None)
def compute_work():
t = self.node.bitcoind_work.value
bb = self.node.best_block_header.value
if bb is not None and bb['previous_block'] == t['previous_block'] and self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
print 'Skipping from block %x to block %x!' % (bb['previous_block'],
bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
t = dict(
version=bb['version'],
previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)),
bits=bb['bits'], # not always true
coinbaseflags='',
height=t['height'] + 1,
time=bb['timestamp'] + 600, # better way?
transactions=[],
transaction_fees=[],
merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.node.bitcoind_work.value['height']),
last_update=self.node.bitcoind_work.value['last_update'],
)
self.current_work.set(t)
self.node.bitcoind_work.changed.watch(lambda _: compute_work())
self.node.best_block_header.changed.watch(lambda _: compute_work())
compute_work()
self.new_work_event = variable.Event()
@self.current_work.transitioned.watch
def _(before, after):
# trigger LP if version/previous_block/bits changed or transactions changed from nothing
if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
self.new_work_event.happened()
self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())
def stop(self):
self.running = False
def get_stale_counts(self):
'''Returns (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain)'''
my_shares = len(self.my_share_hashes)
my_doa_shares = len(self.my_doa_share_hashes)
delta = self.tracker_view.get_delta_to_last(self.node.best_share_var.value)
my_shares_in_chain = delta.my_count + self.removed_unstales_var.value[0]
my_doa_shares_in_chain = delta.my_doa_count + self.removed_doa_unstales_var.value
orphans_recorded_in_chain = delta.my_orphan_announce_count + self.removed_unstales_var.value[1]
doas_recorded_in_chain = delta.my_dead_announce_count + self.removed_unstales_var.value[2]
my_shares_not_in_chain = my_shares - my_shares_in_chain
my_doa_shares_not_in_chain = my_doa_shares - my_doa_shares_in_chain
return (my_shares_not_in_chain - my_doa_shares_not_in_chain, my_doa_shares_not_in_chain), my_shares, (orphans_recorded_in_chain, doas_recorded_in_chain)
def get_user_details(self, username):
contents = re.split('([+/])', username)
assert len(contents) % 2 == 1
user, contents2 = contents[0], contents[1:]
desired_pseudoshare_target = None
desired_share_target = None
for symbol, parameter in zip(contents2[::2], contents2[1::2]):
if symbol == '+':
try:
desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter))
except:
if p2pool.DEBUG:
log.err()
elif symbol == '/':
try:
desired_share_target = bitcoin_data.difficulty_to_target(float(parameter))
except:
if p2pool.DEBUG:
log.err()
if random.uniform(0, 100) < self.worker_fee:
pubkey_hash = self.my_pubkey_hash
else:
try:
pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT)
except: # XXX blah
pubkey_hash = self.my_pubkey_hash
return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def preprocess_request(self, user):
if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST:
raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers')
if time.time() > self.current_work.value['last_update'] + 60:
raise jsonrpc.Error_for_code(-12345)(u'lost contact with bitcoind')
user, pubkey_hash, desired_share_target, desired_pseudoshare_target = self.get_user_details(user)
return pubkey_hash, desired_share_target, desired_pseudoshare_target
def _estimate_local_hash_rate(self):
if len(self.recent_shares_ts_work) == 50:
hash_rate = sum(work for ts, work in self.recent_shares_ts_work[1:])//(self.recent_shares_ts_work[-1][0] - self.recent_shares_ts_work[0][0])
if hash_rate > 0:
return hash_rate
return None
def get_local_rates(self):
miner_hash_rates = {}
miner_dead_hash_rates = {}
datums, dt = self.local_rate_monitor.get_datums_in_last()
for datum in datums:
miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
if datum['dead']:
miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
return miner_hash_rates, miner_dead_hash_rates
def get_local_addr_rates(self):
addr_hash_rates = {}
datums, dt = self.local_addr_rate_monitor.get_datums_in_last()
for datum in datums:
addr_hash_rates[datum['pubkey_hash']] = addr_hash_rates.get(datum['pubkey_hash'], 0) + datum['work']/dt
return addr_hash_rates
def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target):
if self.node.best_share_var.value is None and self.node.net.PERSIST:
raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares')
if self.merged_work.value:
tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value)
mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)]
mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict(
merkle_root=bitcoin_data.merkle_hash(mm_hashes),
size=size,
nonce=0,
))
mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()]
else:
mm_data = ''
mm_later = []
tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']]
tx_map = dict(zip(tx_hashes, self.current_work.value['transactions']))
previous_share = self.node.tracker.items[self.node.best_share_var.value] if self.node.best_share_var.value is not None else None
if previous_share is None:
share_type = p2pool_data.Share
else:
previous_share_type = type(previous_share)
if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH:
share_type = previous_share_type
else:
successor_type = previous_share_type.SUCCESSOR
counts = p2pool_data.get_desired_version_counts(self.node.tracker,
self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10)
upgraded = counts.get(successor_type.VERSION, 0)/sum(counts.itervalues())
if upgraded > .65:
print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (upgraded*100, 95)
print
# Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version
if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100:
share_type = successor_type
else:
share_type = previous_share_type
if desired_share_target is None:
desired_share_target = 2**256-1
local_hash_rate = self._estimate_local_hash_rate()
if local_hash_rate is not None:
desired_share_target = min(desired_share_target,
bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)) # limit to 1.67% of pool shares by modulating share difficulty
local_addr_rates = self.get_local_addr_rates()
lookbehind = 3600//self.node.net.SHARE_PERIOD
block_subsidy = self.node.bitcoind_work.value['subsidy']
if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind:
expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \
* block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash
if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD:
desired_share_target = min(desired_share_target,
bitcoin_data.average_attempts_to_target((bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value['bits'].target)*self.node.net.SPREAD)*self.node.net.PARENT.DUST_THRESHOLD/block_subsidy)
)
if True:
share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction(
tracker=self.node.tracker,
share_data=dict(
previous_share_hash=self.node.best_share_var.value,
coinbase=(script.create_push_script([
self.current_work.value['height'],
] + ([mm_data] if mm_data else []) + [
]) + self.current_work.value['coinbaseflags'])[:100],
nonce=random.randrange(2**32),
pubkey_hash=pubkey_hash,
subsidy=self.current_work.value['subsidy'],
donation=math.perfect_round(65535*self.donation_percentage/100),
stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain):
'orphan' if orphans > orphans_recorded_in_chain else
'doa' if doas > doas_recorded_in_chain else
None
)(*self.get_stale_counts()),
desired_version=(share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type).VOTING_VERSION,
),
block_target=self.current_work.value['bits'].target,
desired_timestamp=int(time.time() + 0.5),
desired_target=desired_share_target,
ref_merkle_link=dict(branch=[], index=0),
desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']),
net=self.node.net,
known_txs=tx_map,
base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']),
)
packed_gentx = bitcoin_data.tx_type.pack(gentx)
other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes]
mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later]
if desired_pseudoshare_target is None:
target = bitcoin_data.difficulty_to_target(float(1.0 / self.node.net.PARENT.DUMB_SCRYPT_DIFF))
local_hash_rate = self._estimate_local_hash_rate()
if local_hash_rate is not None:
target = min(target,
bitcoin_data.average_attempts_to_target(local_hash_rate * 1)) # limit to 1 share response every second by modulating pseudoshare difficulty
else:
target = desired_pseudoshare_target
target = max(target, share_info['bits'].target)
for aux_work, index, hashes in mm_later:
target = max(target, aux_work['target'])
target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE)
getwork_time = time.time()
lp_count = self.new_work_event.times
merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0)
print 'New work for worker! Difficulty: %.06f Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % (
bitcoin_data.target_to_difficulty(target),
bitcoin_data.target_to_difficulty(share_info['bits'].target),
self.current_work.value['subsidy']*1e-8, self.node.net.PARENT.SYMBOL,
len(self.current_work.value['transactions']),
)
#need this for stats
self.last_work_shares.value[bitcoin_data.pubkey_hash_to_address(pubkey_hash, self.node.net.PARENT)]=share_info['bits']
ba = dict(
version=min(self.current_work.value['version'], 2),
previous_block=self.current_work.value['previous_block'],
merkle_link=merkle_link,
coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH-4],
coinb2=packed_gentx[-4:],
timestamp=self.current_work.value['time'],
bits=self.current_work.value['bits'],
share_target=target,
)
received_header_hashes = set()
def got_response(header, user, coinbase_nonce):
assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH
new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx
new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx
header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header))
pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header))
try:
if pow_hash <= header['bits'].target or p2pool.DEBUG:
helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net)
if pow_hash <= header['bits'].target:
print
print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash)
print
except:
log.err(None, 'Error while processing potential block:')
user, _, _, _ = self.get_user_details(user)
assert header['previous_block'] == ba['previous_block']
assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link)
assert header['bits'] == ba['bits']
on_time = self.new_work_event.times == lp_count
for aux_work, index, hashes in mm_later:
try:
if pow_hash <= aux_work['target'] or p2pool.DEBUG:
df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)(
pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'),
bitcoin_data.aux_pow_type.pack(dict(
merkle_tx=dict(
tx=new_gentx,
block_hash=header_hash,
merkle_link=merkle_link,
),
merkle_link=bitcoin_data.calculate_merkle_link(hashes, index),
parent_block_header=header,
)).encode('hex'),
)
@df.addCallback
def _(result, aux_work=aux_work):
if result != (pow_hash <= aux_work['target']):
print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target'])
else:
print 'Merged block submittal result: %s' % (result,)
@df.addErrback
def _(err):
log.err(err, 'Error submitting merged block:')
except:
log.err(None, 'Error while processing merged mining POW:')
if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes:
last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce)
share = get_share(header, last_txout_nonce)
print 'GOT SHARE! %s %s prev %s age %.2fs%s' % (
user,
p2pool_data.format_hash(share.hash),
p2pool_data.format_hash(share.previous_hash),
time.time() - getwork_time,
' DEAD ON ARRIVAL' if not on_time else '',
)
self.my_share_hashes.add(share.hash)
if not on_time:
self.my_doa_share_hashes.add(share.hash)
self.node.tracker.add(share)
self.node.set_best_share()
try:
if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None:
self.node.p2p_node.broadcast_share(share.hash)
except:
log.err(None, 'Error forwarding block solution:')
self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash)
if pow_hash > target:
print 'Worker %s submitted share with hash > target:' % (user,)
print ' Hash: %56x' % (pow_hash,)
print ' Target: %56x' % (target,)
elif header_hash in received_header_hashes:
print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,)
else:
received_header_hashes.add(header_hash)
self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user)
self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target)))
while len(self.recent_shares_ts_work) > 50:
self.recent_shares_ts_work.pop(0)
self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target))
self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash))
return on_time
return ba, got_response
| gpl-3.0 |
somenice/Pablo | web/session.py | 52 | 10767 | """
Session Management
(from web.py)
"""
import os, time, datetime, random, base64
import os.path
from copy import deepcopy
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import hashlib
sha1 = hashlib.sha1
except ImportError:
import sha
sha1 = sha.new
import utils
import webapi as web
__all__ = [
'Session', 'SessionExpired',
'Store', 'DiskStore', 'DBStore',
]
web.config.session_parameters = utils.storage({
'cookie_name': 'webpy_session_id',
'cookie_domain': None,
'cookie_path' : None,
'timeout': 86400, #24 * 60 * 60, # 24 hours in seconds
'ignore_expiry': True,
'ignore_change_ip': True,
'secret_key': 'fLjUfxqXtfNoIldA0A0J',
'expired_message': 'Session expired',
'httponly': True,
'secure': False
})
class SessionExpired(web.HTTPError):
def __init__(self, message):
web.HTTPError.__init__(self, '200 OK', {}, data=message)
class Session(object):
"""Session management for web.py
"""
__slots__ = [
"store", "_initializer", "_last_cleanup_time", "_config", "_data",
"__getitem__", "__setitem__", "__delitem__"
]
def __init__(self, app, store, initializer=None):
self.store = store
self._initializer = initializer
self._last_cleanup_time = 0
self._config = utils.storage(web.config.session_parameters)
self._data = utils.threadeddict()
self.__getitem__ = self._data.__getitem__
self.__setitem__ = self._data.__setitem__
self.__delitem__ = self._data.__delitem__
if app:
app.add_processor(self._processor)
def __contains__(self, name):
return name in self._data
def __getattr__(self, name):
return getattr(self._data, name)
def __setattr__(self, name, value):
if name in self.__slots__:
object.__setattr__(self, name, value)
else:
setattr(self._data, name, value)
def __delattr__(self, name):
delattr(self._data, name)
def _processor(self, handler):
"""Application processor to setup session for every request"""
self._cleanup()
self._load()
try:
return handler()
finally:
self._save()
def _load(self):
"""Load the session from the store, by the id from cookie"""
cookie_name = self._config.cookie_name
cookie_domain = self._config.cookie_domain
cookie_path = self._config.cookie_path
httponly = self._config.httponly
self.session_id = web.cookies().get(cookie_name)
# protection against session_id tampering
if self.session_id and not self._valid_session_id(self.session_id):
self.session_id = None
self._check_expiry()
if self.session_id:
d = self.store[self.session_id]
self.update(d)
self._validate_ip()
if not self.session_id:
self.session_id = self._generate_session_id()
if self._initializer:
if isinstance(self._initializer, dict):
self.update(deepcopy(self._initializer))
elif hasattr(self._initializer, '__call__'):
self._initializer()
self.ip = web.ctx.ip
def _check_expiry(self):
# check for expiry
if self.session_id and self.session_id not in self.store:
if self._config.ignore_expiry:
self.session_id = None
else:
return self.expired()
def _validate_ip(self):
# check for change of IP
if self.session_id and self.get('ip', None) != web.ctx.ip:
if not self._config.ignore_change_ip:
return self.expired()
def _save(self):
if not self.get('_killed'):
self._setcookie(self.session_id)
self.store[self.session_id] = dict(self._data)
else:
self._setcookie(self.session_id, expires=-1)
def _setcookie(self, session_id, expires='', **kw):
cookie_name = self._config.cookie_name
cookie_domain = self._config.cookie_domain
cookie_path = self._config.cookie_path
httponly = self._config.httponly
secure = self._config.secure
web.setcookie(cookie_name, session_id, expires=expires, domain=cookie_domain, httponly=httponly, secure=secure, path=cookie_path)
def _generate_session_id(self):
"""Generate a random id for session"""
while True:
rand = os.urandom(16)
now = time.time()
secret_key = self._config.secret_key
session_id = sha1("%s%s%s%s" %(rand, now, utils.safestr(web.ctx.ip), secret_key))
session_id = session_id.hexdigest()
if session_id not in self.store:
break
return session_id
def _valid_session_id(self, session_id):
rx = utils.re_compile('^[0-9a-fA-F]+$')
return rx.match(session_id)
def _cleanup(self):
"""Cleanup the stored sessions"""
current_time = time.time()
timeout = self._config.timeout
if current_time - self._last_cleanup_time > timeout:
self.store.cleanup(timeout)
self._last_cleanup_time = current_time
def expired(self):
"""Called when an expired session is atime"""
self._killed = True
self._save()
raise SessionExpired(self._config.expired_message)
def kill(self):
"""Kill the session, make it no longer available"""
del self.store[self.session_id]
self._killed = True
class Store:
"""Base class for session stores"""
def __contains__(self, key):
raise NotImplementedError
def __getitem__(self, key):
raise NotImplementedError
def __setitem__(self, key, value):
raise NotImplementedError
def cleanup(self, timeout):
"""removes all the expired sessions"""
raise NotImplementedError
def encode(self, session_dict):
"""encodes session dict as a string"""
pickled = pickle.dumps(session_dict)
return base64.encodestring(pickled)
def decode(self, session_data):
"""decodes the data to get back the session dict """
pickled = base64.decodestring(session_data)
return pickle.loads(pickled)
class DiskStore(Store):
"""
Store for saving a session on disk.
>>> import tempfile
>>> root = tempfile.mkdtemp()
>>> s = DiskStore(root)
>>> s['a'] = 'foo'
>>> s['a']
'foo'
>>> time.sleep(0.01)
>>> s.cleanup(0.01)
>>> s['a']
Traceback (most recent call last):
...
KeyError: 'a'
"""
def __init__(self, root):
# if the storage root doesn't exists, create it.
if not os.path.exists(root):
os.makedirs(
os.path.abspath(root)
)
self.root = root
def _get_path(self, key):
if os.path.sep in key:
raise ValueError, "Bad key: %s" % repr(key)
return os.path.join(self.root, key)
def __contains__(self, key):
path = self._get_path(key)
return os.path.exists(path)
def __getitem__(self, key):
path = self._get_path(key)
if os.path.exists(path):
pickled = open(path).read()
return self.decode(pickled)
else:
raise KeyError, key
def __setitem__(self, key, value):
path = self._get_path(key)
pickled = self.encode(value)
try:
f = open(path, 'w')
try:
f.write(pickled)
finally:
f.close()
except IOError:
pass
def __delitem__(self, key):
path = self._get_path(key)
if os.path.exists(path):
os.remove(path)
def cleanup(self, timeout):
now = time.time()
for f in os.listdir(self.root):
path = self._get_path(f)
atime = os.stat(path).st_atime
if now - atime > timeout :
os.remove(path)
class DBStore(Store):
"""Store for saving a session in database
Needs a table with the following columns:
session_id CHAR(128) UNIQUE NOT NULL,
atime DATETIME NOT NULL default current_timestamp,
data TEXT
"""
def __init__(self, db, table_name):
self.db = db
self.table = table_name
def __contains__(self, key):
data = self.db.select(self.table, where="session_id=$key", vars=locals())
return bool(list(data))
def __getitem__(self, key):
now = datetime.datetime.now()
try:
s = self.db.select(self.table, where="session_id=$key", vars=locals())[0]
self.db.update(self.table, where="session_id=$key", atime=now, vars=locals())
except IndexError:
raise KeyError
else:
return self.decode(s.data)
def __setitem__(self, key, value):
pickled = self.encode(value)
now = datetime.datetime.now()
if key in self:
self.db.update(self.table, where="session_id=$key", data=pickled, vars=locals())
else:
self.db.insert(self.table, False, session_id=key, data=pickled )
def __delitem__(self, key):
self.db.delete(self.table, where="session_id=$key", vars=locals())
def cleanup(self, timeout):
timeout = datetime.timedelta(timeout/(24.0*60*60)) #timedelta takes numdays as arg
last_allowed_time = datetime.datetime.now() - timeout
self.db.delete(self.table, where="$last_allowed_time > atime", vars=locals())
class ShelfStore:
"""Store for saving session using `shelve` module.
import shelve
store = ShelfStore(shelve.open('session.shelf'))
XXX: is shelve thread-safe?
"""
def __init__(self, shelf):
self.shelf = shelf
def __contains__(self, key):
return key in self.shelf
def __getitem__(self, key):
atime, v = self.shelf[key]
self[key] = v # update atime
return v
def __setitem__(self, key, value):
self.shelf[key] = time.time(), value
def __delitem__(self, key):
try:
del self.shelf[key]
except KeyError:
pass
def cleanup(self, timeout):
now = time.time()
for k in self.shelf.keys():
atime, v = self.shelf[k]
if now - atime > timeout :
del self[k]
if __name__ == '__main__' :
import doctest
doctest.testmod()
| mit |
basr/Brieuwers_Kernel | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <[email protected]>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
apache/bloodhound | trac/trac/admin/api.py | 3 | 7003 | # -*- coding: utf-8 -*-
#
# Copyright (C)2006-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import os.path
import sys
import traceback
from trac.core import *
from trac.util.text import levenshtein_distance
from trac.util.translation import _
console_date_format = '%Y-%m-%d'
console_datetime_format = '%Y-%m-%d %H:%M:%S'
console_date_format_hint = 'YYYY-MM-DD'
class IAdminPanelProvider(Interface):
"""Extension point interface for adding panels to the web-based
administration interface.
"""
def get_admin_panels(req):
"""Return a list of available admin panels.
The items returned by this function must be tuples of the form
`(category, category_label, page, page_label)`.
"""
def render_admin_panel(req, category, page, path_info):
"""Process a request for an admin panel.
This function should return a tuple of the form `(template, data)`,
where `template` is the name of the template to use and `data` is the
data to be passed to the template.
"""
class AdminCommandError(TracError):
"""Exception raised when an admin command cannot be executed."""
def __init__(self, msg, show_usage=False, cmd=None):
TracError.__init__(self, msg)
self.show_usage = show_usage
self.cmd = cmd
class IAdminCommandProvider(Interface):
"""Extension point interface for adding commands to the console
administration interface `trac-admin`.
"""
def get_admin_commands():
"""Return a list of available admin commands.
The items returned by this function must be tuples of the form
`(command, args, help, complete, execute)`, where `command` contains
the space-separated command and sub-command names, `args` is a string
describing the command arguments and `help` is the help text. The
first paragraph of the help text is taken as a short help, shown in the
list of commands.
`complete` is called to auto-complete the command arguments, with the
current list of arguments as its only argument. It should return a list
of relevant values for the last argument in the list.
`execute` is called to execute the command, with the command arguments
passed as positional arguments.
"""
class AdminCommandManager(Component):
"""trac-admin command manager."""
providers = ExtensionPoint(IAdminCommandProvider)
def get_command_help(self, args=[]):
"""Return help information for a set of commands."""
commands = []
for provider in self.providers:
for cmd in provider.get_admin_commands() or []:
parts = cmd[0].split()
if parts[:len(args)] == args:
commands.append(cmd[:3])
commands.sort()
return commands
def complete_command(self, args, cmd_only=False):
"""Perform auto-completion on the given arguments."""
comp = []
for provider in self.providers:
for cmd in provider.get_admin_commands() or []:
parts = cmd[0].split()
plen = min(len(parts), len(args) - 1)
if args[:plen] != parts[:plen]: # Prefix doesn't match
continue
elif len(args) <= len(parts): # Command name
comp.append(parts[len(args) - 1])
elif not cmd_only: # Arguments
if cmd[3] is None:
return []
return cmd[3](args[len(parts):]) or []
return comp
def execute_command(self, *args):
"""Execute a command given by a list of arguments."""
args = list(args)
for provider in self.providers:
for cmd in provider.get_admin_commands() or []:
parts = cmd[0].split()
if args[:len(parts)] == parts:
f = cmd[4]
fargs = args[len(parts):]
try:
return f(*fargs)
except AdminCommandError, e:
e.cmd = ' '.join(parts)
raise
except TypeError, e:
tb = traceback.extract_tb(sys.exc_info()[2])
if len(tb) == 1:
raise AdminCommandError(_("Invalid arguments"),
show_usage=True,
cmd=' '.join(parts))
raise
raise AdminCommandError(_("Command not found"), show_usage=True)
def get_similar_commands(self, arg, n=5):
if not arg:
return []
cmds = set()
for provider in self.providers:
for cmd in provider.get_admin_commands() or []:
cmds.add(cmd[0].split()[0]) # use only first token
def score(cmd, arg):
if cmd.startswith(arg):
return 0
return levenshtein_distance(cmd, arg) / float(len(cmd) + len(arg))
similars = sorted((score(cmd, arg), cmd) for cmd in cmds)
similars = [cmd for val, cmd in similars if val <= 0.5]
return similars[:n]
class PrefixList(list):
"""A list of prefixes for command argument auto-completion."""
def complete(self, text):
return list(set(a for a in self if a.startswith(text)))
def path_startswith(path, prefix):
return os.path.normcase(path).startswith(os.path.normcase(prefix))
class PathList(list):
"""A list of paths for command argument auto-completion."""
def complete(self, text):
"""Return the items in the list matching text."""
matches = list(set(a for a in self if path_startswith(a, text)))
if len(matches) == 1 and not os.path.isdir(matches[0]):
matches[0] += ' '
return matches
def get_dir_list(path, dirs_only=False):
"""Return a list of paths to filesystem entries in the same directory
as the given path."""
dname = os.path.dirname(path)
d = os.path.join(os.getcwd(), dname)
result = PathList()
try:
dlist = os.listdir(d)
except OSError:
return result
for entry in dlist:
path = os.path.normpath(os.path.join(dname, entry))
try:
if os.path.isdir(path):
result.append(os.path.join(path, ''))
elif not dirs_only:
result.append(path)
except OSError:
pass
return result
| apache-2.0 |
nkgilley/home-assistant | homeassistant/components/thinkingcleaner/sensor.py | 6 | 3836 | """Support for ThinkingCleaner sensors."""
from datetime import timedelta
import logging
from pythinkingcleaner import Discovery, ThinkingCleaner
import voluptuous as vol
from homeassistant import util
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_HOST, UNIT_PERCENTAGE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
MIN_TIME_BETWEEN_FORCED_SCANS = timedelta(milliseconds=100)
SENSOR_TYPES = {
"battery": ["Battery", UNIT_PERCENTAGE, "mdi:battery"],
"state": ["State", None, None],
"capacity": ["Capacity", None, None],
}
STATES = {
"st_base": "On homebase: Not Charging",
"st_base_recon": "On homebase: Reconditioning Charging",
"st_base_full": "On homebase: Full Charging",
"st_base_trickle": "On homebase: Trickle Charging",
"st_base_wait": "On homebase: Waiting",
"st_plug": "Plugged in: Not Charging",
"st_plug_recon": "Plugged in: Reconditioning Charging",
"st_plug_full": "Plugged in: Full Charging",
"st_plug_trickle": "Plugged in: Trickle Charging",
"st_plug_wait": "Plugged in: Waiting",
"st_stopped": "Stopped",
"st_clean": "Cleaning",
"st_cleanstop": "Stopped with cleaning",
"st_clean_spot": "Spot cleaning",
"st_clean_max": "Max cleaning",
"st_delayed": "Delayed cleaning will start soon",
"st_dock": "Searching Homebase",
"st_pickup": "Roomba picked up",
"st_remote": "Remote control driving",
"st_wait": "Waiting for command",
"st_off": "Off",
"st_error": "Error",
"st_locate": "Find me!",
"st_unknown": "Unknown state",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_HOST): cv.string})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the ThinkingCleaner platform."""
host = config.get(CONF_HOST)
if host:
devices = [ThinkingCleaner(host, "unknown")]
else:
discovery = Discovery()
devices = discovery.discover()
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_devices():
"""Update all devices."""
for device_object in devices:
device_object.update()
dev = []
for device in devices:
for type_name in SENSOR_TYPES:
dev.append(ThinkingCleanerSensor(device, type_name, update_devices))
add_entities(dev)
class ThinkingCleanerSensor(Entity):
"""Representation of a ThinkingCleaner Sensor."""
def __init__(self, tc_object, sensor_type, update_devices):
"""Initialize the ThinkingCleaner."""
self.type = sensor_type
self._tc_object = tc_object
self._update_devices = update_devices
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._tc_object.name, SENSOR_TYPES[self.type][0])
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Update the sensor."""
self._update_devices()
if self.type == "battery":
self._state = self._tc_object.battery
elif self.type == "state":
self._state = STATES[self._tc_object.status]
elif self.type == "capacity":
self._state = self._tc_object.capacity
| apache-2.0 |
nullx002/pychess | lib/pychess/widgets/pydock/ArrowButton.py | 20 | 4242 | from __future__ import absolute_import
import cairo
from gi.repository import Gtk
from gi.repository import GObject
from gi.repository import Gdk
from .OverlayWindow import OverlayWindow
from .__init__ import NORTH, EAST, SOUTH, WEST
class ArrowButton (OverlayWindow):
""" Leafs will connect to the drag-drop signal """
__gsignals__ = {
'dropped' : (GObject.SignalFlags.RUN_FIRST, None, (object,)),
'hovered' : (GObject.SignalFlags.RUN_FIRST, None, (object,)),
'left' : (GObject.SignalFlags.RUN_FIRST, None, ()),
}
def __init__ (self, parent, svgPath, position):
OverlayWindow.__init__(self, parent)
self.myparent = parent
self.myposition = position
self.svgPath = svgPath
self.connect_after("draw", self.__onExposeEvent)
#targets = [("GTK_NOTEBOOK_TAB", Gtk.TargetFlags.SAME_APP, 0xbadbeef)]
targets = [Gtk.TargetEntry.new("GTK_NOTEBOOK_TAB",Gtk.TargetFlags.SAME_APP, 0xbadbeef)]
self.drag_dest_set(Gtk.DestDefaults.DROP | Gtk.DestDefaults.MOTION,
targets, Gdk.DragAction.MOVE)
self.drag_dest_set_track_motion(True)
self.connect("drag-motion", self.__onDragMotion)
self.connect("drag-leave", self.__onDragLeave)
self.connect("drag-drop", self.__onDragDrop)
self.hovered = False
self.myparentAlloc = None
self.myparentPos = None
self.hasHole = False
def _calcSize (self):
parentAlloc = self.myparent.get_allocation()
width, height = self.getSizeOfSvg(self.svgPath)
if self.myparentAlloc == None:
self.resize(width, height)
if self.get_window() and not self.hasHole:
self.hasHole = True
self.digAHole(self.svgPath, width, height)
if self.myposition == NORTH:
x, y = parentAlloc.width/2.-width/2., 0
elif self.myposition == EAST:
x, y = parentAlloc.width-width, parentAlloc.height/2.-height/2.
elif self.myposition == SOUTH:
x, y = parentAlloc.width/2.-width/2., parentAlloc.height-height
elif self.myposition == WEST:
x, y = 0, parentAlloc.height/2.-height/2.
x, y = self.translateCoords(int(x), int(y))
if (x,y) != self.get_position():
self.move(x, y)
self.myparentAlloc = parentAlloc
self.myparentPos = self.myparent.get_window().get_position()
def __onExposeEvent (self, self_, ctx):
self._calcSize()
context = self.get_window().cairo_create()
width, height = self.getSizeOfSvg(self.svgPath)
surface = self.getSurfaceFromSvg(self.svgPath, width, height)
if self.is_composited():
context.set_operator(cairo.OPERATOR_CLEAR)
context.set_source_rgba(0.0,0.0,0.0,0.0)
context.paint()
context.set_operator(cairo.OPERATOR_OVER)
# FIXME
#mask = Gdk.Pixmap(None, width, height, 1)
#mcontext = mask.cairo_create()
#mcontext.set_source_surface(surface, 0, 0)
#mcontext.paint()
#self.window.shape_combine_mask(mask, 0, 0)
context.set_source_surface(surface, 0, 0)
context.paint()
def __containsPoint (self, x, y):
alloc = self.get_allocation()
return 0 <= x < alloc.width and 0 <= y < alloc.height
def __onDragMotion (self, arrow, context, x, y, timestamp):
if not self.hovered and self.__containsPoint(x,y):
self.hovered = True
self.emit("hovered", Gtk.drag_get_source_widget(context))
elif self.hovered and not self.__containsPoint(x,y):
self.hovered = False
self.emit("left")
def __onDragLeave (self, arrow, context, timestamp):
if self.hovered:
self.hovered = False
self.emit("left")
def __onDragDrop (self, arrow, context, x, y, timestamp):
if self.__containsPoint(x,y):
self.emit("dropped", Gtk.drag_get_source_widget(context))
context.finish(True, True, timestamp)
return True
| gpl-3.0 |
goliate/sarakha63-persomov | libs/rtorrent/tracker.py | 173 | 5212 | # Copyright (c) 2013 Chris Lucas, <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# from rtorrent.rpc import Method
import rtorrent.rpc
from rtorrent.common import safe_repr
Method = rtorrent.rpc.Method
class Tracker:
"""Represents an individual tracker within a L{Torrent} instance."""
def __init__(self, _rt_obj, info_hash, **kwargs):
self._rt_obj = _rt_obj
self.info_hash = info_hash # : info hash for the torrent using this tracker
for k in kwargs.keys():
setattr(self, k, kwargs.get(k, None))
# for clarity's sake...
self.index = self.group # : position of tracker within the torrent's tracker list
self.rpc_id = "{0}:t{1}".format(
self.info_hash, self.index) # : unique id to pass to rTorrent
def __repr__(self):
return safe_repr("Tracker(index={0}, url=\"{1}\")",
self.index, self.url)
def enable(self):
"""Alias for set_enabled("yes")"""
self.set_enabled("yes")
def disable(self):
"""Alias for set_enabled("no")"""
self.set_enabled("no")
def update(self):
"""Refresh tracker data
@note: All fields are stored as attributes to self.
@return: None
"""
multicall = rtorrent.rpc.Multicall(self)
retriever_methods = [m for m in methods
if m.is_retriever() and m.is_available(self._rt_obj)]
for method in retriever_methods:
multicall.add(method, self.rpc_id)
multicall.call()
methods = [
# RETRIEVERS
Method(Tracker, 'is_enabled', 't.is_enabled', boolean=True),
Method(Tracker, 'get_id', 't.get_id'),
Method(Tracker, 'get_scrape_incomplete', 't.get_scrape_incomplete'),
Method(Tracker, 'is_open', 't.is_open', boolean=True),
Method(Tracker, 'get_min_interval', 't.get_min_interval'),
Method(Tracker, 'get_scrape_downloaded', 't.get_scrape_downloaded'),
Method(Tracker, 'get_group', 't.get_group'),
Method(Tracker, 'get_scrape_time_last', 't.get_scrape_time_last'),
Method(Tracker, 'get_type', 't.get_type'),
Method(Tracker, 'get_normal_interval', 't.get_normal_interval'),
Method(Tracker, 'get_url', 't.get_url'),
Method(Tracker, 'get_scrape_complete', 't.get_scrape_complete',
min_version=(0, 8, 9),
),
Method(Tracker, 'get_activity_time_last', 't.activity_time_last',
min_version=(0, 8, 9),
),
Method(Tracker, 'get_activity_time_next', 't.activity_time_next',
min_version=(0, 8, 9),
),
Method(Tracker, 'get_failed_time_last', 't.failed_time_last',
min_version=(0, 8, 9),
),
Method(Tracker, 'get_failed_time_next', 't.failed_time_next',
min_version=(0, 8, 9),
),
Method(Tracker, 'get_success_time_last', 't.success_time_last',
min_version=(0, 8, 9),
),
Method(Tracker, 'get_success_time_next', 't.success_time_next',
min_version=(0, 8, 9),
),
Method(Tracker, 'can_scrape', 't.can_scrape',
min_version=(0, 9, 1),
boolean=True
),
Method(Tracker, 'get_failed_counter', 't.failed_counter',
min_version=(0, 8, 9)
),
Method(Tracker, 'get_scrape_counter', 't.scrape_counter',
min_version=(0, 8, 9)
),
Method(Tracker, 'get_success_counter', 't.success_counter',
min_version=(0, 8, 9)
),
Method(Tracker, 'is_usable', 't.is_usable',
min_version=(0, 9, 1),
boolean=True
),
Method(Tracker, 'is_busy', 't.is_busy',
min_version=(0, 9, 1),
boolean=True
),
Method(Tracker, 'is_extra_tracker', 't.is_extra_tracker',
min_version=(0, 9, 1),
boolean=True,
),
Method(Tracker, "get_latest_sum_peers", "t.latest_sum_peers",
min_version=(0, 9, 0)
),
Method(Tracker, "get_latest_new_peers", "t.latest_new_peers",
min_version=(0, 9, 0)
),
# MODIFIERS
Method(Tracker, 'set_enabled', 't.set_enabled'),
]
| gpl-3.0 |
benreynwar/plover | plover/machine/stentura.py | 1 | 22596 | # Copyright (c) 2011 Hesky Fisher
# See LICENSE.txt for details.
# Many thanks to a steno geek for help with the protocol.
# TODO: Come up with a mechanism to communicate back to the engine when there
# is a connection error.
# TODO: Address any generic exceptions still left.
"""Thread-based monitoring of a stenotype machine using the stentura protocol.
"""
"""
The stentura protocol uses packets to communicate with the machine. A
request packet is sent to the machine and a response packet is received. If
no response is received after a one second timeout then the same packet
should be sent again. The machine may hold off on responding to a READC
packet for up to 500ms if there are no new strokes.
Each request packet should have a sequence number that is one higher than
the previously sent packet modulo 256. The response packet will have the
same sequence number. Each packet consists of a header followed by an
optional data section. All multibyte fields are little endian.
The request packet header is structured as follows:
- SOH: 1 byte. Always set to ASCII SOH (0x1).
- seq: 1 byte. The sequence number of this packet.
- length: 2 bytes. The total length of the packet, including the data
section, in bytes.
- action: 2 bytes. The action requested. See actions below.
- p1: 2 bytes. Parameter 1. The values for the parameters depend on the
action.
- p2: 2 bytes. Parameter 2.
- p3: 2 bytes. Parameter 3.
- p4: 2 bytes. Parameter 4.
- p5: 2 bytes. Parameter 5.
- checksum: 2 bytes. The CRC is computed over the packet from seq through
p5. The specific CRC algorithm used is described above in the Crc class.
The request header can be followed by a data section. The meaning of the
data section depends on the action:
- data: variable length.
- crc: 2 bytes. A CRC over just the data section.
The response packet header is structured as follows:
- SOH: 1 byte. Always set to ASCII SOH (0x1).
- seq: 1 byte. The sequence number of the request packet.
- length: 2 bytes. The total length of the packet, including the data
section, in bytes.
- action: 2 bytes. The action of the request packet.
- error: 2 bytes. The error code. Zero if no error.
- p1: 2 bytes. Parameter 1. The values of the parameters depend on the
action.
- p2: 2 bytes. Parameter 2.
- checksum: 2 bytes. The CRC is computed over the packet from seq through
p2.
The response header can be follows by a data section, whose meaning is
dependent on the action. The structure is the same as in request packets.
The stentura machine has a concept of drives and files. The first (and
possibly only) drive is called A. Each file consists of a set of one or
more blocks. Each block is 512 bytes long.
In addition to regular files, there is a realtime file whose name is
'REALTIME.000'. All strokes typed are appended to this file. Subsequent
reads from the realtime file ignore positional arguments and only return
all the strokes since the last read action. However, opening the file again
and reading from the beginning will result in all the same strokes being
read again. The only reliable way to jump to the end is to do a full,
sequential, read to the end before processing any strokes. I'm told that on
some machines sending a READC without an OPEN will just read from the
realtime file.
The contents of the files are a sequence of strokes. Each stroke consists
of four bytes. Each byte has the two most significant bytes set to one. The
rest of the byte is a bitmask indicating which keys were pressed during the
stroke. The format is as follows: 11^#STKP 11WHRAO* 11EUFRPB 11LGTSDZ ^ is
something called a stenomark. I'm not sure what that is. # is the number
bar.
Note: Only OPEN and READC are needed to get strokes as they are typed from
the realtime file.
Actions and their packets:
All unmentioned parameters should be zero and unless explicitly mentioned
the packet should have no data section.
RESET (0x14):
Unknown.
DISKSTATUS (0x7):
Unknown.
p1 is set to the ASCII value corresponding to the drive letter, e.g. 'A'.
GETDOS (0x18):
Returns the DOS filenames for the files in the requested drive.
p1 is set to the ASCII value corresponding to the drive letter, e.g. 'A'.
p2 is set to one to return the name of the realtime file (which is always
'REALTIME.000').
p3 controls which page to return, with 20 filenames per page.
The return packet contains a data section that is 512 bytes long. The first
bytes seems to be one. The filename for the first file starts at offset 32.
My guess would be that the other filenames would exist at a fixed offset of
24 bytes apart. So first filename is at 32, second is at 56, third at 80,
etc. There seems to be some meta data stored after the filename but I don't
know what it means.
DELETE (0x3):
Deletes the specified files. NOP on realtime file.
p1 is set to the ASCII value corresponding to the drive letter, e.g. 'A'.
The filename is specified in the data section.
OPEN (0xA):
Opens a file for reading. This action is sticky and causes this file to be
the current file for all following READC packets.
p1 is set to the ASCII value corresponding to the drive letter, e.g. 'A'.
The filename is specified in the data section.
I'm told that if there is an error opening the realtime file then no
strokes have been written yet.
TODO: Check that and implement workaround.
READC (0xB):
Reads characters from the currently opened file.
p1 is set to 1, I'm not sure why.
p3 is set to the maximum number of bytes to read but should probably be
512.
p4 is set to the block number.
p5 is set to the starting byte offset within the block.
It's possible that the machine will ignore the positional arguments to
READC when reading from the realtime file and just return successive values
for each call.
The response will have the number of bytes read in p1 (but the same is
deducible from the length). The data section will have the contents read
from the file.
CLOSE (0x2):
Closes the current file.
p1 is set to one, I don't know why.
TERM (0x15):
Unknown.
DIAG (0x19):
Unknown.
"""
import array
import itertools
import struct
from plover import log
import plover.machine.base
class _ProtocolViolationException(Exception):
"""Something has happened that is doesn't follow the protocol."""
pass
class _StopException(Exception):
"""The thread was asked to stop."""
pass
class _TimeoutException(Exception):
"""An operation has timed out."""
pass
class _ConnectionLostException(Exception):
"""Cannot communicate with the machine."""
pass
_CRC_TABLE = [
0x0000, 0xc0c1, 0xc181, 0x0140, 0xc301, 0x03c0, 0x0280, 0xc241,
0xc601, 0x06c0, 0x0780, 0xc741, 0x0500, 0xc5c1, 0xc481, 0x0440,
0xcc01, 0x0cc0, 0x0d80, 0xcd41, 0x0f00, 0xcfc1, 0xce81, 0x0e40,
0x0a00, 0xcac1, 0xcb81, 0x0b40, 0xc901, 0x09c0, 0x0880, 0xc841,
0xd801, 0x18c0, 0x1980, 0xd941, 0x1b00, 0xdbc1, 0xda81, 0x1a40,
0x1e00, 0xdec1, 0xdf81, 0x1f40, 0xdd01, 0x1dc0, 0x1c80, 0xdc41,
0x1400, 0xd4c1, 0xd581, 0x1540, 0xd701, 0x17c0, 0x1680, 0xd641,
0xd201, 0x12c0, 0x1380, 0xd341, 0x1100, 0xd1c1, 0xd081, 0x1040,
0xf001, 0x30c0, 0x3180, 0xf141, 0x3300, 0xf3c1, 0xf281, 0x3240,
0x3600, 0xf6c1, 0xf781, 0x3740, 0xf501, 0x35c0, 0x3480, 0xf441,
0x3c00, 0xfcc1, 0xfd81, 0x3d40, 0xff01, 0x3fc0, 0x3e80, 0xfe41,
0xfa01, 0x3ac0, 0x3b80, 0xfb41, 0x3900, 0xf9c1, 0xf881, 0x3840,
0x2800, 0xe8c1, 0xe981, 0x2940, 0xeb01, 0x2bc0, 0x2a80, 0xea41,
0xee01, 0x2ec0, 0x2f80, 0xef41, 0x2d00, 0xedc1, 0xec81, 0x2c40,
0xe401, 0x24c0, 0x2580, 0xe541, 0x2700, 0xe7c1, 0xe681, 0x2640,
0x2200, 0xe2c1, 0xe381, 0x2340, 0xe101, 0x21c0, 0x2080, 0xe041,
0xa001, 0x60c0, 0x6180, 0xa141, 0x6300, 0xa3c1, 0xa281, 0x6240,
0x6600, 0xa6c1, 0xa781, 0x6740, 0xa501, 0x65c0, 0x6480, 0xa441,
0x6c00, 0xacc1, 0xad81, 0x6d40, 0xaf01, 0x6fc0, 0x6e80, 0xae41,
0xaa01, 0x6ac0, 0x6b80, 0xab41, 0x6900, 0xa9c1, 0xa881, 0x6840,
0x7800, 0xb8c1, 0xb981, 0x7940, 0xbb01, 0x7bc0, 0x7a80, 0xba41,
0xbe01, 0x7ec0, 0x7f80, 0xbf41, 0x7d00, 0xbdc1, 0xbc81, 0x7c40,
0xb401, 0x74c0, 0x7580, 0xb541, 0x7700, 0xb7c1, 0xb681, 0x7640,
0x7200, 0xb2c1, 0xb381, 0x7340, 0xb101, 0x71c0, 0x7080, 0xb041,
0x5000, 0x90c1, 0x9181, 0x5140, 0x9301, 0x53c0, 0x5280, 0x9241,
0x9601, 0x56c0, 0x5780, 0x9741, 0x5500, 0x95c1, 0x9481, 0x5440,
0x9c01, 0x5cc0, 0x5d80, 0x9d41, 0x5f00, 0x9fc1, 0x9e81, 0x5e40,
0x5a00, 0x9ac1, 0x9b81, 0x5b40, 0x9901, 0x59c0, 0x5880, 0x9841,
0x8801, 0x48c0, 0x4980, 0x8941, 0x4b00, 0x8bc1, 0x8a81, 0x4a40,
0x4e00, 0x8ec1, 0x8f81, 0x4f40, 0x8d01, 0x4dc0, 0x4c80, 0x8c41,
0x4400, 0x84c1, 0x8581, 0x4540, 0x8701, 0x47c0, 0x4680, 0x8641,
0x8201, 0x42c0, 0x4380, 0x8341, 0x4100, 0x81c1, 0x8081, 0x4040
]
def _crc(data):
"""Compute the Crc algorithm used by the stentura protocol.
This algorithm is described by the Rocksoft^TM Model CRC Algorithm as
follows:
Name : "CRC-16"
Width : 16
Poly : 8005
Init : 0000
RefIn : True
RefOut : True
XorOut : 0000
Check : BB3D
Args:
- data: The data to checksum. The data should be an iterable that returns
bytes
Returns: The computed crc for the data.
"""
checksum = 0
for b in data:
if isinstance(b, str):
b = ord(b)
checksum = (_CRC_TABLE[(checksum ^ b) & 0xff] ^
((checksum >> 8) & 0xff))
return checksum
def _write_to_buffer(buf, offset, data):
"""Write data to buf at offset.
Extends the size of buf as needed.
Args:
- buf: The buffer. Should be of type array('B')
- offset. The offset at which to start writing.
- data: An iterable containing the data to write.
"""
if len(buf) < offset + len(data):
buf.extend([0] * (offset + len(data) - len(buf)))
for i, v in enumerate(data, offset):
if isinstance(v, str):
v = ord(v)
buf[i] = v
# Helper table for parsing strokes of the form:
# 11^#STKP 11WHRAO* 11EUFRPB 11LGTSDZ
_STENO_KEY_CHART = ('^', '#', 'S-', 'T-', 'K-', 'P-', # Byte #1
'W-', 'H-', 'R-', 'A-', 'O-', '*', # Byte #2
'-E', '-U', '-F', '-R', '-P', '-B', # Byte #3
'-L', '-G', '-T', '-S', '-D', '-Z') # Byte #4
def _parse_stroke(a, b, c, d):
"""Parse a stroke and return a list of keys pressed.
Args:
- a: The first byte.
- b: The second byte.
- c: The third byte.
- d: The fourth byte.
Returns: A sequence with all the keys pressed in the stroke.
e.g. ['S-', 'A-', '-T']
"""
fullstroke = (((a & 0x3f) << 18) | ((b & 0x3f) << 12) |
((c & 0x3f) << 6) | d & 0x3f)
return [_STENO_KEY_CHART[i] for i in xrange(24)
if (fullstroke & (1 << (23 - i)))]
def _parse_strokes(data):
"""Parse strokes from a buffer and return a sequence of strokes.
Args:
- data: A byte buffer.
Returns: A sequence of strokes. Each stroke is a sequence of pressed keys.
Throws:
- _ProtocolViolationException if the data doesn't follow the protocol.
"""
strokes = []
if (len(data) % 4 != 0):
raise _ProtocolViolationException(
"Data size is not divisible by 4: %d" % (len(data)))
for b in data:
if (ord(b) & 0b11000000) != 0b11000000:
raise _ProtocolViolationException("Data is not stroke: 0x%X" % (b))
for a, b, c, d in itertools.izip(*([iter(data)] * 4)):
strokes.append(_parse_stroke(ord(a), ord(b), ord(c), ord(d)))
return strokes
# Actions
_CLOSE = 0x2
_DELETE = 0x3
_DIAG = 0x19
_DISKSTATUS = 0x7
_GETDOS = 0x18
_OPEN = 0xA
_READC = 0xB
_RESET = 0x14
_TERM = 0x15
# Compiled struct for writing request headers.
_REQUEST_STRUCT = struct.Struct('<2B7H')
_SHORT_STRUCT = struct.Struct('<H')
def _make_request(buf, action, seq, p1=0, p2=0, p3=0, p4=0, p5=0, data=None):
"""Create a request packet.
Args:
- buf: The buffer used for the packet. Should be array.array('B') and will
be extended as needed.
- action: The action for the packet.
- seq: The sequence numbe for the packet.
- p1 - p5: Paremeter N for the packet (default: 0).
- data: The data to add to the packet as a sequence of bytes, if any
(default: None).
Returns: A buffer as a slice of the passed in buf that holds the packet.
"""
length = 18
if data:
length += len(data) + 2 # +2 for the data CRC.
if len(buf) < length:
buf.extend([0] * (length - len(buf)))
_REQUEST_STRUCT.pack_into(buf, 0, 1, seq, length, action,
p1, p2, p3, p4, p5)
crc = _crc(buffer(buf, 1, 15))
_SHORT_STRUCT.pack_into(buf, 16, crc)
if data:
_write_to_buffer(buf, 18, data)
crc = _crc(data)
_SHORT_STRUCT.pack_into(buf, length - 2, crc)
return buffer(buf, 0, length)
def _make_open(buf, seq, drive, filename):
"""Make a packet with the OPEN command.
Args:
- buf: The buffer to use of type array.array('B'). Will be extended if
needed.
- seq: The sequence number of the packet.
- drive: The letter of the drive (probably 'A').
- filename: The name of the file (probably 'REALTIME.000').
Returns: A buffer as a slice of the passed in buf that holds the packet.
"""
return _make_request(buf, _OPEN, seq, p1=ord(drive), data=filename)
def _make_read(buf, seq, block, byte, length=512):
"""Make a packet with the READC command.
Args:
- buf: The buffer to use of type array.array('B'). Will be extended if
needed.
- seq: The sequence number of the packet.
- block: The index of the file block to read.
- byte: The byte offset within the block at which to start reading.
- length: The number of bytes to read, max 512 (default: 512).
Returns: A buffer as a slice of the passed in buf that holds the packet.
"""
return _make_request(buf, _READC, seq, p1=1, p3=length, p4=block, p5=byte)
def _make_reset(buf, seq):
"""Make a packet with the RESET command.
Args:
- buf: The buffer to use of type array.array('B'). Will be extended if
needed.
- seq: The sequence number of the packet.
Returns: A buffer as a slice of the passed in buf that holds the packet.
"""
return _make_request(buf, _RESET, seq)
def _validate_response(packet):
"""Validate a response packet.
Args:
- packet: The packet to validate.
Returns: True if the packet is valid, False otherwise.
"""
if len(packet) < 14:
return False
length = _SHORT_STRUCT.unpack(buffer(packet, 2, 2))[0]
if length != len(packet):
return False
if _crc(buffer(packet, 1, 13)) != 0:
return False
if length > 14:
if length < 17:
return False
if _crc(buffer(packet, 14)) != 0:
return False
return True
# Timeout is in seconds, can be a float.
def _read_data(port, stop, buf, offset, num_bytes):
"""Read data off the serial port and into port at offset.
Args:
- port: The serial port to read.
- stop: An event which, when set, causes this function to stop.
- buf: The buffer to write.
- offset: The offset into the buffer to write.
- num_bytes: The number of bytes expected
Returns: The number of bytes read.
Raises:
_StopException: If stop is set.
_TimeoutException: If the timeout is reached with no data read.
"""
bytes = port.read(num_bytes)
if stop.is_set():
raise _StopException()
if num_bytes > len(bytes):
raise _TimeoutException()
_write_to_buffer(buf, offset, bytes)
return len(bytes)
def _read_packet(port, stop, buf):
"""Read a full packet from the port.
Reads from the port until a full packet is received or the stop or timeout
conditions are met.
Args:
- port: The port to read.
- stop: Event object used to request stopping.
- buf: The buffer to write.
Returns: A buffer as a slice of buf holding the packet.
Raises:
_ProtocolViolationException: If the packet doesn't conform to the protocol.
_TimeoutException: If the packet is not read within the timeout.
_StopException: If a stop was requested.
"""
bytes_read = 0
bytes_read += _read_data(port, stop, buf, bytes_read, 4)
packet_length = _SHORT_STRUCT.unpack_from(buf, 2)[0]
bytes_read += _read_data(port, stop, buf, bytes_read,
packet_length - bytes_read)
packet = buffer(buf, 0, bytes_read)
if not _validate_response(packet):
raise _ProtocolViolationException()
return buffer(buf, 0, bytes_read)
def _write_to_port(port, data):
"""Write data to a port.
Args:
- port: The port to write.
- data: The data to write
"""
while data:
data = buffer(data, port.write(data))
def _send_receive(port, stop, packet, buf, max_tries=3):
"""Send a packet and return the response.
Send a packet and make sure there is a response and it is for the correct
request and return it, otherwise retry max_retries times.
Args:
- port: The port to read.
- stop: Event used to signal tp stop.
- packet: The packet to send. May be used after buf is written so should be
distinct.
- buf: Buffer used to store response.
- max_tries: The maximum number of times to retry sending the packet and
reading the response before giving up (default: 3).
Returns: A buffer as a slice of buf holding the response packet.
Raises:
_ConnectionLostException: If we can't seem to talk to the machine.
_StopException: If a stop was requested.
_ProtocolViolationException: If the responses packet violates the protocol.
"""
request_action = _SHORT_STRUCT.unpack(buffer(packet, 4, 2))[0]
for attempt in xrange(max_tries):
_write_to_port(port, packet)
try:
response = _read_packet(port, stop, buf)
if response[1] != packet[1]:
continue # Wrong sequence number.
response_action = _SHORT_STRUCT.unpack(buffer(response, 4, 2))[0]
if request_action != response_action:
raise _ProtocolViolationException()
return response
except _TimeoutException:
continue
raise _ConnectionLostException()
class _SequenceCounter(object):
"""A mod 256 counter."""
def __init__(self, seq=0):
"""Init a new counter starting at seq."""
self.seq = seq
def __call__(self):
"""Return the next value."""
cur, self.seq = self.seq, (self.seq + 1) % 256
return cur
def _read(port, stop, seq, request_buf, response_buf, stroke_buf, block, byte):
"""Read the full contents of the current file from beginning to end.
The file should be opened first.
Args:
- port: The port to use.
- stop: The event used to request stopping.
- seq: A _SequenceCounter instance to use to track packets.
- request_buf: Buffer to use for request packet.
- response_buf: Buffer to use for response packet.
- stroke_buf: Buffer to use for strokes read from the file.
Raises:
_ProtocolViolationException: If the protocol is violated.
_StopException: If a stop is requested.
_ConnectionLostException: If we can't seem to talk to the machine.
"""
bytes_read = 0
while True:
packet = _make_read(request_buf, seq(), block, byte, length=512)
response = _send_receive(port, stop, packet, response_buf)
p1 = _SHORT_STRUCT.unpack(buffer(response, 8, 2))[0]
if not ((p1 == 0 and len(response) == 14) or # No data.
(p1 == len(response) - 16)): # Data.
raise _ProtocolViolationException()
if p1 == 0:
return block, byte, buffer(stroke_buf, 0, bytes_read)
data = buffer(response, 14, p1)
_write_to_buffer(stroke_buf, bytes_read, data)
bytes_read += len(data)
byte += p1
if byte >= 512:
block += 1
byte -= 512
def _loop(port, stop, callback, ready_callback, timeout=1):
"""Enter into a loop talking to the machine and returning strokes.
Args:
- port: The port to use.
- stop: The event used to signal that it's time to stop.
- callback: A function that takes a list of pressed keys, called for each
stroke.
- ready_callback: A function that is called when the machine is ready.
- timeout: Timeout to use when waiting for a response in seconds. Should be
1 when talking to a real machine. (default: 1)
Raises:
_ProtocolViolationException: If the protocol is violated.
_StopException: If a stop is requested.
_ConnectionLostException: If we can't seem to talk to the machine.
"""
# We want to give the machine a standard timeout to finish whatever it's
# doing but we also want to stop if asked to so this is the safe way to
# wait.
if stop.wait(timeout):
raise _StopException()
port.flushInput()
port.flushOutput()
# Set serial port timeout to the timeout value
port.setTimeout(timeout)
request_buf, response_buf = array.array('B'), array.array('B')
stroke_buf = array.array('B')
seq = _SequenceCounter()
request = _make_open(request_buf, seq(), 'A', 'REALTIME.000')
# Any checking needed on the response packet?
_send_receive(port, stop, request, response_buf)
# Do a full read to get to the current position in the realtime file.
block, byte = 0, 0
block, byte, _ = _read(port, stop, seq, request_buf, response_buf, stroke_buf, block, byte)
ready_callback()
while True:
block, byte, data = _read(port, stop, seq, request_buf, response_buf, stroke_buf, block, byte)
strokes = _parse_strokes(data)
for stroke in strokes:
callback(stroke)
class Stenotype(plover.machine.base.SerialStenotypeBase):
"""Stentura interface.
This class implements the three methods necessary for a standard
stenotype interface: start_capture, stop_capture, and
add_callback.
"""
def __init__(self, params):
plover.machine.base.SerialStenotypeBase.__init__(self, params)
def run(self):
"""Overrides base class run method. Do not call directly."""
try:
_loop(self.serial_port, self.finished, self._notify, self._ready)
except _StopException:
pass
except Exception as e:
log.info("Failure starting Stentura: %s", str(e))
self._error()
| gpl-2.0 |
fhaoquan/kbengine | kbe/res/scripts/common/Lib/test/test_getopt.py | 173 | 6968 | # test_getopt.py
# David Goodger <[email protected]> 2000-08-19
from test.support import verbose, run_doctest, run_unittest, EnvironmentVarGuard
import unittest
import getopt
sentinel = object()
class GetoptTests(unittest.TestCase):
def setUp(self):
self.env = EnvironmentVarGuard()
if "POSIXLY_CORRECT" in self.env:
del self.env["POSIXLY_CORRECT"]
def tearDown(self):
self.env.__exit__()
del self.env
def assertError(self, *args, **kwargs):
self.assertRaises(getopt.GetoptError, *args, **kwargs)
def test_short_has_arg(self):
self.assertTrue(getopt.short_has_arg('a', 'a:'))
self.assertFalse(getopt.short_has_arg('a', 'a'))
self.assertError(getopt.short_has_arg, 'a', 'b')
def test_long_has_args(self):
has_arg, option = getopt.long_has_args('abc', ['abc='])
self.assertTrue(has_arg)
self.assertEqual(option, 'abc')
has_arg, option = getopt.long_has_args('abc', ['abc'])
self.assertFalse(has_arg)
self.assertEqual(option, 'abc')
has_arg, option = getopt.long_has_args('abc', ['abcd'])
self.assertFalse(has_arg)
self.assertEqual(option, 'abcd')
self.assertError(getopt.long_has_args, 'abc', ['def'])
self.assertError(getopt.long_has_args, 'abc', [])
self.assertError(getopt.long_has_args, 'abc', ['abcd','abcde'])
def test_do_shorts(self):
opts, args = getopt.do_shorts([], 'a', 'a', [])
self.assertEqual(opts, [('-a', '')])
self.assertEqual(args, [])
opts, args = getopt.do_shorts([], 'a1', 'a:', [])
self.assertEqual(opts, [('-a', '1')])
self.assertEqual(args, [])
#opts, args = getopt.do_shorts([], 'a=1', 'a:', [])
#self.assertEqual(opts, [('-a', '1')])
#self.assertEqual(args, [])
opts, args = getopt.do_shorts([], 'a', 'a:', ['1'])
self.assertEqual(opts, [('-a', '1')])
self.assertEqual(args, [])
opts, args = getopt.do_shorts([], 'a', 'a:', ['1', '2'])
self.assertEqual(opts, [('-a', '1')])
self.assertEqual(args, ['2'])
self.assertError(getopt.do_shorts, [], 'a1', 'a', [])
self.assertError(getopt.do_shorts, [], 'a', 'a:', [])
def test_do_longs(self):
opts, args = getopt.do_longs([], 'abc', ['abc'], [])
self.assertEqual(opts, [('--abc', '')])
self.assertEqual(args, [])
opts, args = getopt.do_longs([], 'abc=1', ['abc='], [])
self.assertEqual(opts, [('--abc', '1')])
self.assertEqual(args, [])
opts, args = getopt.do_longs([], 'abc=1', ['abcd='], [])
self.assertEqual(opts, [('--abcd', '1')])
self.assertEqual(args, [])
opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], [])
self.assertEqual(opts, [('--abc', '')])
self.assertEqual(args, [])
# Much like the preceding, except with a non-alpha character ("-") in
# option name that precedes "="; failed in
# http://python.org/sf/126863
opts, args = getopt.do_longs([], 'foo=42', ['foo-bar', 'foo=',], [])
self.assertEqual(opts, [('--foo', '42')])
self.assertEqual(args, [])
self.assertError(getopt.do_longs, [], 'abc=1', ['abc'], [])
self.assertError(getopt.do_longs, [], 'abc', ['abc='], [])
def test_getopt(self):
# note: the empty string between '-a' and '--beta' is significant:
# it simulates an empty string option argument ('-a ""') on the
# command line.
cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a',
'', '--beta', 'arg1', 'arg2']
opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta'])
self.assertEqual(opts, [('-a', '1'), ('-b', ''),
('--alpha', '2'), ('--beta', ''),
('-a', '3'), ('-a', ''), ('--beta', '')])
# Note ambiguity of ('-b', '') and ('-a', '') above. This must be
# accounted for in the code that calls getopt().
self.assertEqual(args, ['arg1', 'arg2'])
self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta'])
def test_gnu_getopt(self):
# Test handling of GNU style scanning mode.
cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2']
# GNU style
opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
self.assertEqual(args, ['arg1'])
self.assertEqual(opts, [('-a', ''), ('-b', '1'),
('--alpha', ''), ('--beta', '2')])
# recognize "-" as an argument
opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', [])
self.assertEqual(args, ['-'])
self.assertEqual(opts, [('-a', ''), ('-b', '-')])
# Posix style via +
opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta='])
self.assertEqual(opts, [('-a', '')])
self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
# Posix style via POSIXLY_CORRECT
self.env["POSIXLY_CORRECT"] = "1"
opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta='])
self.assertEqual(opts, [('-a', '')])
self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2'])
def test_libref_examples(self):
s = """
Examples from the Library Reference: Doc/lib/libgetopt.tex
An example using only Unix style options:
>>> import getopt
>>> args = '-a -b -cfoo -d bar a1 a2'.split()
>>> args
['-a', '-b', '-cfoo', '-d', 'bar', 'a1', 'a2']
>>> optlist, args = getopt.getopt(args, 'abc:d:')
>>> optlist
[('-a', ''), ('-b', ''), ('-c', 'foo'), ('-d', 'bar')]
>>> args
['a1', 'a2']
Using long option names is equally easy:
>>> s = '--condition=foo --testing --output-file abc.def -x a1 a2'
>>> args = s.split()
>>> args
['--condition=foo', '--testing', '--output-file', 'abc.def', '-x', 'a1', 'a2']
>>> optlist, args = getopt.getopt(args, 'x', [
... 'condition=', 'output-file=', 'testing'])
>>> optlist
[('--condition', 'foo'), ('--testing', ''), ('--output-file', 'abc.def'), ('-x', '')]
>>> args
['a1', 'a2']
"""
import types
m = types.ModuleType("libreftest", s)
run_doctest(m, verbose)
def test_issue4629(self):
longopts, shortopts = getopt.getopt(['--help='], '', ['help='])
self.assertEqual(longopts, [('--help', '')])
longopts, shortopts = getopt.getopt(['--help=x'], '', ['help='])
self.assertEqual(longopts, [('--help', 'x')])
self.assertRaises(getopt.GetoptError, getopt.getopt, ['--help='], '', ['help'])
def test_main():
run_unittest(GetoptTests)
if __name__ == "__main__":
test_main()
| lgpl-3.0 |
GuillaumeSeren/alot | tests/commands/test_envelope.py | 2 | 14242 | # encoding=utf-8
# Copyright © 2017-2018 Dylan Baker
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Tests for the alot.commands.envelope module."""
import email
import os
import tempfile
import textwrap
import unittest
from unittest import mock
from alot.commands import envelope
from alot.db.envelope import Envelope
from alot.errors import GPGProblem
from alot.settings.errors import NoMatchingAccount
from alot.settings.manager import SettingsManager
from alot.account import Account
from .. import utilities
# When using an assert from a mock a TestCase method might not use self. That's
# okay.
# pylint: disable=no-self-use
class TestAttachCommand(unittest.TestCase):
"""Tests for the AttachCommaned class."""
def test_single_path(self):
"""A test for an existing single path."""
ui = utilities.make_ui()
with tempfile.TemporaryDirectory() as d:
testfile = os.path.join(d, 'foo')
with open(testfile, 'w') as f:
f.write('foo')
cmd = envelope.AttachCommand(path=testfile)
cmd.apply(ui)
ui.current_buffer.envelope.attach.assert_called_with(testfile)
def test_user(self):
"""A test for an existing single path prefaced with ~/."""
ui = utilities.make_ui()
with tempfile.TemporaryDirectory() as d:
# This mock replaces expanduser to replace "~/" with a path to the
# temporary directory. This is easier and more reliable than
# relying on changing an environment variable (like HOME), since it
# doesn't rely on CPython implementation details.
with mock.patch('alot.commands.os.path.expanduser',
lambda x: os.path.join(d, x[2:])):
testfile = os.path.join(d, 'foo')
with open(testfile, 'w') as f:
f.write('foo')
cmd = envelope.AttachCommand(path='~/foo')
cmd.apply(ui)
ui.current_buffer.envelope.attach.assert_called_with(testfile)
def test_glob(self):
"""A test using a glob."""
ui = utilities.make_ui()
with tempfile.TemporaryDirectory() as d:
testfile1 = os.path.join(d, 'foo')
testfile2 = os.path.join(d, 'far')
for t in [testfile1, testfile2]:
with open(t, 'w') as f:
f.write('foo')
cmd = envelope.AttachCommand(path=os.path.join(d, '*'))
cmd.apply(ui)
ui.current_buffer.envelope.attach.assert_has_calls(
[mock.call(testfile1), mock.call(testfile2)], any_order=True)
def test_no_match(self):
"""A test for a file that doesn't exist."""
ui = utilities.make_ui()
with tempfile.TemporaryDirectory() as d:
cmd = envelope.AttachCommand(path=os.path.join(d, 'doesnt-exist'))
cmd.apply(ui)
ui.notify.assert_called()
class TestTagCommands(unittest.TestCase):
def _test(self, tagstring, action, expected):
"""Common steps for envelope.TagCommand tests
:param tagstring: the string to pass to the TagCommand
:type tagstring: str
:param action: the action to pass to the TagCommand
:type action: str
:param expected: the expected output to assert in the test
:type expected: list(str)
"""
env = Envelope(tags=['one', 'two', 'three'])
ui = utilities.make_ui()
ui.current_buffer = mock.Mock()
ui.current_buffer.envelope = env
cmd = envelope.TagCommand(tags=tagstring, action=action)
cmd.apply(ui)
actual = env.tags
self.assertListEqual(sorted(actual), sorted(expected))
def test_add_new_tags(self):
self._test('four', 'add', ['one', 'two', 'three', 'four'])
def test_adding_existing_tags_has_no_effect(self):
self._test('one', 'add', ['one', 'two', 'three'])
def test_remove_existing_tags(self):
self._test('one', 'remove', ['two', 'three'])
def test_remove_non_existing_tags_has_no_effect(self):
self._test('four', 'remove', ['one', 'two', 'three'])
def test_set_tags(self):
self._test('a,b,c', 'set', ['a', 'b', 'c'])
def test_toggle_will_remove_existing_tags(self):
self._test('one', 'toggle', ['two', 'three'])
def test_toggle_will_add_new_tags(self):
self._test('four', 'toggle', ['one', 'two', 'three', 'four'])
def test_toggle_can_remove_and_add_in_one_run(self):
self._test('one,four', 'toggle', ['two', 'three', 'four'])
class TestSignCommand(unittest.TestCase):
"""Tests for the SignCommand class."""
@staticmethod
def _make_ui_mock():
"""Create a mock for the ui and envelope and return them."""
envelope = Envelope()
envelope['From'] = 'foo <[email protected]>'
envelope.sign = mock.sentinel.default
envelope.sign_key = mock.sentinel.default
ui = utilities.make_ui(current_buffer=mock.Mock(envelope=envelope))
return envelope, ui
@mock.patch('alot.commands.envelope.crypto.get_key',
mock.Mock(return_value=mock.sentinel.keyid))
def test_apply_keyid_success(self):
"""If there is a valid keyid then key and to sign should be set.
"""
env, ui = self._make_ui_mock()
# The actual keyid doesn't matter, since it'll be mocked anyway
cmd = envelope.SignCommand(action='sign', keyid=['a'])
cmd.apply(ui)
self.assertTrue(env.sign)
self.assertEqual(env.sign_key, mock.sentinel.keyid)
@mock.patch('alot.commands.envelope.crypto.get_key',
mock.Mock(side_effect=GPGProblem('sentinel', 0)))
def test_apply_keyid_gpgproblem(self):
"""If there is an invalid keyid then the signing key and to sign should
be set to false and default.
"""
env, ui = self._make_ui_mock()
# The actual keyid doesn't matter, since it'll be mocked anyway
cmd = envelope.SignCommand(action='sign', keyid=['a'])
cmd.apply(ui)
self.assertFalse(env.sign)
self.assertEqual(env.sign_key, mock.sentinel.default)
ui.notify.assert_called_once()
@mock.patch('alot.commands.envelope.settings.account_matching_address',
mock.Mock(side_effect=NoMatchingAccount))
def test_apply_no_keyid_nomatchingaccount(self):
"""If there is a nokeyid and no account can be found to match the From,
then the envelope should not be marked to sign.
"""
env, ui = self._make_ui_mock()
# The actual keyid doesn't matter, since it'll be mocked anyway
cmd = envelope.SignCommand(action='sign', keyid=None)
cmd.apply(ui)
self.assertFalse(env.sign)
self.assertEqual(env.sign_key, mock.sentinel.default)
ui.notify.assert_called_once()
def test_apply_no_keyid_no_gpg_key(self):
"""If there is a nokeyid and the account has no gpg key then the
signing key and to sign should be set to false and default.
"""
env, ui = self._make_ui_mock()
env.account = mock.Mock(gpg_key=None)
cmd = envelope.SignCommand(action='sign', keyid=None)
cmd.apply(ui)
self.assertFalse(env.sign)
self.assertEqual(env.sign_key, mock.sentinel.default)
ui.notify.assert_called_once()
def test_apply_no_keyid_default(self):
"""If there is no keyid and the account has a gpg key, then that should
be used.
"""
env, ui = self._make_ui_mock()
env.account = mock.Mock(gpg_key='sentinel')
cmd = envelope.SignCommand(action='sign', keyid=None)
cmd.apply(ui)
self.assertTrue(env.sign)
self.assertEqual(env.sign_key, 'sentinel')
@mock.patch('alot.commands.envelope.crypto.get_key',
mock.Mock(return_value=mock.sentinel.keyid))
def test_apply_no_sign(self):
"""If signing with a valid keyid and valid key then set sign and
sign_key.
"""
env, ui = self._make_ui_mock()
# The actual keyid doesn't matter, since it'll be mocked anyway
cmd = envelope.SignCommand(action='sign', keyid=['a'])
cmd.apply(ui)
self.assertTrue(env.sign)
self.assertEqual(env.sign_key, mock.sentinel.keyid)
@mock.patch('alot.commands.envelope.crypto.get_key',
mock.Mock(return_value=mock.sentinel.keyid))
def test_apply_unsign(self):
"""Test that settingun sign sets the sign to False if all other
conditions allow for it.
"""
env, ui = self._make_ui_mock()
env.sign = True
env.sign_key = mock.sentinel
# The actual keyid doesn't matter, since it'll be mocked anyway
cmd = envelope.SignCommand(action='unsign', keyid=['a'])
cmd.apply(ui)
self.assertFalse(env.sign)
self.assertIs(env.sign_key, None)
@mock.patch('alot.commands.envelope.crypto.get_key',
mock.Mock(return_value=mock.sentinel.keyid))
def test_apply_togglesign(self):
"""Test that toggling changes the sign and sign_key as approriate if
other condtiions allow for it
"""
env, ui = self._make_ui_mock()
env.sign = True
env.sign_key = mock.sentinel.keyid
# The actual keyid doesn't matter, since it'll be mocked anyway
# Test that togling from true to false works
cmd = envelope.SignCommand(action='toggle', keyid=['a'])
cmd.apply(ui)
self.assertFalse(env.sign)
self.assertIs(env.sign_key, None)
# Test that toggling back to True works
cmd.apply(ui)
self.assertTrue(env.sign)
self.assertIs(env.sign_key, mock.sentinel.keyid)
def _make_local_settings(self):
config = textwrap.dedent("""\
[accounts]
[[default]]
realname = foo
address = [email protected]
sendmail_command = /bin/true
""")
# Allow settings.reload to work by not deleting the file until the end
with tempfile.NamedTemporaryFile(mode='w+', delete=False) as f:
f.write(config)
self.addCleanup(os.unlink, f.name)
# Set the gpg_key separately to avoid validation failures
manager = SettingsManager()
manager.read_config(f.name)
manager.get_accounts()[0].gpg_key = mock.sentinel.gpg_key
return manager
def test_apply_from_email_only(self):
"""Test that a key can be derived using a 'From' header that contains
only an email.
If the from header is in the form "[email protected]" and a key exists it
should be used.
"""
manager = self._make_local_settings()
env, ui = self._make_ui_mock()
env.headers = {'From': ['[email protected]']}
cmd = envelope.SignCommand(action='sign')
with mock.patch('alot.commands.envelope.settings', manager):
cmd.apply(ui)
self.assertTrue(env.sign)
self.assertIs(env.sign_key, mock.sentinel.gpg_key)
def test_apply_from_user_and_email(self):
"""This tests that a gpg key can be derived using a 'From' header that
contains a realname-email combo.
If the header is in the form "Foo <[email protected]>", a key should be
derived.
See issue #1113
"""
manager = self._make_local_settings()
env, ui = self._make_ui_mock()
cmd = envelope.SignCommand(action='sign')
with mock.patch('alot.commands.envelope.settings', manager):
cmd.apply(ui)
self.assertTrue(env.sign)
self.assertIs(env.sign_key, mock.sentinel.gpg_key)
class TestSendCommand(unittest.TestCase):
"""Tests for the SendCommand class."""
mail = textwrap.dedent("""\
From: [email protected]
To: [email protected]
Subject: FooBar
Foo Bar Baz
""")
class MockedAccount(Account):
def __init__(self):
super().__init__('[email protected]')
async def send_mail(self, mail):
pass
@utilities.async_test
async def test_account_matching_address_with_str(self):
cmd = envelope.SendCommand(mail=self.mail)
account = mock.Mock(wraps=self.MockedAccount())
with mock.patch(
'alot.commands.envelope.settings.account_matching_address',
mock.Mock(return_value=account)) as account_matching_address:
await cmd.apply(mock.Mock())
account_matching_address.assert_called_once_with('[email protected]',
return_default=True)
# check that the apply did run through till the end.
account.send_mail.assert_called_once_with(self.mail)
@utilities.async_test
async def test_account_matching_address_with_email_message(self):
mail = email.message_from_string(self.mail)
cmd = envelope.SendCommand(mail=mail)
account = mock.Mock(wraps=self.MockedAccount())
with mock.patch(
'alot.commands.envelope.settings.account_matching_address',
mock.Mock(return_value=account)) as account_matching_address:
await cmd.apply(mock.Mock())
account_matching_address.assert_called_once_with('[email protected]',
return_default=True)
# check that the apply did run through till the end.
account.send_mail.assert_called_once_with(mail)
| gpl-3.0 |
operepo/ope | client_tools/svc/build_mgmt.py | 1 | 1171 | import os
import sys
import shutil
project_name = "mgmt"
main_file = "mgmt.py"
# If you get corrupted errors, use this
clean = " " # " --clean "
remove_spec_file = False
spec_file = project_name + ".spec"
# Don't wan't old spec files right now.
if remove_spec_file and os.path.exists(spec_file):
os.unlink(spec_file)
# options = [ ('v', None, 'OPTION'), ('W ignore', None, 'OPTION') ]
# Put after exe=exe(...a.scripts, options)
#--noconsole
data_files = " --add-data logo_icon.ico;. --add-data rc;rc --add-data mgmt.version;. " + \
" --add-data install_service.cmd;. "
hidden_imports = "--hidden-import sip --hidden-import win32timezone"
build_params = "python -m PyInstaller " + clean + \
hidden_imports + \
" --noupx " + \
data_files + " --noconfirm --icon logo_icon.ico "
# == Build the app for windows using pyinstaller ==
if os.path.exists(spec_file):
# Build using the existing spec file
print("Building w existing spec file...")
os.system(build_params + " {0}.spec".format(project_name))
else:
print("Building fresh copy...")
os.system(build_params + " --name {0} {1}".format(project_name, main_file))
print("Done!") | mit |
ysekky/chainer | chainer/functions/math/sum.py | 3 | 2814 | from chainer import cuda
from chainer import function
from chainer.utils import type_check
class Sum(function.Function):
"""Sum of array elements over a given axis."""
keepdims = False
def __init__(self, axis=None, keepdims=False):
if axis is None:
self.axis = None
elif isinstance(axis, int):
self.axis = (axis,)
elif isinstance(axis, tuple) and all(isinstance(a, int) for a in axis):
if len(set(axis)) != len(axis):
raise ValueError('duplicate value in axis: ({})'.format(
', '.join(map(str, axis))))
self.axis = axis
else:
raise TypeError('None, int or tuple of int are required')
self.keepdims = keepdims
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
in_types[0].dtype.kind == 'f',
)
if self.axis is not None:
for axis in self.axis:
if axis >= 0:
type_check.expect(
axis < in_types[0].ndim,
)
else:
type_check.expect(
-axis - 1 < in_types[0].ndim,
)
def forward(self, x):
self.retain_inputs(())
self._in_shape = x[0].shape
self._in_dtype = x[0].dtype
self._xp = cuda.get_array_module(*x)
return self._xp.asarray(
x[0].sum(axis=self.axis, keepdims=self.keepdims)),
def backward(self, x, gy):
xp = self._xp
gy = gy[0]
if not (len(self._in_shape) == 0 or
self.axis is None or self.keepdims):
actual_axis = []
for axis in self.axis:
if axis < 0:
axis += len(self._in_shape)
actual_axis.append(axis)
for axis in sorted(actual_axis):
gy = xp.expand_dims(gy, axis=axis)
if hasattr(xp, 'broadcast_to'):
gx = xp.broadcast_to(gy, self._in_shape)
else:
# NumPy 1.9 does not support broadcast_to.
dummy_x = xp.empty(self._in_shape, 'b')
gx, _ = xp.broadcast_arrays(gy, dummy_x)
return gx,
def sum(x, axis=None, keepdims=False):
"""Sum of array elements over a given axis.
Args:
x (~chainer.Variable): Elements to sum.
axis (None, int, or tuple of int): Axis which a sum is performed.
The default (axis = None) is perform a sum over all the dimensions
of the input array.
keepdims (bool): If ``True``, the specified axes are remained as axes
of length one.
Returns:
~chainer.Variable: Output variable.
"""
return Sum(axis, keepdims)(x)
| mit |
heplesser/nest-simulator | pynest/nest/tests/test_connect_pairwise_bernoulli.py | 10 | 3533 | # -*- coding: utf-8 -*-
#
# test_connect_pairwise_bernoulli.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import unittest
import scipy.stats
from . import test_connect_helpers as hf
from .test_connect_parameters import TestParams
class TestPairwiseBernoulli(TestParams):
# specify connection pattern and specific params
rule = 'pairwise_bernoulli'
p = 0.5
conn_dict = {'rule': rule, 'p': p}
# sizes of source-, target-population and connection probability for
# statistical test
N_s = 50
N_t = 50
# Critical values and number of iterations of two level test
stat_dict = {'alpha2': 0.05, 'n_runs': 20}
def testStatistics(self):
for fan in ['in', 'out']:
expected = hf.get_expected_degrees_bernoulli(
self.p, fan, self.N_s, self.N_t)
pvalues = []
for i in range(self.stat_dict['n_runs']):
hf.reset_seed(i+1, self.nr_threads)
self.setUpNetwork(conn_dict=self.conn_dict,
N1=self.N_s, N2=self.N_t)
degrees = hf.get_degrees(fan, self.pop1, self.pop2)
degrees = hf.gather_data(degrees)
# degrees = self.comm.gather(degrees, root=0)
# if self.rank == 0:
if degrees is not None:
chi, p = hf.chi_squared_check(degrees, expected, self.rule)
pvalues.append(p)
hf.mpi_barrier()
if degrees is not None:
ks, p = scipy.stats.kstest(pvalues, 'uniform')
self.assertTrue(p > self.stat_dict['alpha2'])
def testAutapsesTrue(self):
conn_params = self.conn_dict.copy()
N = 10
conn_params['allow_multapses'] = False
# test that autapses exist
conn_params['p'] = 1.
conn_params['allow_autapses'] = True
pop = hf.nest.Create('iaf_psc_alpha', N)
hf.nest.Connect(pop, pop, conn_params)
# make sure all connections do exist
M = hf.get_connectivity_matrix(pop, pop)
hf.mpi_assert(np.diag(M), np.ones(N), self)
def testAutapsesFalse(self):
conn_params = self.conn_dict.copy()
N = 10
conn_params['allow_multapses'] = False
# test that autapses were excluded
conn_params['p'] = 1.
conn_params['allow_autapses'] = False
pop = hf.nest.Create('iaf_psc_alpha', N)
hf.nest.Connect(pop, pop, conn_params)
# make sure all connections do exist
M = hf.get_connectivity_matrix(pop, pop)
hf.mpi_assert(np.diag(M), np.zeros(N), self)
def suite():
suite = unittest.TestLoader().loadTestsFromTestCase(TestPairwiseBernoulli)
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == '__main__':
run()
| gpl-2.0 |
smishenk/blink-crosswalk | Tools/Scripts/webkitpy/tool/commands/prettydiff.py | 186 | 1857 | # Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.tool.commands.abstractsequencedcommand import AbstractSequencedCommand
from webkitpy.tool import steps
class PrettyDiff(AbstractSequencedCommand):
name = "pretty-diff"
help_text = "Shows the pretty diff in the default browser"
show_in_main_help = True
steps = [
steps.ConfirmDiff,
]
| bsd-3-clause |
zentralopensource/zentral | zentral/contrib/mdm/views/mdm.py | 1 | 12922 | import logging
import plistlib
from django.db import transaction
from django.http import HttpResponse
from django.views.generic import View
from zentral.contrib.inventory.models import MetaBusinessUnit
from zentral.contrib.inventory.utils import commit_machine_snapshot_and_trigger_events
from zentral.contrib.mdm.commands import queue_account_configuration_command_if_needed
from zentral.contrib.mdm.events import MDMRequestEvent
from zentral.contrib.mdm.models import (EnrolledDevice, EnrolledUser,
DEPEnrollmentSession, OTAEnrollmentSession,
PushCertificate)
from zentral.contrib.mdm.tasks import send_enrolled_device_notification
from zentral.utils.certificates import parse_dn
from .base import PostEventMixin
from .utils import (build_application_download_response, build_application_manifest_response,
get_next_device_command_response,
process_result_payload, tree_from_payload)
logger = logging.getLogger('zentral.contrib.mdm.views.mdm')
class MDMView(PostEventMixin, View):
event_class = MDMRequestEvent
push_certificate = None
enrollment_session = None
def post_event(self, *args, **kwargs):
view_name = self.request.resolver_match.view_name
if view_name:
kwargs["view_name"] = view_name.split(":")[-1]
if self.enrollment_session:
kwargs.update(self.enrollment_session.serialize_for_event())
super().post_event(*args, **kwargs)
def put(self, request, *args, **kwargs):
# DN => serial_number + meta_business_unit
dn = request.META.get("HTTP_X_SSL_CLIENT_S_DN")
if not dn:
self.abort("missing DN in request headers")
dn_d = parse_dn(dn)
cn = dn_d.get("CN")
try:
cn_prefix, enrollment_type, enrollment_secret_secret = cn.split("$")
except (AttributeError, ValueError):
self.abort("missing or bad CN in client certificate DN")
# verify prefix
if cn_prefix != "MDM":
self.abort("bad CN prefix in client certificate")
# verify enrollment
if enrollment_type == "OTA":
try:
self.enrollment_session = (
OTAEnrollmentSession.objects
.select_for_update()
.get(enrollment_secret__secret=enrollment_secret_secret)
)
except OTAEnrollmentSession.DoesNotExist:
self.abort("Bad OTA enrollment session secret in client certificate CN")
elif enrollment_type == "DEP":
try:
self.enrollment_session = (
DEPEnrollmentSession.objects
.select_for_update()
.get(enrollment_secret__secret=enrollment_secret_secret)
)
except DEPEnrollmentSession.DoesNotExist:
self.abort("Bad DEP enrollment session secret in client certificate CN")
else:
self.abort("unknown MDM enrollment type {}".format(enrollment_type))
# verify serial number
self.serial_number = dn_d.get("serialNumber")
if not self.serial_number:
self.abort("empty serial number in client certificate CN")
# verify meta business unit
o = dn_d.get("O")
if not o or not o.startswith("MBU$"):
self.abort("missing or bad O in client certificate DN")
else:
try:
mbu_pk = int(o[4:])
self.meta_business_unit = MetaBusinessUnit.objects.get(pk=mbu_pk)
except (MetaBusinessUnit.DoesNotExist, ValueError):
self.abort("unknown meta business unit in client certificate DN")
# read payload
self.payload = plistlib.loads(self.request.read())
self.udid = self.payload.get("UDID")
return self.do_put()
class CheckinView(MDMView):
message_type = None
first_device_notification_delay = 5 # in seconds, TODO: empirical!!!
def post_event(self, *args, **kwargs):
if self.message_type:
kwargs["message_type"] = self.message_type
if self.push_certificate:
kwargs["push_certificate"] = {"pk": self.push_certificate.pk,
"topic": self.push_certificate.topic}
super().post_event(*args, **kwargs)
def do_authenticate(self):
# commit machine infos
self.commit_tree()
# save the enrolled device (NOT YET ENROLLED!)
enrolled_device_defaults = {"enrollment_id": self.payload.get("EnrollmentID"),
"awaiting_configuration": None,
"serial_number": self.serial_number,
"push_certificate": self.push_certificate,
"token": None,
"push_magic": None,
"unlock_token": None,
"checkout_at": None}
enrolled_device, created = EnrolledDevice.objects.update_or_create(udid=self.udid,
defaults=enrolled_device_defaults)
# purge the installed artifacts and sent commands, to start from scratch
enrolled_device.purge_state()
# update enrollment session
self.enrollment_session.set_authenticated_status(enrolled_device)
# post events
if created:
self.post_event("success", reenrollment=False)
else:
self.post_event("success", reenrollment=True)
def do_token_update(self):
# TODO: do something with AwaitingConfiguration. Part of the DEP setup.
awaiting_configuration = self.payload.get("AwaitingConfiguration", False)
enrolled_device_defaults = {"enrollment_id": self.payload.get("EnrollmentID"),
"awaiting_configuration": awaiting_configuration,
"serial_number": self.serial_number,
"push_certificate": self.push_certificate,
"push_magic": self.payload.get("PushMagic"),
"unlock_token": self.payload.get("UnlockToken"),
"checkout_at": None}
payload_token = self.payload.get("Token")
user_id = self.payload.get("UserID")
if not user_id:
# payload token is the enrolled device token
enrolled_device_defaults["token"] = payload_token
# enrolled device
enrolled_device, device_created = EnrolledDevice.objects.update_or_create(
udid=self.udid,
defaults=enrolled_device_defaults
)
# accounts creation
if awaiting_configuration:
dep_profile = getattr(self.enrollment_session, "dep_profile", None)
if dep_profile:
queue_account_configuration_command_if_needed(
enrolled_device,
dep_profile,
self.enrollment_session.realm_user
)
else:
# should never happen. AwaitingConfiguration is only used during DEP enrollments
logger.error("AwaitingConfiguration but not a DEP enrollment session ???")
# send first push notifications
if not user_id and enrolled_device.can_be_poked():
transaction.on_commit(lambda: send_enrolled_device_notification(
enrolled_device,
delay=self.first_device_notification_delay
))
transaction.on_commit(lambda: send_enrolled_device_notification(
enrolled_device,
delay=2 * self.first_device_notification_delay
))
# Update enrollment session
if enrolled_device.token and not self.enrollment_session.is_completed():
self.enrollment_session.set_completed_status(enrolled_device)
# enrolled user
user_created = False
if user_id and user_id.upper() != "FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF":
# user channel and no shared ipad
# see https://developer.apple.com/documentation/devicemanagement/tokenupdaterequest
enrolled_user_defaults = {"enrolled_device": enrolled_device,
"enrollment_id": self.payload.get("EnrollmentUserID"),
"long_name": self.payload.get("UserLongName"),
"short_name": self.payload.get("UserShortName"),
"token": payload_token}
enrolled_user, user_created = EnrolledUser.objects.update_or_create(
user_id=user_id,
defaults=enrolled_user_defaults
)
self.post_event("success",
token_type="user" if user_id else "device",
user_id=user_id,
device_created=device_created,
user_created=user_created)
def do_checkout(self):
try:
enrolled_device = EnrolledDevice.objects.get(push_certificate=self.push_certificate,
udid=self.udid)
except EnrolledDevice.DoesNotExist:
self.abort("Could not do checkout. Unknown enrolled device",
push_certificate_topic=self.push_certificate.topic,
device_udid=self.udid)
else:
enrolled_device.do_checkout()
self.post_event("success")
def commit_tree(self):
commit_machine_snapshot_and_trigger_events(tree_from_payload(self.udid,
self.serial_number,
self.meta_business_unit,
self.payload))
def do_put(self):
self.message_type = self.payload.get("MessageType")
self.push_certificate = None
# get push certificate
topic = self.payload.get("Topic")
try:
self.push_certificate = PushCertificate.objects.get(topic=topic)
except PushCertificate.DoesNotExist:
self.abort("unknown topic", topic=topic)
# route the payload
if self.message_type == "Authenticate":
self.do_authenticate()
elif self.message_type == "UserAutenticate":
# TODO: network / mobile user management
self.post_event("warning", user_id=self.payload.get("UserID"))
return HttpResponse(status_code=410)
elif self.message_type == "TokenUpdate":
self.do_token_update()
elif self.message_type == "CheckOut":
self.do_checkout()
else:
self.abort("unknown message type")
return HttpResponse()
class ConnectView(MDMView):
@staticmethod
def get_success(payload_status):
if payload_status in ["Error", "CommandFormatError"]:
return "failure"
else:
return "success"
def do_put(self):
command_uuid = self.payload.get("CommandUUID", None)
payload_status = self.payload["Status"]
user_id = self.payload.get("UserID")
self.post_event(self.get_success(payload_status),
command_uuid=command_uuid,
payload_status=payload_status,
user_id=user_id)
enrolled_device = self.enrollment_session.enrolled_device
# result
if payload_status != "Idle":
process_result_payload(self.meta_business_unit, enrolled_device,
command_uuid, payload_status,
self.payload)
# response
if user_id:
# TODO: do something!!!
return HttpResponse()
elif payload_status in ["Idle", "Acknowledged", "Error", "CommandFormatError"]:
# we can send another command
return get_next_device_command_response(self.meta_business_unit, enrolled_device)
elif payload_status in ["NotNow"]:
# we let the device contact us again
return HttpResponse()
else:
self.abort("unknown payload status {}".format(payload_status))
class InstallApplicationManifestView(View):
def get(self, response, *args, **kwargs):
return build_application_manifest_response(kwargs["uuid"])
class InstallApplicationDownloadView(View):
def get(self, response, *args, **kwargs):
return build_application_download_response(kwargs["uuid"])
| apache-2.0 |
google-research/pddm | pddm/utils/convert_to_parser_args.py | 1 | 4673 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import argparse
def convert_to_parser_args(args_source=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('--seed', type=int, default=0)
#######################
### experiment info
#######################
parser.add_argument('--env_name', type=str)
parser.add_argument('--rollout_length', type=int)
parser.add_argument('--num_iters', type=int, default=1)
parser.add_argument('--num_trajectories_per_iter', type=int, default=2)
# -1 means start from scratch... any other number says which iter to restore & continue from
parser.add_argument('--continue_run', type=int, default=-1)
parser.add_argument('--continue_run_filepath', type=str, default='')
# have controller use true dynamics for planning (instead of learned model)
parser.add_argument('--use_ground_truth_dynamics', action="store_true")
# other T/F
parser.add_argument('--visualize_MPC_rollout', action="store_true")
parser.add_argument('--print_minimal', action="store_true")
# noise options
parser.add_argument('--make_aggregated_dataset_noisy', action="store_true")
parser.add_argument('--make_training_dataset_noisy', action="store_true")
parser.add_argument('--rollouts_noise_actions', action="store_true")
parser.add_argument('--rollouts_document_noised_actions', action="store_true")
###########################
### random data collection
###########################
# collect random rollouts
parser.add_argument('--load_existing_random_data', action="store_true")
parser.add_argument('--num_rand_rollouts_train', type=int, default=100)
parser.add_argument('--num_rand_rollouts_val', type=int, default=50)
parser.add_argument('--rand_rollout_length', type=int, default=30)
parser.add_argument('--use_threading', action="store_true")
# sample random velocities vs. positions
parser.add_argument('--rand_policy_sample_velocities', action="store_true")
parser.add_argument('--rand_policy_vel_min', type=float, default=0)
parser.add_argument('--rand_policy_vel_max', type=float, default=0)
parser.add_argument('--rand_policy_hold_action', type=int, default=1)
#######################
### dynamics model
#######################
# arch
parser.add_argument('--num_fc_layers', type=int, default=2)
parser.add_argument('--depth_fc_layers', type=int, default=64)
parser.add_argument('--ensemble_size', type=int, default=1) #ensemble size
parser.add_argument('--K', type=int, default=1) #number of past states for input to model
# False to start model training from SCRATCH at each iteration
parser.add_argument('--warmstart_training', action="store_true")
# model training
parser.add_argument('--always_use_savedModel', action="store_true") #use saved model instead of training one
parser.add_argument('--batchsize', type=int, default=500) #batchsize for each gradient step
parser.add_argument('--lr', type=float, default=0.001) #learning rate
parser.add_argument('--nEpoch', type=int, default=40) #epochs of training
parser.add_argument('--nEpoch_init', type=int, default=40) #epochs of training for 1st iter
#######################
### controller
#######################
# MPC
parser.add_argument('--horizon', type=int, default=7) #planning horizon
parser.add_argument('--num_control_samples', type=int, default=700) #number of candidate ac sequences
parser.add_argument('--controller_type', type=str, default='mppi') #rand, cem, mppi
# cem
parser.add_argument('--cem_max_iters', type=int, default=3) #number of iters
parser.add_argument('--cem_num_elites', type=int, default=20) #elites for refitting sampling dist
# mppi
parser.add_argument('--mppi_kappa', type=float, default=1.0) #reward weighting
parser.add_argument('--mppi_mag_noise', type=float, default=0.9) #magnitude of sampled noise
parser.add_argument('--mppi_beta', type=float, default=0.8) #smoothing
args = parser.parse_args(args_source)
return args
| apache-2.0 |
AlanLaughter/huehuehue | ver2/find_words.py | 2 | 1377 | #found this solution on http://stackoverflow.com/questions/8870261/how-to-split-text-without-spaces-into-list-of-words
def find_words(instring, prefix = '', words = None):
if not instring:
return []
if words is None:
words = set()
with open('/usr/share/dict/words') as f:
for line in f:
words.add(line.strip())
if (not prefix) and (instring in words):
return [instring]
prefix, suffix = prefix + instring[0], instring[1:]
solutions = []
# Case 1: prefix in solution
if prefix in words:
try:
solutions.append([prefix] + find_words(suffix, '', words))
except ValueError:
pass
# Case 2: prefix not in solution
try:
solutions.append(find_words(suffix, prefix, words))
except ValueError:
pass
if solutions:
return sorted(solutions,
key = lambda solution: [len(word) for word in solution],
reverse = True)[0]
else:
raise ValueError('no solution')
def buildbracemap(code):
temp_bracestack, bracemap = [], {}
for position, command in enumerate(code):
if command == "HUe": temp_bracestack.append(position)
if command == "HUE":
start = temp_bracestack.pop()
bracemap[start] = position
bracemap[position] = start
return bracemap
| gpl-3.0 |
Callwoola/tornado | tornado/escape.py | 120 | 14441 | #!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Escaping/unescaping methods for HTML, JSON, URLs, and others.
Also includes a few other miscellaneous string manipulation functions that
have crept in over time.
"""
from __future__ import absolute_import, division, print_function, with_statement
import re
import sys
from tornado.util import unicode_type, basestring_type, u
try:
from urllib.parse import parse_qs as _parse_qs # py3
except ImportError:
from urlparse import parse_qs as _parse_qs # Python 2.6+
try:
import htmlentitydefs # py2
except ImportError:
import html.entities as htmlentitydefs # py3
try:
import urllib.parse as urllib_parse # py3
except ImportError:
import urllib as urllib_parse # py2
import json
try:
unichr
except NameError:
unichr = chr
_XHTML_ESCAPE_RE = re.compile('[&<>"\']')
_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"',
'\'': '''}
def xhtml_escape(value):
"""Escapes a string so it is valid within HTML or XML.
Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``.
When used in attribute values the escaped strings must be enclosed
in quotes.
.. versionchanged:: 3.2
Added the single quote to the list of escaped characters.
"""
return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)],
to_basestring(value))
def xhtml_unescape(value):
"""Un-escapes an XML-escaped string."""
return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value))
# The fact that json_encode wraps json.dumps is an implementation detail.
# Please see https://github.com/tornadoweb/tornado/pull/706
# before sending a pull request that adds **kwargs to this function.
def json_encode(value):
"""JSON-encodes the given Python object."""
# JSON permits but does not require forward slashes to be escaped.
# This is useful when json data is emitted in a <script> tag
# in HTML, as it prevents </script> tags from prematurely terminating
# the javascript. Some json libraries do this escaping by default,
# although python's standard library does not, so we do it here.
# http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped
return json.dumps(value).replace("</", "<\\/")
def json_decode(value):
"""Returns Python objects for the given JSON string."""
return json.loads(to_basestring(value))
def squeeze(value):
"""Replace all sequences of whitespace chars with a single space."""
return re.sub(r"[\x00-\x20]+", " ", value).strip()
def url_escape(value, plus=True):
"""Returns a URL-encoded version of the given value.
If ``plus`` is true (the default), spaces will be represented
as "+" instead of "%20". This is appropriate for query strings
but not for the path component of a URL. Note that this default
is the reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
quote = urllib_parse.quote_plus if plus else urllib_parse.quote
return quote(utf8(value))
# python 3 changed things around enough that we need two separate
# implementations of url_unescape. We also need our own implementation
# of parse_qs since python 3's version insists on decoding everything.
if sys.version_info[0] < 3:
def url_unescape(value, encoding='utf-8', plus=True):
"""Decodes the given value from a URL.
The argument may be either a byte or unicode string.
If encoding is None, the result will be a byte string. Otherwise,
the result is a unicode string in the specified encoding.
If ``plus`` is true (the default), plus signs will be interpreted
as spaces (literal plus signs must be represented as "%2B"). This
is appropriate for query strings and form-encoded values but not
for the path component of a URL. Note that this default is the
reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
unquote = (urllib_parse.unquote_plus if plus else urllib_parse.unquote)
if encoding is None:
return unquote(utf8(value))
else:
return unicode_type(unquote(utf8(value)), encoding)
parse_qs_bytes = _parse_qs
else:
def url_unescape(value, encoding='utf-8', plus=True):
"""Decodes the given value from a URL.
The argument may be either a byte or unicode string.
If encoding is None, the result will be a byte string. Otherwise,
the result is a unicode string in the specified encoding.
If ``plus`` is true (the default), plus signs will be interpreted
as spaces (literal plus signs must be represented as "%2B"). This
is appropriate for query strings and form-encoded values but not
for the path component of a URL. Note that this default is the
reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
if encoding is None:
if plus:
# unquote_to_bytes doesn't have a _plus variant
value = to_basestring(value).replace('+', ' ')
return urllib_parse.unquote_to_bytes(value)
else:
unquote = (urllib_parse.unquote_plus if plus
else urllib_parse.unquote)
return unquote(to_basestring(value), encoding=encoding)
def parse_qs_bytes(qs, keep_blank_values=False, strict_parsing=False):
"""Parses a query string like urlparse.parse_qs, but returns the
values as byte strings.
Keys still become type str (interpreted as latin1 in python3!)
because it's too painful to keep them as byte strings in
python3 and in practice they're nearly always ascii anyway.
"""
# This is gross, but python3 doesn't give us another way.
# Latin1 is the universal donor of character encodings.
result = _parse_qs(qs, keep_blank_values, strict_parsing,
encoding='latin1', errors='strict')
encoded = {}
for k, v in result.items():
encoded[k] = [i.encode('latin1') for i in v]
return encoded
_UTF8_TYPES = (bytes, type(None))
def utf8(value):
"""Converts a string argument to a byte string.
If the argument is already a byte string or None, it is returned unchanged.
Otherwise it must be a unicode string and is encoded as utf8.
"""
if isinstance(value, _UTF8_TYPES):
return value
if not isinstance(value, unicode_type):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.encode("utf-8")
_TO_UNICODE_TYPES = (unicode_type, type(None))
def to_unicode(value):
"""Converts a string argument to a unicode string.
If the argument is already a unicode string or None, it is returned
unchanged. Otherwise it must be a byte string and is decoded as utf8.
"""
if isinstance(value, _TO_UNICODE_TYPES):
return value
if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.decode("utf-8")
# to_unicode was previously named _unicode not because it was private,
# but to avoid conflicts with the built-in unicode() function/type
_unicode = to_unicode
# When dealing with the standard library across python 2 and 3 it is
# sometimes useful to have a direct conversion to the native string type
if str is unicode_type:
native_str = to_unicode
else:
native_str = utf8
_BASESTRING_TYPES = (basestring_type, type(None))
def to_basestring(value):
"""Converts a string argument to a subclass of basestring.
In python2, byte and unicode strings are mostly interchangeable,
so functions that deal with a user-supplied argument in combination
with ascii string constants can use either and should return the type
the user supplied. In python3, the two types are not interchangeable,
so this method is needed to convert byte strings to unicode.
"""
if isinstance(value, _BASESTRING_TYPES):
return value
if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.decode("utf-8")
def recursive_unicode(obj):
"""Walks a simple data structure, converting byte strings to unicode.
Supports lists, tuples, and dictionaries.
"""
if isinstance(obj, dict):
return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items())
elif isinstance(obj, list):
return list(recursive_unicode(i) for i in obj)
elif isinstance(obj, tuple):
return tuple(recursive_unicode(i) for i in obj)
elif isinstance(obj, bytes):
return to_unicode(obj)
else:
return obj
# I originally used the regex from
# http://daringfireball.net/2010/07/improved_regex_for_matching_urls
# but it gets all exponential on certain patterns (such as too many trailing
# dots), causing the regex matcher to never return.
# This regex should avoid those problems.
# Use to_unicode instead of tornado.util.u - we don't want backslashes getting
# processed as escapes.
_URL_RE = re.compile(to_unicode(r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)"""))
def linkify(text, shorten=False, extra_params="",
require_protocol=False, permitted_protocols=["http", "https"]):
"""Converts plain text into HTML with links.
For example: ``linkify("Hello http://tornadoweb.org!")`` would return
``Hello <a href="http://tornadoweb.org">http://tornadoweb.org</a>!``
Parameters:
* ``shorten``: Long urls will be shortened for display.
* ``extra_params``: Extra text to include in the link tag, or a callable
taking the link as an argument and returning the extra text
e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``,
or::
def extra_params_cb(url):
if url.startswith("http://example.com"):
return 'class="internal"'
else:
return 'class="external" rel="nofollow"'
linkify(text, extra_params=extra_params_cb)
* ``require_protocol``: Only linkify urls which include a protocol. If
this is False, urls such as www.facebook.com will also be linkified.
* ``permitted_protocols``: List (or set) of protocols which should be
linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp",
"mailto"])``. It is very unsafe to include protocols such as
``javascript``.
"""
if extra_params and not callable(extra_params):
extra_params = " " + extra_params.strip()
def make_link(m):
url = m.group(1)
proto = m.group(2)
if require_protocol and not proto:
return url # not protocol, no linkify
if proto and proto not in permitted_protocols:
return url # bad protocol, no linkify
href = m.group(1)
if not proto:
href = "http://" + href # no proto specified, use http
if callable(extra_params):
params = " " + extra_params(href).strip()
else:
params = extra_params
# clip long urls. max_len is just an approximation
max_len = 30
if shorten and len(url) > max_len:
before_clip = url
if proto:
proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for :
else:
proto_len = 0
parts = url[proto_len:].split("/")
if len(parts) > 1:
# Grab the whole host part plus the first bit of the path
# The path is usually not that interesting once shortened
# (no more slug, etc), so it really just provides a little
# extra indication of shortening.
url = url[:proto_len] + parts[0] + "/" + \
parts[1][:8].split('?')[0].split('.')[0]
if len(url) > max_len * 1.5: # still too long
url = url[:max_len]
if url != before_clip:
amp = url.rfind('&')
# avoid splitting html char entities
if amp > max_len - 5:
url = url[:amp]
url += "..."
if len(url) >= len(before_clip):
url = before_clip
else:
# full url is visible on mouse-over (for those who don't
# have a status bar, such as Safari by default)
params += ' title="%s"' % href
return u('<a href="%s"%s>%s</a>') % (href, params, url)
# First HTML-escape so that our strings are all safe.
# The regex is modified to avoid character entites other than & so
# that we won't pick up ", etc.
text = _unicode(xhtml_escape(text))
return _URL_RE.sub(make_link, text)
def _convert_entity(m):
if m.group(1) == "#":
try:
if m.group(2)[:1].lower() == 'x':
return unichr(int(m.group(2)[1:], 16))
else:
return unichr(int(m.group(2)))
except ValueError:
return "&#%s;" % m.group(2)
try:
return _HTML_UNICODE_MAP[m.group(2)]
except KeyError:
return "&%s;" % m.group(2)
def _build_unicode_map():
unicode_map = {}
for name, value in htmlentitydefs.name2codepoint.items():
unicode_map[name] = unichr(value)
return unicode_map
_HTML_UNICODE_MAP = _build_unicode_map()
| apache-2.0 |
erlang/otp | lib/asn1/test/asn1_SUITE_data/Constraints.py | 92 | 4620 | Constraints DEFINITIONS ::=
BEGIN
-- Single Value
SingleValue ::= INTEGER (1)
SingleValue2 ::= INTEGER (1..20)
predefined INTEGER ::= 1
SingleValue3 ::= INTEGER (predefined | 5 | 10)
Range2to19 ::= INTEGER (1<..<20)
Range10to20 ::= INTEGER (10..20)
ContainedSubtype ::= INTEGER (INCLUDES Range10to20)
-- Some ranges for additional constrained number testing.
LongLong ::= INTEGER (0..18446744073709551615)
Range256to65536 ::= INTEGER (256..65536)
SemiConstrained ::= INTEGER (100..MAX)
NegSemiConstrained ::= INTEGER (-128..MAX)
SemiConstrainedExt ::= INTEGER (42..MAX, ...)
NegSemiConstrainedExt ::= INTEGER (-128..MAX, ...)
SemiNamed ::= INTEGER {a(100), b(200)} (100..MAX)
-- Extensions --
LongLongExt ::= INTEGER (0..18446744073709551615, ..., -5000..-1)
Range256to65536Ext ::= INTEGER (256..65536, ..., 1000000..9000000)
-- Union of single values
Sv1 ::= INTEGER (2|3|17)
Sv2 ::= INTEGER (2|3|17, ...)
Sv3 ::= INTEGER {a(2),b(3),z(17)} (2|3|17, ...)
-- Other constraints
FixedSize ::= OCTET STRING (SIZE(10))
FixedSize2 ::= OCTET STRING (SIZE(10|20))
VariableSize ::= OCTET STRING (SIZE(1..10))
PemittedAlphabet ::= PrintableString (FROM ("a"|"yx"))
AliasAddress ::=CHOICE
{
e164 IA5String (SIZE (1..128) ^ FROM ("0123456789#*,")),
h323-ID BMPString (SIZE (1..256)),
...
}
Obj ::= OBJECT IDENTIFIER
-- OTP-4559: a referenced type that has a permitted alphabet constraint
-- Example from H323-MESSAGES ver (11/2000)
TBCD-STRING ::= IA5String (FROM ("0123456789#*abc"))
ANSI-41-UIM ::= SEQUENCE {
imsi [0] TBCD-STRING(SIZE (3..16)) OPTIONAL,
esn [1] TBCD-STRING(SIZE (16)) OPTIONAL
}
-- OTP-4869: a BIT STRING constrained by SIZE(C) was encoded wrong
-- when C was larger than 16. There was also an error when encodeing
-- in compact_bit_string mode.
IP ::= SEQUENCE {
perm SEQUENCE OF INTEGER (0..15),
key BIT STRING (SIZE (128)),
bool BOOLEAN OPTIONAL
}
-- add for OTP-3558 and OTP-4917
Day ::= ENUMERATED{monday(0),tuesday(1),wednesday(2),thursday(3),friday(4),saturday(5),sunday(6)}
Wednesday ::= Day(wednesday)
Thing ::= INTEGER {fred (0),fred2 (1),fred3 (2)}
AnotherThing ::= Thing (fred | fred2)
OneMoreThing ::= INTEGER {wilma(0), fred(1), betty(3), barney(2)}
OneMoreThing-1 ::= OneMoreThing (wilma | fred)
OneMoreThing-2 ::= OneMoreThing (fred | barney)
I ::= INTEGER (0|15..269) -- OTP-5457
X1 ::= INTEGER (1..4 | 8 | 10 | 20) -- OTP-9946
-- OTP-5511
maxNrOfCellPortionsPerCell-1 INTEGER ::= 35
CellPortionID ::= INTEGER (0..maxNrOfCellPortionsPerCell-1,...)
-- OTP-6763
T ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3)) -- Dubuisson 268
T2 ::= IA5String (SIZE (1|2, ..., 3)) -- equal with T
-- OTP-8046
DateAndTime ::= VisibleString (PATTERN "\d#2/\d#2/\d#4-\d#2:\d#2")
-- DD/MM/YYYY-HH:MM
-- OTP-6828
HandoverCommand-r8-IEs ::= SEQUENCE {
handoverCommandMessage OCTET STRING (CONTAINING MyType),
...
}
MoreCompact ::= OCTET STRING (CONTAINING MyType ENCODED BY {joint-iso-itu-t asn1 packed-encoding(3) basic(0) unaligned(1)})
MyType ::= SEQUENCE {a INTEGER, b INTEGER}
Document ::= OCTET STRING (ENCODED BY pdf)
pdf OBJECT IDENTIFIER ::= {1,2,3,4,5}
ShorterExt ::= IA5String (SIZE (5, ...))
SeqOverlapping ::= SEQUENCE {
v Overlapping
}
SeqNonOverlapping ::= SEQUENCE {
v NonOverlapping
}
Overlapping ::= INTEGER (7280..7560 |
7580..7680 |
7910..8210 |
8600..8940 |
9250..9600 |
14759..15109 |
15250..15590 |
18050..18800 |
19300..19950 |
21100..21700 |
26200..26900 |
18500..19900 |
20100..20250 |
21100..21700 |
23000..24000 |
24960..26900)
-- The same intervals, but merged and sorted --
NonOverlapping ::= INTEGER (7280..7560 |
7580..7680 |
7910..8210 |
8600..8940 |
9250..9600 |
14759..15109 |
15250..15590 |
18050..19950 |
20100..20250 |
21100..21700 |
23000..24000 |
24960..26900)
--
-- Test INTEGER constraints from fields in objects.
--
INT-HOLDER ::= CLASS {
&id INTEGER UNIQUE,
&obj INT-HOLDER OPTIONAL
} WITH SYNTAX {
ID &id
[OBJ &obj]
}
int-holder-1 INT-HOLDER ::= { ID 2 }
int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 }
IntObjectConstr ::= INTEGER (int-holder-2.&obj.&id..int-holder-2.&id)
--
-- INTEGER constraints defined using named INTEGERs.
--
ConstrainedNamedInt ::= INTEGER {v1(42)} (v1)
constrainedNamedInt-1 INTEGER {v1(42)} (v1) ::= 42
constrainedNamedInt-2 ConstrainedNamedInt ::= 100
SeqWithNamedInt ::= SEQUENCE {
int INTEGER {v2(7)} (v2)
}
--
-- Cover simpletable constraint checking code.
--
ContentInfo ::= SEQUENCE {
contentType ContentType
}
Contents TYPE-IDENTIFIER ::= {
{OCTET STRING IDENTIFIED BY {2 1 1 1 1 1 1}}
}
ContentType ::= TYPE-IDENTIFIER.&id({Contents})
END
| apache-2.0 |
asimshankar/tensorflow | tensorflow/compiler/tests/variable_ops_test.py | 5 | 21654 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for reading and writing variables."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training.gradient_descent import GradientDescentOptimizer
class VariableOpsTest(xla_test.XLATestCase):
"""Test cases for resource variable operators."""
def testWriteEmptyShape(self):
# Verifies that we can pass an uninitialized variable with an empty shape,
# assign it a value, and successfully return it.
for dtype in self.numeric_types:
with self.test_session() as sess, self.test_scope():
zeros = np.zeros([3, 0], dtype=dtype)
v = resource_variable_ops.ResourceVariable(zeros)
p = array_ops.placeholder(dtype)
x = v.assign(p)
with ops.control_dependencies([x]):
y = v.read_value()
self.assertAllClose(zeros, sess.run(y, {p: zeros}))
def testOneWriteOneOutput(self):
# Regression test for a bug where computations with one non-constant
# output and one variable update were mishandled.
for dtype in self.numeric_types:
init = np.array([[1, 2j], [3, 4]]).astype(dtype)
with self.test_session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable(init)
sess.run(variables.variables_initializer([v]))
p = array_ops.placeholder(dtype)
x = v.assign_add(p)
with ops.control_dependencies([x]):
y = v.read_value()
self.assertAllClose(
np.array([[2, 1 + 2j], [4, 5]]).astype(dtype), sess.run(y, {p: 1}))
def testSparseRead0DIndices(self):
for dtype in self.numeric_types:
init = np.array([[0, 1, 2, 3], [4, 5, 6, 7], [8j, 9, 10,
11]]).astype(dtype)
with self.test_session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable(init)
sess.run(variables.variables_initializer([v]))
x = v.sparse_read(2)
self.assertAllClose(
np.array([8j, 9, 10, 11]).astype(dtype), self.evaluate(x))
def testSparseRead1DIndices(self):
for dtype in self.numeric_types:
init = np.array([[0, 1, 2, 3], [4, 5, 6j, 7], [8, 9, 10,
11]]).astype(dtype)
with self.test_session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable(init)
sess.run(variables.variables_initializer([v]))
x = v.sparse_read([2, 1])
self.assertAllClose(
np.array([[8, 9, 10, 11], [4, 5, 6j, 7]]).astype(dtype),
self.evaluate(x))
def testSparseRead2DIndices(self):
for dtype in self.numeric_types:
init = np.array([[0, 1, 2j, 3], [4, 5, 6, 7], [8, 9, 10,
11]]).astype(dtype)
with self.test_session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable(init)
sess.run(variables.variables_initializer([v]))
x = v.sparse_read([[2, 1], [0, 2]])
self.assertAllClose(
np.array([[[8, 9, 10, 11], [4, 5, 6, 7]],
[[0, 1, 2j, 3], [8, 9, 10, 11]]]).astype(dtype),
self.evaluate(x))
def testSparseRead2DIndices3DTensor(self):
for dtype in self.numeric_types:
init = np.array([[[0, 1, 2], [3, 4, 5]], [[10, 11, 12], [13, 14, 15]],
[[20, 21, 22], [23, 24j, 25]],
[[30, 31, 32], [33, 34, 35]]]).astype(dtype)
with self.test_session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable(init)
sess.run(variables.variables_initializer([v]))
x = v.sparse_read([[2, 1], [3, 0]])
self.assertAllClose(
np.array(
[[[[20, 21, 22], [23, 24j, 25]], [[10, 11, 12], [13, 14, 15]]],
[[[30, 31, 32], [33, 34, 35]], [[0, 1, 2], [3, 4, 5]]]
],).astype(dtype), self.evaluate(x))
def testShape(self):
for dtype in self.numeric_types:
init = np.ones([2, 3]).astype(dtype)
with self.test_session() as session, self.test_scope():
v = resource_variable_ops.ResourceVariable(init)
session.run(variables.variables_initializer([v]))
h = v.handle
s32, s64 = session.run([
resource_variable_ops.variable_shape(h),
resource_variable_ops.variable_shape(h, out_type=dtypes.int64)
])
self.assertEqual(s32.dtype, np.int32)
self.assertEqual(s64.dtype, np.int64)
self.assertAllEqual(s32, [2, 3])
self.assertAllEqual(s64, [2, 3])
def testReadWrite(self):
"""Tests initialization, reading, and writing a resource variable."""
for dtype in self.numeric_types:
with self.test_session() as session:
with self.test_scope():
with variable_scope.variable_scope("ascope", use_resource=True):
x = variable_scope.get_variable(
"x",
shape=[],
dtype=dtype,
initializer=init_ops.constant_initializer(2))
a = x.read_value()
with ops.control_dependencies([a]):
b = state_ops.assign(x, dtype(47))
with ops.control_dependencies([b]):
c = x.read_value()
with ops.control_dependencies([c]):
d = state_ops.assign_add(x, np.array(6 + 2j).astype(dtype))
with ops.control_dependencies([d]):
e = state_ops.assign_sub(x, dtype(3))
with ops.control_dependencies([e]):
f = x.read_value()
session.run(variables.global_variables_initializer())
v1, v2, v3 = session.run([a, c, f])
self.assertAllClose(dtype(2), v1)
self.assertAllClose(dtype(47), v2)
self.assertAllClose(np.array(50 + 2j).astype(dtype), v3)
def testTraining(self):
"""Tests a gradient descent step for a simple model."""
with self.test_session() as session:
with self.test_scope():
with variable_scope.variable_scope("ascope", use_resource=True):
w = variable_scope.get_variable(
"w",
shape=[4, 2],
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
np.array([[1, 2], [3, 4], [5, 6], [7, 8]], dtype=np.float32)))
b = variable_scope.get_variable(
"b",
shape=[2],
dtype=dtypes.float32,
initializer=init_ops.constant_initializer(
np.array([2, 3], dtype=np.float32)))
x = array_ops.placeholder(dtypes.float32, shape=[1, 4])
y = math_ops.matmul(x, w) + b
loss = math_ops.reduce_sum(y)
optimizer = GradientDescentOptimizer(0.1)
train = optimizer.minimize(loss)
session.run(variables.global_variables_initializer())
session.run(train, {x: np.array([[7, 3, 5, 9]], dtype=np.float32)})
vw, vb = session.run([w, b])
self.assertAllClose(
np.array(
[[0.3, 1.3], [2.7, 3.7], [4.5, 5.5], [6.1, 7.1]],
dtype=np.float32),
vw,
rtol=1e-4)
self.assertAllClose(np.array([1.9, 2.9], dtype=np.float32), vb, rtol=1e-4)
def testWriteOfAliasedTensor(self):
for dtype in self.numeric_types:
init = np.array([[1, 2j], [3, 4]]).astype(dtype)
update = np.array([[7, 1j], [2, 11]]).astype(dtype)
with self.test_session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable(init)
sess.run(variables.variables_initializer([v]))
p = array_ops.placeholder(dtype)
q = array_ops.identity(p)
x = v.read_value()
# Writes the value of 'p' to 'v', but keeps a reference to the original
# value of 'v' so the variable update cannot reuse its buffer.
with ops.control_dependencies([x]):
y = v.assign(q)
result = sess.run([x, y, q], {p: update})
self.assertAllClose(init, result[0])
self.assertAllClose(update, result[1])
self.assertAllClose(update, result[2])
def testScatterAdd(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[2, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1], [7]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_add(
handle, [0], constant_op.constant([[2]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertAllEqual(self.evaluate(read), [[3], [7]])
def testScatterSub(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[2, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[4], [1]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_sub(
handle, [1], constant_op.constant([[2]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertAllEqual(self.evaluate(read), [[4], [-1]])
def testScatterMul(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_mul(
handle, [0], constant_op.constant([[5]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[5]])
def testScatterDiv(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_div(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertAllEqual(self.evaluate(read), [[2]])
def testScatterMin(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_min(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
def testScatterMax(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_max(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[6]])
def testScatterUpdate(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_update(
handle, [0], constant_op.constant([[3]], dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
def testScatterAddScalar(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_add(
handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
def testScatterSubScalar(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_sub(
handle, [0], constant_op.constant(2, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[-1]])
def testScatterMulScalar(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[1]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_mul(
handle, [0], constant_op.constant(5, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[5]])
def testScatterDivScalar(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_div(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[2]])
def testScatterMinScalar(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_min(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[3]])
def testScatterMaxScalar(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.int32, shape=[1, 1])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([[6]], dtype=dtypes.int32)))
sess.run(
resource_variable_ops.resource_scatter_max(
handle, [0], constant_op.constant(3, dtype=dtypes.int32)))
read = resource_variable_ops.read_variable_op(handle, dtype=dtypes.int32)
self.assertEqual(self.evaluate(read), [[6]])
def testScatterNdAddOps(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.float32, shape=[8])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([1] * 8, dtype=dtypes.float32)))
indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
expected = np.array([1, 12, 1, 11, 10, 1, 1, 13])
sess.run(gen_state_ops.resource_scatter_nd_add(handle, indices, updates))
read = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.float32)
self.assertAllClose(expected, self.evaluate(read))
def testScatterNdUpdateAddOps(self):
with self.test_session() as sess, self.test_scope():
handle = resource_variable_ops.var_handle_op(
dtype=dtypes.float32, shape=[8])
sess.run(
resource_variable_ops.assign_variable_op(
handle, constant_op.constant([1] * 8, dtype=dtypes.float32)))
indices = constant_op.constant([[4], [3], [1], [7]], dtype=dtypes.int32)
updates = constant_op.constant([9, 10, 11, 12], dtype=dtypes.float32)
expected = np.array([1, 11, 1, 10, 9, 1, 1, 12])
sess.run(
gen_state_ops.resource_scatter_nd_update(handle, indices, updates))
read = resource_variable_ops.read_variable_op(
handle, dtype=dtypes.float32)
self.assertAllClose(expected, self.evaluate(read))
class StridedSliceAssignChecker(object):
"""Compares the results of a slice assignment using Tensorflow and numpy."""
def __init__(self, test, x, dtype):
self.dtype = dtype
self.test = test
self.x_np = np.array(x).astype(dtype)
# Randomly start on mode 0 or 1.
self.which_mode = np.random.randint(2, size=1)[0]
def __setitem__(self, index, value):
self.which_mode = 1 - self.which_mode
value = np.array(value).astype(self.dtype)
with self.test.test_session() as sess, self.test.test_scope():
x = constant_op.constant(self.x_np, dtype=self.dtype)
var = resource_variable_ops.ResourceVariable(x)
sess.run(variables.variables_initializer([var]))
if self.which_mode == 0:
val = sess.run(var[index].assign(value))
else:
assert self.which_mode == 1
val = sess.run(state_ops.assign(var[index], value))
valnp = np.copy(self.x_np)
valnp[index] = np.array(value)
self.test.assertAllEqual(val, valnp)
class SliceAssignTest(xla_test.XLATestCase):
def testSliceAssign(self):
for dtype in self.numeric_types:
checker = StridedSliceAssignChecker(
self, [[1, 2, 3], [4, 5, 6]], dtype=dtype)
# No-op assignment
checker[:] = [[10, 20, 30], [40, 50, 60]]
# Checks trivial (1,1) shape tensor
checker[1:2, 1:2] = [[66]]
# shrink shape changes
checker[1:2, 1] = [66]
checker[1, 1:2] = [66]
if dtype != dtypes.bfloat16.as_numpy_dtype:
# TODO(b/68813416): valnp call above results in an ndarray and not a
# number for bfloat16s.
checker[1, 1] = 66
# newaxis shape changes
checker[:, None, :] = [[[10, 20, 30]], [[40, 50, 50]]]
# shrink and newaxis
checker[None, None, 0, 0:1] = [[[99]]]
# Non unit strides
checker[::1, 1::-1] = [[3, 33], [4, 44]]
# degenerate interval
checker[8:10, 0] = []
checker[8:10, 8:10] = [[]]
# Assign vector to scalar (rank-0) using newaxis
checker2 = StridedSliceAssignChecker(self, 222, dtype=dtype)
if dtype != dtypes.bfloat16.as_numpy_dtype:
# TODO(b/68813416): valnp call above results in an ndarray and not a
# number for bfloat16s.
checker2[()] = 6 # no indices
checker2[...] = 6 # ellipsis
checker2[None] = [6] # new axis
def testUninitialized(self):
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"uninitialized variable"):
with self.test_session() as sess, self.test_scope():
v = resource_variable_ops.ResourceVariable([1, 2])
sess.run(v[:].assign([1, 2]))
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
mattseymour/django | django/template/loaders/cached.py | 2 | 3743 | """
Wrapper class that takes a list of template loaders as an argument and attempts
to load templates from them in order, caching the result.
"""
import hashlib
from django.template import TemplateDoesNotExist
from django.template.backends.django import copy_exception
from django.utils.encoding import force_bytes, force_text
from .base import Loader as BaseLoader
class Loader(BaseLoader):
def __init__(self, engine, loaders):
self.template_cache = {}
self.get_template_cache = {}
self.loaders = engine.get_template_loaders(loaders)
super(Loader, self).__init__(engine)
def get_contents(self, origin):
return origin.loader.get_contents(origin)
def get_template(self, template_name, skip=None):
"""
Perform the caching that gives this loader its name. Often many of the
templates attempted will be missing, so memory use is of concern here.
To keep it in check, caching behavior is a little complicated when a
template is not found. See ticket #26306 for more details.
With template debugging disabled, cache the TemplateDoesNotExist class
for every missing template and raise a new instance of it after
fetching it from the cache.
With template debugging enabled, a unique TemplateDoesNotExist object
is cached for each missing template to preserve debug data. When
raising an exception, Python sets __traceback__, __context__, and
__cause__ attributes on it. Those attributes can contain references to
all sorts of objects up the call chain and caching them creates a
memory leak. Thus, unraised copies of the exceptions are cached and
copies of those copies are raised after they're fetched from the cache.
"""
key = self.cache_key(template_name, skip)
cached = self.get_template_cache.get(key)
if cached:
if isinstance(cached, type) and issubclass(cached, TemplateDoesNotExist):
raise cached(template_name)
elif isinstance(cached, TemplateDoesNotExist):
raise copy_exception(cached)
return cached
try:
template = super(Loader, self).get_template(template_name, skip)
except TemplateDoesNotExist as e:
self.get_template_cache[key] = copy_exception(e) if self.engine.debug else TemplateDoesNotExist
raise
else:
self.get_template_cache[key] = template
return template
def get_template_sources(self, template_name):
for loader in self.loaders:
for origin in loader.get_template_sources(template_name):
yield origin
def cache_key(self, template_name, skip=None):
"""
Generate a cache key for the template name, dirs, and skip.
If skip is provided, only origins that match template_name are included
in the cache key. This ensures each template is only parsed and cached
once if contained in different extend chains like:
x -> a -> a
y -> a -> a
z -> a -> a
"""
dirs_prefix = ''
skip_prefix = ''
if skip:
matching = [origin.name for origin in skip if origin.template_name == template_name]
if matching:
skip_prefix = self.generate_hash(matching)
return '-'.join(filter(bool, [force_text(template_name), skip_prefix, dirs_prefix]))
def generate_hash(self, values):
return hashlib.sha1(force_bytes('|'.join(values))).hexdigest()
def reset(self):
"Empty the template cache."
self.template_cache.clear()
self.get_template_cache.clear()
| bsd-3-clause |
hashem78/G-Kernel | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <[email protected]>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
auferack08/edx-platform | common/lib/xmodule/xmodule/tests/test_conditional.py | 37 | 12428 | import json
import unittest
from fs.memoryfs import MemoryFS
from mock import Mock, patch
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from xmodule.error_module import NonStaffErrorDescriptor
from opaque_keys.edx.locations import SlashSeparatedCourseKey, Location
from xmodule.modulestore.xml import ImportSystem, XMLModuleStore, CourseLocationGenerator
from xmodule.conditional_module import ConditionalDescriptor
from xmodule.tests import DATA_DIR, get_test_system, get_test_descriptor_system
from xmodule.x_module import STUDENT_VIEW
ORG = 'test_org'
COURSE = 'conditional' # name of directory with course data
class DummySystem(ImportSystem):
@patch('xmodule.modulestore.xml.OSFS', lambda directory: MemoryFS())
def __init__(self, load_error_modules):
xmlstore = XMLModuleStore("data_dir", course_dirs=[], load_error_modules=load_error_modules)
super(DummySystem, self).__init__(
xmlstore=xmlstore,
course_id=SlashSeparatedCourseKey(ORG, COURSE, 'test_run'),
course_dir='test_dir',
error_tracker=Mock(),
parent_tracker=Mock(),
load_error_modules=load_error_modules,
)
def render_template(self, template, context):
raise Exception("Shouldn't be called")
class ConditionalFactory(object):
"""
A helper class to create a conditional module and associated source and child modules
to allow for testing.
"""
@staticmethod
def create(system, source_is_error_module=False):
"""
return a dict of modules: the conditional with a single source and a single child.
Keys are 'cond_module', 'source_module', and 'child_module'.
if the source_is_error_module flag is set, create a real ErrorModule for the source.
"""
descriptor_system = get_test_descriptor_system()
# construct source descriptor and module:
source_location = Location("edX", "conditional_test", "test_run", "problem", "SampleProblem", None)
if source_is_error_module:
# Make an error descriptor and module
source_descriptor = NonStaffErrorDescriptor.from_xml(
'some random xml data',
system,
id_generator=CourseLocationGenerator(SlashSeparatedCourseKey('edX', 'conditional_test', 'test_run')),
error_msg='random error message'
)
else:
source_descriptor = Mock()
source_descriptor.location = source_location
source_descriptor.runtime = descriptor_system
source_descriptor.render = lambda view, context=None: descriptor_system.render(source_descriptor, view, context)
# construct other descriptors:
child_descriptor = Mock()
child_descriptor._xmodule.student_view.return_value.content = u'<p>This is a secret</p>'
child_descriptor.student_view = child_descriptor._xmodule.student_view
child_descriptor.displayable_items.return_value = [child_descriptor]
child_descriptor.runtime = descriptor_system
child_descriptor.xmodule_runtime = get_test_system()
child_descriptor.render = lambda view, context=None: descriptor_system.render(child_descriptor, view, context)
child_descriptor.location = source_location.replace(category='html', name='child')
descriptor_system.load_item = {
child_descriptor.location: child_descriptor,
source_location: source_descriptor
}.get
# construct conditional module:
cond_location = Location("edX", "conditional_test", "test_run", "conditional", "SampleConditional", None)
field_data = DictFieldData({
'data': '<conditional/>',
'xml_attributes': {'attempted': 'true'},
'children': [child_descriptor.location],
})
cond_descriptor = ConditionalDescriptor(
descriptor_system,
field_data,
ScopeIds(None, None, cond_location, cond_location)
)
cond_descriptor.xmodule_runtime = system
system.get_module = lambda desc: desc
cond_descriptor.get_required_module_descriptors = Mock(return_value=[source_descriptor])
# return dict:
return {'cond_module': cond_descriptor,
'source_module': source_descriptor,
'child_module': child_descriptor}
class ConditionalModuleBasicTest(unittest.TestCase):
"""
Make sure that conditional module works, using mocks for
other modules.
"""
def setUp(self):
self.test_system = get_test_system()
def test_icon_class(self):
'''verify that get_icon_class works independent of condition satisfaction'''
modules = ConditionalFactory.create(self.test_system)
for attempted in ["false", "true"]:
for icon_class in ['other', 'problem', 'video']:
modules['source_module'].is_attempted = attempted
modules['child_module'].get_icon_class = lambda: icon_class
self.assertEqual(modules['cond_module'].get_icon_class(), icon_class)
def test_get_html(self):
modules = ConditionalFactory.create(self.test_system)
# because get_test_system returns the repr of the context dict passed to render_template,
# we reverse it here
html = modules['cond_module'].render(STUDENT_VIEW).content
expected = modules['cond_module'].xmodule_runtime.render_template('conditional_ajax.html', {
'ajax_url': modules['cond_module'].xmodule_runtime.ajax_url,
'element_id': u'i4x-edX-conditional_test-conditional-SampleConditional',
'depends': u'i4x-edX-conditional_test-problem-SampleProblem',
})
self.assertEquals(expected, html)
def test_handle_ajax(self):
modules = ConditionalFactory.create(self.test_system)
modules['source_module'].is_attempted = "false"
ajax = json.loads(modules['cond_module'].handle_ajax('', ''))
modules['cond_module'].save()
print "ajax: ", ajax
html = ajax['html']
self.assertFalse(any(['This is a secret' in item for item in html]))
# now change state of the capa problem to make it completed
modules['source_module'].is_attempted = "true"
ajax = json.loads(modules['cond_module'].handle_ajax('', ''))
modules['cond_module'].save()
print "post-attempt ajax: ", ajax
html = ajax['html']
self.assertTrue(any(['This is a secret' in item for item in html]))
def test_error_as_source(self):
'''
Check that handle_ajax works properly if the source is really an ErrorModule,
and that the condition is not satisfied.
'''
modules = ConditionalFactory.create(self.test_system, source_is_error_module=True)
ajax = json.loads(modules['cond_module'].handle_ajax('', ''))
modules['cond_module'].save()
html = ajax['html']
self.assertFalse(any(['This is a secret' in item for item in html]))
class ConditionalModuleXmlTest(unittest.TestCase):
"""
Make sure ConditionalModule works, by loading data in from an XML-defined course.
"""
@staticmethod
def get_system(load_error_modules=True):
'''Get a dummy system'''
return DummySystem(load_error_modules)
def setUp(self):
self.test_system = get_test_system()
def get_course(self, name):
"""Get a test course by directory name. If there's more than one, error."""
print "Importing {0}".format(name)
modulestore = XMLModuleStore(DATA_DIR, course_dirs=[name])
courses = modulestore.get_courses()
self.modulestore = modulestore
self.assertEquals(len(courses), 1)
return courses[0]
def test_conditional_module(self):
"""Make sure that conditional module works"""
print "Starting import"
course = self.get_course('conditional_and_poll')
print "Course: ", course
print "id: ", course.id
def inner_get_module(descriptor):
if isinstance(descriptor, Location):
location = descriptor
descriptor = self.modulestore.get_item(location, depth=None)
descriptor.xmodule_runtime = get_test_system()
descriptor.xmodule_runtime.get_module = inner_get_module
return descriptor
# edx - HarvardX
# cond_test - ER22x
location = Location("HarvardX", "ER22x", "2013_Spring", "conditional", "condone")
def replace_urls(text, staticfiles_prefix=None, replace_prefix='/static/', course_namespace=None):
return text
self.test_system.replace_urls = replace_urls
self.test_system.get_module = inner_get_module
module = inner_get_module(location)
print "module: ", module
print "module children: ", module.get_children()
print "module display items (children): ", module.get_display_items()
html = module.render(STUDENT_VIEW).content
print "html type: ", type(html)
print "html: ", html
html_expect = module.xmodule_runtime.render_template(
'conditional_ajax.html',
{
# Test ajax url is just usage-id / handler_name
'ajax_url': '{}/xmodule_handler'.format(location.to_deprecated_string()),
'element_id': u'i4x-HarvardX-ER22x-conditional-condone',
'depends': u'i4x-HarvardX-ER22x-problem-choiceprob'
}
)
self.assertEqual(html, html_expect)
gdi = module.get_display_items()
print "gdi=", gdi
ajax = json.loads(module.handle_ajax('', ''))
module.save()
print "ajax: ", ajax
html = ajax['html']
self.assertFalse(any(['This is a secret' in item for item in html]))
# Now change state of the capa problem to make it completed
inner_module = inner_get_module(location.replace(category="problem", name='choiceprob'))
inner_module.attempts = 1
# Save our modifications to the underlying KeyValueStore so they can be persisted
inner_module.save()
ajax = json.loads(module.handle_ajax('', ''))
module.save()
print "post-attempt ajax: ", ajax
html = ajax['html']
self.assertTrue(any(['This is a secret' in item for item in html]))
def test_conditional_module_with_empty_sources_list(self):
"""
If a ConditionalDescriptor is initialized with an empty sources_list, we assert that the sources_list is set
via generating UsageKeys from the values in xml_attributes['sources']
"""
dummy_system = Mock()
dummy_location = Location("edX", "conditional_test", "test_run", "conditional", "SampleConditional", None)
dummy_scope_ids = ScopeIds(None, None, dummy_location, dummy_location)
dummy_field_data = DictFieldData({
'data': '<conditional/>',
'xml_attributes': {'sources': 'i4x://HarvardX/ER22x/poll_question/T15_poll'},
'children': None,
})
conditional = ConditionalDescriptor(
dummy_system,
dummy_field_data,
dummy_scope_ids,
)
self.assertEqual(
conditional.sources_list[0],
conditional.location.course_key.make_usage_key_from_deprecated_string(conditional.xml_attributes['sources'])
)
def test_conditional_module_parse_sources(self):
dummy_system = Mock()
dummy_location = Location("edX", "conditional_test", "test_run", "conditional", "SampleConditional", None)
dummy_scope_ids = ScopeIds(None, None, dummy_location, dummy_location)
dummy_field_data = DictFieldData({
'data': '<conditional/>',
'xml_attributes': {'sources': 'i4x://HarvardX/ER22x/poll_question/T15_poll;i4x://HarvardX/ER22x/poll_question/T16_poll'},
'children': None,
})
conditional = ConditionalDescriptor(
dummy_system,
dummy_field_data,
dummy_scope_ids,
)
self.assertEqual(
conditional.parse_sources(conditional.xml_attributes),
['i4x://HarvardX/ER22x/poll_question/T15_poll', 'i4x://HarvardX/ER22x/poll_question/T16_poll']
)
| agpl-3.0 |
EPDCenter/android_kernel_bq_dc_v1 | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
abhi11/tanglu-dak | daklib/daksubprocess.py | 7 | 2601 | """subprocess management for dak
@copyright: 2013, Ansgar Burchardt <[email protected]>
@license: GPL-2+
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import signal
import subprocess
#
def fix_signal_handlers():
"""reset signal handlers to default action.
Python changes the signal handler to SIG_IGN for a few signals which
causes unexpected behaviour in child processes. This function resets
them to their default action.
Reference: http://bugs.python.org/issue1652
"""
for signal_name in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
try:
signal_number = getattr(signal, signal_name)
signal.signal(signal_number, signal.SIG_DFL)
except AttributeError:
pass
def _generate_preexec_fn(other_preexec_fn=None):
def preexec_fn():
fix_signal_handlers()
if other_preexec_fn is not None:
other_preexec_fn()
return preexec_fn
def call(*args, **kwargs):
"""wrapper around subprocess.call that fixes signal handling"""
preexec_fn = _generate_preexec_fn(kwargs.get('preexec_fn'))
kwargs['preexec_fn'] = preexec_fn
return subprocess.call(*args, **kwargs)
def check_call(*args, **kwargs):
"""wrapper around subprocess.check_call that fixes signal handling"""
preexec_fn = _generate_preexec_fn(kwargs.get('preexec_fn'))
kwargs['preexec_fn'] = preexec_fn
return subprocess.check_call(*args, **kwargs)
def check_output(*args, **kwargs):
"""wrapper around subprocess.check_output that fixes signal handling"""
preexec_fn = _generate_preexec_fn(kwargs.get('preexec_fn'))
kwargs['preexec_fn'] = preexec_fn
return subprocess.check_output(*args, **kwargs)
def Popen(*args, **kwargs):
"""wrapper around subprocess.Popen that fixes signal handling"""
preexec_fn = _generate_preexec_fn(kwargs.get('preexec_fn'))
kwargs['preexec_fn'] = preexec_fn
return subprocess.Popen(*args, **kwargs)
| gpl-2.0 |
eliasdesousa/indico | indico/modules/events/registration/placeholders/registrations.py | 2 | 3930 | # This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from markupsafe import Markup, escape
from indico.modules.events.registration.models.items import PersonalDataType
from indico.util.i18n import _
from indico.util.placeholders import ParametrizedPlaceholder, Placeholder
from indico.web.flask.util import url_for
class FirstNamePlaceholder(Placeholder):
name = 'first_name'
description = _("First name of the person")
@classmethod
def render(cls, regform, registration):
return registration.first_name
class LastNamePlaceholder(Placeholder):
name = 'last_name'
description = _("Last name of the person")
@classmethod
def render(cls, regform, registration):
return registration.last_name
class EventTitlePlaceholder(Placeholder):
name = 'event_title'
description = _("The title of the event")
@classmethod
def render(cls, regform, registration):
return registration.registration_form.event.title
class EventLinkPlaceholder(Placeholder):
name = 'event_link'
description = _("Link to the event")
@classmethod
def render(cls, regform, registration):
regform = registration.registration_form
return Markup('<a href="{url}" title="{title}">{url}</a>'.format(url=regform.event.short_external_url,
title=escape(regform.event.title)))
class IDPlaceholder(Placeholder):
name = 'id'
description = _("The ID of the registration")
@classmethod
def render(cls, regform, registration):
return registration.friendly_id
class LinkPlaceholder(Placeholder):
name = 'link'
description = _("The link to the registration details")
@classmethod
def render(cls, regform, registration):
url = url_for('.display_regform', registration.registration_form, token=registration.uuid, _external=True)
return Markup('<a href="{url}">{url}</a>'.format(url=url))
class FieldPlaceholder(ParametrizedPlaceholder):
name = 'field'
description = None
param_required = True
param_restricted = True
advanced = True
@classmethod
def render(cls, param, regform, registration):
if ':' in param:
field_id, key = param.split(':', 1)
else:
field_id = param
key = None
data = registration.data_by_field.get(int(field_id))
if data is None:
return ''
rv = data.field_data.field.field_impl.render_placeholder(data, key)
if isinstance(rv, list):
rv = ', '.join(rv)
return rv or '-'
@classmethod
def iter_param_info(cls, regform, registration):
own_placeholder_types = {PersonalDataType.email, PersonalDataType.first_name, PersonalDataType.last_name}
for field in sorted(regform.active_fields, key=lambda x: (x.parent.position, x.position)):
if field.personal_data_type in own_placeholder_types:
continue
for key, description in field.field_impl.iter_placeholder_info():
name = unicode(field.id) if key is None else '{}:{}'.format(field.id, key)
yield name, description
| gpl-3.0 |
Liyier/learning_log | env/Lib/site-packages/django/test/client.py | 44 | 28212 | from __future__ import unicode_literals
import json
import mimetypes
import os
import re
import sys
from copy import copy
from importlib import import_module
from io import BytesIO
from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import ISO_8859_1, UTF_8, WSGIRequest
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.template import TemplateDoesNotExist
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils import six
from django.utils.encoding import force_bytes, force_str, uri_to_iri
from django.utils.functional import SimpleLazyObject, curry
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.six.moves.urllib.parse import urljoin, urlparse, urlsplit
__all__ = ('Client', 'RedirectCycleError', 'RequestFactory', 'encode_file', 'encode_multipart')
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = re.compile(r'.*; charset=([\w\d-]+);?')
# JSON Vendor Tree spec: https://tools.ietf.org/html/rfc6838#section-3.2
JSON_CONTENT_TYPE_RE = re.compile(r'^application\/(vnd\..+\+)?json')
class RedirectCycleError(Exception):
"""
The test client has been asked to follow a redirect loop.
"""
def __init__(self, message, last_response):
super(RedirectCycleError, self).__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(object):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be seeked and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in Real Life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after he's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
for item in iterable:
yield item
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most Web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensures
compliance with RFC 7230, section 3.3.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b''
response['Content-Length'] = '0'
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = b''
return response
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Uses the WSGI
interface to compose requests, but returns the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super(ClientHandler, self).__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# We're emulating a WSGI server; we must call the close method
# on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Stores templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
if 'context' not in store:
store['context'] = ContextList()
store['context'].append(copy(context))
def encode_multipart(boundary, data):
"""
Encodes multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, six.string_types) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# file.name might not be a string. For example, it's an int for
# tempfile.TemporaryFile().
file_has_string_name = hasattr(file, 'name') and isinstance(file.name, six.string_types)
filename = os.path.basename(file.name) if file_has_string_name else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
if not filename:
filename = key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory(object):
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, **defaults):
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See http://www.python.org/dev/peps/pep-3333/#environ-variables
environ = {
'HTTP_COOKIE': self.cookies.output(header='', sep='; '),
'PATH_INFO': str('/'),
'REMOTE_ADDR': str('127.0.0.1'),
'REQUEST_METHOD': str('GET'),
'SCRIPT_NAME': str(''),
'SERVER_NAME': str('testserver'),
'SERVER_PORT': str('80'),
'SERVER_PROTOCOL': str('HTTP/1.1'),
'wsgi.version': (1, 0),
'wsgi.url_scheme': str('http'),
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
}
environ.update(self.defaults)
environ.update(request)
return environ
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match.group(1)
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _get_path(self, parsed):
path = force_str(parsed[2])
# If there are parameters, add them
if parsed[3]:
path += str(";") + force_str(parsed[3])
path = uri_to_iri(path).encode(UTF_8)
# Under Python 3, non-ASCII values in the WSGI environ are arbitrarily
# decoded with ISO-8859-1. We replicate this behavior here.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode(ISO_8859_1) if six.PY3 else path
def get(self, path, data=None, secure=False, **extra):
"Construct a GET request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('GET', path, secure=secure, **r)
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"Construct a POST request."
data = {} if data is None else data
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"Construct a HEAD request."
data = {} if data is None else data
r = {
'QUERY_STRING': urlencode(data, doseq=True),
}
r.update(extra)
return self.generic('HEAD', path, secure=secure, **r)
def trace(self, path, secure=False, **extra):
"Construct a TRACE request."
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PUT request."
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a PATCH request."
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct a DELETE request."
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Constructs an arbitrary HTTP request."""
parsed = urlparse(force_str(path))
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': str(method),
'SERVER_PORT': str('443') if secure else str('80'),
'wsgi.url_scheme': str('https') if secure else str('http'),
}
if data:
r.update({
'CONTENT_LENGTH': len(data),
'CONTENT_TYPE': str(content_type),
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
query_string = force_bytes(parsed[4])
# WSGI requires latin-1 encoded strings. See get_path_info().
if six.PY3:
query_string = query_string.decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class Client(RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, **defaults):
super(Client, self).__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.exc_info = None
def store_exc_info(self, **kwargs):
"""
Stores exceptions when they are generated by a view.
"""
self.exc_info = sys.exc_info()
@property
def session(self):
"""
Obtains the current session variables.
"""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = curry(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
try:
response = self.handler(environ)
except TemplateDoesNotExist as e:
# If the view raises an exception, Django will attempt to show
# the 500.html template. If that template is not available,
# we should ignore the error in favor of re-raising the
# underlying exception that caused the 500 error. Any other
# template found to be missing during view error handling
# should be reported as-is.
if e.args != ('500.html',):
raise
# Look for a signalled exception, clear the current context
# exception data, then re-raise the signalled exception.
# Also make sure that the signalled exception is cleared from
# the local cache!
if self.exc_info:
exc_info = self.exc_info
self.exc_info = None
six.reraise(*exc_info)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = curry(self._parse_json, response)
# Attach the ResolverMatch instance to the response
response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to
# the __getattr__ flattening in ContextList, but has some edge-case
# backwards-compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
def get(self, path, data=None, follow=False, secure=False, **extra):
"""
Requests a response from the server using GET.
"""
response = super(Client, self).get(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""
Requests a response from the server using POST.
"""
response = super(Client, self).post(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""
Request a response from the server using HEAD.
"""
response = super(Client, self).head(path, data=data, secure=secure,
**extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Request a response from the server using OPTIONS.
"""
response = super(Client, self).options(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PUT.
"""
response = super(Client, self).put(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a resource to the server using PATCH.
"""
response = super(Client, self).patch(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""
Send a DELETE request to the server.
"""
response = super(Client, self).delete(path, data=data,
content_type=content_type,
secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""
Send a TRACE request to the server.
"""
response = super(Client, self).trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, **extra)
return response
def login(self, **credentials):
"""
Sets the Factory to appear as if it has successfully logged into a site.
Returns True if login is possible; False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
else:
return False
def force_login(self, user, backend=None):
def get_backend():
from django.contrib.auth import load_backend
for backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
if hasattr(backend, 'get_user'):
return backend_path
if backend is None:
backend = get_backend()
user.backend = backend
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
engine = import_module(settings.SESSION_ENGINE)
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""
Removes the authenticated user's cookies and session object.
Causes the authenticated user to be logged out.
"""
from django.contrib.auth import get_user, logout
request = HttpRequest()
engine = import_module(settings.SESSION_ENGINE)
if self.session:
request.session = self.session
request.user = get_user(request)
else:
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if not hasattr(response, '_json'):
if not JSON_CONTENT_TYPE_RE.match(response.get('Content-Type')):
raise ValueError(
'Content-Type header is "{0}", not "application/json"'
.format(response.get('Content-Type'))
)
response._json = json.loads(response.content.decode(), **extra)
return response._json
def _handle_redirects(self, response, **extra):
"Follows any redirects by requesting responses from the server using GET."
response.redirect_chain = []
while response.status_code in (301, 302, 303, 307):
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
# Prepend the request path to handle relative path redirects
path = url.path
if not path.startswith('/'):
path = urljoin(response.request['PATH_INFO'], path)
response = self.get(path, QueryDict(url.query), follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
| mit |
Perferom/android_external_chromium_org | native_client_sdk/src/tools/tests/chrome_mock.py | 107 | 1498 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import sys
import time
import urllib2
def PrintAndFlush(s):
print s
sys.stdout.flush()
def main(args):
parser = optparse.OptionParser(usage='%prog [options] <URL to load>')
parser.add_option('--post', help='POST to URL.', dest='post',
action='store_true')
parser.add_option('--get', help='GET to URL.', dest='get',
action='store_true')
parser.add_option('--sleep',
help='Number of seconds to sleep after reading URL',
dest='sleep', default=0)
parser.add_option('--expect-to-be-killed', help='If set, the script will warn'
' if it isn\'t killed before it finishes sleeping.',
dest='expect_to_be_killed', action='store_true')
options, args = parser.parse_args(args)
if len(args) != 1:
parser.error('Expected URL to load.')
PrintAndFlush('Starting %s.' % sys.argv[0])
if options.post:
urllib2.urlopen(args[0], data='').read()
elif options.get:
urllib2.urlopen(args[0]).read()
else:
# Do nothing but wait to be killed.
pass
time.sleep(float(options.sleep))
if options.expect_to_be_killed:
PrintAndFlush('Done sleeping. Expected to be killed.')
sys.exit(0)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
renzon/scripts3tarde | backend/appengine/manager.py | 31 | 37077 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import importlib
import sys
import os
import shutil
if 'GAE_SDK' in os.environ:
SDK_PATH = os.environ['GAE_SDK']
sys.path.insert(0, SDK_PATH)
import dev_appserver
dev_appserver.fix_sys_path()
else:
print "GAE_SDK environment variable must be on path and point to App Engine's SDK folder"
from gaeforms.ndb.property import SimpleCurrency, SimpleDecimal
from google.appengine.ext.ndb.model import StringProperty, TextProperty, DateProperty, DateTimeProperty, \
IntegerProperty, \
FloatProperty, BooleanProperty
PROJECT_DIR = os.path.dirname(__file__)
PROJECT_DIR = os.path.abspath(os.path.join(PROJECT_DIR, '..'))
APPS_DIR = os.path.join(PROJECT_DIR, 'apps')
TEST_DIR = os.path.join(PROJECT_DIR, 'test')
sys.path.insert(1, APPS_DIR)
APPENGINE_DIR = os.path.join(PROJECT_DIR, 'appengine')
WEB_DIR = os.path.join(APPENGINE_DIR, 'routes')
TEMPLATES_DIR = os.path.join(APPENGINE_DIR, 'templates')
# Templates
REST_TESTS_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from datetime import datetime, date
from decimal import Decimal
from base import GAETestCase
from %(app)s_app.%(app)s_model import %(model)s
from routes.%(app)ss import rest
from gaegraph.model import Node
from mock import Mock
from mommygae import mommy
class IndexTests(GAETestCase):
def test_success(self):
mommy.save_one(%(model)s)
mommy.save_one(%(model)s)
json_response = rest.index()
context = json_response.context
self.assertEqual(2, len(context))
%(model_underscore)s_dct = context[0]
self.assertSetEqual(set(['id', 'creation', %(model_properties)s]), set(%(model_underscore)s_dct.iterkeys()))
self.assert_can_serialize_as_json(json_response)
class NewTests(GAETestCase):
def test_success(self):
self.assertIsNone(%(model)s.query().get())
json_response = rest.new(None, %(request_values)s)
db_%(model_underscore)s = %(model)s.query().get()
self.assertIsNotNone(db_%(model_underscore)s)
%(model_assertions)s
self.assert_can_serialize_as_json(json_response)
def test_error(self):
resp = Mock()
json_response = rest.new(resp)
errors = json_response.context
self.assertEqual(500, resp.status_code)
self.assertSetEqual(set([%(model_properties)s]), set(errors.keys()))
self.assert_can_serialize_as_json(json_response)
class EditTests(GAETestCase):
def test_success(self):
%(model_underscore)s = mommy.save_one(%(model)s)
old_properties = %(model_underscore)s.to_dict()
json_response = rest.edit(None, %(model_underscore)s.key.id(), %(request_values)s)
db_%(model_underscore)s = %(model_underscore)s.key.get()
%(model_assertions)s
self.assertNotEqual(old_properties, db_%(model_underscore)s.to_dict())
self.assert_can_serialize_as_json(json_response)
def test_error(self):
%(model_underscore)s = mommy.save_one(%(model)s)
old_properties = %(model_underscore)s.to_dict()
resp = Mock()
json_response = rest.edit(resp, %(model_underscore)s.key.id())
errors = json_response.context
self.assertEqual(500, resp.status_code)
self.assertSetEqual(set([%(model_properties)s]), set(errors.keys()))
self.assertEqual(old_properties, %(model_underscore)s.key.get().to_dict())
self.assert_can_serialize_as_json(json_response)
class DeleteTests(GAETestCase):
def test_success(self):
%(model_underscore)s = mommy.save_one(%(model)s)
rest.delete(None, %(model_underscore)s.key.id())
self.assertIsNone(%(model_underscore)s.key.get())
def test_non_%(model_underscore)s_deletion(self):
non_%(model_underscore)s = mommy.save_one(Node)
response = Mock()
json_response = rest.delete(response, non_%(model_underscore)s.key.id())
self.assertIsNotNone(non_%(model_underscore)s.key.get())
self.assertEqual(500, response.status_code)
self.assert_can_serialize_as_json(json_response)
'''
HOME_TESTS_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from base import GAETestCase
from %(app)s_app.%(app)s_model import %(model)s
from routes.%(app)ss.home import index, delete
from gaebusiness.business import CommandExecutionException
from gaegraph.model import Node
from mommygae import mommy
from tekton.gae.middleware.redirect import RedirectResponse
class IndexTests(GAETestCase):
def test_success(self):
mommy.save_one(%(model)s)
template_response = index()
self.assert_can_render(template_response)
class DeleteTests(GAETestCase):
def test_success(self):
%(model_underscore)s = mommy.save_one(%(model)s)
redirect_response = delete(%(model_underscore)s.key.id())
self.assertIsInstance(redirect_response, RedirectResponse)
self.assertIsNone(%(model_underscore)s.key.get())
def test_non_%(model_underscore)s_deletion(self):
non_%(model_underscore)s = mommy.save_one(Node)
self.assertRaises(CommandExecutionException, delete, non_%(model_underscore)s.key.id())
self.assertIsNotNone(non_%(model_underscore)s.key.get())
'''
EDIT_TESTS_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from base import GAETestCase
from datetime import datetime, date
from decimal import Decimal
from %(app)s_app.%(app)s_model import %(model)s
from routes.%(app)ss.edit import index, save
from mommygae import mommy
from tekton.gae.middleware.redirect import RedirectResponse
class IndexTests(GAETestCase):
def test_success(self):
%(model_underscore)s = mommy.save_one(%(model)s)
template_response = index(%(model_underscore)s.key.id())
self.assert_can_render(template_response)
class EditTests(GAETestCase):
def test_success(self):
%(model_underscore)s = mommy.save_one(%(model)s)
old_properties = %(model_underscore)s.to_dict()
redirect_response = save(%(model_underscore)s.key.id(), %(request_values)s)
self.assertIsInstance(redirect_response, RedirectResponse)
edited_%(model_underscore)s = %(model_underscore)s.key.get()
%(model_assertions)s
self.assertNotEqual(old_properties, edited_%(model_underscore)s.to_dict())
def test_error(self):
%(model_underscore)s = mommy.save_one(%(model)s)
old_properties = %(model_underscore)s.to_dict()
template_response = save(%(model_underscore)s.key.id())
errors = template_response.context['errors']
self.assertSetEqual(set([%(model_properties)s]), set(errors.keys()))
self.assertEqual(old_properties, %(model_underscore)s.key.get().to_dict())
self.assert_can_render(template_response)
'''
NEW_TESTS_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from base import GAETestCase
from datetime import datetime, date
from decimal import Decimal
from %(app)s_app.%(app)s_model import %(model)s
from routes.%(app)ss.new import index, save
from tekton.gae.middleware.redirect import RedirectResponse
class IndexTests(GAETestCase):
def test_success(self):
template_response = index()
self.assert_can_render(template_response)
class SaveTests(GAETestCase):
def test_success(self):
self.assertIsNone(%(model)s.query().get())
redirect_response = save(%(request_values)s)
self.assertIsInstance(redirect_response, RedirectResponse)
saved_%(model_underscore)s = %(model)s.query().get()
self.assertIsNotNone(saved_%(model_underscore)s)
%(model_assertions)s
def test_error(self):
template_response = save()
errors = template_response.context['errors']
self.assertSetEqual(set([%(model_properties)s]), set(errors.keys()))
self.assert_can_render(template_response)
'''
MODEL_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from google.appengine.ext import ndb
from gaegraph.model import Node
from gaeforms.ndb import property
class %(model)s(Node):
%(properties)s
'''
COMMANDS_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaebusiness.gaeutil import SaveCommand, ModelSearchCommand
from gaeforms.ndb.form import ModelForm
from gaegraph.business_base import UpdateNode, NodeSearch, DeleteNode
from %(app_path)s.%(app)s_model import %(model)s
class %(model)sSaveForm(ModelForm):
"""
Form used to save and update %(model)s
"""
_model_class = %(model)s
_include = [%(form_properties)s]
class %(model)sForm(ModelForm):
"""
Form used to expose %(model)s's properties for list or json
"""
_model_class = %(model)s
class Get%(model)sCommand(NodeSearch):
_model_class = %(model)s
class Delete%(model)sCommand(DeleteNode):
_model_class = %(model)s
class Save%(model)sCommand(SaveCommand):
_model_form_class = %(model)sSaveForm
class Update%(model)sCommand(UpdateNode):
_model_form_class = %(model)sSaveForm
class List%(model)sCommand(ModelSearchCommand):
def __init__(self):
super(List%(model)sCommand, self).__init__(%(model)s.query_by_creation())
'''
FACADE_TEMPLATE = r'''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaegraph.business_base import NodeSearch, DeleteNode
from %(app_path)s.%(app)s_commands import List%(model)sCommand, Save%(model)sCommand, Update%(model)sCommand, %(model)sForm,\
Get%(model)sCommand, Delete%(model)sCommand
def save_%(model_underscore)s_cmd(**%(model_underscore)s_properties):
"""
Command to save %(model)s entity
:param %(model_underscore)s_properties: a dict of properties to save on model
:return: a Command that save %(model)s, validating and localizing properties received as strings
"""
return Save%(model)sCommand(**%(model_underscore)s_properties)
def update_%(model_underscore)s_cmd(%(model_underscore)s_id, **%(model_underscore)s_properties):
"""
Command to update %(model)s entity with id equals '%(model_underscore)s_id'
:param %(model_underscore)s_properties: a dict of properties to update model
:return: a Command that update %(model)s, validating and localizing properties received as strings
"""
return Update%(model)sCommand(%(model_underscore)s_id, **%(model_underscore)s_properties)
def list_%(model_underscore)ss_cmd():
"""
Command to list %(model)s entities ordered by their creation dates
:return: a Command proceed the db operations when executed
"""
return List%(model)sCommand()
def %(model_underscore)s_form(**kwargs):
"""
Function to get %(model)s's detail form.
:param kwargs: form properties
:return: Form
"""
return %(model)sForm(**kwargs)
def get_%(model_underscore)s_cmd(%(model_underscore)s_id):
"""
Find %(model_underscore)s by her id
:param %(model_underscore)s_id: the %(model_underscore)s id
:return: Command
"""
return Get%(model)sCommand(%(model_underscore)s_id)
def delete_%(model_underscore)s_cmd(%(model_underscore)s_id):
"""
Construct a command to delete a %(model)s
:param %(model_underscore)s_id: %(model_underscore)s's id
:return: Command
"""
return Delete%(model)sCommand(%(model_underscore)s_id)
'''
HOME_SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from tekton import router
from gaecookie.decorator import no_csrf
from %(app_name)s import %(app)s_facade
from routes.%(web_name)s import new, edit
from tekton.gae.middleware.redirect import RedirectResponse
@no_csrf
def index():
cmd = %(app)s_facade.list_%(model_underscore)ss_cmd()
%(model_underscore)ss = cmd()
edit_path = router.to_path(edit)
delete_path = router.to_path(delete)
%(model_underscore)s_form = %(app)s_facade.%(model_underscore)s_form()
def localize_%(model_underscore)s(%(model_underscore)s):
%(model_underscore)s_dct = %(model_underscore)s_form.fill_with_model(%(model_underscore)s)
%(model_underscore)s_dct['edit_path'] = router.to_path(edit_path, %(model_underscore)s_dct['id'])
%(model_underscore)s_dct['delete_path'] = router.to_path(delete_path, %(model_underscore)s_dct['id'])
return %(model_underscore)s_dct
localized_%(model_underscore)ss = [localize_%(model_underscore)s(%(model_underscore)s) for %(model_underscore)s in %(model_underscore)ss]
context = {'%(model_underscore)ss': localized_%(model_underscore)ss,
'new_path': router.to_path(new)}
return TemplateResponse(context, '%(app)ss/%(app)s_home.html')
def delete(%(model_underscore)s_id):
%(app)s_facade.delete_%(model_underscore)s_cmd(%(model_underscore)s_id)()
return RedirectResponse(router.to_path(index))
'''
NEW_SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaebusiness.business import CommandExecutionException
from tekton import router
from gaecookie.decorator import no_csrf
from %(app_name)s import %(app)s_facade
from routes import %(web_name)s
from tekton.gae.middleware.redirect import RedirectResponse
@no_csrf
def index():
return TemplateResponse({'save_path': router.to_path(save)}, '%(web_name)s/%(app)s_form.html')
def save(**%(model_underscore)s_properties):
cmd = %(app)s_facade.save_%(model_underscore)s_cmd(**%(model_underscore)s_properties)
try:
cmd()
except CommandExecutionException:
context = {'errors': cmd.errors,
'%(model_underscore)s': %(model_underscore)s_properties}
return TemplateResponse(context, '%(web_name)s/%(app)s_form.html')
return RedirectResponse(router.to_path(%(web_name)s))
'''
EDIT_SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaebusiness.business import CommandExecutionException
from tekton import router
from gaecookie.decorator import no_csrf
from %(app_name)s import %(app)s_facade
from routes import %(web_name)s
from tekton.gae.middleware.redirect import RedirectResponse
@no_csrf
def index(%(model_underscore)s_id):
%(model_underscore)s = %(app)s_facade.get_%(model_underscore)s_cmd(%(model_underscore)s_id)()
%(model_underscore)s_form = %(app)s_facade.%(model_underscore)s_form()
context = {'save_path': router.to_path(save, %(model_underscore)s_id), '%(model_underscore)s': %(model_underscore)s_form.fill_with_model(%(model_underscore)s)}
return TemplateResponse(context, '%(web_name)s/%(app)s_form.html')
def save(%(model_underscore)s_id, **%(model_underscore)s_properties):
cmd = %(app)s_facade.update_%(model_underscore)s_cmd(%(model_underscore)s_id, **%(model_underscore)s_properties)
try:
cmd()
except CommandExecutionException:
context = {'errors': cmd.errors, '%(model_underscore)s': %(model_underscore)s_properties}
return TemplateResponse(context, '%(web_name)s/%(app)s_form.html')
return RedirectResponse(router.to_path(%(web_name)s))
'''
REST_SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaebusiness.business import CommandExecutionException
from tekton.gae.middleware.json_middleware import JsonResponse
from %(app_name)s import %(app)s_facade
def index():
cmd = %(app)s_facade.list_%(model_underscore)ss_cmd()
%(model_underscore)s_list = cmd()
%(model_underscore)s_form = %(app)s_facade.%(model_underscore)s_form()
%(model_underscore)s_dcts = [%(model_underscore)s_form.fill_with_model(m) for m in %(model_underscore)s_list]
return JsonResponse(%(model_underscore)s_dcts)
def new(_resp, **%(model_underscore)s_properties):
cmd = %(app)s_facade.save_%(model_underscore)s_cmd(**%(model_underscore)s_properties)
return _save_or_update_json_response(cmd, _resp)
def edit(_resp, id, **%(model_underscore)s_properties):
cmd = %(app)s_facade.update_%(model_underscore)s_cmd(id, **%(model_underscore)s_properties)
return _save_or_update_json_response(cmd, _resp)
def delete(_resp, id):
cmd = %(app)s_facade.delete_%(model_underscore)s_cmd(id)
try:
cmd()
except CommandExecutionException:
_resp.status_code = 500
return JsonResponse(cmd.errors)
def _save_or_update_json_response(cmd, _resp):
try:
%(model_underscore)s = cmd()
except CommandExecutionException:
_resp.status_code = 500
return JsonResponse(cmd.errors)
%(model_underscore)s_form = %(app)s_facade.%(model_underscore)s_form()
return JsonResponse(%(model_underscore)s_form.fill_with_model(%(model_underscore)s))
'''
HOME_HTML_TEMPLATE = '''{%% extends '%(web_name)s/%(app)s_base.html' %%}
{%% block body %%}
<div class="container">
<div class="row">
<div class="col-md-12">
<h1>{%% trans %%}This is a generic home for %(app_name)s {%% endtrans %%} </h1>
<a href="{{ new_path }}" class="btn btn-success">{%% trans %%}Create New %(model)s{%% endtrans %%}</a>
<hr/>
<h2>{%% trans %%}List of %(model)ss{%% endtrans %%}</h2>
<table class="table table-striped table-hover">
<thead>
<tr>
<th/>
<th>{%% trans %%}Id{%% endtrans %%}</th>
<th>{%% trans %%}Creation{%% endtrans %%}</th>
%(headers)s
</tr>
</thead>
<tbody>
{%% for %(model_underscore)s in %(model_underscore)ss %%}
<tr>
<td><a href="{{ %(model_underscore)s.edit_path }}" class="btn btn-success btn-sm"><i
class="glyphicon glyphicon-pencil"></i></a></td>
<td>{{ %(model_underscore)s.id }}</td>
<td>{{ %(model_underscore)s.creation }}</td>
%(columns)s
<td>
<form action="{{ %(model_underscore)s.delete_path }}" method="post" onsubmit="return confirm('{{_('Are you sure to delete? Press cancel to avoid deletion.')}}');">
{{ csrf_input() }}
<button class="btn btn-danger btn-sm"><i
class="glyphicon glyphicon-trash"></i></button>
</form>
</td>
</tr>
{%% endfor %%}
</tbody>
</table>
</div>
</div>
</div>
{%% endblock %%}'''
FORM_HTML_TEMPLATE = '''{%% extends '%(web_name)s/%(app)s_base.html' %%}
{%% block body %%}
{%% set %(model_underscore)s=%(model_underscore)s or None %%}
{%% set errors=errors or None %%}
<div class="container">
<div class="row">
<div class="col-md-6 col-md-offset-3">
<br/>
<div class="well">
<h1 class="text-center">{%% trans %%}%(model)s Form{%% endtrans %%}</h1>
<form action="{{ save_path }}" method="post" role="form">
{{ csrf_input() }}
%(inputs)s
<button type="submit" class="btn btn-success">{%% trans %%}Save{%% endtrans %%}</button>
</form>
</div>
</div>
</div>
</div>
{%% endblock %%}'''
def _create_dir_if_not_existing(package_path):
if not os.path.exists(package_path):
os.mkdir(package_path)
def _create_file_if_not_existing(file_path, content=''):
if not os.path.isfile(file_path):
with open(file_path, 'w') as f:
f.write(content.encode('utf8'))
def _create_package(package_path):
_create_dir_if_not_existing(package_path)
_create_file_if_not_existing(os.path.join(package_path, '__init__.py'))
def _create_app(name, app_path, model, *properties):
properties = '\n'.join(parse_property(p) for p in properties)
properties = properties or ' pass'
_create_package(app_path)
_create_file_if_not_existing(os.path.join(app_path, '%s_model.py' % name),
MODEL_TEMPLATE % {'model': model, 'properties': properties})
def parse_property(p):
name, type_alias = p.split(':')
types = {'string': 'ndb.StringProperty(required=True)',
'date': 'ndb.DateProperty(required=True)',
'datetime': 'ndb.DateTimeProperty(required=True)',
'int': 'ndb.IntegerProperty(required=True)',
'float': 'ndb.FloatProperty(required=True)',
'decimal': 'property.SimpleDecimal(required=True)',
'currency': 'property.SimpleCurrency(required=True)',
'bool': 'ndb.BooleanProperty(required=True)'}
return ' %s = %s' % (name, types[type_alias])
def init_app(name, model, *properties):
_title('Creating app package')
app_path = os.path.join(APPS_DIR, name + '_app')
_create_app(name, app_path, model, *properties)
PROPERTY = '%(model)s.%(property)s'
def _build_properties(model, properties):
return ', \n '.join([PROPERTY % {'model': model, 'property': p} for p in properties])
def _model_class(app, model):
app_path = app + '_app'
model_module = importlib.import_module(app_path + '.%s_model' % app)
model_class = getattr(model_module, model)
return model_class
def _model_properties(app, model):
model_class = _model_class(app, model)
properties = set(model_class._properties.keys())
properties = properties.difference(set(['class']))
return properties
def commands_code_for(app, model):
app_path = app + '_app'
properties = _model_properties(app, model)
full_properties = _build_properties(model, properties)
form_properties = properties.difference(set(['creation']))
form_properties = _build_properties(model, form_properties)
dct = {'app': app, 'app_path': app_path, 'model': model, 'full_properties': full_properties,
'form_properties': form_properties}
return COMMANDS_TEMPLATE % dct
def _title(param):
n = 15
print ('- ' * n) + param + (' -' * n)
def _to_app_name(app):
return app + '_app'
def _to_underscore_case(model):
model_underscore = model[0].lower() + model[1:]
return ''.join(('_' + letter.lower() if letter.isupper() else letter) for letter in model_underscore)
def generate_generic(app, model, template_path_function, file_name, content_function):
app_template_path = template_path_function(app)
template_file = os.path.join(app_template_path, file_name)
content = content_function(app, model)
_create_file_if_not_existing(template_file, content)
return content
def _to_app_path(app):
return os.path.join(APPS_DIR, app + '_app')
def generate_app_file(app, model, file_name, content_function):
file_name = '%s_%s.py' % (app, file_name)
return generate_generic(app, model, _to_app_path, file_name, content_function)
def init_commands(app, model):
return generate_app_file(app, model, 'commands', commands_code_for)
def facade_code_for(app, model):
app_path = _to_app_name(app)
model_underscore = _to_underscore_case(model)
dct = {'app': app, 'app_path': app_path, 'model': model, 'model_underscore': model_underscore}
return FACADE_TEMPLATE % dct
def init_facade(app, model):
return generate_app_file(app, model, 'facade', facade_code_for)
def _to_routes_name(app):
return app + 's'
def init_routes(app):
web_path = _to_routes_path(app)
_create_package(web_path)
def _to_routes_path(app):
return os.path.join(WEB_DIR, _to_routes_name(app))
def generate_routes(app, model, file_name, content_function):
file_name = '%s.py' % file_name
return generate_generic(app, model, _to_routes_path, file_name, content_function)
def code_for_home_script(app, model):
web_name = _to_routes_name(app)
app_name = _to_app_name(app)
return HOME_SCRIPT_TEMPLATE % {'app_name': app_name,
'model_underscore': _to_underscore_case(model),
'web_name': web_name,
'app': app}
def init_home_script(app, model):
return generate_routes(app, model, 'home', code_for_home_script)
def code_for_new_script(app, model):
web_name = _to_routes_name(app)
app_name = _to_app_name(app)
return NEW_SCRIPT_TEMPLATE % {'app_name': app_name,
'model_underscore': _to_underscore_case(model),
'web_name': web_name,
'app': app}
def init_new_script(app, model):
return generate_routes(app, model, 'new', code_for_new_script)
def code_for_edit_script(app, model):
web_name = _to_routes_name(app)
app_name = _to_app_name(app)
return EDIT_SCRIPT_TEMPLATE % {'app_name': app_name,
'model_underscore': _to_underscore_case(model),
'web_name': web_name,
'app': app}
def init_edit_script(app, model):
return generate_routes(app, model, 'edit', code_for_edit_script)
def code_for_rest_script(app, model):
web_name = _to_routes_name(app)
app_name = _to_app_name(app)
return REST_SCRIPT_TEMPLATE % {'app_name': app_name,
'model_underscore': _to_underscore_case(model),
'web_name': web_name,
'app': app}
def init_rest_script(app, model):
return generate_routes(app, model, 'rest', code_for_rest_script)
APP_BASE_HTML_TEMPLATE = '''{%% extends 'base/base.html' %%}
{%% block tabs %%}
{{ select_tab('%(app_name_upper)s') }}
{%% endblock %%}'''
def _to_template_path(app):
return os.path.join(TEMPLATES_DIR, _to_routes_name(app))
def init_html_templates(app):
template_path = _to_template_path(app)
content = APP_BASE_HTML_TEMPLATE % {'app_name_upper': _to_routes_name(app).upper()}
_create_dir_if_not_existing(template_path)
base_dir = os.path.join(template_path, '%s_base.html' % app)
_create_file_if_not_existing(base_dir, content)
def _to_label(label):
names = label.split('_')
upper_names = [n[0].upper() + n[1:] for n in names]
return ' '.join(upper_names)
def _to_html_table_header(properties):
template = ' ' * 24 + '<th>{%% trans %%}%s{%% endtrans %%}</th>'
properties = [_to_label(p) for p in properties]
rendered = [template % p for p in properties]
return '\n'.join(rendered)
def _to_html_table_columns(model_underscore, properties):
template = ' ' * 28 + '<td>{{ %(model_underscore)s.%(property)s }}</td>'
rendered = [template % {'model_underscore': model_underscore, 'property': p} for p in properties]
return '\n'.join(rendered)
def _to_html_form_inputs(model_underscore, properties):
template = "{{ form_input(_('%(label)s'),'%(property)s',%(model_underscore)s.%(property)s,errors.%(property)s) }}"
template = ' ' * 24 + template
rendered = [template % {'model_underscore': model_underscore, 'property': p, 'label': _to_label(p)} for p in
properties]
return '\n'.join(rendered)
def generate_template(app, model, file_name, content_function):
file_name = '%s_%s.html' % (app, file_name)
return generate_generic(app, model, _to_template_path, file_name, content_function)
def code_for_home_html(app, model):
web_name = _to_routes_name(app)
app_name = _to_app_name(app)
properties = _model_properties(app, model)
properties = properties.difference(set(['creation']))
model_underscore = _to_underscore_case(model)
return HOME_HTML_TEMPLATE % {'app_name': app_name,
'model_underscore': model_underscore,
'model': model,
'web_name': web_name,
'headers': _to_html_table_header(properties),
'columns': _to_html_table_columns(model_underscore, properties),
'app': app}
def init_home_html(app, model):
return generate_template(app, model, 'home', code_for_home_html)
def code_for_form_html(app, model):
web_name = _to_routes_name(app)
app_name = _to_app_name(app)
properties = _model_properties(app, model)
properties = properties.difference(set(['creation']))
model_underscore = _to_underscore_case(model)
return FORM_HTML_TEMPLATE % {'app_name': app_name,
'model_underscore': model_underscore,
'model': model,
'web_name': web_name,
'inputs': _to_html_form_inputs(model_underscore, properties),
'app': app}
def init_form_html(app, model):
return generate_template(app, model, 'form', code_for_form_html)
def init_test(name, model, *properties):
_title('Creating test package')
test_path = os.path.join(TEST_DIR, name + '_tests')
_create_package(test_path)
def _to_test_path(app):
return os.path.join(TEST_DIR, app + '_tests')
def generate_tests(app, model, file_name, content_function):
file_name = '%s_%s_tests.py' % (app, file_name)
return generate_generic(app, model, _to_test_path, file_name, content_function)
def _to_default_model_value(descriptor, name, index):
if isinstance(descriptor, (StringProperty, TextProperty)):
return "'%s_string'" % name
if isinstance(descriptor, DateProperty):
return "date(2014, 1, %s)" % (index + 1)
if isinstance(descriptor, DateTimeProperty):
return "datetime(2014, 1, 1, 1, %s, 0)" % (index + 1)
if isinstance(descriptor, (SimpleCurrency, SimpleDecimal)):
return "Decimal('1.%s')" % (index + 1 if index >= 9 else '0%s' % (index + 1))
if isinstance(descriptor, IntegerProperty):
return "%s" % (index + 1)
if isinstance(descriptor, FloatProperty):
return "1.%s" % (index + 1)
if isinstance(descriptor, BooleanProperty):
return "True"
def _to_model_assertions(variable, descriptors_dct):
template = " self.assertEquals(%(value)s, %(variable)s.%(property)s)"
rendered = [template % {'variable': variable, 'property': p, 'value': _to_default_model_value(descriptor, p, i)} for
i, (p, descriptor) in
enumerate(descriptors_dct.iteritems())]
return '\n'.join(rendered)
def _to_default_reques_value(descriptor, name, index):
if isinstance(descriptor, (StringProperty, TextProperty)):
return "'%s_string'" % name
if isinstance(descriptor, DateProperty):
return "'1/%s/2014'" % (index + 1)
if isinstance(descriptor, DateTimeProperty):
return "'1/1/2014 01:%s:0'" % (index + 1)
if isinstance(descriptor, (SimpleCurrency, SimpleDecimal)):
return "'1.%s'" % (index + 1 if index >= 9 else '0%s' % (index + 1))
if isinstance(descriptor, IntegerProperty):
return "'%s'" % (index + 1)
if isinstance(descriptor, FloatProperty):
return "'1.%s'" % (index + 1)
if isinstance(descriptor, BooleanProperty):
return "'True'"
def _to_request_values(variable, descriptors_dct):
template = "%(property)s=%(value)s"
rendered = [template % {'variable': variable, 'property': p, 'value': _to_default_reques_value(descriptor, p, i)}
for
i, (p, descriptor) in
enumerate(descriptors_dct.iteritems())]
return ', '.join(rendered)
def _model_descriptors(app, model):
model_class = _model_class(app, model)
return {k: p for k, p in model_class._properties.iteritems() if k not in ['class', 'creation']}
def code_new_tests(app, model):
descriptors_dct = _model_descriptors(app, model)
model_underscore = _to_underscore_case(model)
model_assertions = _to_model_assertions('saved_' + model_underscore, descriptors_dct)
model_properties = ', '.join("'%s'" % k for k in descriptors_dct)
request_values = _to_request_values('saved_' + model_underscore, descriptors_dct)
return NEW_TESTS_TEMPLATE % {'app': app, 'model': model, 'model_underscore': model_underscore,
'model_assertions': model_assertions, 'request_values': request_values,
'model_properties': model_properties}
def code_edit_tests(app, model):
descriptors_dct = _model_descriptors(app, model)
model_underscore = _to_underscore_case(model)
model_assertions = _to_model_assertions('edited_' + model_underscore, descriptors_dct)
model_properties = ', '.join("'%s'" % k for k in descriptors_dct)
request_values = _to_request_values('edited_' + model_underscore, descriptors_dct)
return EDIT_TESTS_TEMPLATE % {'app': app, 'model': model, 'model_underscore': model_underscore,
'model_assertions': model_assertions, 'request_values': request_values,
'model_properties': model_properties}
def code_home_tests(app, model):
model_underscore = _to_underscore_case(model)
return HOME_TESTS_TEMPLATE % {'app': app, 'model': model, 'model_underscore': model_underscore}
def code_rest_tests(app, model):
descriptors_dct = _model_descriptors(app, model)
model_underscore = _to_underscore_case(model)
model_assertions = _to_model_assertions('db_' + model_underscore, descriptors_dct)
model_properties = ', '.join("'%s'" % k for k in descriptors_dct)
request_values = _to_request_values('request_' + model_underscore, descriptors_dct)
return REST_TESTS_TEMPLATE % {'app': app, 'model': model, 'model_underscore': model_underscore,
'model_assertions': model_assertions, 'request_values': request_values,
'model_properties': model_properties}
def init_new_tests(app, model):
return generate_tests(app, model, 'new', code_new_tests)
def init_edit_tests(app, model):
return generate_tests(app, model, 'edit', code_edit_tests)
def init_home_tests(app, model):
return generate_tests(app, model, 'home', code_home_tests)
def init_rest_tests(app, model):
return generate_tests(app, model, 'rest', code_rest_tests)
def scaffold(app, model, *properties):
init_app(app, model, *properties)
_title('commands.py')
print init_commands(app, model)
_title('facade.py')
print init_facade(app, model)
_title('creating routes folder')
init_routes(app)
_title('routes home.py')
print init_home_script(app, model)
_title('routes.new.py')
print init_new_script(app, model)
_title('routes.edit.py')
print init_edit_script(app, model)
_title('routes rest.py')
print init_rest_script(app, model)
_title('creating template folder ans base.html')
init_html_templates(app)
_title('templates/home.html')
print init_home_html(app, model)
_title('templates/form.html')
print init_form_html(app, model)
init_test(app, model)
_title('creating new tests')
print init_new_tests(app, model)
_title('creating edit tests')
print init_edit_tests(app, model)
_title('creating home tests')
print init_home_tests(app, model)
_title('creating rest tests')
print init_rest_tests(app, model)
def delete_app(app):
flag = raw_input('Are you sure you want delete app %s (yes or no)? ' % app)
if flag.lower() == 'yes':
app_dir = os.path.join(APPS_DIR, app + '_app')
shutil.rmtree(app_dir)
template_dir = os.path.join(TEMPLATES_DIR, app + 's')
shutil.rmtree(template_dir)
web_dir = os.path.join(WEB_DIR, app + 's')
shutil.rmtree(web_dir)
test_dir = os.path.join(TEST_DIR, app + '_tests')
shutil.rmtree(test_dir)
FUNC_DICT = {'model': init_app, 'app': scaffold, 'delete': delete_app}
if __name__ == '__main__':
if len(sys.argv) == 1:
print 'Commands available:'
print '\n '.join([''] + FUNC_DICT.keys())
print 'both model or app must be folowed by <app> <model>'
elif len(sys.argv) >= 3:
fcn = FUNC_DICT.get(sys.argv[1])
if fcn:
fcn(*sys.argv[2:])
else:
print 'Invalid command: %s' % sys.argv[1]
else:
print 'Must use command %s followed by params: <app> <model>' % sys.argv[1]
| mit |
joedursun/.emacs.d | elpa/elpy-20150226.1148/elpy/tests/support.py | 1 | 26998 | # coding: utf-8
"""Support classes and functions for the elpy test code.
Elpy uses a bit of a peculiar test setup to avoid redundancy. For the
tests of the two backends, we provide generic test cases for generic
tests and for specific callback tests.
These mixins can be included in the actual test classes. We can't add
these tests to a BackendTestCase subclass directly because the test
discovery would find them there and try to run them, which would fail.
"""
import os
import shutil
import tempfile
import unittest
from elpy.tests import compat
class BackendTestCase(unittest.TestCase):
"""Base class for backend tests.
This class sets up a project root directory and provides an easy
way to create files within the project root.
"""
def setUp(self):
"""Create the project root and make sure it gets cleaned up."""
super(BackendTestCase, self).setUp()
self.project_root = tempfile.mkdtemp(prefix="elpy-test")
self.addCleanup(shutil.rmtree, self.project_root, True)
def project_file(self, relname, contents):
"""Create a file named relname within the project root.
Write contents into that file.
"""
full_name = os.path.join(self.project_root, relname)
try:
os.makedirs(os.path.dirname(full_name))
except OSError:
pass
with open(full_name, "w") as f:
f.write(contents)
return full_name
class GenericRPCTests(object):
"""Generic RPC test methods.
This is a mixin to add tests that should be run for all RPC
methods that follow the generic (filename, source, offset) calling
conventions.
"""
METHOD = None
def rpc(self, filename, source, offset):
method = getattr(self.backend, self.METHOD)
return method(filename, source, offset)
def test_should_not_fail_on_inexisting_file(self):
filename = self.project_root + "/doesnotexist.py"
self.rpc(filename, "", 0)
def test_should_not_fail_on_empty_file(self):
filename = self.project_file("test.py", "")
self.rpc(filename, "", 0)
def test_should_not_fail_if_file_is_none(self):
self.rpc(None, "", 0)
def test_should_not_fail_for_module_syntax_errors(self):
source, offset = source_and_offset(
"class Foo(object):\n"
" def bar(self):\n"
" foo(_|_"
" bar("
"\n"
" def a(self):\n"
" pass\n"
"\n"
" def b(self):\n"
" pass\n"
"\n"
" def b(self):\n"
" pass\n"
"\n"
" def b(self):\n"
" pass\n"
"\n"
" def b(self):\n"
" pass\n"
"\n"
" def b(self):\n"
" pass\n"
)
filename = self.project_file("test.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_for_bad_indentation(self):
# Bug in Rope: rope#80
source, offset = source_and_offset(
"def foo():\n"
" print(23)_|_\n"
" print(17)\n")
filename = self.project_file("test.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_for_relative_import(self):
# Bug in Rope: rope#81 and rope#82
source, offset = source_and_offset(
"from .. import foo_|_"
)
filename = self.project_file("test.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_on_keyword(self):
source, offset = source_and_offset(
"_|_try:\n"
" pass\n"
"except:\n"
" pass\n")
filename = self.project_file("test.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_with_bad_encoding(self):
# Bug in Rope: rope#83
source, offset = source_and_offset(
u'# coding: utf-8X_|_\n'
)
filename = self.project_file("test.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_with_form_feed_characters(self):
# Bug in Jedi: jedi#424
source, offset = source_and_offset(
"\f\n"
"class Test(object):_|_\n"
" pass"
)
filename = self.project_file("test.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_for_dictionaries_in_weird_places(self):
# Bug in Jedi: jedi#417
source, offset = source_and_offset(
"import json\n"
"\n"
"def foo():\n"
" json.loads(_|_\n"
"\n"
" json.load.return_value = {'foo': [],\n"
" 'bar': True}\n"
"\n"
" c = Foo()\n"
)
filename = self.project_file("test.py", source)
self.rpc(filename, source, offset)
def test_should_not_break_with_binary_characters_in_docstring(self):
# Bug in Jedi: jedi#427
template = '''\
class Foo(object):
def __init__(self):
"""
COMMUNITY instance that this conversion belongs to.
DISPERSY_VERSION is the dispersy conversion identifier (on the wire version; must be one byte).
COMMUNIY_VERSION is the community conversion identifier (on the wire version; must be one byte).
COMMUNIY_VERSION may not be '\\x00' or '\\xff'. '\\x00' is used by the DefaultConversion until
a proper conversion instance can be made for the Community. '\\xff' is reserved for when
more than one byte is needed as a version indicator.
"""
pass
x = Foo()
x._|_
'''
source, offset = source_and_offset(template)
filename = self.project_file("test.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_for_def_without_name(self):
# Bug jedi#429
source, offset = source_and_offset(
"def_|_():\n"
" if True:\n"
" return True\n"
" else:\n"
" return False\n"
)
filename = self.project_file("project.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_on_lambda(self):
# Bug #272 / jedi#431
source, offset = source_and_offset(
"map(lambda_|_"
)
filename = self.project_file("project.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_on_literals(self):
# Bug #314, #344 / jedi#466
source = u'lit = u"""\\\n# -*- coding: utf-8 -*-\n"""\n'
offset = 0
filename = self.project_file("project.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_with_args_as_args(self):
# Bug #347 in rope_py3k
source, offset = source_and_offset(
"def my_function(*args):\n"
" ret_|_"
)
filename = self.project_file("project.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_for_unicode_chars_in_string(self):
# Bug #358 / jedi#482
source = '''\
# coding: utf-8
logging.info(u"Saving «{}»...".format(title))
requests.get(u"https://web.archive.org/save/{}".format(url))
'''
offset = 57
filename = self.project_file("project.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_for_bad_escape_sequence(self):
# Bug #360 / jedi#485
source = r"v = '\x'"
offset = 8
filename = self.project_file("project.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_for_coding_declarations_in_strings(self):
# Bug #314 / jedi#465 / python#22221
source = u'lit = """\\\n# -*- coding: utf-8 -*-\n"""'
offset = 8
filename = self.project_file("project.py", source)
self.rpc(filename, source, offset)
def test_should_not_fail_if_root_vanishes(self):
# Bug #353
source, offset = source_and_offset(
"import foo\n"
"foo._|_"
)
filename = self.project_file("project.py", source)
shutil.rmtree(self.project_root)
self.rpc(filename, source, offset)
# For some reason, this breaks a lot of other tests. Couldn't
# figure out why.
#
# def test_should_not_fail_for_sys_path(self):
# # Bug #365 / jedi#486
# source, offset = source_and_offset(
# "import sys\n"
# "\n"
# "sys.path.index(_|_\n"
# )
# filename = self.project_file("project.py", source)
#
# self.rpc(filename, source, offset)
class RPCGetCompletionsTests(GenericRPCTests):
METHOD = "rpc_get_completions"
def test_should_complete_builtin(self):
source, offset = source_and_offset("o_|_")
expected = ["object", "oct", "open", "or", "ord"]
actual = [cand['name'] for cand in
self.backend.rpc_get_completions("test.py",
source, offset)]
for candidate in expected:
self.assertIn(candidate, actual)
def test_should_complete_imports(self):
source, offset = source_and_offset("import json\n"
"json.J_|_")
filename = self.project_file("test.py", source)
completions = self.backend.rpc_get_completions(filename,
source,
offset)
self.assertEqual(
sorted([cand['suffix'] for cand in completions]),
sorted(["SONDecoder", "SONEncoder"]))
def test_should_complete_top_level_modules_for_import(self):
source, offset = source_and_offset("import multi_|_")
filename = self.project_file("test.py", source)
completions = self.backend.rpc_get_completions(filename,
source,
offset)
if compat.PYTHON3:
expected = ["processing"]
else:
expected = ["file", "processing"]
self.assertEqual(sorted([cand['suffix'] for cand in completions]),
sorted(expected))
def test_should_complete_packages_for_import(self):
source, offset = source_and_offset("import elpy.tes_|_")
filename = self.project_file("test.py", source)
completions = self.backend.rpc_get_completions(filename,
source,
offset)
self.assertEqual([cand['suffix'] for cand in completions],
["ts"])
def test_should_not_complete_for_import(self):
source, offset = source_and_offset("import foo.Conf_|_")
filename = self.project_file("test.py", source)
completions = self.backend.rpc_get_completions(filename,
source,
offset)
self.assertEqual([cand['suffix'] for cand in completions],
[])
def test_should_not_fail_for_short_module(self):
source, offset = source_and_offset("from .. import foo_|_")
filename = self.project_file("test.py", source)
completions = self.backend.rpc_get_completions(filename,
source,
offset)
self.assertIsNotNone(completions)
def test_should_complete_sys(self):
source, offset = source_and_offset("import sys\nsys._|_")
filename = self.project_file("test.py", source)
completions = self.backend.rpc_get_completions(filename,
source,
offset)
self.assertIn('path', [cand['suffix'] for cand in completions])
def test_should_find_with_trailing_text(self):
source, offset = source_and_offset(
"import threading\nthreading.T_|_mumble mumble")
expected = ["Thread", "ThreadError", "Timer"]
actual = [cand['name'] for cand in
self.backend.rpc_get_completions("test.py", source, offset)]
for candidate in expected:
self.assertIn(candidate, actual)
def test_should_find_completion_different_package(self):
# See issue #74
self.project_file("project/__init__.py", "")
source1 = ("class Add:\n"
" def add(self, a, b):\n"
" return a + b\n")
self.project_file("project/add.py", source1)
source2, offset = source_and_offset(
"from project.add import Add\n"
"class Calculator:\n"
" def add(self, a, b):\n"
" c = Add()\n"
" c.ad_|_\n")
file2 = self.project_file("project/calculator.py", source2)
proposals = self.backend.rpc_get_completions(file2,
source2,
offset)
self.assertEqual(["add"],
[proposal["name"] for proposal in proposals])
class RPCGetCompletionDocstringTests(object):
def test_should_return_docstring(self):
source, offset = source_and_offset("import json\n"
"json.J_|_")
filename = self.project_file("test.py", source)
completions = self.backend.rpc_get_completions(filename,
source,
offset)
completions.sort(key=lambda p: p["name"])
prop = completions[0]
self.assertEqual(prop["name"], "JSONDecoder")
docs = self.backend.rpc_get_completion_docstring("JSONDecoder")
self.assertIn("Simple JSON", docs)
def test_should_return_none_if_unknown(self):
docs = self.backend.rpc_get_completion_docstring("Foo")
self.assertIsNone(docs)
class RPCGetCompletionLocationTests(object):
def test_should_return_location(self):
source, offset = source_and_offset("donaudampfschiff = 1\n"
"donau_|_")
filename = self.project_file("test.py", source)
completions = self.backend.rpc_get_completions(filename,
source,
offset)
prop = completions[0]
self.assertEqual(prop["name"], "donaudampfschiff")
loc = self.backend.rpc_get_completion_location("donaudampfschiff")
self.assertEqual((filename, 1), loc)
def test_should_return_none_if_unknown(self):
docs = self.backend.rpc_get_completion_location("Foo")
self.assertIsNone(docs)
class RPCGetDefinitionTests(GenericRPCTests):
METHOD = "rpc_get_definition"
def test_should_return_definition_location_same_file(self):
source, offset = source_and_offset("import threading\n"
"def test_function(a, b):\n"
" return a + b\n"
"\n"
"test_func_|_tion(\n")
filename = self.project_file("test.py", source)
location = self.backend.rpc_get_definition(filename,
source,
offset)
self.assertEqual(location[0], filename)
# On def or on the function name
self.assertIn(location[1], (17, 21))
def test_should_return_location_in_same_file_if_not_saved(self):
source, offset = source_and_offset(
"import threading\n"
"\n"
"\n"
"def other_function():\n"
" test_f_|_unction(1, 2)\n"
"\n"
"\n"
"def test_function(a, b):\n"
" return a + b\n")
filename = self.project_file("test.py", "")
location = self.backend.rpc_get_definition(filename,
source,
offset)
self.assertEqual(location[0], filename)
# def or function name
self.assertIn(location[1], (67, 71))
def test_should_return_location_in_different_file(self):
source1 = ("def test_function(a, b):\n"
" return a + b\n")
file1 = self.project_file("test1.py", source1)
source2, offset = source_and_offset("from test1 import test_function\n"
"test_funct_|_ion(1, 2)\n")
file2 = self.project_file("test2.py", source2)
definition = self.backend.rpc_get_definition(file2,
source2,
offset)
self.assertEqual(definition[0], file1)
# Either on the def or on the function name
self.assertIn(definition[1], (0, 4))
def test_should_return_none_if_location_not_found(self):
source, offset = source_and_offset("test_f_|_unction()\n")
filename = self.project_file("test.py", source)
definition = self.backend.rpc_get_definition(filename,
source,
offset)
self.assertIsNone(definition)
def test_should_return_none_if_outside_of_symbol(self):
source, offset = source_and_offset("test_function(_|_)\n")
filename = self.project_file("test.py", source)
definition = self.backend.rpc_get_definition(filename,
source,
offset)
self.assertIsNone(definition)
def test_should_return_definition_location_different_package(self):
# See issue #74
self.project_file("project/__init__.py", "")
source1 = ("class Add:\n"
" def add(self, a, b):\n"
" return a + b\n")
file1 = self.project_file("project/add.py", source1)
source2, offset = source_and_offset(
"from project.add import Add\n"
"class Calculator:\n"
" def add(self, a, b):\n"
" return Add_|_().add(a, b)\n")
file2 = self.project_file("project/calculator.py", source2)
location = self.backend.rpc_get_definition(file2,
source2,
offset)
self.assertEqual(location[0], file1)
# class or class name
self.assertIn(location[1], (0, 6))
def test_should_find_variable_definition(self):
source, offset = source_and_offset("SOME_VALUE = 1\n"
"\n"
"variable = _|_SOME_VALUE\n")
filename = self.project_file("test.py", source)
self.assertEqual(self.backend.rpc_get_definition(filename,
source,
offset),
(filename, 0))
class RPCGetCalltipTests(GenericRPCTests):
METHOD = "rpc_get_calltip"
def test_should_get_calltip(self):
source, offset = source_and_offset(
"import threading\nthreading.Thread(_|_")
filename = self.project_file("test.py", source)
calltip = self.backend.rpc_get_calltip(filename,
source,
offset)
expected = self.THREAD_CALLTIP
self.assertEqual(calltip, expected)
def test_should_get_calltip_even_after_parens(self):
source, offset = source_and_offset(
"import threading\nthreading.Thread(foo()_|_")
filename = self.project_file("test.py", source)
actual = self.backend.rpc_get_calltip(filename,
source,
offset)
self.assertEqual(self.THREAD_CALLTIP, actual)
def test_should_get_calltip_at_closing_paren(self):
source, offset = source_and_offset(
"import threading\nthreading.Thread(_|_)")
filename = self.project_file("test.py", source)
actual = self.backend.rpc_get_calltip(filename,
source,
offset)
self.assertEqual(self.THREAD_CALLTIP, actual)
def test_should_return_none_for_bad_identifier(self):
source, offset = source_and_offset(
"froblgoo(_|_")
filename = self.project_file("test.py", source)
calltip = self.backend.rpc_get_calltip(filename,
source,
offset)
self.assertIsNone(calltip)
def test_should_remove_self_argument(self):
source, offset = source_and_offset(
"d = dict()\n"
"d.keys(_|_")
filename = self.project_file("test.py", source)
actual = self.backend.rpc_get_calltip(filename,
source,
offset)
self.assertEqual(self.KEYS_CALLTIP, actual)
def test_should_remove_package_prefix(self):
source, offset = source_and_offset(
"import decimal\n"
"d = decimal.Decimal('1.5')\n"
"d.radix(_|_")
filename = self.project_file("test.py", source)
actual = self.backend.rpc_get_calltip(filename,
source,
offset)
self.assertEqual(self.RADIX_CALLTIP, actual)
def test_should_return_none_outside_of_all(self):
filename = self.project_file("test.py", "")
source, offset = source_and_offset("import thr_|_eading\n")
calltip = self.backend.rpc_get_calltip(filename,
source, offset)
self.assertIsNone(calltip)
def test_should_find_calltip_different_package(self):
# See issue #74
self.project_file("project/__init__.py", "")
source1 = ("class Add:\n"
" def add(self, a, b):\n"
" return a + b\n")
self.project_file("project/add.py", source1)
source2, offset = source_and_offset(
"from project.add import Add\n"
"class Calculator:\n"
" def add(self, a, b):\n"
" c = Add()\n"
" c.add(_|_\n")
file2 = self.project_file("project/calculator.py", source2)
actual = self.backend.rpc_get_calltip(file2,
source2,
offset)
self.assertEqual(self.ADD_CALLTIP, actual)
class RPCGetDocstringTests(GenericRPCTests):
METHOD = "rpc_get_docstring"
def test_should_get_docstring(self):
source, offset = source_and_offset(
"import threading\nthreading.Thread.join_|_(")
filename = self.project_file("test.py", source)
docstring = self.backend.rpc_get_docstring(filename,
source,
offset)
def first_line(s):
return s[:s.index("\n")]
self.assertEqual(first_line(docstring),
self.THREAD_JOIN_DOCSTRING)
def test_should_return_none_for_bad_identifier(self):
source, offset = source_and_offset(
"froblgoo_|_(\n")
filename = self.project_file("test.py", source)
docstring = self.backend.rpc_get_docstring(filename,
source,
offset)
self.assertIsNone(docstring)
class RPCGetUsagesTests(GenericRPCTests):
METHOD = "rpc_get_usages"
def test_should_return_uses_in_same_file(self):
filename = self.project_file("test.py", "")
source, offset = source_and_offset(
"def foo(x):\n"
" return _|_x + x\n")
usages = self.backend.rpc_get_usages(filename,
source,
offset)
self.assertEqual(usages,
[{'name': 'x',
'offset': 8,
'filename': filename},
{'name': 'x',
'filename': filename,
'offset': 23},
{'name': u'x',
'filename': filename,
'offset': 27}])
def test_should_return_uses_in_other_file(self):
file1 = self.project_file("file1.py", "")
file2 = self.project_file("file2.py", "\n\n\n\n\nx = 5")
source, offset = source_and_offset(
"import file2\n"
"file2._|_x\n")
usages = self.backend.rpc_get_usages(file1,
source,
offset)
self.assertEqual(usages,
[{'name': 'x',
'filename': file1,
'offset': 19},
{'name': 'x',
'filename': file2,
'offset': 5}])
def test_should_not_fail_without_symbol(self):
filename = self.project_file("file.py", "")
usages = self.backend.rpc_get_usages(filename,
"",
0)
self.assertEqual(usages, [])
def source_and_offset(source):
"""Return a source and offset from a source description.
>>> source_and_offset("hello, _|_world")
("hello, world", 7)
>>> source_and_offset("_|_hello, world")
("hello, world", 0)
>>> source_and_offset("hello, world_|_")
("hello, world", 12)
"""
offset = source.index("_|_")
return source[:offset] + source[offset + 3:], offset
| gpl-3.0 |
gnmiller/craig-bot | craig-bot/lib/python3.6/site-packages/urllib3/util/wait.py | 63 | 5403 | import errno
from functools import partial
import select
import sys
try:
from time import monotonic
except ImportError:
from time import time as monotonic
__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
class NoWayToWaitForSocketError(Exception):
pass
# How should we wait on sockets?
#
# There are two types of APIs you can use for waiting on sockets: the fancy
# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
# select/poll. The stateful APIs are more efficient when you have a lots of
# sockets to keep track of, because you can set them up once and then use them
# lots of times. But we only ever want to wait on a single socket at a time
# and don't want to keep track of state, so the stateless APIs are actually
# more efficient. So we want to use select() or poll().
#
# Now, how do we choose between select() and poll()? On traditional Unixes,
# select() has a strange calling convention that makes it slow, or fail
# altogether, for high-numbered file descriptors. The point of poll() is to fix
# that, so on Unixes, we prefer poll().
#
# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
# for it), but that's OK, because on Windows, select() doesn't have this
# strange calling convention; plain select() works fine.
#
# So: on Windows we use select(), and everywhere else we use poll(). We also
# fall back to select() in case poll() is somehow broken or missing.
if sys.version_info >= (3, 5):
# Modern Python, that retries syscalls by default
def _retry_on_intr(fn, timeout):
return fn(timeout)
else:
# Old and broken Pythons.
def _retry_on_intr(fn, timeout):
if timeout is None:
deadline = float("inf")
else:
deadline = monotonic() + timeout
while True:
try:
return fn(timeout)
# OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
except (OSError, select.error) as e:
# 'e.args[0]' incantation works for both OSError and select.error
if e.args[0] != errno.EINTR:
raise
else:
timeout = deadline - monotonic()
if timeout < 0:
timeout = 0
if timeout == float("inf"):
timeout = None
continue
def select_wait_for_socket(sock, read=False, write=False, timeout=None):
if not read and not write:
raise RuntimeError("must specify at least one of read=True, write=True")
rcheck = []
wcheck = []
if read:
rcheck.append(sock)
if write:
wcheck.append(sock)
# When doing a non-blocking connect, most systems signal success by
# marking the socket writable. Windows, though, signals success by marked
# it as "exceptional". We paper over the difference by checking the write
# sockets for both conditions. (The stdlib selectors module does the same
# thing.)
fn = partial(select.select, rcheck, wcheck, wcheck)
rready, wready, xready = _retry_on_intr(fn, timeout)
return bool(rready or wready or xready)
def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
if not read and not write:
raise RuntimeError("must specify at least one of read=True, write=True")
mask = 0
if read:
mask |= select.POLLIN
if write:
mask |= select.POLLOUT
poll_obj = select.poll()
poll_obj.register(sock, mask)
# For some reason, poll() takes timeout in milliseconds
def do_poll(t):
if t is not None:
t *= 1000
return poll_obj.poll(t)
return bool(_retry_on_intr(do_poll, timeout))
def null_wait_for_socket(*args, **kwargs):
raise NoWayToWaitForSocketError("no select-equivalent available")
def _have_working_poll():
# Apparently some systems have a select.poll that fails as soon as you try
# to use it, either due to strange configuration or broken monkeypatching
# from libraries like eventlet/greenlet.
try:
poll_obj = select.poll()
_retry_on_intr(poll_obj.poll, 0)
except (AttributeError, OSError):
return False
else:
return True
def wait_for_socket(*args, **kwargs):
# We delay choosing which implementation to use until the first time we're
# called. We could do it at import time, but then we might make the wrong
# decision if someone goes wild with monkeypatching select.poll after
# we're imported.
global wait_for_socket
if _have_working_poll():
wait_for_socket = poll_wait_for_socket
elif hasattr(select, "select"):
wait_for_socket = select_wait_for_socket
else: # Platform-specific: Appengine.
wait_for_socket = null_wait_for_socket
return wait_for_socket(*args, **kwargs)
def wait_for_read(sock, timeout=None):
""" Waits for reading to be available on a given socket.
Returns True if the socket is readable, or False if the timeout expired.
"""
return wait_for_socket(sock, read=True, timeout=timeout)
def wait_for_write(sock, timeout=None):
""" Waits for writing to be available on a given socket.
Returns True if the socket is readable, or False if the timeout expired.
"""
return wait_for_socket(sock, write=True, timeout=timeout)
| mit |
EeOneDown/spbu4u | telebot_login/__init__.py | 1 | 4022 | from functools import wraps
from flask import g
from app.constants import (
ask_to_register_answer, student_required_answer, educator_required_answer
)
from app.models import User
from tg_bot import bot
def login_required_message(func):
@wraps(func)
def wrapper(message):
user = User.query.filter_by(tg_id=message.chat.id).first()
if user:
g.current_tbot_user = user
func(message)
else:
bot.reply_to(message, ask_to_register_answer)
return wrapper
def login_required_callback(func):
@wraps(func)
def wrapper(call_back):
user = User.query.filter_by(tg_id=call_back.message.chat.id).first()
if user:
g.current_tbot_user = user
func(call_back)
else:
bot.edit_message_text(
text=ask_to_register_answer,
chat_id=call_back.message.chat.id,
message_id=call_back.message.message_id,
parse_mode="HTML"
)
return wrapper
def login_required_inline(func):
@wraps(func)
def wrapper(inline_query):
user = User.query.filter_by(tg_id=inline_query.from_user.id).first()
if user:
g.current_tbot_user = user
func(inline_query)
else:
bot.answer_inline_query(
inline_query_id=inline_query.id,
results=[],
switch_pm_text=ask_to_register_answer,
switch_pm_parameter="new_from_inline",
cache_time=1,
is_personal=True
)
return wrapper
def student_required_message(func):
@wraps(func)
def wrapper(message):
if not g.current_tbot_user.is_educator:
func(message)
else:
bot.reply_to(message, student_required_answer)
return wrapper
def student_required_callback(func):
@wraps(func)
def wrapper(call_back):
if not g.current_tbot_user.is_educator:
func(call_back)
else:
bot.edit_message_text(
text=student_required_answer,
chat_id=g.current_tbot_user.tg_id,
message_id=call_back.message.message_id,
parse_mode="HTML"
)
return wrapper
def student_required_inline(func):
@wraps(func)
def wrapper(inline_query):
if not g.current_tbot_user.is_educator:
func(inline_query)
else:
bot.answer_inline_query(
inline_query_id=inline_query.id,
results=[],
switch_pm_text=student_required_answer,
switch_pm_parameter="educator_from_inline",
cache_time=10,
is_personal=True
)
return wrapper
def educator_required_message(func):
@wraps(func)
def wrapper(message):
if g.current_tbot_user.is_educator:
func(message)
else:
bot.reply_to(message, educator_required_answer)
return wrapper
def educator_required_callback(func):
@wraps(func)
def wrapper(call_back):
if g.current_tbot_user.is_educator:
func(call_back)
else:
bot.edit_message_text(
text=educator_required_answer,
chat_id=g.current_tbot_user.tg_id,
message_id=call_back.message.message_id,
parse_mode="HTML"
)
return wrapper
def educator_required_inline(func):
@wraps(func)
def wrapper(inline_query):
if g.current_tbot_user.is_educator:
func(inline_query)
else:
bot.answer_inline_query(
inline_query_id=inline_query.id,
results=[],
switch_pm_text=educator_required_answer,
switch_pm_parameter="student_from_inline",
cache_time=10,
is_personal=True
)
return wrapper
from telebot_login import help_decorators
| apache-2.0 |
stuntman723/rap-analyzer | rap_analyzer/lib/python2.7/site-packages/psycopg2/extras.py | 25 | 31956 | """Miscellaneous goodies for psycopg2
This module is a generic place used to hold little helper functions
and classes until a better place in the distribution is found.
"""
# psycopg/extras.py - miscellaneous extra goodies for psycopg
#
# Copyright (C) 2003-2010 Federico Di Gregorio <[email protected]>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import os as _os
import sys as _sys
import time as _time
import re as _re
try:
import logging as _logging
except:
_logging = None
import psycopg2
from psycopg2 import extensions as _ext
from psycopg2.extensions import cursor as _cursor
from psycopg2.extensions import connection as _connection
from psycopg2.extensions import adapt as _A
from psycopg2.extensions import b
class DictCursorBase(_cursor):
"""Base class for all dict-like cursors."""
def __init__(self, *args, **kwargs):
if 'row_factory' in kwargs:
row_factory = kwargs['row_factory']
del kwargs['row_factory']
else:
raise NotImplementedError(
"DictCursorBase can't be instantiated without a row factory.")
super(DictCursorBase, self).__init__(*args, **kwargs)
self._query_executed = 0
self._prefetch = 0
self.row_factory = row_factory
def fetchone(self):
if self._prefetch:
res = super(DictCursorBase, self).fetchone()
if self._query_executed:
self._build_index()
if not self._prefetch:
res = super(DictCursorBase, self).fetchone()
return res
def fetchmany(self, size=None):
if self._prefetch:
res = super(DictCursorBase, self).fetchmany(size)
if self._query_executed:
self._build_index()
if not self._prefetch:
res = super(DictCursorBase, self).fetchmany(size)
return res
def fetchall(self):
if self._prefetch:
res = super(DictCursorBase, self).fetchall()
if self._query_executed:
self._build_index()
if not self._prefetch:
res = super(DictCursorBase, self).fetchall()
return res
def __iter__(self):
if self._prefetch:
res = super(DictCursorBase, self).__iter__()
first = res.next()
if self._query_executed:
self._build_index()
if not self._prefetch:
res = super(DictCursorBase, self).__iter__()
first = res.next()
yield first
while 1:
yield res.next()
class DictConnection(_connection):
"""A connection that uses `DictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', DictCursor)
return super(DictConnection, self).cursor(*args, **kwargs)
class DictCursor(DictCursorBase):
"""A cursor that keeps a list of column name -> index mappings."""
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = DictRow
super(DictCursor, self).__init__(*args, **kwargs)
self._prefetch = 1
def execute(self, query, vars=None):
self.index = {}
self._query_executed = 1
return super(DictCursor, self).execute(query, vars)
def callproc(self, procname, vars=None):
self.index = {}
self._query_executed = 1
return super(DictCursor, self).callproc(procname, vars)
def _build_index(self):
if self._query_executed == 1 and self.description:
for i in range(len(self.description)):
self.index[self.description[i][0]] = i
self._query_executed = 0
class DictRow(list):
"""A row object that allow by-column-name access to data."""
__slots__ = ('_index',)
def __init__(self, cursor):
self._index = cursor.index
self[:] = [None] * len(cursor.description)
def __getitem__(self, x):
if not isinstance(x, (int, slice)):
x = self._index[x]
return list.__getitem__(self, x)
def __setitem__(self, x, v):
if not isinstance(x, (int, slice)):
x = self._index[x]
list.__setitem__(self, x, v)
def items(self):
return list(self.iteritems())
def keys(self):
return self._index.keys()
def values(self):
return tuple(self[:])
def has_key(self, x):
return x in self._index
def get(self, x, default=None):
try:
return self[x]
except:
return default
def iteritems(self):
for n, v in self._index.iteritems():
yield n, list.__getitem__(self, v)
def iterkeys(self):
return self._index.iterkeys()
def itervalues(self):
return list.__iter__(self)
def copy(self):
return dict(self.iteritems())
def __contains__(self, x):
return x in self._index
def __getstate__(self):
return self[:], self._index.copy()
def __setstate__(self, data):
self[:] = data[0]
self._index = data[1]
# drop the crusty Py2 methods
if _sys.version_info[0] > 2:
items = iteritems; del iteritems
keys = iterkeys; del iterkeys
values = itervalues; del itervalues
del has_key
class RealDictConnection(_connection):
"""A connection that uses `RealDictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', RealDictCursor)
return super(RealDictConnection, self).cursor(*args, **kwargs)
class RealDictCursor(DictCursorBase):
"""A cursor that uses a real dict as the base type for rows.
Note that this cursor is extremely specialized and does not allow
the normal access (using integer indices) to fetched data. If you need
to access database rows both as a dictionary and a list, then use
the generic `DictCursor` instead of `!RealDictCursor`.
"""
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = RealDictRow
super(RealDictCursor, self).__init__(*args, **kwargs)
self._prefetch = 0
def execute(self, query, vars=None):
self.column_mapping = []
self._query_executed = 1
return super(RealDictCursor, self).execute(query, vars)
def callproc(self, procname, vars=None):
self.column_mapping = []
self._query_executed = 1
return super(RealDictCursor, self).callproc(procname, vars)
def _build_index(self):
if self._query_executed == 1 and self.description:
for i in range(len(self.description)):
self.column_mapping.append(self.description[i][0])
self._query_executed = 0
class RealDictRow(dict):
"""A `!dict` subclass representing a data record."""
__slots__ = ('_column_mapping')
def __init__(self, cursor):
dict.__init__(self)
# Required for named cursors
if cursor.description and not cursor.column_mapping:
cursor._build_index()
self._column_mapping = cursor.column_mapping
def __setitem__(self, name, value):
if type(name) == int:
name = self._column_mapping[name]
return dict.__setitem__(self, name, value)
def __getstate__(self):
return (self.copy(), self._column_mapping[:])
def __setstate__(self, data):
self.update(data[0])
self._column_mapping = data[1]
class NamedTupleConnection(_connection):
"""A connection that uses `NamedTupleCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', NamedTupleCursor)
return super(NamedTupleConnection, self).cursor(*args, **kwargs)
class NamedTupleCursor(_cursor):
"""A cursor that generates results as `~collections.namedtuple`.
`!fetch*()` methods will return named tuples instead of regular tuples, so
their elements can be accessed both as regular numeric items as well as
attributes.
>>> nt_cur = conn.cursor(cursor_factory=psycopg2.extras.NamedTupleCursor)
>>> rec = nt_cur.fetchone()
>>> rec
Record(id=1, num=100, data="abc'def")
>>> rec[1]
100
>>> rec.data
"abc'def"
"""
Record = None
def execute(self, query, vars=None):
self.Record = None
return super(NamedTupleCursor, self).execute(query, vars)
def executemany(self, query, vars):
self.Record = None
return super(NamedTupleCursor, self).executemany(query, vars)
def callproc(self, procname, vars=None):
self.Record = None
return super(NamedTupleCursor, self).callproc(procname, vars)
def fetchone(self):
t = super(NamedTupleCursor, self).fetchone()
if t is not None:
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return nt._make(t)
def fetchmany(self, size=None):
ts = super(NamedTupleCursor, self).fetchmany(size)
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return map(nt._make, ts)
def fetchall(self):
ts = super(NamedTupleCursor, self).fetchall()
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return map(nt._make, ts)
def __iter__(self):
it = super(NamedTupleCursor, self).__iter__()
t = it.next()
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
yield nt._make(t)
while 1:
yield nt._make(it.next())
try:
from collections import namedtuple
except ImportError, _exc:
def _make_nt(self):
raise self._exc
else:
def _make_nt(self, namedtuple=namedtuple):
return namedtuple("Record", [d[0] for d in self.description or ()])
class LoggingConnection(_connection):
"""A connection that logs all queries to a file or logger__ object.
.. __: http://docs.python.org/library/logging.html
"""
def initialize(self, logobj):
"""Initialize the connection to log to `!logobj`.
The `!logobj` parameter can be an open file object or a Logger
instance from the standard logging module.
"""
self._logobj = logobj
if _logging and isinstance(logobj, _logging.Logger):
self.log = self._logtologger
else:
self.log = self._logtofile
def filter(self, msg, curs):
"""Filter the query before logging it.
This is the method to overwrite to filter unwanted queries out of the
log or to add some extra data to the output. The default implementation
just does nothing.
"""
return msg
def _logtofile(self, msg, curs):
msg = self.filter(msg, curs)
if msg: self._logobj.write(msg + _os.linesep)
def _logtologger(self, msg, curs):
msg = self.filter(msg, curs)
if msg: self._logobj.debug(msg)
def _check(self):
if not hasattr(self, '_logobj'):
raise self.ProgrammingError(
"LoggingConnection object has not been initialize()d")
def cursor(self, *args, **kwargs):
self._check()
kwargs.setdefault('cursor_factory', LoggingCursor)
return super(LoggingConnection, self).cursor(*args, **kwargs)
class LoggingCursor(_cursor):
"""A cursor that logs queries using its connection logging facilities."""
def execute(self, query, vars=None):
try:
return super(LoggingCursor, self).execute(query, vars)
finally:
self.connection.log(self.query, self)
def callproc(self, procname, vars=None):
try:
return super(LoggingCursor, self).callproc(procname, vars)
finally:
self.connection.log(self.query, self)
class MinTimeLoggingConnection(LoggingConnection):
"""A connection that logs queries based on execution time.
This is just an example of how to sub-class `LoggingConnection` to
provide some extra filtering for the logged queries. Both the
`initialize()` and `filter()` methods are overwritten to make sure
that only queries executing for more than ``mintime`` ms are logged.
Note that this connection uses the specialized cursor
`MinTimeLoggingCursor`.
"""
def initialize(self, logobj, mintime=0):
LoggingConnection.initialize(self, logobj)
self._mintime = mintime
def filter(self, msg, curs):
t = (_time.time() - curs.timestamp) * 1000
if t > self._mintime:
return msg + _os.linesep + " (execution time: %d ms)" % t
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', MinTimeLoggingCursor)
return LoggingConnection.cursor(self, *args, **kwargs)
class MinTimeLoggingCursor(LoggingCursor):
"""The cursor sub-class companion to `MinTimeLoggingConnection`."""
def execute(self, query, vars=None):
self.timestamp = _time.time()
return LoggingCursor.execute(self, query, vars)
def callproc(self, procname, vars=None):
self.timestamp = _time.time()
return LoggingCursor.callproc(self, procname, vars)
# a dbtype and adapter for Python UUID type
class UUID_adapter(object):
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
.. __: http://docs.python.org/library/uuid.html
.. __: http://www.postgresql.org/docs/current/static/datatype-uuid.html
"""
def __init__(self, uuid):
self._uuid = uuid
def __conform__(self, proto):
if proto is _ext.ISQLQuote:
return self
def getquoted(self):
return b("'%s'::uuid" % self._uuid)
def __str__(self):
return "'%s'::uuid" % self._uuid
def register_uuid(oids=None, conn_or_curs=None):
"""Create the UUID type and an uuid.UUID adapter.
:param oids: oid for the PostgreSQL :sql:`uuid` type, or 2-items sequence
with oids of the type and the array. If not specified, use PostgreSQL
standard oids.
:param conn_or_curs: where to register the typecaster. If not specified,
register it globally.
"""
import uuid
if not oids:
oid1 = 2950
oid2 = 2951
elif isinstance(oids, (list, tuple)):
oid1, oid2 = oids
else:
oid1 = oids
oid2 = 2951
_ext.UUID = _ext.new_type((oid1, ), "UUID",
lambda data, cursor: data and uuid.UUID(data) or None)
_ext.UUIDARRAY = _ext.new_array_type((oid2,), "UUID[]", _ext.UUID)
_ext.register_type(_ext.UUID, conn_or_curs)
_ext.register_type(_ext.UUIDARRAY, conn_or_curs)
_ext.register_adapter(uuid.UUID, UUID_adapter)
return _ext.UUID
# a type, dbtype and adapter for PostgreSQL inet type
class Inet(object):
"""Wrap a string to allow for correct SQL-quoting of inet values.
Note that this adapter does NOT check the passed value to make
sure it really is an inet-compatible address but DOES call adapt()
on it to make sure it is impossible to execute an SQL-injection
by passing an evil value to the initializer.
"""
def __init__(self, addr):
self.addr = addr
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.addr)
def prepare(self, conn):
self._conn = conn
def getquoted(self):
obj = _A(self.addr)
if hasattr(obj, 'prepare'):
obj.prepare(self._conn)
return obj.getquoted() + b("::inet")
def __conform__(self, proto):
if proto is _ext.ISQLQuote:
return self
def __str__(self):
return str(self.addr)
def register_inet(oid=None, conn_or_curs=None):
"""Create the INET type and an Inet adapter.
:param oid: oid for the PostgreSQL :sql:`inet` type, or 2-items sequence
with oids of the type and the array. If not specified, use PostgreSQL
standard oids.
:param conn_or_curs: where to register the typecaster. If not specified,
register it globally.
"""
if not oid:
oid1 = 869
oid2 = 1041
elif isinstance(oid, (list, tuple)):
oid1, oid2 = oid
else:
oid1 = oid
oid2 = 1041
_ext.INET = _ext.new_type((oid1, ), "INET",
lambda data, cursor: data and Inet(data) or None)
_ext.INETARRAY = _ext.new_array_type((oid2, ), "INETARRAY", _ext.INET)
_ext.register_type(_ext.INET, conn_or_curs)
_ext.register_type(_ext.INETARRAY, conn_or_curs)
return _ext.INET
def register_tstz_w_secs(oids=None, conn_or_curs=None):
"""The function used to register an alternate type caster for
:sql:`TIMESTAMP WITH TIME ZONE` to deal with historical time zones with
seconds in the UTC offset.
These are now correctly handled by the default type caster, so currently
the function doesn't do anything.
"""
import warnings
warnings.warn("deprecated", DeprecationWarning)
def wait_select(conn):
"""Wait until a connection or cursor has data available.
The function is an example of a wait callback to be registered with
`~psycopg2.extensions.set_wait_callback()`. This function uses
:py:func:`~select.select()` to wait for data available.
"""
import select
from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE
while 1:
state = conn.poll()
if state == POLL_OK:
break
elif state == POLL_READ:
select.select([conn.fileno()], [], [])
elif state == POLL_WRITE:
select.select([], [conn.fileno()], [])
else:
raise conn.OperationalError("bad state from poll: %s" % state)
def _solve_conn_curs(conn_or_curs):
"""Return the connection and a DBAPI cursor from a connection or cursor."""
if conn_or_curs is None:
raise psycopg2.ProgrammingError("no connection or cursor provided")
if hasattr(conn_or_curs, 'execute'):
conn = conn_or_curs.connection
curs = conn.cursor(cursor_factory=_cursor)
else:
conn = conn_or_curs
curs = conn.cursor(cursor_factory=_cursor)
return conn, curs
class HstoreAdapter(object):
"""Adapt a Python dict to the hstore syntax."""
def __init__(self, wrapped):
self.wrapped = wrapped
def prepare(self, conn):
self.conn = conn
# use an old-style getquoted implementation if required
if conn.server_version < 90000:
self.getquoted = self._getquoted_8
def _getquoted_8(self):
"""Use the operators available in PG pre-9.0."""
if not self.wrapped:
return b("''::hstore")
adapt = _ext.adapt
rv = []
for k, v in self.wrapped.iteritems():
k = adapt(k)
k.prepare(self.conn)
k = k.getquoted()
if v is not None:
v = adapt(v)
v.prepare(self.conn)
v = v.getquoted()
else:
v = b('NULL')
# XXX this b'ing is painfully inefficient!
rv.append(b("(") + k + b(" => ") + v + b(")"))
return b("(") + b('||').join(rv) + b(")")
def _getquoted_9(self):
"""Use the hstore(text[], text[]) function."""
if not self.wrapped:
return b("''::hstore")
k = _ext.adapt(self.wrapped.keys())
k.prepare(self.conn)
v = _ext.adapt(self.wrapped.values())
v.prepare(self.conn)
return b("hstore(") + k.getquoted() + b(", ") + v.getquoted() + b(")")
getquoted = _getquoted_9
_re_hstore = _re.compile(r"""
# hstore key:
# a string of normal or escaped chars
"((?: [^"\\] | \\. )*)"
\s*=>\s* # hstore value
(?:
NULL # the value can be null - not catched
# or a quoted string like the key
| "((?: [^"\\] | \\. )*)"
)
(?:\s*,\s*|$) # pairs separated by comma or end of string.
""", _re.VERBOSE)
@classmethod
def parse(self, s, cur, _bsdec=_re.compile(r"\\(.)")):
"""Parse an hstore representation in a Python string.
The hstore is represented as something like::
"a"=>"1", "b"=>"2"
with backslash-escaped strings.
"""
if s is None:
return None
rv = {}
start = 0
for m in self._re_hstore.finditer(s):
if m is None or m.start() != start:
raise psycopg2.InterfaceError(
"error parsing hstore pair at char %d" % start)
k = _bsdec.sub(r'\1', m.group(1))
v = m.group(2)
if v is not None:
v = _bsdec.sub(r'\1', v)
rv[k] = v
start = m.end()
if start < len(s):
raise psycopg2.InterfaceError(
"error parsing hstore: unparsed data after char %d" % start)
return rv
@classmethod
def parse_unicode(self, s, cur):
"""Parse an hstore returning unicode keys and values."""
if s is None:
return None
s = s.decode(_ext.encodings[cur.connection.encoding])
return self.parse(s, cur)
@classmethod
def get_oids(self, conn_or_curs):
"""Return the lists of OID of the hstore and hstore[] types.
"""
conn, curs = _solve_conn_curs(conn_or_curs)
# Store the transaction status of the connection to revert it after use
conn_status = conn.status
# column typarray not available before PG 8.3
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
rv0, rv1 = [], []
# get the oid for the hstore
curs.execute("""\
SELECT t.oid, %s
FROM pg_type t JOIN pg_namespace ns
ON typnamespace = ns.oid
WHERE typname = 'hstore';
""" % typarray)
for oids in curs:
rv0.append(oids[0])
rv1.append(oids[1])
# revert the status of the connection as before the command
if (conn_status != _ext.STATUS_IN_TRANSACTION
and not conn.autocommit):
conn.rollback()
return tuple(rv0), tuple(rv1)
def register_hstore(conn_or_curs, globally=False, unicode=False,
oid=None, array_oid=None):
"""Register adapter and typecaster for `!dict`\-\ |hstore| conversions.
:param conn_or_curs: a connection or cursor: the typecaster will be
registered only on this object unless *globally* is set to `!True`
:param globally: register the adapter globally, not only on *conn_or_curs*
:param unicode: if `!True`, keys and values returned from the database
will be `!unicode` instead of `!str`. The option is not available on
Python 3
:param oid: the OID of the |hstore| type if known. If not, it will be
queried on *conn_or_curs*.
:param array_oid: the OID of the |hstore| array type if known. If not, it
will be queried on *conn_or_curs*.
The connection or cursor passed to the function will be used to query the
database and look for the OID of the |hstore| type (which may be different
across databases). If querying is not desirable (e.g. with
:ref:`asynchronous connections <async-support>`) you may specify it in the
*oid* parameter, which can be found using a query such as :sql:`SELECT
'hstore'::regtype::oid`. Analogously you can obtain a value for *array_oid*
using a query such as :sql:`SELECT 'hstore[]'::regtype::oid`.
Note that, when passing a dictionary from Python to the database, both
strings and unicode keys and values are supported. Dictionaries returned
from the database have keys/values according to the *unicode* parameter.
The |hstore| contrib module must be already installed in the database
(executing the ``hstore.sql`` script in your ``contrib`` directory).
Raise `~psycopg2.ProgrammingError` if the type is not found.
"""
if oid is None:
oid = HstoreAdapter.get_oids(conn_or_curs)
if oid is None or not oid[0]:
raise psycopg2.ProgrammingError(
"hstore type not found in the database. "
"please install it from your 'contrib/hstore.sql' file")
else:
array_oid = oid[1]
oid = oid[0]
if isinstance(oid, int):
oid = (oid,)
if array_oid is not None:
if isinstance(array_oid, int):
array_oid = (array_oid,)
else:
array_oid = tuple([x for x in array_oid if x])
# create and register the typecaster
if _sys.version_info[0] < 3 and unicode:
cast = HstoreAdapter.parse_unicode
else:
cast = HstoreAdapter.parse
HSTORE = _ext.new_type(oid, "HSTORE", cast)
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
_ext.register_adapter(dict, HstoreAdapter)
if array_oid:
HSTOREARRAY = _ext.new_array_type(array_oid, "HSTOREARRAY", HSTORE)
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
class CompositeCaster(object):
"""Helps conversion of a PostgreSQL composite type into a Python object.
The class is usually created by the `register_composite()` function.
You may want to create and register manually instances of the class if
querying the database at registration time is not desirable (such as when
using an :ref:`asynchronous connections <async-support>`).
"""
def __init__(self, name, oid, attrs, array_oid=None, schema=None):
self.name = name
self.schema = schema
self.oid = oid
self.array_oid = array_oid
self.attnames = [ a[0] for a in attrs ]
self.atttypes = [ a[1] for a in attrs ]
self._create_type(name, self.attnames)
self.typecaster = _ext.new_type((oid,), name, self.parse)
if array_oid:
self.array_typecaster = _ext.new_array_type(
(array_oid,), "%sARRAY" % name, self.typecaster)
else:
self.array_typecaster = None
def parse(self, s, curs):
if s is None:
return None
tokens = self.tokenize(s)
if len(tokens) != len(self.atttypes):
raise psycopg2.DataError(
"expecting %d components for the type %s, %d found instead" %
(len(self.atttypes), self.name, len(tokens)))
values = [ curs.cast(oid, token)
for oid, token in zip(self.atttypes, tokens) ]
return self.make(values)
def make(self, values):
"""Return a new Python object representing the data being casted.
*values* is the list of attributes, already casted into their Python
representation.
You can subclass this method to :ref:`customize the composite cast
<custom-composite>`.
"""
return self._ctor(values)
_re_tokenize = _re.compile(r"""
\(? ([,)]) # an empty token, representing NULL
| \(? " ((?: [^"] | "")*) " [,)] # or a quoted string
| \(? ([^",)]+) [,)] # or an unquoted string
""", _re.VERBOSE)
_re_undouble = _re.compile(r'(["\\])\1')
@classmethod
def tokenize(self, s):
rv = []
for m in self._re_tokenize.finditer(s):
if m is None:
raise psycopg2.InterfaceError("can't parse type: %r" % s)
if m.group(1) is not None:
rv.append(None)
elif m.group(2) is not None:
rv.append(self._re_undouble.sub(r"\1", m.group(2)))
else:
rv.append(m.group(3))
return rv
def _create_type(self, name, attnames):
try:
from collections import namedtuple
except ImportError:
self.type = tuple
self._ctor = self.type
else:
self.type = namedtuple(name, attnames)
self._ctor = self.type._make
@classmethod
def _from_db(self, name, conn_or_curs):
"""Return a `CompositeCaster` instance for the type *name*.
Raise `ProgrammingError` if the type is not found.
"""
conn, curs = _solve_conn_curs(conn_or_curs)
# Store the transaction status of the connection to revert it after use
conn_status = conn.status
# Use the correct schema
if '.' in name:
schema, tname = name.split('.', 1)
else:
tname = name
schema = 'public'
# column typarray not available before PG 8.3
typarray = conn.server_version >= 80300 and "typarray" or "NULL"
# get the type oid and attributes
curs.execute("""\
SELECT t.oid, %s, attname, atttypid
FROM pg_type t
JOIN pg_namespace ns ON typnamespace = ns.oid
JOIN pg_attribute a ON attrelid = typrelid
WHERE typname = %%s AND nspname = %%s
AND attnum > 0 AND NOT attisdropped
ORDER BY attnum;
""" % typarray, (tname, schema))
recs = curs.fetchall()
# revert the status of the connection as before the command
if (conn_status != _ext.STATUS_IN_TRANSACTION
and not conn.autocommit):
conn.rollback()
if not recs:
raise psycopg2.ProgrammingError(
"PostgreSQL type '%s' not found" % name)
type_oid = recs[0][0]
array_oid = recs[0][1]
type_attrs = [ (r[2], r[3]) for r in recs ]
return self(tname, type_oid, type_attrs,
array_oid=array_oid, schema=schema)
def register_composite(name, conn_or_curs, globally=False, factory=None):
"""Register a typecaster to convert a composite type into a tuple.
:param name: the name of a PostgreSQL composite type, e.g. created using
the |CREATE TYPE|_ command
:param conn_or_curs: a connection or cursor used to find the type oid and
components; the typecaster is registered in a scope limited to this
object, unless *globally* is set to `!True`
:param globally: if `!False` (default) register the typecaster only on
*conn_or_curs*, otherwise register it globally
:param factory: if specified it should be a `CompositeCaster` subclass: use
it to :ref:`customize how to cast composite types <custom-composite>`
:return: the registered `CompositeCaster` or *factory* instance
responsible for the conversion
"""
if factory is None:
factory = CompositeCaster
caster = factory._from_db(name, conn_or_curs)
_ext.register_type(caster.typecaster, not globally and conn_or_curs or None)
if caster.array_typecaster is not None:
_ext.register_type(caster.array_typecaster, not globally and conn_or_curs or None)
return caster
# expose the json adaptation stuff into the module
from psycopg2._json import json, Json, register_json
from psycopg2._json import register_default_json, register_default_jsonb
# Expose range-related objects
from psycopg2._range import Range, NumericRange
from psycopg2._range import DateRange, DateTimeRange, DateTimeTZRange
from psycopg2._range import register_range, RangeAdapter, RangeCaster
| mit |
cudeso/AIL-framework | bin/indexer_lookup.py | 2 | 2895 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of AIL framework - Analysis Information Leak framework
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Copyright (c) 2014 Alexandre Dulaunoy - [email protected]
import ConfigParser
import argparse
import gzip
import os
def readdoc(path=None):
if path is None:
return False
f = gzip.open(path, 'r')
return f.read()
configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg')
cfg = ConfigParser.ConfigParser()
cfg.read(configfile)
# Indexer configuration - index dir and schema setup
indexpath = os.path.join(os.environ['AIL_HOME'], cfg.get("Indexer", "path"))
indexertype = cfg.get("Indexer", "type")
argParser = argparse.ArgumentParser(description='Fulltext search for AIL')
argParser.add_argument('-q', action='append', help='query to lookup (one or more)')
argParser.add_argument('-n', action='store_true', default=False, help='return numbers of indexed documents')
argParser.add_argument('-t', action='store_true', default=False, help='dump top 500 terms')
argParser.add_argument('-l', action='store_true', default=False, help='dump all terms encountered in indexed documents')
argParser.add_argument('-f', action='store_true', default=False, help='dump each matching document')
argParser.add_argument('-s', action='append', help='search similar documents')
args = argParser.parse_args()
from whoosh import index
from whoosh.fields import Schema, TEXT, ID
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT)
ix = index.open_dir(indexpath)
from whoosh.qparser import QueryParser
if args.n:
print ix.doc_count_all()
exit(0)
if args.l:
xr = ix.searcher().reader()
for x in xr.lexicon("content"):
print (x)
exit(0)
if args.t:
xr = ix.searcher().reader()
for x in xr.most_frequent_terms("content", number=500, prefix=''):
print (x)
exit(0)
if args.s:
# By default, the index is not storing the vector of the document (Whoosh
# document schema). It won't work if you don't change the schema of the
# index for the content. It depends of your storage strategy.
docnum = ix.searcher().document_number(path=args.s)
r = ix.searcher().more_like(docnum, "content")
for hit in r:
print(hit["path"])
exit(0)
if args.q is None:
argParser.print_help()
exit(1)
with ix.searcher() as searcher:
query = QueryParser("content", ix.schema).parse(" ".join(args.q))
results = searcher.search(query, limit=None)
for x in results:
if args.f:
print (readdoc(path=x.items()[0][1]))
else:
print (x.items()[0][1])
print
| agpl-3.0 |
IllusionRom-deprecated/android_platform_external_chromium_org_tools_grit | grit/tool/android2grd.py | 7 | 19811 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The 'grit android2grd' tool."""
import getopt
import os.path
import StringIO
from xml.dom import Node
import xml.dom.minidom
import grit.node.empty
from grit.node import io
from grit.node import message
from grit.tool import interface
from grit import grd_reader
from grit import lazy_re
from grit import tclib
from grit import util
# The name of a string in strings.xml
_STRING_NAME = lazy_re.compile(r'[a-z0-9_]+\Z')
# A string's character limit in strings.xml
_CHAR_LIMIT = lazy_re.compile(r'\[CHAR-LIMIT=(\d+)\]')
# Finds String.Format() style format specifiers such as "%-5.2f".
_FORMAT_SPECIFIER = lazy_re.compile(
'%'
'([1-9][0-9]*\$|<)?' # argument_index
'([-#+ 0,(]*)' # flags
'([0-9]+)?' # width
'(\.[0-9]+)?' # precision
'([bBhHsScCdoxXeEfgGaAtT%n])') # conversion
class Android2Grd(interface.Tool):
"""Tool for converting Android string.xml files into chrome Grd files.
Usage: grit [global options] android2grd [OPTIONS] STRINGS_XML
The Android2Grd tool will convert an Android strings.xml file (whose path is
specified by STRINGS_XML) and create a chrome style grd file containing the
relevant information.
Because grd documents are much richer than strings.xml documents we supplement
the information required by grds using OPTIONS with sensible defaults.
OPTIONS may be any of the following:
--name FILENAME Specify the base FILENAME. This should be without
any file type suffix. By default
"chrome_android_strings" will be used.
--languages LANGUAGES Comma separated list of ISO language codes (e.g.
en-US, en-GB, ru, zh-CN). These codes will be used
to determine the names of resource and translations
files that will be declared by the output grd file.
--grd-dir GRD_DIR Specify where the resultant grd file
(FILENAME.grd) should be output. By default this
will be the present working directory.
--header-dir HEADER_DIR Specify the location of the directory where grit
generated C++ headers (whose name will be
FILENAME.h) will be placed. Use an empty string to
disable rc generation. Default: empty.
--rc-dir RC_DIR Specify the directory where resource files will
be located relative to grit build's output
directory. Use an empty string to disable rc
generation. Default: empty.
--xml-dir XML_DIR Specify where to place localized strings.xml files
relative to grit build's output directory. For each
language xx a values-xx/strings.xml file will be
generated. Use an empty string to disable
strings.xml generation. Default: '.'.
--xtb-dir XTB_DIR Specify where the xtb files containing translations
will be located relative to the grd file. Default:
'.'.
"""
_NAME_FLAG = 'name'
_LANGUAGES_FLAG = 'languages'
_GRD_DIR_FLAG = 'grd-dir'
_RC_DIR_FLAG = 'rc-dir'
_HEADER_DIR_FLAG = 'header-dir'
_XTB_DIR_FLAG = 'xtb-dir'
_XML_DIR_FLAG = 'xml-dir'
def __init__(self):
self.name = 'chrome_android_strings'
self.languages = []
self.grd_dir = '.'
self.rc_dir = None
self.xtb_dir = '.'
self.xml_res_dir = '.'
self.header_dir = None
def ShortDescription(self):
"""Returns a short description of the Android2Grd tool.
Overridden from grit.interface.Tool
Returns:
A string containing a short description of the android2grd tool.
"""
return 'Converts Android string.xml files into Chrome grd files.'
def ParseOptions(self, args):
"""Set this objects and return all non-option arguments."""
flags = [
Android2Grd._NAME_FLAG,
Android2Grd._LANGUAGES_FLAG,
Android2Grd._GRD_DIR_FLAG,
Android2Grd._RC_DIR_FLAG,
Android2Grd._HEADER_DIR_FLAG,
Android2Grd._XTB_DIR_FLAG,
Android2Grd._XML_DIR_FLAG, ]
(opts, args) = getopt.getopt(args, None, ['%s=' % o for o in flags])
for key, val in opts:
# Get rid of the preceding hypens.
k = key[2:]
if k == Android2Grd._NAME_FLAG:
self.name = val
elif k == Android2Grd._LANGUAGES_FLAG:
self.languages = val.split(',')
elif k == Android2Grd._GRD_DIR_FLAG:
self.grd_dir = val
elif k == Android2Grd._RC_DIR_FLAG:
self.rc_dir = val
elif k == Android2Grd._HEADER_DIR_FLAG:
self.header_dir = val
elif k == Android2Grd._XTB_DIR_FLAG:
self.xtb_dir = val
elif k == Android2Grd._XML_DIR_FLAG:
self.xml_res_dir = val
return args
def Run(self, opts, args):
"""Runs the Android2Grd tool.
Inherited from grit.interface.Tool.
Args:
opts: List of string arguments that should be parsed.
args: String containing the path of the strings.xml file to be converted.
"""
args = self.ParseOptions(args)
if len(args) != 1:
print ('Tool requires one argument, the path to the Android '
'strings.xml resource file to be converted.')
return 2
self.SetOptions(opts)
android_path = args[0]
# Read and parse the Android strings.xml file.
with open(android_path) as android_file:
android_dom = xml.dom.minidom.parse(android_file)
# Do the hard work -- convert the Android dom to grd file contents.
grd_dom = self.AndroidDomToGrdDom(android_dom)
grd_string = unicode(grd_dom)
# Write the grd string to a file in grd_dir.
grd_filename = self.name + '.grd'
grd_path = os.path.join(self.grd_dir, grd_filename)
with open(grd_path, 'w') as grd_file:
grd_file.write(grd_string)
def AndroidDomToGrdDom(self, android_dom):
"""Converts a strings.xml DOM into a DOM representing the contents of
a grd file.
Args:
android_dom: A xml.dom.Document containing the contents of the Android
string.xml document.
Returns:
The DOM for the grd xml document produced by converting the Android DOM.
"""
# Start with a basic skeleton for the .grd file.
root = grd_reader.Parse(StringIO.StringIO(
'''<?xml version="1.0" encoding="UTF-8"?>
<grit base_dir="." latest_public_release="0"
current_release="1" source_lang_id="en">
<release allow_pseudo="false" seq="1">
<messages fallback_to_english="true" />
</release>
<translations />
<outputs />
</grit>'''), dir='.')
messages = root.children[0].children[0]
translations = root.children[1]
outputs = root.children[2]
assert (isinstance(messages, grit.node.empty.MessagesNode) and
isinstance(translations, grit.node.empty.TranslationsNode) and
isinstance(outputs, grit.node.empty.OutputsNode))
if self.header_dir:
cpp_header = self.__CreateCppHeaderOutputNode(outputs, self.header_dir)
for lang in self.languages:
# Create an output element for each language.
if self.rc_dir:
self.__CreateRcOutputNode(outputs, lang, self.rc_dir)
if self.xml_res_dir:
self.__CreateAndroidXmlOutputNode(outputs, lang, self.xml_res_dir)
if lang != 'en':
self.__CreateFileNode(translations, lang)
# Convert all the strings.xml strings into grd messages.
self.__CreateMessageNodes(messages, android_dom.documentElement)
return root
def __CreateMessageNodes(self, messages, resources):
"""Creates the <message> elements and adds them as children of <messages>.
Args:
messages: the <messages> element in the strings.xml dom.
resources: the <resources> element in the grd dom.
"""
# <string> elements contain the definition of the resource.
# The description of a <string> element is contained within the comment
# node element immediately preceeding the string element in question.
description = ''
for child in resources.childNodes:
if child.nodeType == Node.COMMENT_NODE:
# Remove leading/trailing whitespace; collapse consecutive whitespaces.
description = ' '.join(child.data.split())
elif child.nodeType == Node.ELEMENT_NODE:
if child.tagName != 'string':
print 'Warning: ignoring unknown tag <%s>' % child.tagName
else:
translatable = self.IsTranslatable(child)
raw_name = child.getAttribute('name')
product = child.getAttribute('product') or None
grd_name = self.__FormatName(raw_name, product)
# Transform the <string> node contents into a tclib.Message, taking
# care to handle whitespace transformations and escaped characters,
# and coverting <xliff:g> placeholders into <ph> placeholders.
msg = self.CreateTclibMessage(child)
msg_node = self.__CreateMessageNode(messages, grd_name, description,
msg, translatable)
messages.AddChild(msg_node)
# Reset the description once a message has been parsed.
description = ''
def __FormatName(self, name, product=None):
"""Formats the message name.
Names in the strings.xml files should be lowercase with underscores. In grd
files message names should be mostly uppercase with a IDS prefix. We also
will annotate names with product information (lowercase) where appropriate.
Args:
name: The message name as found in the string.xml file.
product: An optional product annotation.
Returns:
String containing the grd style name that will be used in the translation
console.
"""
if not _STRING_NAME.match(name):
print 'Error: string name contains illegal characters: %s' % name
grd_name = 'IDS_%s' % name.upper()
product_suffix = ('_product_%s' % product.lower()) if product else ''
return grd_name + product_suffix
def CreateTclibMessage(self, android_string):
"""Transforms a <string/> element from strings.xml into a tclib.Message.
Interprets whitespace, quotes, and escaped characters in the android_string
according to Android's formatting and styling rules for strings. Also
converts <xliff:g> placeholders into <ph> placeholders, e.g.:
<xliff:g id="website" example="google.com">%s</xliff:g>
becomes
<ph name="website"><ex>google.com</ex>%s</ph>
Returns:
The tclib.Message.
"""
msg = tclib.Message()
current_text = '' # Accumulated text that hasn't yet been added to msg.
nodes = android_string.childNodes
for i, node in enumerate(nodes):
# Handle text nodes.
if node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
current_text += node.data
# Handle <xliff:g> and other tags.
elif node.nodeType == Node.ELEMENT_NODE:
if node.tagName == 'xliff:g':
assert node.hasAttribute('id'), 'missing id: ' + node.data()
placeholder_id = node.getAttribute('id')
placeholder_text = self.__FormatPlaceholderText(node)
placeholder_example = node.getAttribute('example')
if not placeholder_example:
print ('Info: placeholder does not contain an example: %s' %
node.toxml())
placeholder_example = placeholder_id.upper()
msg.AppendPlaceholder(tclib.Placeholder(placeholder_id,
placeholder_text, placeholder_example))
else:
print ('Warning: removing tag <%s> which must be inside a '
'placeholder: %s' % (node.tagName, node.toxml()))
msg.AppendText(self.__FormatPlaceholderText(node))
# Handle other nodes.
elif node.nodeType != Node.COMMENT_NODE:
assert False, 'Unknown node type: %s' % node.nodeType
is_last_node = (i == len(nodes) - 1)
if (current_text and
(is_last_node or nodes[i + 1].nodeType == Node.ELEMENT_NODE)):
# For messages containing just text and comments (no xml tags) Android
# strips leading and trailing whitespace. We mimic that behavior.
if not msg.GetContent() and is_last_node:
current_text = current_text.strip()
msg.AppendText(self.__FormatAndroidString(current_text))
current_text = ''
return msg
def __FormatAndroidString(self, android_string, inside_placeholder=False):
r"""Returns android_string formatted for a .grd file.
* Collapses consecutive whitespaces, except when inside double-quotes.
* Replaces \\, \n, \t, \", \' with \, newline, tab, ", '.
"""
backslash_map = {'\\' : '\\', 'n' : '\n', 't' : '\t', '"' : '"', "'" : "'"}
is_quoted_section = False # True when we're inside double quotes.
is_backslash_sequence = False # True after seeing an unescaped backslash.
prev_char = ''
output = []
for c in android_string:
if is_backslash_sequence:
# Unescape \\, \n, \t, \", and \'.
assert c in backslash_map, 'Illegal escape sequence: \\%s' % c
output.append(backslash_map[c])
is_backslash_sequence = False
elif c == '\\':
is_backslash_sequence = True
elif c.isspace() and not is_quoted_section:
# Turn whitespace into ' ' and collapse consecutive whitespaces.
if not prev_char.isspace():
output.append(' ')
elif c == '"':
is_quoted_section = not is_quoted_section
else:
output.append(c)
prev_char = c
output = ''.join(output)
if is_quoted_section:
print 'Warning: unbalanced quotes in string: %s' % android_string
if is_backslash_sequence:
print 'Warning: trailing backslash in string: %s' % android_string
# Check for format specifiers outside of placeholder tags.
if not inside_placeholder:
format_specifier = _FORMAT_SPECIFIER.search(output)
if format_specifier:
print ('Warning: format specifiers are not inside a placeholder '
'<xliff:g/> tag: %s' % output)
return output
def __FormatPlaceholderText(self, placeholder_node):
"""Returns the text inside of an <xliff:g> placeholder node."""
text = []
for childNode in placeholder_node.childNodes:
if childNode.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
text.append(childNode.data)
elif childNode.nodeType != Node.COMMENT_NODE:
assert False, 'Unknown node type in ' + placeholder_node.toxml()
return self.__FormatAndroidString(''.join(text), inside_placeholder=True)
def __CreateMessageNode(self, messages_node, grd_name, description, msg,
translatable):
"""Creates and initializes a <message> element.
Message elements correspond to Android <string> elements in that they
declare a string resource along with a programmatic id.
"""
if not description:
print 'Warning: no description for %s' % grd_name
# Check that we actually fit within the character limit we've specified.
match = _CHAR_LIMIT.search(description)
if match:
char_limit = int(match.group(1))
msg_content = msg.GetRealContent()
if len(msg_content) > char_limit:
print ('Warning: char-limit for %s is %d, but length is %d: %s' %
(grd_name, char_limit, len(msg_content), msg_content))
return message.MessageNode.Construct(parent=messages_node,
name=grd_name,
message=msg,
desc=description,
translateable=translatable)
def __CreateFileNode(self, translations_node, lang):
"""Creates and initializes the <file> elements.
File elements provide information on the location of translation files
(xtbs)
"""
xtb_file = os.path.normpath(os.path.join(
self.xtb_dir, '%s_%s.xtb' % (self.name, lang)))
fnode = io.FileNode()
fnode.StartParsing(u'file', translations_node)
fnode.HandleAttribute('path', xtb_file)
fnode.HandleAttribute('lang', lang)
fnode.EndParsing()
translations_node.AddChild(fnode)
return fnode
def __CreateCppHeaderOutputNode(self, outputs_node, header_dir):
"""Creates the <output> element corresponding to the generated c header."""
header_file_name = os.path.join(header_dir, self.name + '.h')
header_node = io.OutputNode()
header_node.StartParsing(u'output', outputs_node)
header_node.HandleAttribute('filename', header_file_name)
header_node.HandleAttribute('type', 'rc_header')
emit_node = io.EmitNode()
emit_node.StartParsing(u'emit', header_node)
emit_node.HandleAttribute('emit_type', 'prepend')
emit_node.EndParsing()
header_node.AddChild(emit_node)
header_node.EndParsing()
outputs_node.AddChild(header_node)
return header_node
def __CreateRcOutputNode(self, outputs_node, lang, rc_dir):
"""Creates the <output> element corresponding to various rc file output."""
rc_file_name = self.name + '_' + lang + ".rc"
rc_path = os.path.join(rc_dir, rc_file_name)
node = io.OutputNode()
node.StartParsing(u'output', outputs_node)
node.HandleAttribute('filename', rc_path)
node.HandleAttribute('lang', lang)
node.HandleAttribute('type', 'rc_all')
node.EndParsing()
outputs_node.AddChild(node)
return node
def __CreateAndroidXmlOutputNode(self, outputs_node, locale, xml_res_dir):
"""Creates the <output> element corresponding to various rc file output."""
# Need to check to see if the locale has a region, e.g. the GB in en-GB.
# When a locale has a region Android expects the region to be prefixed
# with an 'r'. For example for en-GB Android expects a values-en-rGB
# directory. Also, Android expects nb, tl, in, iw, ji as the language
# codes for Norwegian, Tagalog/Filipino, Indonesian, Hebrew, and Yiddish:
# http://developer.android.com/reference/java/util/Locale.html
if locale == 'es-419':
android_locale = 'es-rUS'
else:
android_lang, dash, region = locale.partition('-')
lang_map = {'no': 'nb', 'fil': 'tl', 'id': 'in', 'he': 'iw', 'yi': 'ji'}
android_lang = lang_map.get(android_lang, android_lang)
android_locale = android_lang + ('-r' + region if region else '')
values = 'values-' + android_locale if android_locale != 'en' else 'values'
xml_path = os.path.normpath(os.path.join(
xml_res_dir, values, 'strings.xml'))
node = io.OutputNode()
node.StartParsing(u'output', outputs_node)
node.HandleAttribute('filename', xml_path)
node.HandleAttribute('lang', locale)
node.HandleAttribute('type', 'android')
node.EndParsing()
outputs_node.AddChild(node)
return node
def IsTranslatable(self, android_string):
"""Determines if a <string> element is a candidate for translation.
A <string> element is by default translatable unless otherwise marked.
"""
if android_string.hasAttribute('translatable'):
value = android_string.getAttribute('translatable').lower()
if value not in ('true', 'false'):
print 'Warning: translatable attribute has invalid value: %s' % value
return value == 'true'
else:
return True
| bsd-2-clause |
miyouzi/ch-docker-ssr | shadowsocks/manager.py | 19 | 9889 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import errno
import traceback
import socket
import logging
import json
import collections
from shadowsocks import common, eventloop, tcprelay, udprelay, asyncdns, shell
BUF_SIZE = 1506
STAT_SEND_LIMIT = 50
class Manager(object):
def __init__(self, config):
self._config = config
self._relays = {} # (tcprelay, udprelay)
self._loop = eventloop.EventLoop()
self._dns_resolver = asyncdns.DNSResolver()
self._dns_resolver.add_to_loop(self._loop)
self._statistics = collections.defaultdict(int)
self._control_client_addr = None
try:
manager_address = common.to_str(config['manager_address'])
if ':' in manager_address:
addr = manager_address.rsplit(':', 1)
addr = addr[0], int(addr[1])
addrs = socket.getaddrinfo(addr[0], addr[1])
if addrs:
family = addrs[0][0]
else:
logging.error('invalid address: %s', manager_address)
exit(1)
else:
addr = manager_address
family = socket.AF_UNIX
self._control_socket = socket.socket(family,
socket.SOCK_DGRAM)
self._control_socket.bind(addr)
self._control_socket.setblocking(False)
except (OSError, IOError) as e:
logging.error(e)
logging.error('can not bind to manager address')
exit(1)
self._loop.add(self._control_socket,
eventloop.POLL_IN, self)
self._loop.add_periodic(self.handle_periodic)
port_password = config['port_password']
del config['port_password']
for port, password in port_password.items():
a_config = config.copy()
a_config['server_port'] = int(port)
a_config['password'] = password
self.add_port(a_config)
def add_port(self, config):
port = int(config['server_port'])
servers = self._relays.get(port, None)
if servers:
logging.error("server already exists at %s:%d" % (config['server'],
port))
return
logging.info("adding server at %s:%d" % (config['server'], port))
t = tcprelay.TCPRelay(config, self._dns_resolver, False,
stat_callback=self.stat_callback)
u = udprelay.UDPRelay(config, self._dns_resolver, False,
stat_callback=self.stat_callback)
t.add_to_loop(self._loop)
u.add_to_loop(self._loop)
self._relays[port] = (t, u)
def remove_port(self, config):
port = int(config['server_port'])
servers = self._relays.get(port, None)
if servers:
logging.info("removing server at %s:%d" % (config['server'], port))
t, u = servers
t.close(next_tick=False)
u.close(next_tick=False)
del self._relays[port]
else:
logging.error("server not exist at %s:%d" % (config['server'],
port))
def handle_event(self, sock, fd, event):
if sock == self._control_socket and event == eventloop.POLL_IN:
data, self._control_client_addr = sock.recvfrom(BUF_SIZE)
parsed = self._parse_command(data)
if parsed:
command, config = parsed
a_config = self._config.copy()
if config:
# let the command override the configuration file
a_config.update(config)
if 'server_port' not in a_config:
logging.error('can not find server_port in config')
else:
if command == 'add':
self.add_port(a_config)
self._send_control_data(b'ok')
elif command == 'remove':
self.remove_port(a_config)
self._send_control_data(b'ok')
elif command == 'ping':
self._send_control_data(b'pong')
else:
logging.error('unknown command %s', command)
def _parse_command(self, data):
# commands:
# add: {"server_port": 8000, "password": "foobar"}
# remove: {"server_port": 8000"}
data = common.to_str(data)
parts = data.split(':', 1)
if len(parts) < 2:
return data, None
command, config_json = parts
try:
config = shell.parse_json_in_str(config_json)
return command, config
except Exception as e:
logging.error(e)
return None
def stat_callback(self, port, data_len):
self._statistics[port] += data_len
def handle_periodic(self):
r = {}
i = 0
def send_data(data_dict):
if data_dict:
# use compact JSON format (without space)
data = common.to_bytes(json.dumps(data_dict,
separators=(',', ':')))
self._send_control_data(b'stat: ' + data)
for k, v in self._statistics.items():
r[k] = v
i += 1
# split the data into segments that fit in UDP packets
if i >= STAT_SEND_LIMIT:
send_data(r)
r.clear()
i = 0
if len(r) > 0 :
send_data(r)
self._statistics.clear()
def _send_control_data(self, data):
if self._control_client_addr:
try:
self._control_socket.sendto(data, self._control_client_addr)
except (socket.error, OSError, IOError) as e:
error_no = eventloop.errno_from_exception(e)
if error_no in (errno.EAGAIN, errno.EINPROGRESS,
errno.EWOULDBLOCK):
return
else:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
def run(self):
self._loop.run()
def run(config):
Manager(config).run()
def test():
import time
import threading
import struct
from shadowsocks import encrypt
logging.basicConfig(level=5,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
enc = []
eventloop.TIMEOUT_PRECISION = 1
def run_server():
config = shell.get_config(True)
config = config.copy()
a_config = {
'server': '127.0.0.1',
'local_port': 1081,
'port_password': {
'8381': 'foobar1',
'8382': 'foobar2'
},
'method': 'aes-256-cfb',
'manager_address': '127.0.0.1:6001',
'timeout': 60,
'fast_open': False,
'verbose': 2
}
config.update(a_config)
manager = Manager(config)
enc.append(manager)
manager.run()
t = threading.Thread(target=run_server)
t.start()
time.sleep(1)
manager = enc[0]
cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
cli.connect(('127.0.0.1', 6001))
# test add and remove
time.sleep(1)
cli.send(b'add: {"server_port":7001, "password":"asdfadsfasdf"}')
time.sleep(1)
assert 7001 in manager._relays
data, addr = cli.recvfrom(1506)
assert b'ok' in data
cli.send(b'remove: {"server_port":8381}')
time.sleep(1)
assert 8381 not in manager._relays
data, addr = cli.recvfrom(1506)
assert b'ok' in data
logging.info('add and remove test passed')
# test statistics for TCP
header = common.pack_addr(b'google.com') + struct.pack('>H', 80)
data = encrypt.encrypt_all(b'asdfadsfasdf', 'aes-256-cfb', 1,
header + b'GET /\r\n\r\n')
tcp_cli = socket.socket()
tcp_cli.connect(('127.0.0.1', 7001))
tcp_cli.send(data)
tcp_cli.recv(4096)
tcp_cli.close()
data, addr = cli.recvfrom(1506)
data = common.to_str(data)
assert data.startswith('stat: ')
data = data.split('stat:')[1]
stats = shell.parse_json_in_str(data)
assert '7001' in stats
logging.info('TCP statistics test passed')
# test statistics for UDP
header = common.pack_addr(b'127.0.0.1') + struct.pack('>H', 80)
data = encrypt.encrypt_all(b'foobar2', 'aes-256-cfb', 1,
header + b'test')
udp_cli = socket.socket(type=socket.SOCK_DGRAM)
udp_cli.sendto(data, ('127.0.0.1', 8382))
tcp_cli.close()
data, addr = cli.recvfrom(1506)
data = common.to_str(data)
assert data.startswith('stat: ')
data = data.split('stat:')[1]
stats = json.loads(data)
assert '8382' in stats
logging.info('UDP statistics test passed')
manager._loop.stop()
t.join()
if __name__ == '__main__':
test()
| apache-2.0 |
HailStorm32/Q.bo_stacks | qbo_object_recognition/src/qbo_object_recognition/srv/_RecognizeObject.py | 1 | 7433 | """autogenerated by genpy from qbo_object_recognition/RecognizeObjectRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class RecognizeObjectRequest(genpy.Message):
_md5sum = "d41d8cd98f00b204e9800998ecf8427e"
_type = "qbo_object_recognition/RecognizeObjectRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
"""
__slots__ = []
_slot_types = []
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(RecognizeObjectRequest, self).__init__(*args, **kwds)
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
pass
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
"""autogenerated by genpy from qbo_object_recognition/RecognizeObjectResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class RecognizeObjectResponse(genpy.Message):
_md5sum = "20fe0fa539e86f0dcf82db65a3df666b"
_type = "qbo_object_recognition/RecognizeObjectResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """string object_name
bool recognized
"""
__slots__ = ['object_name','recognized']
_slot_types = ['string','bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
object_name,recognized
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(RecognizeObjectResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.object_name is None:
self.object_name = ''
if self.recognized is None:
self.recognized = False
else:
self.object_name = ''
self.recognized = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.object_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_B.pack(self.recognized))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.object_name = str[start:end].decode('utf-8')
else:
self.object_name = str[start:end]
start = end
end += 1
(self.recognized,) = _struct_B.unpack(str[start:end])
self.recognized = bool(self.recognized)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.object_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_B.pack(self.recognized))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.object_name = str[start:end].decode('utf-8')
else:
self.object_name = str[start:end]
start = end
end += 1
(self.recognized,) = _struct_B.unpack(str[start:end])
self.recognized = bool(self.recognized)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_B = struct.Struct("<B")
class RecognizeObject(object):
_type = 'qbo_object_recognition/RecognizeObject'
_md5sum = '20fe0fa539e86f0dcf82db65a3df666b'
_request_class = RecognizeObjectRequest
_response_class = RecognizeObjectResponse
| lgpl-2.1 |
madan96/sympy | sympy/series/order.py | 16 | 16906 | from __future__ import print_function, division
from sympy.core import S, sympify, Expr, Rational, Symbol, Dummy
from sympy.core import Add, Mul, expand_power_base, expand_log
from sympy.core.cache import cacheit
from sympy.core.compatibility import default_sort_key, is_sequence
from sympy.core.containers import Tuple
from sympy.utilities.iterables import uniq
from sympy.sets.sets import Complement
class Order(Expr):
r""" Represents the limiting behavior of some function
The order of a function characterizes the function based on the limiting
behavior of the function as it goes to some limit. Only taking the limit
point to be a number is currently supported. This is expressed in
big O notation [1]_.
The formal definition for the order of a function `g(x)` about a point `a`
is such that `g(x) = O(f(x))` as `x \rightarrow a` if and only if for any
`\delta > 0` there exists a `M > 0` such that `|g(x)| \leq M|f(x)|` for
`|x-a| < \delta`. This is equivalent to `\lim_{x \rightarrow a}
\sup |g(x)/f(x)| < \infty`.
Let's illustrate it on the following example by taking the expansion of
`\sin(x)` about 0:
.. math ::
\sin(x) = x - x^3/3! + O(x^5)
where in this case `O(x^5) = x^5/5! - x^7/7! + \cdots`. By the definition
of `O`, for any `\delta > 0` there is an `M` such that:
.. math ::
|x^5/5! - x^7/7! + ....| <= M|x^5| \text{ for } |x| < \delta
or by the alternate definition:
.. math ::
\lim_{x \rightarrow 0} | (x^5/5! - x^7/7! + ....) / x^5| < \infty
which surely is true, because
.. math ::
\lim_{x \rightarrow 0} | (x^5/5! - x^7/7! + ....) / x^5| = 1/5!
As it is usually used, the order of a function can be intuitively thought
of representing all terms of powers greater than the one specified. For
example, `O(x^3)` corresponds to any terms proportional to `x^3,
x^4,\ldots` and any higher power. For a polynomial, this leaves terms
proportional to `x^2`, `x` and constants.
Examples
========
>>> from sympy import O, oo, cos, pi
>>> from sympy.abc import x, y
>>> O(x + x**2)
O(x)
>>> O(x + x**2, (x, 0))
O(x)
>>> O(x + x**2, (x, oo))
O(x**2, (x, oo))
>>> O(1 + x*y)
O(1, x, y)
>>> O(1 + x*y, (x, 0), (y, 0))
O(1, x, y)
>>> O(1 + x*y, (x, oo), (y, oo))
O(x*y, (x, oo), (y, oo))
>>> O(1) in O(1, x)
True
>>> O(1, x) in O(1)
False
>>> O(x) in O(1, x)
True
>>> O(x**2) in O(x)
True
>>> O(x)*x
O(x**2)
>>> O(x) - O(x)
O(x)
>>> O(cos(x))
O(1)
>>> O(cos(x), (x, pi/2))
O(x - pi/2, (x, pi/2))
References
==========
.. [1] `Big O notation <http://en.wikipedia.org/wiki/Big_O_notation>`_
Notes
=====
In ``O(f(x), x)`` the expression ``f(x)`` is assumed to have a leading
term. ``O(f(x), x)`` is automatically transformed to
``O(f(x).as_leading_term(x),x)``.
``O(expr*f(x), x)`` is ``O(f(x), x)``
``O(expr, x)`` is ``O(1)``
``O(0, x)`` is 0.
Multivariate O is also supported:
``O(f(x, y), x, y)`` is transformed to
``O(f(x, y).as_leading_term(x,y).as_leading_term(y), x, y)``
In the multivariate case, it is assumed the limits w.r.t. the various
symbols commute.
If no symbols are passed then all symbols in the expression are used
and the limit point is assumed to be zero.
"""
is_Order = True
__slots__ = []
@cacheit
def __new__(cls, expr, *args, **kwargs):
expr = sympify(expr)
if not args:
if expr.is_Order:
variables = expr.variables
point = expr.point
else:
variables = list(expr.free_symbols)
point = [S.Zero]*len(variables)
else:
args = list(args if is_sequence(args) else [args])
variables, point = [], []
if is_sequence(args[0]):
for a in args:
v, p = list(map(sympify, a))
variables.append(v)
point.append(p)
else:
variables = list(map(sympify, args))
point = [S.Zero]*len(variables)
if not all(v.is_Symbol for v in variables):
raise TypeError('Variables are not symbols, got %s' % variables)
if len(list(uniq(variables))) != len(variables):
raise ValueError('Variables are supposed to be unique symbols, got %s' % variables)
if expr.is_Order:
expr_vp = dict(expr.args[1:])
new_vp = dict(expr_vp)
vp = dict(zip(variables, point))
for v, p in vp.items():
if v in new_vp.keys():
if p != new_vp[v]:
raise NotImplementedError(
"Mixing Order at different points is not supported.")
else:
new_vp[v] = p
if set(expr_vp.keys()) == set(new_vp.keys()):
return expr
else:
variables = list(new_vp.keys())
point = [new_vp[v] for v in variables]
if expr is S.NaN:
return S.NaN
if any(x in p.free_symbols for x in variables for p in point):
raise ValueError('Got %s as a point.' % point)
if variables:
if any(p != point[0] for p in point):
raise NotImplementedError
if point[0] is S.Infinity:
s = {k: 1/Dummy() for k in variables}
rs = {1/v: 1/k for k, v in s.items()}
elif point[0] is not S.Zero:
s = dict((k, Dummy() + point[0]) for k in variables)
rs = dict((v - point[0], k - point[0]) for k, v in s.items())
else:
s = ()
rs = ()
expr = expr.subs(s)
if expr.is_Add:
from sympy import expand_multinomial
expr = expand_multinomial(expr)
if s:
args = tuple([r[0] for r in rs.items()])
else:
args = tuple(variables)
if len(variables) > 1:
# XXX: better way? We need this expand() to
# workaround e.g: expr = x*(x + y).
# (x*(x + y)).as_leading_term(x, y) currently returns
# x*y (wrong order term!). That's why we want to deal with
# expand()'ed expr (handled in "if expr.is_Add" branch below).
expr = expr.expand()
if expr.is_Add:
lst = expr.extract_leading_order(args)
expr = Add(*[f.expr for (e, f) in lst])
elif expr:
expr = expr.as_leading_term(*args)
expr = expr.as_independent(*args, as_Add=False)[1]
expr = expand_power_base(expr)
expr = expand_log(expr)
if len(args) == 1:
# The definition of O(f(x)) symbol explicitly stated that
# the argument of f(x) is irrelevant. That's why we can
# combine some power exponents (only "on top" of the
# expression tree for f(x)), e.g.:
# x**p * (-x)**q -> x**(p+q) for real p, q.
x = args[0]
margs = list(Mul.make_args(
expr.as_independent(x, as_Add=False)[1]))
for i, t in enumerate(margs):
if t.is_Pow:
b, q = t.args
if b in (x, -x) and q.is_real and not q.has(x):
margs[i] = x**q
elif b.is_Pow and not b.exp.has(x):
b, r = b.args
if b in (x, -x) and r.is_real:
margs[i] = x**(r*q)
elif b.is_Mul and b.args[0] is S.NegativeOne:
b = -b
if b.is_Pow and not b.exp.has(x):
b, r = b.args
if b in (x, -x) and r.is_real:
margs[i] = x**(r*q)
expr = Mul(*margs)
expr = expr.subs(rs)
if expr is S.Zero:
return expr
if expr.is_Order:
expr = expr.expr
if not expr.has(*variables):
expr = S.One
# create Order instance:
vp = dict(zip(variables, point))
variables.sort(key=default_sort_key)
point = [vp[v] for v in variables]
args = (expr,) + Tuple(*zip(variables, point))
obj = Expr.__new__(cls, *args)
return obj
def _eval_nseries(self, x, n, logx):
return self
@property
def expr(self):
return self.args[0]
@property
def variables(self):
if self.args[1:]:
return tuple(x[0] for x in self.args[1:])
else:
return ()
@property
def point(self):
if self.args[1:]:
return tuple(x[1] for x in self.args[1:])
else:
return ()
@property
def free_symbols(self):
return self.expr.free_symbols | set(self.variables)
def _eval_power(b, e):
if e.is_Number and e.is_nonnegative:
return b.func(b.expr ** e, *b.args[1:])
if e == O(1):
return b
return
def as_expr_variables(self, order_symbols):
if order_symbols is None:
order_symbols = self.args[1:]
else:
if not all(o[1] == order_symbols[0][1] for o in order_symbols) and \
not all(p == self.point[0] for p in self.point):
raise NotImplementedError('Order at points other than 0 '
'or oo not supported, got %s as a point.' % point)
if order_symbols and order_symbols[0][1] != self.point[0]:
raise NotImplementedError(
"Multiplying Order at different points is not supported.")
order_symbols = dict(order_symbols)
for s, p in dict(self.args[1:]).items():
if s not in order_symbols.keys():
order_symbols[s] = p
order_symbols = sorted(order_symbols.items(), key=lambda x: default_sort_key(x[0]))
return self.expr, tuple(order_symbols)
def removeO(self):
return S.Zero
def getO(self):
return self
@cacheit
def contains(self, expr):
"""
Return True if expr belongs to Order(self.expr, \*self.variables).
Return False if self belongs to expr.
Return None if the inclusion relation cannot be determined
(e.g. when self and expr have different symbols).
"""
from sympy import powsimp
if expr is S.Zero:
return True
if expr is S.NaN:
return False
if expr.is_Order:
if not all(p == expr.point[0] for p in expr.point) and \
not all(p == self.point[0] for p in self.point):
raise NotImplementedError('Order at points other than 0 '
'or oo not supported, got %s as a point.' % point)
else:
# self and/or expr is O(1):
if any(not p for p in [expr.point, self.point]):
point = self.point + expr.point
if point:
point = point[0]
else:
point = S.Zero
else:
point = self.point[0]
if expr.expr == self.expr:
# O(1) + O(1), O(1) + O(1, x), etc.
return all([x in self.args[1:] for x in expr.args[1:]])
if expr.expr.is_Add:
return all([self.contains(x) for x in expr.expr.args])
if self.expr.is_Add:
return any([self.func(x, *self.args[1:]).contains(expr)
for x in self.expr.args])
if self.variables and expr.variables:
common_symbols = tuple(
[s for s in self.variables if s in expr.variables])
elif self.variables:
common_symbols = self.variables
else:
common_symbols = expr.variables
if not common_symbols:
return None
if (self.expr.is_Pow and self.expr.base.is_Symbol
and self.expr.exp.is_positive):
if expr.expr.is_Pow and self.expr.base == expr.expr.base:
return not (self.expr.exp-expr.expr.exp).is_positive
if expr.expr.is_Mul:
for arg in expr.expr.args:
if (arg.is_Pow and self.expr.base == arg.base
and (expr.expr/arg).is_number):
r = (self.expr.exp-arg.exp).is_positive
if not (r is None):
return not r
r = None
ratio = self.expr/expr.expr
ratio = powsimp(ratio, deep=True, combine='exp')
for s in common_symbols:
l = ratio.limit(s, point)
from sympy.series.limits import Limit
if not isinstance(l, Limit):
l = l != 0
else:
l = None
if r is None:
r = l
else:
if r != l:
return
return r
if (self.expr.is_Pow and self.expr.base.is_Symbol
and self.expr.exp.is_positive):
if expr.is_Pow and self.expr.base == expr.base:
return not (self.expr.exp-expr.exp).is_positive
if expr.is_Mul:
for arg in expr.args:
if (arg.is_Pow and self.expr.base == arg.base
and (expr/arg).is_number):
r = (self.expr.exp-arg.exp).is_positive
if not (r is None):
return not r
obj = self.func(expr, *self.args[1:])
return self.contains(obj)
def __contains__(self, other):
result = self.contains(other)
if result is None:
raise TypeError('contains did not evaluate to a bool')
return result
def _eval_subs(self, old, new):
if old in self.variables:
newexpr = self.expr.subs(old, new)
i = self.variables.index(old)
newvars = list(self.variables)
newpt = list(self.point)
if new.is_Symbol:
newvars[i] = new
else:
syms = new.free_symbols
if len(syms) == 1 or old in syms:
if old in syms:
var = self.variables[i]
else:
var = syms.pop()
# First, try to substitute self.point in the "new"
# expr to see if this is a fixed point.
# E.g. O(y).subs(y, sin(x))
point = new.subs(var, self.point[i])
if point != self.point[i]:
from sympy.solvers.solveset import solveset
d = Dummy()
sol = solveset(old - new.subs(var, d), d)
if isinstance(sol, Complement):
e1 = sol.args[0]
e2 = sol.args[1]
sol = set(e1) - set(e2)
res = [dict(zip((d, ), sol))]
point = d.subs(res[0]).limit(old, self.point[i])
newvars[i] = var
newpt[i] = point
elif old not in syms:
del newvars[i], newpt[i]
if not syms and new == self.point[i]:
newvars.extend(syms)
newpt.extend([S.Zero]*len(syms))
else:
return
return Order(newexpr, *zip(newvars, newpt))
def _eval_conjugate(self):
expr = self.expr._eval_conjugate()
if expr is not None:
return self.func(expr, *self.args[1:])
def _eval_derivative(self, x):
return self.func(self.expr.diff(x), *self.args[1:]) or self
def _eval_transpose(self):
expr = self.expr._eval_transpose()
if expr is not None:
return self.func(expr, *self.args[1:])
def _sage_(self):
#XXX: SAGE doesn't have Order yet. Let's return 0 instead.
return Rational(0)._sage_()
O = Order
| bsd-3-clause |
luiseduardohdbackup/odoo | addons/l10n_be_intrastat/l10n_be_intrastat.py | 258 | 7828 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Business Applications
# Copyright (C) 2014-2015 Odoo S.A. <http://www.odoo.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_invoice(osv.osv):
_inherit = "account.invoice"
_columns = {
'incoterm_id': fields.many2one(
'stock.incoterms', 'Incoterm',
help="International Commercial Terms are a series of predefined commercial terms "
"used in international transactions."),
'intrastat_transaction_id': fields.many2one(
'l10n_be_intrastat.transaction', 'Intrastat Transaction Type',
help="Intrastat nature of transaction"),
'transport_mode_id': fields.many2one(
'l10n_be_intrastat.transport_mode', 'Intrastat Transport Mode'),
'intrastat_country_id': fields.many2one(
'res.country', 'Intrastat Country',
help='Intrastat country, delivery for sales, origin for purchases',
domain=[('intrastat','=',True)]),
}
class intrastat_region(osv.osv):
_name = 'l10n_be_intrastat.region'
_columns = {
'code': fields.char('Code', required=True),
'country_id': fields.many2one('res.country', 'Country'),
'name': fields.char('Name', translate=True),
'description': fields.char('Description'),
}
_sql_constraints = [
('l10n_be_intrastat_regioncodeunique', 'UNIQUE (code)', 'Code must be unique.'),
]
class intrastat_transaction(osv.osv):
_name = 'l10n_be_intrastat.transaction'
_rec_name = 'code'
_columns = {
'code': fields.char('Code', required=True, readonly=True),
'description': fields.text('Description', readonly=True),
}
_sql_constraints = [
('l10n_be_intrastat_trcodeunique', 'UNIQUE (code)', 'Code must be unique.'),
]
class intrastat_transport_mode(osv.osv):
_name = 'l10n_be_intrastat.transport_mode'
_columns = {
'code': fields.char('Code', required=True, readonly=True),
'name': fields.char('Description', readonly=True),
}
_sql_constraints = [
('l10n_be_intrastat_trmodecodeunique', 'UNIQUE (code)', 'Code must be unique.'),
]
class product_category(osv.osv):
_name = "product.category"
_inherit = "product.category"
_columns = {
'intrastat_id': fields.many2one('report.intrastat.code', 'Intrastat Code'),
}
def get_intrastat_recursively(self, cr, uid, category, context=None):
""" Recursively search in categories to find an intrastat code id
:param category : Browse record of a category
"""
if category.intrastat_id:
res = category.intrastat_id.id
elif category.parent_id:
res = self.get_intrastat_recursively(cr, uid, category.parent_id, context=context)
else:
res = None
return res
class product_product(osv.osv):
_name = "product.product"
_inherit = "product.product"
def get_intrastat_recursively(self, cr, uid, id, context=None):
""" Recursively search in categories to find an intrastat code id
"""
product = self.browse(cr, uid, id, context=context)
if product.intrastat_id:
res = product.intrastat_id.id
elif product.categ_id:
res = self.pool['product.category'].get_intrastat_recursively(
cr, uid, product.categ_id, context=context)
else:
res = None
return res
class purchase_order(osv.osv):
_inherit = "purchase.order"
def _prepare_invoice(self, cr, uid, order, line_ids, context=None):
"""
copy incoterm from purchase order to invoice
"""
invoice = super(purchase_order, self)._prepare_invoice(
cr, uid, order, line_ids, context=context)
if order.incoterm_id:
invoice['incoterm_id'] = order.incoterm_id.id
#Try to determine products origin
if order.partner_id.country_id:
#It comes from supplier
invoice['intrastat_country_id'] = order.partner_id.country_id.id
return invoice
class report_intrastat_code(osv.osv):
_inherit = "report.intrastat.code"
_columns = {
'description': fields.text('Description', translate=True),
}
class res_company(osv.osv):
_inherit = "res.company"
_columns = {
'region_id': fields.many2one('l10n_be_intrastat.region', 'Intrastat region'),
'transport_mode_id': fields.many2one('l10n_be_intrastat.transport_mode',
'Default transport mode'),
'incoterm_id': fields.many2one('stock.incoterms', 'Default incoterm for Intrastat',
help="International Commercial Terms are a series of "
"predefined commercial terms used in international "
"transactions."),
}
class sale_order(osv.osv):
_inherit = "sale.order"
def _prepare_invoice(self, cr, uid, saleorder, lines, context=None):
"""
copy incoterm from sale order to invoice
"""
invoice = super(sale_order, self)._prepare_invoice(
cr, uid, saleorder, lines, context=context)
if saleorder.incoterm:
invoice['incoterm_id'] = saleorder.incoterm.id
# Guess products destination
if saleorder.partner_shipping_id.country_id:
invoice['intrastat_country_id'] = saleorder.partner_shipping_id.country_id.id
elif saleorder.partner_id.country_id:
invoice['intrastat_country_id'] = saleorder.partner_id.country_id.id
elif saleorder.partner_invoice_id.country_id:
invoice['intrastat_country_id'] = saleorder.partner_invoice_id.country_id.id
return invoice
class stock_warehouse(osv.osv):
_inherit = "stock.warehouse"
_columns = {
'region_id': fields.many2one('l10n_be_intrastat.region', 'Intrastat region'),
}
def get_regionid_from_locationid(self, cr, uid, location_id, context=None):
location_model = self.pool['stock.location']
location = location_model.browse(cr, uid, location_id, context=context)
location_ids = location_model.search(cr, uid,
[('parent_left', '<=', location.parent_left),
('parent_right', '>=', location.parent_right)],
context=context)
warehouse_ids = self.search(cr, uid,
[('lot_stock_id', 'in', location_ids),
('region_id', '!=', False)],
context=context)
warehouses = self.browse(cr, uid, warehouse_ids, context=context)
if warehouses and warehouses[0]:
return warehouses[0].region_id.id
return None
| agpl-3.0 |
tumbl3w33d/ansible | lib/ansible/modules/network/fortios/fortios_firewall_profile_group.py | 13 | 14468 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_profile_group
short_description: Configure profile groups in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify firewall feature and profile_group category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
firewall_profile_group:
description:
- Configure profile groups.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
application_list:
description:
- Name of an existing Application list. Source application.list.name.
type: str
av_profile:
description:
- Name of an existing Antivirus profile. Source antivirus.profile.name.
type: str
dlp_sensor:
description:
- Name of an existing DLP sensor. Source dlp.sensor.name.
type: str
dnsfilter_profile:
description:
- Name of an existing DNS filter profile. Source dnsfilter.profile.name.
type: str
icap_profile:
description:
- Name of an existing ICAP profile. Source icap.profile.name.
type: str
ips_sensor:
description:
- Name of an existing IPS sensor. Source ips.sensor.name.
type: str
name:
description:
- Profile group name.
required: true
type: str
profile_protocol_options:
description:
- Name of an existing Protocol options profile. Source firewall.profile-protocol-options.name.
type: str
spamfilter_profile:
description:
- Name of an existing Spam filter profile. Source spamfilter.profile.name.
type: str
ssh_filter_profile:
description:
- Name of an existing SSH filter profile. Source ssh-filter.profile.name.
type: str
ssl_ssh_profile:
description:
- Name of an existing SSL SSH profile. Source firewall.ssl-ssh-profile.name.
type: str
voip_profile:
description:
- Name of an existing VoIP profile. Source voip.profile.name.
type: str
waf_profile:
description:
- Name of an existing Web application firewall profile. Source waf.profile.name.
type: str
webfilter_profile:
description:
- Name of an existing Web filter profile. Source webfilter.profile.name.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure profile groups.
fortios_firewall_profile_group:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
firewall_profile_group:
application_list: "<your_own_value> (source application.list.name)"
av_profile: "<your_own_value> (source antivirus.profile.name)"
dlp_sensor: "<your_own_value> (source dlp.sensor.name)"
dnsfilter_profile: "<your_own_value> (source dnsfilter.profile.name)"
icap_profile: "<your_own_value> (source icap.profile.name)"
ips_sensor: "<your_own_value> (source ips.sensor.name)"
name: "default_name_9"
profile_protocol_options: "<your_own_value> (source firewall.profile-protocol-options.name)"
spamfilter_profile: "<your_own_value> (source spamfilter.profile.name)"
ssh_filter_profile: "<your_own_value> (source ssh-filter.profile.name)"
ssl_ssh_profile: "<your_own_value> (source firewall.ssl-ssh-profile.name)"
voip_profile: "<your_own_value> (source voip.profile.name)"
waf_profile: "<your_own_value> (source waf.profile.name)"
webfilter_profile: "<your_own_value> (source webfilter.profile.name)"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_firewall_profile_group_data(json):
option_list = ['application_list', 'av_profile', 'dlp_sensor',
'dnsfilter_profile', 'icap_profile', 'ips_sensor',
'name', 'profile_protocol_options', 'spamfilter_profile',
'ssh_filter_profile', 'ssl_ssh_profile', 'voip_profile',
'waf_profile', 'webfilter_profile']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def firewall_profile_group(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['firewall_profile_group'] and data['firewall_profile_group']:
state = data['firewall_profile_group']['state']
else:
state = True
firewall_profile_group_data = data['firewall_profile_group']
filtered_data = underscore_to_hyphen(filter_firewall_profile_group_data(firewall_profile_group_data))
if state == "present":
return fos.set('firewall',
'profile-group',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('firewall',
'profile-group',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_firewall(data, fos):
if data['firewall_profile_group']:
resp = firewall_profile_group(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"firewall_profile_group": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"application_list": {"required": False, "type": "str"},
"av_profile": {"required": False, "type": "str"},
"dlp_sensor": {"required": False, "type": "str"},
"dnsfilter_profile": {"required": False, "type": "str"},
"icap_profile": {"required": False, "type": "str"},
"ips_sensor": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"profile_protocol_options": {"required": False, "type": "str"},
"spamfilter_profile": {"required": False, "type": "str"},
"ssh_filter_profile": {"required": False, "type": "str"},
"ssl_ssh_profile": {"required": False, "type": "str"},
"voip_profile": {"required": False, "type": "str"},
"waf_profile": {"required": False, "type": "str"},
"webfilter_profile": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_firewall(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_firewall(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
obruns/gtest | test/gtest_xml_outfiles_test.py | 2526 | 5340 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module."""
__author__ = "[email protected] (Keith Ray)"
import os
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_SUBDIR = "xml_outfiles"
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
</testsuite>
</testsuites>
"""
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
</testsuite>
</testsuites>
"""
class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
"""Unit test for Google Test's XML output functionality."""
def setUp(self):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_OUTPUT_SUBDIR, "")
self.DeleteFilesAndDir()
def tearDown(self):
self.DeleteFilesAndDir()
def DeleteFilesAndDir(self):
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
except os.error:
pass
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
except os.error:
pass
try:
os.rmdir(self.output_dir_)
except os.error:
pass
def testOutfile1(self):
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
def testOutfile2(self):
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
p = gtest_test_utils.Subprocess(command,
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
# TODO([email protected]): libtool causes the built test binary to be
# named lt-gtest_xml_outfiles_test_ instead of
# gtest_xml_outfiles_test_. To account for this possibillity, we
# allow both names in the following code. We should remove this
# hack when Chandler Carruth's libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
output_file1)
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
actual = minidom.parse(output_file1)
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == "__main__":
os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
gtest_test_utils.Main()
| bsd-3-clause |
valentin-krasontovitsch/ansible | test/units/modules/network/f5/test_bigip_profile_http_compression.py | 21 | 3949 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_profile_http_compression import ApiParameters
from library.modules.bigip_profile_http_compression import ModuleParameters
from library.modules.bigip_profile_http_compression import ModuleManager
from library.modules.bigip_profile_http_compression import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_profile_http_compression import ApiParameters
from ansible.modules.network.f5.bigip_profile_http_compression import ModuleParameters
from ansible.modules.network.f5.bigip_profile_http_compression import ModuleManager
from ansible.modules.network.f5.bigip_profile_http_compression import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
parent='bar',
description='description1',
buffer_size=1024,
gzip_memory_level=64,
gzip_level=2,
gzip_window_size=128
)
p = ModuleParameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/bar'
assert p.description == 'description1'
assert p.buffer_size == 1024
assert p.gzip_memory_level == 64
assert p.gzip_level == 2
assert p.gzip_window_size == 128
def test_api_parameters(self):
p = ApiParameters(params=load_fixture('load_ltm_profile_http_compression_1.json'))
assert p.description == 'my profile'
assert p.buffer_size == 4096
assert p.gzip_memory_level == 8
assert p.gzip_level == 1
assert p.gzip_window_size == 16
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create(self, *args):
# Configure the arguments that would be sent to the Ansible module
set_module_args(dict(
name='foo',
parent='bar',
description='description1',
buffer_size=1024,
gzip_memory_level=64,
gzip_level=2,
gzip_window_size=128,
password='password',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.exists = Mock(return_value=False)
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
| gpl-3.0 |
cbertinato/pandas | pandas/tests/frame/test_combine_concat.py | 1 | 34741 | from datetime import datetime
import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Index, Series, Timestamp, date_range
import pandas.util.testing as tm
from pandas.util.testing import assert_frame_equal, assert_series_equal
class TestDataFrameConcatCommon:
def test_concat_multiple_frames_dtypes(self):
# GH 2759
A = DataFrame(data=np.ones((10, 2)), columns=[
'foo', 'bar'], dtype=np.float64)
B = DataFrame(data=np.ones((10, 2)), dtype=np.float32)
results = pd.concat((A, B), axis=1).get_dtype_counts()
expected = Series(dict(float64=2, float32=2))
assert_series_equal(results, expected)
@pytest.mark.parametrize('data', [
pd.date_range('2000', periods=4),
pd.date_range('2000', periods=4, tz="US/Central"),
pd.period_range('2000', periods=4),
pd.timedelta_range(0, periods=4),
])
def test_combine_datetlike_udf(self, data):
# https://github.com/pandas-dev/pandas/issues/23079
df = pd.DataFrame({"A": data})
other = df.copy()
df.iloc[1, 0] = None
def combiner(a, b):
return b
result = df.combine(other, combiner)
tm.assert_frame_equal(result, other)
def test_concat_multiple_tzs(self):
# GH 12467
# combining datetime tz-aware and naive DataFrames
ts1 = Timestamp('2015-01-01', tz=None)
ts2 = Timestamp('2015-01-01', tz='UTC')
ts3 = Timestamp('2015-01-01', tz='EST')
df1 = DataFrame(dict(time=[ts1]))
df2 = DataFrame(dict(time=[ts2]))
df3 = DataFrame(dict(time=[ts3]))
results = pd.concat([df1, df2]).reset_index(drop=True)
expected = DataFrame(dict(time=[ts1, ts2]), dtype=object)
assert_frame_equal(results, expected)
results = pd.concat([df1, df3]).reset_index(drop=True)
expected = DataFrame(dict(time=[ts1, ts3]), dtype=object)
assert_frame_equal(results, expected)
results = pd.concat([df2, df3]).reset_index(drop=True)
expected = DataFrame(dict(time=[ts2, ts3]))
assert_frame_equal(results, expected)
@pytest.mark.parametrize(
't1',
[
'2015-01-01',
pytest.param(pd.NaT, marks=pytest.mark.xfail(
reason='GH23037 incorrect dtype when concatenating'))])
def test_concat_tz_NaT(self, t1):
# GH 22796
# Concating tz-aware multicolumn DataFrames
ts1 = Timestamp(t1, tz='UTC')
ts2 = Timestamp('2015-01-01', tz='UTC')
ts3 = Timestamp('2015-01-01', tz='UTC')
df1 = DataFrame([[ts1, ts2]])
df2 = DataFrame([[ts3]])
result = pd.concat([df1, df2])
expected = DataFrame([[ts1, ts2], [ts3, pd.NaT]], index=[0, 0])
assert_frame_equal(result, expected)
def test_concat_tz_not_aligned(self):
# GH 22796
ts = pd.to_datetime([1, 2]).tz_localize("UTC")
a = pd.DataFrame({"A": ts})
b = pd.DataFrame({"A": ts, "B": ts})
result = pd.concat([a, b], sort=True, ignore_index=True)
expected = pd.DataFrame({"A": list(ts) + list(ts),
"B": [pd.NaT, pd.NaT] + list(ts)})
assert_frame_equal(result, expected)
def test_concat_tuple_keys(self):
# GH 14438
df1 = pd.DataFrame(np.ones((2, 2)), columns=list('AB'))
df2 = pd.DataFrame(np.ones((3, 2)) * 2, columns=list('AB'))
results = pd.concat((df1, df2), keys=[('bee', 'bah'), ('bee', 'boo')])
expected = pd.DataFrame(
{'A': {('bee', 'bah', 0): 1.0,
('bee', 'bah', 1): 1.0,
('bee', 'boo', 0): 2.0,
('bee', 'boo', 1): 2.0,
('bee', 'boo', 2): 2.0},
'B': {('bee', 'bah', 0): 1.0,
('bee', 'bah', 1): 1.0,
('bee', 'boo', 0): 2.0,
('bee', 'boo', 1): 2.0,
('bee', 'boo', 2): 2.0}})
assert_frame_equal(results, expected)
def test_append_series_dict(self):
df = DataFrame(np.random.randn(5, 4),
columns=['foo', 'bar', 'baz', 'qux'])
series = df.loc[4]
msg = 'Indexes have overlapping values'
with pytest.raises(ValueError, match=msg):
df.append(series, verify_integrity=True)
series.name = None
msg = 'Can only append a Series if ignore_index=True'
with pytest.raises(TypeError, match=msg):
df.append(series, verify_integrity=True)
result = df.append(series[::-1], ignore_index=True)
expected = df.append(DataFrame({0: series[::-1]}, index=df.columns).T,
ignore_index=True)
assert_frame_equal(result, expected)
# dict
result = df.append(series.to_dict(), ignore_index=True)
assert_frame_equal(result, expected)
result = df.append(series[::-1][:3], ignore_index=True)
expected = df.append(DataFrame({0: series[::-1][:3]}).T,
ignore_index=True, sort=True)
assert_frame_equal(result, expected.loc[:, result.columns])
# can append when name set
row = df.loc[4]
row.name = 5
result = df.append(row)
expected = df.append(df[-1:], ignore_index=True)
assert_frame_equal(result, expected)
def test_append_list_of_series_dicts(self):
df = DataFrame(np.random.randn(5, 4),
columns=['foo', 'bar', 'baz', 'qux'])
dicts = [x.to_dict() for idx, x in df.iterrows()]
result = df.append(dicts, ignore_index=True)
expected = df.append(df, ignore_index=True)
assert_frame_equal(result, expected)
# different columns
dicts = [{'foo': 1, 'bar': 2, 'baz': 3, 'peekaboo': 4},
{'foo': 5, 'bar': 6, 'baz': 7, 'peekaboo': 8}]
result = df.append(dicts, ignore_index=True, sort=True)
expected = df.append(DataFrame(dicts), ignore_index=True, sort=True)
assert_frame_equal(result, expected)
def test_append_missing_cols(self):
# GH22252
# exercise the conditional branch in append method where the data
# to be appended is a list and does not contain all columns that are in
# the target DataFrame
df = DataFrame(np.random.randn(5, 4),
columns=['foo', 'bar', 'baz', 'qux'])
dicts = [{'foo': 9}, {'bar': 10}]
with tm.assert_produces_warning(None):
result = df.append(dicts, ignore_index=True, sort=True)
expected = df.append(DataFrame(dicts), ignore_index=True, sort=True)
assert_frame_equal(result, expected)
def test_append_empty_dataframe(self):
# Empty df append empty df
df1 = DataFrame()
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
# Non-empty df append empty df
df1 = DataFrame(np.random.randn(5, 2))
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
# Empty df with columns append empty df
df1 = DataFrame(columns=['bar', 'foo'])
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
# Non-Empty df with columns append empty df
df1 = DataFrame(np.random.randn(5, 2), columns=['bar', 'foo'])
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
def test_append_dtypes(self):
# GH 5754
# row appends of different dtypes (so need to do by-item)
# can sometimes infer the correct type
df1 = DataFrame({'bar': Timestamp('20130101')}, index=range(5))
df2 = DataFrame()
result = df1.append(df2)
expected = df1.copy()
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': Timestamp('20130101')}, index=range(1))
df2 = DataFrame({'bar': 'foo'}, index=range(1, 2))
result = df1.append(df2)
expected = DataFrame({'bar': [Timestamp('20130101'), 'foo']})
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': Timestamp('20130101')}, index=range(1))
df2 = DataFrame({'bar': np.nan}, index=range(1, 2))
result = df1.append(df2)
expected = DataFrame(
{'bar': Series([Timestamp('20130101'), np.nan], dtype='M8[ns]')})
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': Timestamp('20130101')}, index=range(1))
df2 = DataFrame({'bar': np.nan}, index=range(1, 2), dtype=object)
result = df1.append(df2)
expected = DataFrame(
{'bar': Series([Timestamp('20130101'), np.nan], dtype='M8[ns]')})
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': np.nan}, index=range(1))
df2 = DataFrame({'bar': Timestamp('20130101')}, index=range(1, 2))
result = df1.append(df2)
expected = DataFrame(
{'bar': Series([np.nan, Timestamp('20130101')], dtype='M8[ns]')})
assert_frame_equal(result, expected)
df1 = DataFrame({'bar': Timestamp('20130101')}, index=range(1))
df2 = DataFrame({'bar': 1}, index=range(1, 2), dtype=object)
result = df1.append(df2)
expected = DataFrame({'bar': Series([Timestamp('20130101'), 1])})
assert_frame_equal(result, expected)
def test_update(self):
df = DataFrame([[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3],
[1.5, np.nan, 3]])
other = DataFrame([[3.6, 2., np.nan],
[np.nan, np.nan, 7]], index=[1, 3])
df.update(other)
expected = DataFrame([[1.5, np.nan, 3],
[3.6, 2, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 7.]])
assert_frame_equal(df, expected)
def test_update_dtypes(self):
# gh 3016
df = DataFrame([[1., 2., False, True], [4., 5., True, False]],
columns=['A', 'B', 'bool1', 'bool2'])
other = DataFrame([[45, 45]], index=[0], columns=['A', 'B'])
df.update(other)
expected = DataFrame([[45., 45., False, True], [4., 5., True, False]],
columns=['A', 'B', 'bool1', 'bool2'])
assert_frame_equal(df, expected)
def test_update_nooverwrite(self):
df = DataFrame([[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3],
[1.5, np.nan, 3]])
other = DataFrame([[3.6, 2., np.nan],
[np.nan, np.nan, 7]], index=[1, 3])
df.update(other, overwrite=False)
expected = DataFrame([[1.5, np.nan, 3],
[1.5, 2, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 3.]])
assert_frame_equal(df, expected)
def test_update_filtered(self):
df = DataFrame([[1.5, np.nan, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3],
[1.5, np.nan, 3]])
other = DataFrame([[3.6, 2., np.nan],
[np.nan, np.nan, 7]], index=[1, 3])
df.update(other, filter_func=lambda x: x > 2)
expected = DataFrame([[1.5, np.nan, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 3],
[1.5, np.nan, 7.]])
assert_frame_equal(df, expected)
@pytest.mark.parametrize('bad_kwarg, exception, msg', [
# errors must be 'ignore' or 'raise'
({'errors': 'something'}, ValueError, 'The parameter errors must.*'),
({'join': 'inner'}, NotImplementedError, 'Only left join is supported')
])
def test_update_raise_bad_parameter(self, bad_kwarg, exception, msg):
df = DataFrame([[1.5, 1, 3.]])
with pytest.raises(exception, match=msg):
df.update(df, **bad_kwarg)
def test_update_raise_on_overlap(self):
df = DataFrame([[1.5, 1, 3.],
[1.5, np.nan, 3.],
[1.5, np.nan, 3],
[1.5, np.nan, 3]])
other = DataFrame([[2., np.nan],
[np.nan, 7]], index=[1, 3], columns=[1, 2])
with pytest.raises(ValueError, match="Data overlaps"):
df.update(other, errors='raise')
@pytest.mark.parametrize('raise_conflict', [True, False])
def test_update_deprecation(self, raise_conflict):
df = DataFrame([[1.5, 1, 3.]])
other = DataFrame()
with tm.assert_produces_warning(FutureWarning):
df.update(other, raise_conflict=raise_conflict)
def test_update_from_non_df(self):
d = {'a': Series([1, 2, 3, 4]), 'b': Series([5, 6, 7, 8])}
df = DataFrame(d)
d['a'] = Series([5, 6, 7, 8])
df.update(d)
expected = DataFrame(d)
assert_frame_equal(df, expected)
d = {'a': [1, 2, 3, 4], 'b': [5, 6, 7, 8]}
df = DataFrame(d)
d['a'] = [5, 6, 7, 8]
df.update(d)
expected = DataFrame(d)
assert_frame_equal(df, expected)
def test_update_datetime_tz(self):
# GH 25807
result = DataFrame([pd.Timestamp('2019', tz='UTC')])
result.update(result)
expected = DataFrame([pd.Timestamp('2019', tz='UTC')])
assert_frame_equal(result, expected)
def test_join_str_datetime(self):
str_dates = ['20120209', '20120222']
dt_dates = [datetime(2012, 2, 9), datetime(2012, 2, 22)]
A = DataFrame(str_dates, index=range(2), columns=['aa'])
C = DataFrame([[1, 2], [3, 4]], index=str_dates, columns=dt_dates)
tst = A.join(C, on='aa')
assert len(tst.columns) == 3
def test_join_multiindex_leftright(self):
# GH 10741
df1 = (pd.DataFrame([['a', 'x', 0.471780], ['a', 'y', 0.774908],
['a', 'z', 0.563634], ['b', 'x', -0.353756],
['b', 'y', 0.368062], ['b', 'z', -1.721840],
['c', 'x', 1], ['c', 'y', 2], ['c', 'z', 3]],
columns=['first', 'second', 'value1'])
.set_index(['first', 'second']))
df2 = (pd.DataFrame([['a', 10], ['b', 20]],
columns=['first', 'value2'])
.set_index(['first']))
exp = pd.DataFrame([[0.471780, 10], [0.774908, 10], [0.563634, 10],
[-0.353756, 20], [0.368062, 20],
[-1.721840, 20],
[1.000000, np.nan], [2.000000, np.nan],
[3.000000, np.nan]],
index=df1.index, columns=['value1', 'value2'])
# these must be the same results (but columns are flipped)
assert_frame_equal(df1.join(df2, how='left'), exp)
assert_frame_equal(df2.join(df1, how='right'),
exp[['value2', 'value1']])
exp_idx = pd.MultiIndex.from_product([['a', 'b'], ['x', 'y', 'z']],
names=['first', 'second'])
exp = pd.DataFrame([[0.471780, 10], [0.774908, 10], [0.563634, 10],
[-0.353756, 20], [0.368062, 20], [-1.721840, 20]],
index=exp_idx, columns=['value1', 'value2'])
assert_frame_equal(df1.join(df2, how='right'), exp)
assert_frame_equal(df2.join(df1, how='left'),
exp[['value2', 'value1']])
def test_concat_named_keys(self):
# GH 14252
df = pd.DataFrame({'foo': [1, 2], 'bar': [0.1, 0.2]})
index = Index(['a', 'b'], name='baz')
concatted_named_from_keys = pd.concat([df, df], keys=index)
expected_named = pd.DataFrame(
{'foo': [1, 2, 1, 2], 'bar': [0.1, 0.2, 0.1, 0.2]},
index=pd.MultiIndex.from_product((['a', 'b'], [0, 1]),
names=['baz', None]))
assert_frame_equal(concatted_named_from_keys, expected_named)
index_no_name = Index(['a', 'b'], name=None)
concatted_named_from_names = pd.concat(
[df, df], keys=index_no_name, names=['baz'])
assert_frame_equal(concatted_named_from_names, expected_named)
concatted_unnamed = pd.concat([df, df], keys=index_no_name)
expected_unnamed = pd.DataFrame(
{'foo': [1, 2, 1, 2], 'bar': [0.1, 0.2, 0.1, 0.2]},
index=pd.MultiIndex.from_product((['a', 'b'], [0, 1]),
names=[None, None]))
assert_frame_equal(concatted_unnamed, expected_unnamed)
def test_concat_axis_parameter(self):
# GH 14369
df1 = pd.DataFrame({'A': [0.1, 0.2]}, index=range(2))
df2 = pd.DataFrame({'A': [0.3, 0.4]}, index=range(2))
# Index/row/0 DataFrame
expected_index = pd.DataFrame(
{'A': [0.1, 0.2, 0.3, 0.4]}, index=[0, 1, 0, 1])
concatted_index = pd.concat([df1, df2], axis='index')
assert_frame_equal(concatted_index, expected_index)
concatted_row = pd.concat([df1, df2], axis='rows')
assert_frame_equal(concatted_row, expected_index)
concatted_0 = pd.concat([df1, df2], axis=0)
assert_frame_equal(concatted_0, expected_index)
# Columns/1 DataFrame
expected_columns = pd.DataFrame(
[[0.1, 0.3], [0.2, 0.4]], index=[0, 1], columns=['A', 'A'])
concatted_columns = pd.concat([df1, df2], axis='columns')
assert_frame_equal(concatted_columns, expected_columns)
concatted_1 = pd.concat([df1, df2], axis=1)
assert_frame_equal(concatted_1, expected_columns)
series1 = pd.Series([0.1, 0.2])
series2 = pd.Series([0.3, 0.4])
# Index/row/0 Series
expected_index_series = pd.Series(
[0.1, 0.2, 0.3, 0.4], index=[0, 1, 0, 1])
concatted_index_series = pd.concat([series1, series2], axis='index')
assert_series_equal(concatted_index_series, expected_index_series)
concatted_row_series = pd.concat([series1, series2], axis='rows')
assert_series_equal(concatted_row_series, expected_index_series)
concatted_0_series = pd.concat([series1, series2], axis=0)
assert_series_equal(concatted_0_series, expected_index_series)
# Columns/1 Series
expected_columns_series = pd.DataFrame(
[[0.1, 0.3], [0.2, 0.4]], index=[0, 1], columns=[0, 1])
concatted_columns_series = pd.concat(
[series1, series2], axis='columns')
assert_frame_equal(concatted_columns_series, expected_columns_series)
concatted_1_series = pd.concat([series1, series2], axis=1)
assert_frame_equal(concatted_1_series, expected_columns_series)
# Testing ValueError
with pytest.raises(ValueError, match='No axis named'):
pd.concat([series1, series2], axis='something')
def test_concat_numerical_names(self):
# #15262 # #12223
df = pd.DataFrame({'col': range(9)},
dtype='int32',
index=(pd.MultiIndex
.from_product([['A0', 'A1', 'A2'],
['B0', 'B1', 'B2']],
names=[1, 2])))
result = pd.concat((df.iloc[:2, :], df.iloc[-2:, :]))
expected = pd.DataFrame({'col': [0, 1, 7, 8]},
dtype='int32',
index=pd.MultiIndex.from_tuples([('A0', 'B0'),
('A0', 'B1'),
('A2', 'B1'),
('A2', 'B2')],
names=[1, 2]))
tm.assert_frame_equal(result, expected)
def test_concat_astype_dup_col(self):
# gh 23049
df = pd.DataFrame([{'a': 'b'}])
df = pd.concat([df, df], axis=1)
result = df.astype('category')
expected = pd.DataFrame(np.array(["b", "b"]).reshape(1, 2),
columns=["a", "a"]).astype("category")
tm.assert_frame_equal(result, expected)
class TestDataFrameCombineFirst:
def test_combine_first_mixed(self):
a = Series(['a', 'b'], index=range(2))
b = Series(range(2), index=range(2))
f = DataFrame({'A': a, 'B': b})
a = Series(['a', 'b'], index=range(5, 7))
b = Series(range(2), index=range(5, 7))
g = DataFrame({'A': a, 'B': b})
exp = pd.DataFrame({'A': list('abab'), 'B': [0., 1., 0., 1.]},
index=[0, 1, 5, 6])
combined = f.combine_first(g)
tm.assert_frame_equal(combined, exp)
def test_combine_first(self, float_frame):
# disjoint
head, tail = float_frame[:5], float_frame[5:]
combined = head.combine_first(tail)
reordered_frame = float_frame.reindex(combined.index)
assert_frame_equal(combined, reordered_frame)
assert tm.equalContents(combined.columns, float_frame.columns)
assert_series_equal(combined['A'], reordered_frame['A'])
# same index
fcopy = float_frame.copy()
fcopy['A'] = 1
del fcopy['C']
fcopy2 = float_frame.copy()
fcopy2['B'] = 0
del fcopy2['D']
combined = fcopy.combine_first(fcopy2)
assert (combined['A'] == 1).all()
assert_series_equal(combined['B'], fcopy['B'])
assert_series_equal(combined['C'], fcopy2['C'])
assert_series_equal(combined['D'], fcopy['D'])
# overlap
head, tail = reordered_frame[:10].copy(), reordered_frame
head['A'] = 1
combined = head.combine_first(tail)
assert (combined['A'][:10] == 1).all()
# reverse overlap
tail['A'][:10] = 0
combined = tail.combine_first(head)
assert (combined['A'][:10] == 0).all()
# no overlap
f = float_frame[:10]
g = float_frame[10:]
combined = f.combine_first(g)
assert_series_equal(combined['A'].reindex(f.index), f['A'])
assert_series_equal(combined['A'].reindex(g.index), g['A'])
# corner cases
comb = float_frame.combine_first(DataFrame())
assert_frame_equal(comb, float_frame)
comb = DataFrame().combine_first(float_frame)
assert_frame_equal(comb, float_frame)
comb = float_frame.combine_first(DataFrame(index=["faz", "boo"]))
assert "faz" in comb.index
# #2525
df = DataFrame({'a': [1]}, index=[datetime(2012, 1, 1)])
df2 = DataFrame(columns=['b'])
result = df.combine_first(df2)
assert 'b' in result
def test_combine_first_mixed_bug(self):
idx = Index(['a', 'b', 'c', 'e'])
ser1 = Series([5.0, -9.0, 4.0, 100.], index=idx)
ser2 = Series(['a', 'b', 'c', 'e'], index=idx)
ser3 = Series([12, 4, 5, 97], index=idx)
frame1 = DataFrame({"col0": ser1,
"col2": ser2,
"col3": ser3})
idx = Index(['a', 'b', 'c', 'f'])
ser1 = Series([5.0, -9.0, 4.0, 100.], index=idx)
ser2 = Series(['a', 'b', 'c', 'f'], index=idx)
ser3 = Series([12, 4, 5, 97], index=idx)
frame2 = DataFrame({"col1": ser1,
"col2": ser2,
"col5": ser3})
combined = frame1.combine_first(frame2)
assert len(combined.columns) == 5
# gh 3016 (same as in update)
df = DataFrame([[1., 2., False, True], [4., 5., True, False]],
columns=['A', 'B', 'bool1', 'bool2'])
other = DataFrame([[45, 45]], index=[0], columns=['A', 'B'])
result = df.combine_first(other)
assert_frame_equal(result, df)
df.loc[0, 'A'] = np.nan
result = df.combine_first(other)
df.loc[0, 'A'] = 45
assert_frame_equal(result, df)
# doc example
df1 = DataFrame({'A': [1., np.nan, 3., 5., np.nan],
'B': [np.nan, 2., 3., np.nan, 6.]})
df2 = DataFrame({'A': [5., 2., 4., np.nan, 3., 7.],
'B': [np.nan, np.nan, 3., 4., 6., 8.]})
result = df1.combine_first(df2)
expected = DataFrame(
{'A': [1, 2, 3, 5, 3, 7.], 'B': [np.nan, 2, 3, 4, 6, 8]})
assert_frame_equal(result, expected)
# GH3552, return object dtype with bools
df1 = DataFrame(
[[np.nan, 3., True], [-4.6, np.nan, True], [np.nan, 7., False]])
df2 = DataFrame(
[[-42.6, np.nan, True], [-5., 1.6, False]], index=[1, 2])
result = df1.combine_first(df2)[2]
expected = Series([True, True, False], name=2)
assert_series_equal(result, expected)
# GH 3593, converting datetime64[ns] incorrectly
df0 = DataFrame({"a": [datetime(2000, 1, 1),
datetime(2000, 1, 2),
datetime(2000, 1, 3)]})
df1 = DataFrame({"a": [None, None, None]})
df2 = df1.combine_first(df0)
assert_frame_equal(df2, df0)
df2 = df0.combine_first(df1)
assert_frame_equal(df2, df0)
df0 = DataFrame({"a": [datetime(2000, 1, 1),
datetime(2000, 1, 2),
datetime(2000, 1, 3)]})
df1 = DataFrame({"a": [datetime(2000, 1, 2), None, None]})
df2 = df1.combine_first(df0)
result = df0.copy()
result.iloc[0, :] = df1.iloc[0, :]
assert_frame_equal(df2, result)
df2 = df0.combine_first(df1)
assert_frame_equal(df2, df0)
def test_combine_first_align_nan(self):
# GH 7509 (not fixed)
dfa = pd.DataFrame([[pd.Timestamp('2011-01-01'), 2]],
columns=['a', 'b'])
dfb = pd.DataFrame([[4], [5]], columns=['b'])
assert dfa['a'].dtype == 'datetime64[ns]'
assert dfa['b'].dtype == 'int64'
res = dfa.combine_first(dfb)
exp = pd.DataFrame({'a': [pd.Timestamp('2011-01-01'), pd.NaT],
'b': [2., 5.]}, columns=['a', 'b'])
tm.assert_frame_equal(res, exp)
assert res['a'].dtype == 'datetime64[ns]'
# ToDo: this must be int64
assert res['b'].dtype == 'float64'
res = dfa.iloc[:0].combine_first(dfb)
exp = pd.DataFrame({'a': [np.nan, np.nan],
'b': [4, 5]}, columns=['a', 'b'])
tm.assert_frame_equal(res, exp)
# ToDo: this must be datetime64
assert res['a'].dtype == 'float64'
# ToDo: this must be int64
assert res['b'].dtype == 'int64'
def test_combine_first_timezone(self):
# see gh-7630
data1 = pd.to_datetime('20100101 01:01').tz_localize('UTC')
df1 = pd.DataFrame(columns=['UTCdatetime', 'abc'],
data=data1,
index=pd.date_range('20140627', periods=1))
data2 = pd.to_datetime('20121212 12:12').tz_localize('UTC')
df2 = pd.DataFrame(columns=['UTCdatetime', 'xyz'],
data=data2,
index=pd.date_range('20140628', periods=1))
res = df2[['UTCdatetime']].combine_first(df1)
exp = pd.DataFrame({'UTCdatetime': [pd.Timestamp('2010-01-01 01:01',
tz='UTC'),
pd.Timestamp('2012-12-12 12:12',
tz='UTC')],
'abc': [pd.Timestamp('2010-01-01 01:01:00',
tz='UTC'), pd.NaT]},
columns=['UTCdatetime', 'abc'],
index=pd.date_range('20140627', periods=2,
freq='D'))
tm.assert_frame_equal(res, exp)
assert res['UTCdatetime'].dtype == 'datetime64[ns, UTC]'
assert res['abc'].dtype == 'datetime64[ns, UTC]'
# see gh-10567
dts1 = pd.date_range('2015-01-01', '2015-01-05', tz='UTC')
df1 = pd.DataFrame({'DATE': dts1})
dts2 = pd.date_range('2015-01-03', '2015-01-05', tz='UTC')
df2 = pd.DataFrame({'DATE': dts2})
res = df1.combine_first(df2)
tm.assert_frame_equal(res, df1)
assert res['DATE'].dtype == 'datetime64[ns, UTC]'
dts1 = pd.DatetimeIndex(['2011-01-01', 'NaT', '2011-01-03',
'2011-01-04'], tz='US/Eastern')
df1 = pd.DataFrame({'DATE': dts1}, index=[1, 3, 5, 7])
dts2 = pd.DatetimeIndex(['2012-01-01', '2012-01-02',
'2012-01-03'], tz='US/Eastern')
df2 = pd.DataFrame({'DATE': dts2}, index=[2, 4, 5])
res = df1.combine_first(df2)
exp_dts = pd.DatetimeIndex(['2011-01-01', '2012-01-01', 'NaT',
'2012-01-02', '2011-01-03', '2011-01-04'],
tz='US/Eastern')
exp = pd.DataFrame({'DATE': exp_dts}, index=[1, 2, 3, 4, 5, 7])
tm.assert_frame_equal(res, exp)
# different tz
dts1 = pd.date_range('2015-01-01', '2015-01-05', tz='US/Eastern')
df1 = pd.DataFrame({'DATE': dts1})
dts2 = pd.date_range('2015-01-03', '2015-01-05')
df2 = pd.DataFrame({'DATE': dts2})
# if df1 doesn't have NaN, keep its dtype
res = df1.combine_first(df2)
tm.assert_frame_equal(res, df1)
assert res['DATE'].dtype == 'datetime64[ns, US/Eastern]'
dts1 = pd.date_range('2015-01-01', '2015-01-02', tz='US/Eastern')
df1 = pd.DataFrame({'DATE': dts1})
dts2 = pd.date_range('2015-01-01', '2015-01-03')
df2 = pd.DataFrame({'DATE': dts2})
res = df1.combine_first(df2)
exp_dts = [pd.Timestamp('2015-01-01', tz='US/Eastern'),
pd.Timestamp('2015-01-02', tz='US/Eastern'),
pd.Timestamp('2015-01-03')]
exp = pd.DataFrame({'DATE': exp_dts})
tm.assert_frame_equal(res, exp)
assert res['DATE'].dtype == 'object'
def test_combine_first_timedelta(self):
data1 = pd.TimedeltaIndex(['1 day', 'NaT', '3 day', '4day'])
df1 = pd.DataFrame({'TD': data1}, index=[1, 3, 5, 7])
data2 = pd.TimedeltaIndex(['10 day', '11 day', '12 day'])
df2 = pd.DataFrame({'TD': data2}, index=[2, 4, 5])
res = df1.combine_first(df2)
exp_dts = pd.TimedeltaIndex(['1 day', '10 day', 'NaT',
'11 day', '3 day', '4 day'])
exp = pd.DataFrame({'TD': exp_dts}, index=[1, 2, 3, 4, 5, 7])
tm.assert_frame_equal(res, exp)
assert res['TD'].dtype == 'timedelta64[ns]'
def test_combine_first_period(self):
data1 = pd.PeriodIndex(['2011-01', 'NaT', '2011-03',
'2011-04'], freq='M')
df1 = pd.DataFrame({'P': data1}, index=[1, 3, 5, 7])
data2 = pd.PeriodIndex(['2012-01-01', '2012-02',
'2012-03'], freq='M')
df2 = pd.DataFrame({'P': data2}, index=[2, 4, 5])
res = df1.combine_first(df2)
exp_dts = pd.PeriodIndex(['2011-01', '2012-01', 'NaT',
'2012-02', '2011-03', '2011-04'],
freq='M')
exp = pd.DataFrame({'P': exp_dts}, index=[1, 2, 3, 4, 5, 7])
tm.assert_frame_equal(res, exp)
assert res['P'].dtype == data1.dtype
# different freq
dts2 = pd.PeriodIndex(['2012-01-01', '2012-01-02',
'2012-01-03'], freq='D')
df2 = pd.DataFrame({'P': dts2}, index=[2, 4, 5])
res = df1.combine_first(df2)
exp_dts = [pd.Period('2011-01', freq='M'),
pd.Period('2012-01-01', freq='D'),
pd.NaT,
pd.Period('2012-01-02', freq='D'),
pd.Period('2011-03', freq='M'),
pd.Period('2011-04', freq='M')]
exp = pd.DataFrame({'P': exp_dts}, index=[1, 2, 3, 4, 5, 7])
tm.assert_frame_equal(res, exp)
assert res['P'].dtype == 'object'
def test_combine_first_int(self):
# GH14687 - integer series that do no align exactly
df1 = pd.DataFrame({'a': [0, 1, 3, 5]}, dtype='int64')
df2 = pd.DataFrame({'a': [1, 4]}, dtype='int64')
res = df1.combine_first(df2)
tm.assert_frame_equal(res, df1)
assert res['a'].dtype == 'int64'
@pytest.mark.parametrize("val", [1, 1.0])
def test_combine_first_with_asymmetric_other(self, val):
# see gh-20699
df1 = pd.DataFrame({'isNum': [val]})
df2 = pd.DataFrame({'isBool': [True]})
res = df1.combine_first(df2)
exp = pd.DataFrame({'isBool': [True], 'isNum': [val]})
tm.assert_frame_equal(res, exp)
def test_concat_datetime_datetime64_frame(self):
# #2624
rows = []
rows.append([datetime(2010, 1, 1), 1])
rows.append([datetime(2010, 1, 2), 'hi'])
df2_obj = DataFrame.from_records(rows, columns=['date', 'test'])
ind = date_range(start="2000/1/1", freq="D", periods=10)
df1 = DataFrame({'date': ind, 'test': range(10)})
# it works!
pd.concat([df1, df2_obj])
class TestDataFrameUpdate:
def test_update_nan(self):
# #15593 #15617
# test 1
df1 = DataFrame({'A': [1.0, 2, 3], 'B': date_range('2000', periods=3)})
df2 = DataFrame({'A': [None, 2, 3]})
expected = df1.copy()
df1.update(df2, overwrite=False)
tm.assert_frame_equal(df1, expected)
# test 2
df1 = DataFrame({'A': [1.0, None, 3],
'B': date_range('2000', periods=3)})
df2 = DataFrame({'A': [None, 2, 3]})
expected = DataFrame({'A': [1.0, 2, 3],
'B': date_range('2000', periods=3)})
df1.update(df2, overwrite=False)
tm.assert_frame_equal(df1, expected)
| bsd-3-clause |
nilsbore/aaf_deployment | info_terminal/approach_person_of_interest/scripts/go_to_person_server.py | 4 | 6954 | #! /usr/bin/env python
import roslib
import rospy
import actionlib
from geometry_msgs.msg import PoseStamped, PointStamped
from approach_person_of_interest.msg import *
from sensor_msgs.msg import JointState
from std_msgs.msg import String, Float32, Bool, Int32
from strands_navigation_msgs.msg import MonitoredNavigationAction, MonitoredNavigationGoal
import strands_webserver.client_utils
from strands_executive_msgs.srv import IsTaskInterruptible
import strands_gazing.msg
import scitos_ptu.msg
from strands_gazing.msg import GazeAtPoseAction, GazeAtPoseGoal
from monitored_navigation import *
from flir_pantilt_d46.msg import *
class GoToPersonAction(object):
_feedback = GoToPersonFeedback()
_result = GoToPersonResult()
def __init__(self, name):
self._action_name = name
self._as = actionlib.SimpleActionServer(self._action_name, approach_person_of_interest.msg.GoToPersonAction, execute_cb=self.execute_cb, auto_start = False)
self._as.start()
rospy.Service(self._action_name + '_is_interruptible', IsTaskInterruptible, self.is_interruptible)
# this will be set to true while actively engaging with someone
self._is_in_active_time = False
self._activity_timer = None
rospy.loginfo("Action server up: %s"%self._action_name)
self._mon_nav_client = actionlib.SimpleActionClient('monitored_navigation', MonitoredNavigationAction)
print 'Waiting for monitored navigation to start'
self._mon_nav_client.wait_for_server();
print 'Monitored navigation is ready'
self._timeout = 100
rospy.Subscriber("info_terminal/active_screen", Int32, self.button_pressed_callback)
print 'Waiting for ptu...'
self.ptuclient = actionlib.SimpleActionClient("SetPTUState", flir_pantilt_d46.msg.PtuGotoAction)
self.ptuclient.wait_for_server()
print 'ptu ready!'
self.ptugoal = flir_pantilt_d46.msg.PtuGotoGoal()
self.ptugoal.pan_vel = 60
self.ptugoal.tilt_vel = 60
# blink eyes when there is an interaction (i.e. GUI button pressed)
self.pub = rospy.Publisher('/head/commanded_state', JointState, queue_size=2)
# Publishing the gaze pose
self.gaze_topic_pub=rospy.Publisher('/info_terminal/gaze_pose',PoseStamped,queue_size=0)
# Create a gaze action client
self.gaze_act_client = actionlib.SimpleActionClient('gaze_at_pose', GazeAtPoseAction)
def is_interruptible(self, req):
return not self._is_in_active_time
def _reset_activitity_timer(self, event):
rospy.loginfo('Activity timer complete')
self._activity_timer = None
self._is_in_active_time = False
def _start_activity_timer(self):
if self._activity_timer is None:
rospy.loginfo('Starting activity timer')
self._is_in_active_time = True
self._activity_timer = rospy.Timer(rospy.Duration(self._timeout), self._reset_activitity_timer, oneshot=True)
else:
rospy.loginfo('Activity timer to be extended')
# shutdown previous timer and try again
self._activity_timer.shutdown()
self._activity_timer = None
self._start_activity_timer()
def execute_cb(self, goal):
# helper variables
print goal.go_to_person
# goal.timeout is how long to run this behaviour for
# self._timeout is how long to extend behaviour for on
self._timeout_after = rospy.get_rostime() + rospy.Duration(goal.timeout)
if goal.go_to_person:
print 'going to person'
# prevent interruption
self._is_in_active_time = True
self.send_feedback('going to person')
mon_nav_goal=MonitoredNavigationGoal(action_server='move_base', target_pose=goal.pose)
self._mon_nav_client.send_goal(mon_nav_goal)
print "CREATING GAZE"
gaze_dir_goal= GazeAtPoseGoal(topic_name='/info_terminal/gaze_pose', runtime_sec=0)
print "SENDING GOAL"
self.gaze_act_client.send_goal(gaze_dir_goal)
print "DONE GAZING"
self.gaze_topic_pub.publish(goal.pose)
finished_moving=self._mon_nav_client.wait_for_result(rospy.Duration(1))
while not finished_moving:
self.gaze_topic_pub.publish(goal.pose)
finished_moving=self._mon_nav_client.wait_for_result(rospy.Duration(1))
self.send_feedback('Reached the right position')
self.gaze_act_client.cancel_all_goals()
# assume some default activity
self._start_activity_timer()
strands_webserver.client_utils.display_url(0, 'http://localhost:8080')
self.currentPan=-180
self.currentTilt=0
self.head_command = JointState()
self.head_command.name=["HeadPan", "HeadTilt"]
self.head_command.position=[self.currentPan, self.currentTilt]
self.pub.publish(self.head_command)
self.send_feedback('Turning the camera to the person...')
#turn head cam to person
self.ptugoal.pan = -180
self.ptugoal.tilt = 20
self.ptuclient.send_goal(self.ptugoal)
self.ptuclient.wait_for_result()
self.send_feedback('camera turned successfully!')
rate = rospy.Rate(1)
# preempt will not be requested while activity is happening
while not rospy.is_shutdown() and not self._as.is_preempt_requested() and rospy.get_rostime() < self._timeout_after:
# loop for duration
rate.sleep()
self.exit_as()
def button_pressed_callback(self, active_screen):
# reset timeout
rospy.loginfo('button_pressed_callback')
self._start_activity_timer()
#blink eyes
self.eyelid_command = JointState()
self.eyelid_command.name=["EyeLids"]
self.eyelid_command.position=[20]
self.pub.publish(self.eyelid_command)
rospy.sleep(0.5)
self.eyelid_command.position=[100]
self.pub.publish(self.eyelid_command)
# self.currentPan=20
# self.currentTilt=0
# self.head_command = JointState()
# self.head_command.name=["HeadPan", "HeadTilt"]
# self.head_command.position=[self.currentPan, self.currentTilt]
# self.pub.publish(self.head_command)
def send_feedback(self, txt):
self._feedback.status = txt
self._as.publish_feedback(self._feedback)
rospy.loginfo(txt)
def exit_as(self):
self.send_feedback('Turning head camera to default position...')
#turn head cam to person
self.ptugoal.pan = 0
self.ptugoal.tilt = 0
self.ptuclient.send_goal(self.ptugoal)
self.ptuclient.wait_for_result()
self.send_feedback('head camera turned successfully to default position!')
self._result.success = True
rospy.loginfo('%s: Succeeded' % self._action_name)
self._as.set_succeeded(self._result)
if __name__ == '__main__':
rospy.init_node('go_to_person_action')
GoToPersonAction(rospy.get_name())
rospy.spin()
# Add stuff to move head randomly.
# Add stuff to rotate head and blink eyes at target Point
# Add stuff to check time out twice and then warn and move away
# Add stuff to rotate head also by 180deg after robot rotate
| mit |
luci/luci-py | appengine/auth_service/realms/config_test.py | 2 | 11855 | #!/usr/bin/env vpython
# Copyright 2020 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
# pylint: disable=no-value-for-parameter
import logging
import os
import sys
import unittest
import test_env
test_env.setup_test_env()
import mock
import parameterized
from test_support import test_case
from components.auth import model
from components.config import fs
from realms import config
from realms import permissions
def fake_db(rev, perms=None):
b = permissions.Builder(rev)
for p in (perms or []):
b.permission(p)
return b.finish()
def fake_realms_rev(project_id, config_digest, perms_rev):
return config.RealmsCfgRev(
project_id, 'config-rev', config_digest, 'config-body', perms_rev)
class CheckConfigChangesTest(test_case.TestCase):
@mock.patch('realms.config.update_realms', autospec=True)
@mock.patch('realms.config.delete_realms', autospec=True)
def call(self, db, latest, stored, delete_realms_mock, update_realms_mock):
updated = set()
deleted = set()
batches = []
def do_update(_db, revs, _comment):
batches.append(len(revs))
for r in revs:
self.assertNotIn(r.project_id, updated)
self.assertNotIn(r.project_id, deleted)
updated.add(r.project_id)
update_realms_mock.side_effect = do_update
def do_delete(project_id):
self.assertNotIn(project_id, updated)
self.assertNotIn(project_id, deleted)
deleted.add(project_id)
delete_realms_mock.side_effect = do_delete
jobs = config.check_config_changes(db, latest, stored)
self.assertTrue(config.execute_jobs(jobs, 0.0))
return updated, deleted, batches
def test_noop_when_up_to_date(self):
updated, deleted, _ = self.call(
fake_db('db-rev'),
[
fake_realms_rev('proj1', 'digest1', 'db-rev'),
fake_realms_rev('proj2', 'digest1', 'db-rev'),
],
[
fake_realms_rev('proj1', 'digest1', 'db-rev'),
fake_realms_rev('proj2', 'digest1', 'db-rev'),
])
self.assertEqual(updated, set())
self.assertEqual(deleted, set())
def test_new_projects(self):
updated, deleted, _ = self.call(
fake_db('db-rev'),
[
fake_realms_rev('proj1', 'digest1', 'db-rev'),
fake_realms_rev('proj2', 'digest1', 'db-rev'),
],
[
fake_realms_rev('proj1', 'digest1', 'db-rev'),
])
self.assertEqual(updated, {'proj2'})
self.assertEqual(deleted, set())
def test_updated_projects(self):
updated, deleted, _ = self.call(
fake_db('db-rev'),
[
fake_realms_rev('proj1', 'digest1', 'db-rev'),
fake_realms_rev('proj2', 'digest1', 'db-rev'),
],
[
fake_realms_rev('proj1', 'digest1', 'db-rev'),
fake_realms_rev('proj2', 'digest2', 'db-rev'),
])
self.assertEqual(updated, {'proj2'})
self.assertEqual(deleted, set())
def test_deleted_projects(self):
updated, deleted, _ = self.call(
fake_db('db-rev'),
[
fake_realms_rev('proj1', 'digest1', 'db-rev'),
],
[
fake_realms_rev('proj1', 'digest1', 'db-rev'),
fake_realms_rev('proj2', 'digest2', 'db-rev'),
])
self.assertEqual(updated, set())
self.assertEqual(deleted, {'proj2'})
def test_perms_revision_change(self):
revs = [
fake_realms_rev('proj%d' % i, 'digest1', 'db-rev1')
for i in range(20)
]
updated, deleted, batches = self.call(fake_db('db-rev2'), revs, revs)
self.assertEqual(updated, {p.project_id for p in revs}) # all of them
self.assertEqual(deleted, set())
self.assertEqual(len(batches), config.DB_REEVAL_REVISIONS)
class CheckPermissionChangesTest(test_case.TestCase):
def call(self, db):
jobs = config.check_permission_changes(db)
self.assertTrue(config.execute_jobs(jobs, 0.0))
def test_works(self):
def perms_from_authdb():
e = model.realms_globals_key().get()
return [p.name for p in e.permissions] if e else []
# The initial state.
self.assertEqual(model.get_auth_db_revision(), 0)
self.assertEqual(perms_from_authdb(), [])
# Create the initial copy of AuthRealmsGlobals.
self.call(fake_db('rev1', ['luci.dev.p1', 'luci.dev.p2']))
self.assertEqual(model.get_auth_db_revision(), 1)
self.assertEqual(perms_from_authdb(), ['luci.dev.p1', 'luci.dev.p2'])
# Noop change.
self.call(fake_db('rev1', ['luci.dev.p1', 'luci.dev.p2']))
self.assertEqual(model.get_auth_db_revision(), 1)
self.assertEqual(perms_from_authdb(), ['luci.dev.p1', 'luci.dev.p2'])
# Real change.
self.call(fake_db('rev2', ['luci.dev.p3']))
self.assertEqual(model.get_auth_db_revision(), 2)
self.assertEqual(perms_from_authdb(), ['luci.dev.p3'])
class ProjectConfigFetchTest(test_case.TestCase):
@mock.patch('components.config.common.self_config_set', autospec=True)
@mock.patch('components.config.fs.get_provider', autospec=True)
def test_works(self, get_provider_mock, self_config_set_mock):
TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
get_provider_mock.return_value = fs.Provider(
os.path.join(TESTS_DIR, 'test_data'))
# See test_data/... layout.
self_config_set_mock.return_value = 'services/auth-service-app-id'
revs = config.get_latest_revs_async().get_result()
self.assertEqual(sorted(revs, key=lambda r: r.project_id), [
config.RealmsCfgRev(
project_id='@internal',
config_rev='unknown',
config_digest='90549bf56e8be6c0ff6001d2376db' +
'def519b97cc89e65b2813237b252300dea8',
config_body='realms {\n name: "internal-realm"\n}\n',
perms_rev=None,
),
config.RealmsCfgRev(
project_id='proj1',
config_rev='unknown',
config_digest='05105846cbabf80e1ab2979b7787' +
'f1df1aca9751661fe4b4d28494e0b442459b',
config_body='realms {\n name: "realm1"\n}\n',
perms_rev=None,
),
config.RealmsCfgRev(
project_id='proj2',
config_rev='unknown',
config_digest='fe0857c4fe4282083c0295ee835e7' +
'96403027d13c652f4959a0c6a41957dbc18',
config_body='realms {\n name: "realm2"\n}\n',
perms_rev=None,
),
])
class RealmsUpdateTest(test_case.TestCase):
@parameterized.parameterized.expand([
('some-proj',),
('@internal',),
])
def test_realms_config_lifecycle(self, project_id):
self.assertEqual(model.get_auth_db_revision(), 0)
# A new config appears.
rev = config.RealmsCfgRev(
project_id=project_id,
config_rev='cfg_rev1',
config_digest='digest1',
config_body='realms{ name: "realm1" }',
perms_rev=None)
config.update_realms(fake_db('db-rev1'), [rev], 'New config')
# Generated new AuthDB revisions.
self.assertEqual(model.get_auth_db_revision(), 1)
# Stored now in the expanded form.
ent = model.project_realms_key(project_id).get()
self.assertEqual(
[r.name for r in ent.realms.realms],
['%s:@root' % project_id, '%s:realm1' % project_id])
self.assertEqual(ent.config_rev, 'cfg_rev1')
self.assertEqual(ent.perms_rev, 'db-rev1')
# Permissions DB changes in a way that doesn't affect the expanded form.
config.update_realms(fake_db('db-rev2'), [rev], 'Reeval')
# Seeing the same AuthDB version.
self.assertEqual(model.get_auth_db_revision(), 1)
# The config body changes in a way that doesn't affect the expanded form.
rev = config.RealmsCfgRev(
project_id=project_id,
config_rev='cfg_rev2',
config_digest='digest2',
config_body='realms{ name: "realm1" } # blah blah',
perms_rev=None)
config.update_realms(fake_db('db-rev2'), [rev], 'Updated config')
# Still the same AuthDB version.
self.assertEqual(model.get_auth_db_revision(), 1)
# The config change significantly now.
rev = config.RealmsCfgRev(
project_id=project_id,
config_rev='cfg_rev3',
config_digest='digest3',
config_body='realms{ name: "realm2" }',
perms_rev=None)
config.update_realms(fake_db('db-rev2'), [rev], 'Updated config')
# New revision.
self.assertEqual(model.get_auth_db_revision(), 2)
# And new body.
ent = model.project_realms_key(project_id).get()
self.assertEqual(
[r.name for r in ent.realms.realms],
['%s:@root' % project_id, '%s:realm2' % project_id])
self.assertEqual(ent.config_rev, 'cfg_rev3')
self.assertEqual(ent.perms_rev, 'db-rev2')
# The config is gone.
config.delete_realms(project_id)
# This generated a new revision.
self.assertEqual(model.get_auth_db_revision(), 3)
# And it is indeed gone.
ent = model.project_realms_key(project_id).get()
self.assertIsNone(ent)
# The second deletion is noop.
config.delete_realms(project_id)
self.assertEqual(model.get_auth_db_revision(), 3)
def test_update_many_projects(self):
self.assertEqual(model.get_auth_db_revision(), 0)
cfg_rev = lambda proj, realm, rev_sfx: config.RealmsCfgRev(
project_id=proj,
config_rev='cfg-rev-'+rev_sfx,
config_digest='digest-'+rev_sfx,
config_body='realms{ name: "%s" }' % realm,
perms_rev=None)
# Create a bunch of project configs at once.
config.update_realms(
fake_db('db-rev1'),
[
cfg_rev('proj1', 'realm1', 'p1s1'),
cfg_rev('proj2', 'realm1', 'p2s1'),
],
'New config')
# Produced a single revision.
self.assertEqual(model.get_auth_db_revision(), 1)
# Present now.
revs = config.get_stored_revs_async().get_result()
self.assertEqual(revs, [
config.RealmsCfgRev(
project_id='proj1',
config_rev=u'cfg-rev-p1s1',
config_digest=u'digest-p1s1',
config_body=None,
perms_rev=u'db-rev1',
),
config.RealmsCfgRev(
project_id='proj2',
config_rev=u'cfg-rev-p2s1',
config_digest=u'digest-p2s1',
config_body=None,
perms_rev=u'db-rev1',
),
])
self.assertEqual(
model.project_realms_key('proj1').get().config_rev, 'cfg-rev-p1s1')
self.assertEqual(
model.project_realms_key('proj2').get().config_rev, 'cfg-rev-p2s1')
# One is modified significantly, another not.
config.update_realms(
fake_db('db-rev1'),
[
cfg_rev('proj1', 'realm1', 'p1s2'), # noop change
cfg_rev('proj2', 'realm2', 'p2s2'), # significant change
],
'New config')
revs = config.get_stored_revs_async().get_result()
self.assertEqual(
model.project_realms_key('proj1').get().config_rev, 'cfg-rev-p1s1')
self.assertEqual(
model.project_realms_key('proj2').get().config_rev, 'cfg-rev-p2s2')
# One config is broken.
config.update_realms(
fake_db('db-rev1'),
[
cfg_rev('proj1', 'realm3', 'p1s3'),
cfg_rev('proj2', '@@@@@@', 'p2s3'),
],
'New config')
revs = config.get_stored_revs_async().get_result()
self.assertEqual(
model.project_realms_key('proj1').get().config_rev, 'cfg-rev-p1s3')
self.assertEqual(
model.project_realms_key('proj2').get().config_rev, 'cfg-rev-p2s2')
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.FATAL)
unittest.main()
| apache-2.0 |
dongguangming/python-phonenumbers | python/phonenumbers/data/region_BZ.py | 10 | 1751 | """Auto-generated file, do not edit by hand. BZ metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_BZ = PhoneMetadata(id='BZ', country_code=501, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[2-8]\\d{6}|0\\d{10}', possible_number_pattern='\\d{7}(?:\\d{4})?'),
fixed_line=PhoneNumberDesc(national_number_pattern='[234578][02]\\d{5}', possible_number_pattern='\\d{7}', example_number='2221234'),
mobile=PhoneNumberDesc(national_number_pattern='6[0-367]\\d{5}', possible_number_pattern='\\d{7}', example_number='6221234'),
toll_free=PhoneNumberDesc(national_number_pattern='0800\\d{7}', possible_number_pattern='\\d{11}', example_number='08001234123'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
number_format=[NumberFormat(pattern='(\\d{3})(\\d{4})', format='\\1-\\2', leading_digits_pattern=['[2-8]']),
NumberFormat(pattern='(0)(800)(\\d{4})(\\d{3})', format='\\1-\\2-\\3-\\4', leading_digits_pattern=['0'])],
leading_zero_possible=True)
| apache-2.0 |
mbjadhav/AliPhysics | PWGJE/EMCALJetTasks/Tracks/analysis/base/struct/JetTHnSparse.py | 41 | 7138 | #**************************************************************************
#* Copyright(c) 1998-2014, ALICE Experiment at CERN, All rights reserved. *
#* *
#* Author: The ALICE Off-line Project. *
#* Contributors are mentioned in the code where appropriate. *
#* *
#* Permission to use, copy, modify and distribute this software and its *
#* documentation strictly for non-commercial purposes is hereby granted *
#* without fee, provided that the above copyright notice appears in all *
#* copies and that both the copyright notice and this permission notice *
#* appear in the supporting documentation. The authors make no claims *
#* about the suitability of this software for any purpose. It is *
#* provided "as is" without express or implied warranty. *
#**************************************************************************
"""
Representation of a jet-based THnSparse
@author: Markus Fasel
"""
from PWGJE.EMCALJetTasks.Tracks.analysis.base.struct.THnSparseWrapper import AxisFormat
from PWGJE.EMCALJetTasks.Tracks.analysis.base.struct.THnSparseWrapper import THnSparseWrapper
from copy import copy, deepcopy
from numpy import array as nparray
class AxisFormatJetTHnSparse(AxisFormat):
'''
Axis format for jet-based track THnSparse
'''
def __init__(self):
'''
Constructor
'''
AxisFormat.__init__(self, "jets")
self._axes["tracktpt"] = 0
self._axes["jetpt"] = 1
self._axes["tracketa"] = 2
self._axes["trackphi"] = 3
self._axes["vertexz"] = 4
self._axes["mbtrigger"] = 5
def __deepcopy__(self, other, memo):
'''
Deep copy constructor
'''
newobj = AxisFormatJetTHnSparse()
newobj._Deepcopy(other, memo)
return newobj
def __copy__(self, other):
'''
Shallow copy constructor
'''
newobj = AxisFormatJetTHnSparse()
newobj._Copy()
return newobj
class AxisFormatReducedJetTHnSparse(AxisFormat):
'''
Axis format for projected THnSparse
'''
def __init__(self):
'''
Constructor
'''
AxisFormat.__init__(self, "jetsreduced")
self._axes["tracktpt"] = 0
self._axes["tracketa"] = 1
self._axes["trackphi"] = 2
self._axes["vertexz"] = 3
self._axes["mbtrigger"] = 4
def __deepcopy__(self, other, memo):
'''
Deep copy constructor
'''
newobj = AxisFormatReducedJetTHnSparse()
newobj._Deepcopy(other, memo)
return newobj
def __copy__(self, other):
'''
Shallow copy constructor
'''
newobj = AxisFormatReducedJetTHnSparse()
newobj._Copy()
return newobj
class JetTHnSparseBase(THnSparseWrapper):
'''
Base class for Jet THnSparses
Can not be used directly, but classes must inherit from it
'''
def __init__(self, roothist):
'''
Constructor
'''
THnSparseWrapper.__init__(self, roothist)
def SetEtaCut(self, etamin, etamax):
'''
Apply eta cut
'''
self.ApplyCut("tracketa", etamin, etamax)
def SetPhiCut(self, phimin, phimax):
'''
Apply phi cut
'''
self.ApplyCut("trackphi", phimin, phimax)
def SetVertexCut(self, vzmin, vzmax):
'''
Apply cut on the position of the z-vertex
'''
self.ApplyCut("vertexz", vzmin, vzmax)
def SetRequestSeenInMB(self, vzmin, vzmax):
'''
Request that the track was also in a min. bias event
'''
self.ApplyCut("mbtrigger", 1., 1.)
class JetTHnSparse(JetTHnSparseBase):
'''
THnSparse with information for Tracks in jets
'''
def __init__(self, roothist):
'''
Constructor
'''
JetTHnSparseBase.__init__(self, roothist)
self._axisdefinition = AxisFormatJetTHnSparse()
def __deepcopy__(self, memo):
'''
Deep copy constructor
'''
result = JetTHnSparse(deepcopy(self._rootthnsparse))
result.CopyCuts(self._cutlist, True)
return result
def __copy__(self):
'''
Shallow copy constructor
'''
result = JetTHnSparse(copy(self._rootthnsparse))
result.CopyCuts(self._cutlist, False)
return result
def MakeProjectionMinJetPt(self, minpt):
'''
Reduce THnSparse restricted to track axis, selecting tracks from jets with given
minimum jet pt
'''
self._PrepareProjection()
finaldims = nparray([\
self._axisdefinition.FindAxis("trackpt"),\
self._axisdefinition.FindAxis("tracketa"),\
self._axisdefinition.FindAxis("trackphi"),\
self._axisdefinition.FindAxis("vertexz"),\
self._axisdefinition.FindAxis("mbtrigger"),\
])
currentlimits = {\
"min":self._rootthnsparse.GetAxis(self._axisdefinition.FindAxis("jetpt")).GetFirst(),\
"max":self._rootthnsparse.GetAxis(self._axisdefinition.FindAxis("jetpt")).GetLast()\
}
newlimits = {\
"min":self._rootthnsparse.GetAxis(self._axisdefinition.FindAxis("jetpt")).FindBin(minpt),\
"max":currentlimits["max"],\
}
# Make cut in jet pt
self._rootthnsparse.GetAxis(self._axisdefinition.FindAxis("jetpt")).SetRange(newlimits["min"], newlimits["max"])
# create projected Matrix
result = self._rootthnsparse.Projection(len(finaldims), finaldims)
jetptstring= "jetpt%03d" %(minpt)
result.SetName("%s%s" %(self._rootthnsparse.GetName(), jetptstring))
#reset axis range
self._rootthnsparse.GetAxis(self._axisdefinition.FindAxis("jetpt")).SetRange(currentlimits["min"], currentlimits["max"])
self._CleanumProjection()
return result
class ReducedJetTHnSparse(JetTHnSparseBase):
'''
Class for Jet THnSparse after projecting for different minimum jet pts
'''
def __init__(self, roothist):
'''
Constructor
'''
JetTHnSparseBase.__init__(self, roothist)
self._axisdefinition = AxisFormatReducedJetTHnSparse()
def __deepcopy__(self, memo):
'''
Deep copy constructor
'''
result = ReducedJetTHnSparse(deepcopy(self._rootthnsparse))
result.CopyCuts(self._cutlist, True)
return result
def __copy__(self):
'''
Shallow copy constructor
'''
result = ReducedJetTHnSparse(copy(self._rootthnsparse))
result.CopyCuts(self._cutlist, False)
return result
| bsd-3-clause |
efortuna/AndroidSDKClone | ndk_experimental/prebuilt/linux-x86_64/lib/python2.7/email/quoprimime.py | 246 | 10848 | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Ben Gertzfield
# Contact: [email protected]
"""Quoted-printable content transfer encoding per RFCs 2045-2047.
This module handles the content transfer encoding method defined in RFC 2045
to encode US ASCII-like 8-bit data called `quoted-printable'. It is used to
safely encode text that is in a character set similar to the 7-bit US ASCII
character set, but that includes some 8-bit characters that are normally not
allowed in email bodies or headers.
Quoted-printable is very space-inefficient for encoding binary files; use the
email.base64mime module for that instead.
This module provides an interface to encode and decode both headers and bodies
with quoted-printable encoding.
RFC 2045 defines a method for including character set information in an
`encoded-word' in a header. This method is commonly used for 8-bit real names
in To:/From:/Cc: etc. fields, as well as Subject: lines.
This module does not do the line wrapping or end-of-line character
conversion necessary for proper internationalized headers; it only
does dumb encoding and decoding. To deal with the various line
wrapping issues, use the email.header module.
"""
__all__ = [
'body_decode',
'body_encode',
'body_quopri_check',
'body_quopri_len',
'decode',
'decodestring',
'encode',
'encodestring',
'header_decode',
'header_encode',
'header_quopri_check',
'header_quopri_len',
'quote',
'unquote',
]
import re
from string import hexdigits
from email.utils import fix_eols
CRLF = '\r\n'
NL = '\n'
# See also Charset.py
MISC_LEN = 7
hqre = re.compile(r'[^-a-zA-Z0-9!*+/ ]')
bqre = re.compile(r'[^ !-<>-~\t]')
# Helpers
def header_quopri_check(c):
"""Return True if the character should be escaped with header quopri."""
return bool(hqre.match(c))
def body_quopri_check(c):
"""Return True if the character should be escaped with body quopri."""
return bool(bqre.match(c))
def header_quopri_len(s):
"""Return the length of str when it is encoded with header quopri."""
count = 0
for c in s:
if hqre.match(c):
count += 3
else:
count += 1
return count
def body_quopri_len(str):
"""Return the length of str when it is encoded with body quopri."""
count = 0
for c in str:
if bqre.match(c):
count += 3
else:
count += 1
return count
def _max_append(L, s, maxlen, extra=''):
if not L:
L.append(s.lstrip())
elif len(L[-1]) + len(s) <= maxlen:
L[-1] += extra + s
else:
L.append(s.lstrip())
def unquote(s):
"""Turn a string in the form =AB to the ASCII character with value 0xab"""
return chr(int(s[1:3], 16))
def quote(c):
return "=%02X" % ord(c)
def header_encode(header, charset="iso-8859-1", keep_eols=False,
maxlinelen=76, eol=NL):
"""Encode a single header line with quoted-printable (like) encoding.
Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but
used specifically for email header fields to allow charsets with mostly 7
bit characters (and some 8 bit) to remain more or less readable in non-RFC
2045 aware mail clients.
charset names the character set to use to encode the header. It defaults
to iso-8859-1.
The resulting string will be in the form:
"=?charset?q?I_f=E2rt_in_your_g=E8n=E8ral_dire=E7tion?\\n
=?charset?q?Silly_=C8nglish_Kn=EEghts?="
with each line wrapped safely at, at most, maxlinelen characters (defaults
to 76 characters). If maxlinelen is None, the entire string is encoded in
one chunk with no splitting.
End-of-line characters (\\r, \\n, \\r\\n) will be automatically converted
to the canonical email line separator \\r\\n unless the keep_eols
parameter is True (the default is False).
Each line of the header will be terminated in the value of eol, which
defaults to "\\n". Set this to "\\r\\n" if you are using the result of
this function directly in email.
"""
# Return empty headers unchanged
if not header:
return header
if not keep_eols:
header = fix_eols(header)
# Quopri encode each line, in encoded chunks no greater than maxlinelen in
# length, after the RFC chrome is added in.
quoted = []
if maxlinelen is None:
# An obnoxiously large number that's good enough
max_encoded = 100000
else:
max_encoded = maxlinelen - len(charset) - MISC_LEN - 1
for c in header:
# Space may be represented as _ instead of =20 for readability
if c == ' ':
_max_append(quoted, '_', max_encoded)
# These characters can be included verbatim
elif not hqre.match(c):
_max_append(quoted, c, max_encoded)
# Otherwise, replace with hex value like =E2
else:
_max_append(quoted, "=%02X" % ord(c), max_encoded)
# Now add the RFC chrome to each encoded chunk and glue the chunks
# together. BAW: should we be able to specify the leading whitespace in
# the joiner?
joiner = eol + ' '
return joiner.join(['=?%s?q?%s?=' % (charset, line) for line in quoted])
def encode(body, binary=False, maxlinelen=76, eol=NL):
"""Encode with quoted-printable, wrapping at maxlinelen characters.
If binary is False (the default), end-of-line characters will be converted
to the canonical email end-of-line sequence \\r\\n. Otherwise they will
be left verbatim.
Each line of encoded text will end with eol, which defaults to "\\n". Set
this to "\\r\\n" if you will be using the result of this function directly
in an email.
Each line will be wrapped at, at most, maxlinelen characters (defaults to
76 characters). Long lines will have the `soft linefeed' quoted-printable
character "=" appended to them, so the decoded text will be identical to
the original text.
"""
if not body:
return body
if not binary:
body = fix_eols(body)
# BAW: We're accumulating the body text by string concatenation. That
# can't be very efficient, but I don't have time now to rewrite it. It
# just feels like this algorithm could be more efficient.
encoded_body = ''
lineno = -1
# Preserve line endings here so we can check later to see an eol needs to
# be added to the output later.
lines = body.splitlines(1)
for line in lines:
# But strip off line-endings for processing this line.
if line.endswith(CRLF):
line = line[:-2]
elif line[-1] in CRLF:
line = line[:-1]
lineno += 1
encoded_line = ''
prev = None
linelen = len(line)
# Now we need to examine every character to see if it needs to be
# quopri encoded. BAW: again, string concatenation is inefficient.
for j in range(linelen):
c = line[j]
prev = c
if bqre.match(c):
c = quote(c)
elif j+1 == linelen:
# Check for whitespace at end of line; special case
if c not in ' \t':
encoded_line += c
prev = c
continue
# Check to see to see if the line has reached its maximum length
if len(encoded_line) + len(c) >= maxlinelen:
encoded_body += encoded_line + '=' + eol
encoded_line = ''
encoded_line += c
# Now at end of line..
if prev and prev in ' \t':
# Special case for whitespace at end of file
if lineno + 1 == len(lines):
prev = quote(prev)
if len(encoded_line) + len(prev) > maxlinelen:
encoded_body += encoded_line + '=' + eol + prev
else:
encoded_body += encoded_line + prev
# Just normal whitespace at end of line
else:
encoded_body += encoded_line + prev + '=' + eol
encoded_line = ''
# Now look at the line we just finished and it has a line ending, we
# need to add eol to the end of the line.
if lines[lineno].endswith(CRLF) or lines[lineno][-1] in CRLF:
encoded_body += encoded_line + eol
else:
encoded_body += encoded_line
encoded_line = ''
return encoded_body
# For convenience and backwards compatibility w/ standard base64 module
body_encode = encode
encodestring = encode
# BAW: I'm not sure if the intent was for the signature of this function to be
# the same as base64MIME.decode() or not...
def decode(encoded, eol=NL):
"""Decode a quoted-printable string.
Lines are separated with eol, which defaults to \\n.
"""
if not encoded:
return encoded
# BAW: see comment in encode() above. Again, we're building up the
# decoded string with string concatenation, which could be done much more
# efficiently.
decoded = ''
for line in encoded.splitlines():
line = line.rstrip()
if not line:
decoded += eol
continue
i = 0
n = len(line)
while i < n:
c = line[i]
if c != '=':
decoded += c
i += 1
# Otherwise, c == "=". Are we at the end of the line? If so, add
# a soft line break.
elif i+1 == n:
i += 1
continue
# Decode if in form =AB
elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits:
decoded += unquote(line[i:i+3])
i += 3
# Otherwise, not in form =AB, pass literally
else:
decoded += c
i += 1
if i == n:
decoded += eol
# Special case if original string did not end with eol
if not encoded.endswith(eol) and decoded.endswith(eol):
decoded = decoded[:-1]
return decoded
# For convenience and backwards compatibility w/ standard base64 module
body_decode = decode
decodestring = decode
def _unquote_match(match):
"""Turn a match in the form =AB to the ASCII character with value 0xab"""
s = match.group(0)
return unquote(s)
# Header decoding is done a bit differently
def header_decode(s):
"""Decode a string encoded with RFC 2045 MIME header `Q' encoding.
This function does not parse a full MIME header value encoded with
quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use
the high level email.header class for that functionality.
"""
s = s.replace('_', ' ')
return re.sub(r'=[a-fA-F0-9]{2}', _unquote_match, s)
| apache-2.0 |
40223246/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/unittest/main.py | 739 | 10385 | """Unittest main program"""
import sys
import optparse
import os
from . import loader, runner
from .signals import installHandler
__unittest = True
FAILFAST = " -f, --failfast Stop on first failure\n"
CATCHBREAK = " -c, --catch Catch control-C and display results\n"
BUFFEROUTPUT = " -b, --buffer Buffer stdout and stderr during test runs\n"
USAGE_AS_MAIN = """\
Usage: %(progName)s [options] [tests]
Options:
-h, --help Show this message
-v, --verbose Verbose output
-q, --quiet Minimal output
%(failfast)s%(catchbreak)s%(buffer)s
Examples:
%(progName)s test_module - run tests from test_module
%(progName)s module.TestClass - run tests from module.TestClass
%(progName)s module.Class.test_method - run specified test method
[tests] can be a list of any number of test modules, classes and test
methods.
Alternative Usage: %(progName)s discover [options]
Options:
-v, --verbose Verbose output
%(failfast)s%(catchbreak)s%(buffer)s -s directory Directory to start discovery ('.' default)
-p pattern Pattern to match test files ('test*.py' default)
-t directory Top level directory of project (default to
start directory)
For test discovery all test modules must be importable from the top
level directory of the project.
"""
USAGE_FROM_MODULE = """\
Usage: %(progName)s [options] [test] [...]
Options:
-h, --help Show this message
-v, --verbose Verbose output
-q, --quiet Minimal output
%(failfast)s%(catchbreak)s%(buffer)s
Examples:
%(progName)s - run default set of tests
%(progName)s MyTestSuite - run suite 'MyTestSuite'
%(progName)s MyTestCase.testSomething - run MyTestCase.testSomething
%(progName)s MyTestCase - run all 'test*' test methods
in MyTestCase
"""
def _convert_name(name):
# on Linux / Mac OS X 'foo.PY' is not importable, but on
# Windows it is. Simpler to do a case insensitive match
# a better check would be to check that the name is a
# valid Python module name.
if os.path.isfile(name) and name.lower().endswith('.py'):
if os.path.isabs(name):
rel_path = os.path.relpath(name, os.getcwd())
if os.path.isabs(rel_path) or rel_path.startswith(os.pardir):
return name
name = rel_path
# on Windows both '\' and '/' are used as path
# separators. Better to replace both than rely on os.path.sep
return name[:-3].replace('\\', '.').replace('/', '.')
return name
def _convert_names(names):
return [_convert_name(name) for name in names]
class TestProgram(object):
"""A command-line program that runs a set of tests; this is primarily
for making test modules conveniently executable.
"""
USAGE = USAGE_FROM_MODULE
# defaults for testing
failfast = catchbreak = buffer = progName = warnings = None
def __init__(self, module='__main__', defaultTest=None, argv=None,
testRunner=None, testLoader=loader.defaultTestLoader,
exit=True, verbosity=1, failfast=None, catchbreak=None,
buffer=None, warnings=None):
if isinstance(module, str):
self.module = __import__(module)
for part in module.split('.')[1:]:
self.module = getattr(self.module, part)
else:
self.module = module
if argv is None:
argv = sys.argv
self.exit = exit
self.failfast = failfast
self.catchbreak = catchbreak
self.verbosity = verbosity
self.buffer = buffer
if warnings is None and not sys.warnoptions:
# even if DreprecationWarnings are ignored by default
# print them anyway unless other warnings settings are
# specified by the warnings arg or the -W python flag
self.warnings = 'default'
else:
# here self.warnings is set either to the value passed
# to the warnings args or to None.
# If the user didn't pass a value self.warnings will
# be None. This means that the behavior is unchanged
# and depends on the values passed to -W.
self.warnings = warnings
self.defaultTest = defaultTest
self.testRunner = testRunner
self.testLoader = testLoader
self.progName = os.path.basename(argv[0])
self.parseArgs(argv)
self.runTests()
def usageExit(self, msg=None):
if msg:
print(msg)
usage = {'progName': self.progName, 'catchbreak': '', 'failfast': '',
'buffer': ''}
if self.failfast != False:
usage['failfast'] = FAILFAST
if self.catchbreak != False:
usage['catchbreak'] = CATCHBREAK
if self.buffer != False:
usage['buffer'] = BUFFEROUTPUT
print(self.USAGE % usage)
sys.exit(2)
def parseArgs(self, argv):
if ((len(argv) > 1 and argv[1].lower() == 'discover') or
(len(argv) == 1 and self.module is None)):
self._do_discovery(argv[2:])
return
parser = self._getOptParser()
options, args = parser.parse_args(argv[1:])
self._setAttributesFromOptions(options)
if len(args) == 0 and self.module is None:
# this allows "python -m unittest -v" to still work for
# test discovery. This means -c / -b / -v / -f options will
# be handled twice, which is harmless but not ideal.
self._do_discovery(argv[1:])
return
if len(args) == 0 and self.defaultTest is None:
# createTests will load tests from self.module
self.testNames = None
elif len(args) > 0:
self.testNames = _convert_names(args)
if __name__ == '__main__':
# to support python -m unittest ...
self.module = None
else:
self.testNames = (self.defaultTest,)
self.createTests()
def createTests(self):
if self.testNames is None:
self.test = self.testLoader.loadTestsFromModule(self.module)
else:
self.test = self.testLoader.loadTestsFromNames(self.testNames,
self.module)
def _getOptParser(self):
import optparse
parser = optparse.OptionParser()
parser.prog = self.progName
parser.add_option('-v', '--verbose', dest='verbose', default=False,
help='Verbose output', action='store_true')
parser.add_option('-q', '--quiet', dest='quiet', default=False,
help='Quiet output', action='store_true')
if self.failfast != False:
parser.add_option('-f', '--failfast', dest='failfast', default=False,
help='Stop on first fail or error',
action='store_true')
if self.catchbreak != False:
parser.add_option('-c', '--catch', dest='catchbreak', default=False,
help='Catch ctrl-C and display results so far',
action='store_true')
if self.buffer != False:
parser.add_option('-b', '--buffer', dest='buffer', default=False,
help='Buffer stdout and stderr during tests',
action='store_true')
return parser
def _setAttributesFromOptions(self, options):
# only set options from the parsing here
# if they weren't set explicitly in the constructor
if self.failfast is None:
self.failfast = options.failfast
if self.catchbreak is None:
self.catchbreak = options.catchbreak
if self.buffer is None:
self.buffer = options.buffer
if options.verbose:
self.verbosity = 2
elif options.quiet:
self.verbosity = 0
def _addDiscoveryOptions(self, parser):
parser.add_option('-s', '--start-directory', dest='start', default='.',
help="Directory to start discovery ('.' default)")
parser.add_option('-p', '--pattern', dest='pattern', default='test*.py',
help="Pattern to match tests ('test*.py' default)")
parser.add_option('-t', '--top-level-directory', dest='top', default=None,
help='Top level directory of project (defaults to start directory)')
def _do_discovery(self, argv, Loader=None):
if Loader is None:
Loader = lambda: self.testLoader
# handle command line args for test discovery
self.progName = '%s discover' % self.progName
parser = self._getOptParser()
self._addDiscoveryOptions(parser)
options, args = parser.parse_args(argv)
if len(args) > 3:
self.usageExit()
for name, value in zip(('start', 'pattern', 'top'), args):
setattr(options, name, value)
self._setAttributesFromOptions(options)
start_dir = options.start
pattern = options.pattern
top_level_dir = options.top
loader = Loader()
self.test = loader.discover(start_dir, pattern, top_level_dir)
def runTests(self):
if self.catchbreak:
installHandler()
if self.testRunner is None:
self.testRunner = runner.TextTestRunner
if isinstance(self.testRunner, type):
try:
testRunner = self.testRunner(verbosity=self.verbosity,
failfast=self.failfast,
buffer=self.buffer,
warnings=self.warnings)
except TypeError:
# didn't accept the verbosity, buffer or failfast arguments
testRunner = self.testRunner()
else:
# it is assumed to be a TestRunner instance
testRunner = self.testRunner
self.result = testRunner.run(self.test)
if self.exit:
sys.exit(not self.result.wasSuccessful())
main = TestProgram
| agpl-3.0 |
Alwnikrotikz/sulley | requests/trend.py | 6 | 5080 | from sulley import *
import struct
# crap ass trend xor "encryption" routine for control manager (20901)
def trend_xor_encode (str):
'''
Simple bidirectional XOR "encryption" routine used by this service.
'''
key = 0xA8534344
ret = ""
# pad to 4 byte boundary.
pad = 4 - (len(str) % 4)
if pad == 4:
pad = 0
str += "\x00" * pad
while str:
dword = struct.unpack("<L", str[:4])[0]
str = str[4:]
dword ^= key
ret += struct.pack("<L", dword)
key = dword
return ret
# crap ass trend xor "encryption" routine for control manager (20901)
def trend_xor_decode (str):
key = 0xA8534344
ret = ""
while str:
dword = struct.unpack("<L", str[:4])[0]
str = str[4:]
tmp = dword
tmp ^= key
ret += struct.pack("<L", tmp)
key = dword
return ret
# dce rpc request encoder used for trend server protect 5168 RPC service.
# opnum is always zero.
def rpc_request_encoder (data):
return utils.dcerpc.request(0, data)
########################################################################################################################
s_initialize("20901")
"""
Trend Micro Control Manager (DcsProcessor.exe)
http://bakemono/mediawiki/index.php/Trend_Micro:Control_Manager
This fuzz found nothing! need to uncover more protocol details. See also: pedram's pwned notebook page 3, 4.
"""
# dword 1, error: 0x10000001, do something:0x10000002, 0x10000003 (>0x10000002)
s_group("magic", values=["\x02\x00\x00\x10", "\x03\x00\x00\x10"])
# dword 2, size of body
s_size("body")
# dword 3, crc32(block) (copy from eax at 0041EE8B)
# XXX - CRC is non standard, nop out jmp at 0041EE99 and use bogus value:
#s_checksum("body", algorithm="crc32")
s_static("\xff\xff\xff\xff")
# the body of the trend request contains a variable number of (2-byte) TLVs
if s_block_start("body", encoder=trend_xor_encode):
s_word(0x0000, full_range=True) # completely fuzz the type
s_size("string1", length=2) # valid length
if s_block_start("string1"): # fuzz string
s_string("A"*1000)
s_block_end()
s_random("\x00\x00", 2, 2) # random type
s_size("string2", length=2) # valid length
if s_block_start("string2"): # fuzz string
s_string("B"*10)
s_block_end()
# try a table overflow.
if s_block_start("repeat me"):
s_random("\x00\x00", 2, 2) # random type
s_size("string3", length=2) # valid length
if s_block_start("string3"): # fuzz string
s_string("C"*10)
s_block_end()
s_block_end()
# repeat string3 a bunch of times.
s_repeat("repeat me", min_reps=100, max_reps=1000, step=50)
s_block_end("body")
########################################################################################################################
"""
Trend Micro Server Protect (SpNTsvc.exe)
This fuzz uncovered a bunch of DoS and code exec bugs. The obvious code exec bugs were documented and released to
the vendor. See also: pedram's pwned notebook page 1, 2.
// opcode: 0x00, address: 0x65741030
// uuid: 25288888-bd5b-11d1-9d53-0080c83a5c2c
// version: 1.0
error_status_t rpc_opnum_0 (
[in] handle_t arg_1, // not sent on wire
[in] long trend_req_num,
[in][size_is(arg_4)] byte overflow_str[],
[in] long arg_4,
[out][size_is(arg_6)] byte arg_5[], // not sent on wire
[in] long arg_6
);
"""
for op, submax in [(0x1, 22), (0x2, 19), (0x3, 85), (0x5, 25), (0xa, 49), (0x1f, 25)]:
s_initialize("5168: op-%x" % op)
if s_block_start("everything", encoder=rpc_request_encoder):
# [in] long trend_req_num,
s_group("subs", values=map(chr, range(1, submax)))
s_static("\x00") # subs is actually a little endian word
s_static(struct.pack("<H", op)) # opcode
# [in][size_is(arg_4)] byte overflow_str[],
s_size("the string")
if s_block_start("the string", group="subs"):
s_static("A" * 0x5000, name="arg3")
s_block_end()
# [in] long arg_4,
s_size("the string")
# [in] long arg_6
s_static(struct.pack("<L", 0x5000)) # output buffer size
s_block_end()
########################################################################################################################
s_initialize("5005")
"""
Trend Micro Server Protect (EarthAgent.exe)
Some custom protocol listening on TCP port 5005
"""
s_static("\x21\x43\x65\x87") # magic
# command
s_static("\x00\x00\x00\x00") # dunno
s_static("\x01\x00\x00\x00") # dunno, but observed static
# length
s_static("\xe8\x03\x00\x00") # dunno, but observed static
s_static("\x00\x00\x00\x00") # dunno, but observed static
| gpl-2.0 |
kernevil/samba | python/samba/tests/process_limits.py | 1 | 3057 | # Tests for limiting processes forked on accept by the standard process model
#
# Copyright (C) Andrew Bartlett <[email protected]> 2018
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
"""Tests limits on processes forked by fork on accept in the standard process
model.
NOTE: This test runs in an environment with an artificially low setting for
smbd max processes
"""
import os
from samba.tests import TestCase
from samba.samdb import SamDB
from ldb import LdbError, ERR_OPERATIONS_ERROR
class StandardModelProcessLimitTests(TestCase):
def setUp(self):
super(StandardModelProcessLimitTests, self).setUp()
def tearDown(self):
super(StandardModelProcessLimitTests, self).tearDown()
def simple_bind(self):
creds = self.insta_creds(template=self.get_credentials())
creds.set_bind_dn("%s\\%s" % (creds.get_domain(),
creds.get_username()))
return SamDB(url="ldaps://%s" % os.environ["SERVER"],
lp=self.get_loadparm(),
credentials=creds)
def test_process_limits(self):
creds = self.insta_creds(template=self.get_credentials())
creds.set_bind_dn("%s\\%s" % (creds.get_domain(),
creds.get_username()))
connections = []
try:
# Open a series of LDAP connections, the maximum number of
# active connections should be 20, so the 21st should fail.
# But as it is possible that there may be other processes holding
# connections, need to allow for earlier connection failures.
for _ in range(21):
connections.append(self.simple_bind())
self.fail(
"Processes not limited, able to make more than 20 connections")
except LdbError as e:
(errno, estr) = e.args
if errno != ERR_OPERATIONS_ERROR:
raise
if not (estr.endswith("NT_STATUS_CONNECTION_DISCONNECTED") or
estr.endswith("NT_STATUS_CONNECTION_RESET")):
raise
pass
#
# Clean up the connections we've just opened, by deleting the
# connection in python. This should invoke the talloc destructor to
# release any resources and close the actual connection to the server.
for c in connections:
del c
| gpl-3.0 |
Jay-Jay-D/LeanSTP | Algorithm.Python/G10CurrencySelectionModelFrameworkAlgorithm.py | 1 | 3703 | # QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Algorithm.Framework")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Orders import *
from QuantConnect.Algorithm import *
from QuantConnect.Algorithm.Framework import *
from QuantConnect.Algorithm.Framework.Alphas import *
from QuantConnect.Algorithm.Framework.Execution import *
from QuantConnect.Algorithm.Framework.Portfolio import *
from QuantConnect.Algorithm.Framework.Risk import *
from Selection.ManualUniverseSelectionModel import ManualUniverseSelectionModel
from datetime import timedelta
### <summary>
### Framework algorithm that uses the G10CurrencySelectionModel,
### a Universe Selection Model that inherits from ManualUniverseSelectionModel
### </summary>
class G10CurrencySelectionModelFrameworkAlgorithm(QCAlgorithm):
'''Framework algorithm that uses the G10CurrencySelectionModel,
a Universe Selection Model that inherits from ManualUniverseSelectionMode'''
def Initialize(self):
''' Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
# Set requested data resolution
self.UniverseSettings.Resolution = Resolution.Minute
self.SetStartDate(2013,10,7) #Set Start Date
self.SetEndDate(2013,10,11) #Set End Date
self.SetCash(100000) #Set Strategy Cash
# set algorithm framework models
self.SetUniverseSelection(self.G10CurrencySelectionModel())
self.SetAlpha(ConstantAlphaModel(InsightType.Price, InsightDirection.Up, timedelta(minutes = 20), 0.025, None))
self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel())
self.SetExecution(ImmediateExecutionModel())
self.SetRiskManagement(MaximumDrawdownPercentPerSecurity(0.01))
def OnOrderEvent(self, orderEvent):
if orderEvent.Status == OrderStatus.Filled:
self.Debug("Purchased Stock: {0}".format(orderEvent.Symbol))
class G10CurrencySelectionModel(ManualUniverseSelectionModel):
'''Provides an implementation of IUniverseSelectionModel that simply subscribes to G10 currencies'''
def __init__(self):
'''Initializes a new instance of the G10CurrencySelectionModel class
using the algorithm's security initializer and universe settings'''
super().__init__([Symbol.Create(x, SecurityType.Forex, Market.Oanda)
for x in [ "EURUSD",
"GBPUSD",
"USDJPY",
"AUDUSD",
"NZDUSD",
"USDCAD",
"USDCHF",
"NOKUSD",
"SEKUSD" ]]) | apache-2.0 |
alexryndin/ambari | ambari-server/src/main/resources/stacks/ADH/1.5/services/TEZ/package/scripts/service_check.py | 4 | 3606 | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import os
from resource_management.libraries.script import Script
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.resources.execute_hadoop import ExecuteHadoop
from resource_management.libraries.functions import format
from resource_management.libraries.functions.version import compare_versions
from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
from resource_management.core.resources.system import File, Execute
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyImpl
from resource_management.core.logger import Logger
class TezServiceCheck(Script):
pass
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class TezServiceCheckLinux(TezServiceCheck):
def service_check(self, env):
import params
env.set_params(params)
path_to_tez_jar = format(params.tez_examples_jar)
wordcount_command = format("jar {path_to_tez_jar} orderedwordcount /tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/")
test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
File(format("{tmp_dir}/sample-tez-test"),
content = "foo\nbar\nfoo\nbar\nfoo",
mode = 0755
)
params.HdfsResource("/tmp/tezsmokeoutput",
action = "delete_on_execute",
type = "directory"
)
params.HdfsResource("/tmp/tezsmokeinput",
action = "create_on_execute",
type = "directory",
owner = params.smokeuser,
)
params.HdfsResource("/tmp/tezsmokeinput/sample-tez-test",
action = "create_on_execute",
type = "file",
owner = params.smokeuser,
source = format("{tmp_dir}/sample-tez-test"),
)
copy_to_hdfs("tez", params.user_group, params.hdfs_user, skip=params.host_sys_prepped)
params.HdfsResource(None, action = "execute")
if params.security_enabled:
kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};")
Execute(kinit_cmd,
user=params.smokeuser
)
ExecuteHadoop(wordcount_command,
tries = 3,
try_sleep = 5,
user = params.smokeuser,
conf_dir = params.hadoop_conf_dir,
bin_dir = params.hadoop_bin_dir
)
ExecuteHadoop(test_command,
tries = 10,
try_sleep = 6,
user = params.smokeuser,
conf_dir = params.hadoop_conf_dir,
bin_dir = params.hadoop_bin_dir
)
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class TezServiceCheckWindows(TezServiceCheck):
def service_check(self, env):
import params
env.set_params(params)
smoke_cmd = os.path.join(params.hdp_root,"Run-SmokeTests.cmd")
service = "TEZ"
Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True, user=params.tez_user)
if __name__ == "__main__":
TezServiceCheck().execute()
| apache-2.0 |
mrtnrdl/.macdots | scripts/bin/platform-tools/systrace/catapult/third_party/pyserial/serial/win32.py | 155 | 10120 | from ctypes import *
from ctypes.wintypes import HANDLE
from ctypes.wintypes import BOOL
from ctypes.wintypes import LPCWSTR
_stdcall_libraries = {}
_stdcall_libraries['kernel32'] = WinDLL('kernel32')
from ctypes.wintypes import DWORD
from ctypes.wintypes import WORD
from ctypes.wintypes import BYTE
INVALID_HANDLE_VALUE = HANDLE(-1).value
# some details of the windows API differ between 32 and 64 bit systems..
def is_64bit():
"""Returns true when running on a 64 bit system"""
return sizeof(c_ulong) != sizeof(c_void_p)
# ULONG_PTR is a an ordinary number, not a pointer and contrary to the name it
# is either 32 or 64 bits, depending on the type of windows...
# so test if this a 32 bit windows...
if is_64bit():
# assume 64 bits
ULONG_PTR = c_int64
else:
# 32 bits
ULONG_PTR = c_ulong
class _SECURITY_ATTRIBUTES(Structure):
pass
LPSECURITY_ATTRIBUTES = POINTER(_SECURITY_ATTRIBUTES)
try:
CreateEventW = _stdcall_libraries['kernel32'].CreateEventW
except AttributeError:
# Fallback to non wide char version for old OS...
from ctypes.wintypes import LPCSTR
CreateEventA = _stdcall_libraries['kernel32'].CreateEventA
CreateEventA.restype = HANDLE
CreateEventA.argtypes = [LPSECURITY_ATTRIBUTES, BOOL, BOOL, LPCSTR]
CreateEvent=CreateEventA
CreateFileA = _stdcall_libraries['kernel32'].CreateFileA
CreateFileA.restype = HANDLE
CreateFileA.argtypes = [LPCSTR, DWORD, DWORD, LPSECURITY_ATTRIBUTES, DWORD, DWORD, HANDLE]
CreateFile = CreateFileA
else:
CreateEventW.restype = HANDLE
CreateEventW.argtypes = [LPSECURITY_ATTRIBUTES, BOOL, BOOL, LPCWSTR]
CreateEvent = CreateEventW # alias
CreateFileW = _stdcall_libraries['kernel32'].CreateFileW
CreateFileW.restype = HANDLE
CreateFileW.argtypes = [LPCWSTR, DWORD, DWORD, LPSECURITY_ATTRIBUTES, DWORD, DWORD, HANDLE]
CreateFile = CreateFileW # alias
class _OVERLAPPED(Structure):
pass
OVERLAPPED = _OVERLAPPED
class _COMSTAT(Structure):
pass
COMSTAT = _COMSTAT
class _DCB(Structure):
pass
DCB = _DCB
class _COMMTIMEOUTS(Structure):
pass
COMMTIMEOUTS = _COMMTIMEOUTS
GetLastError = _stdcall_libraries['kernel32'].GetLastError
GetLastError.restype = DWORD
GetLastError.argtypes = []
LPOVERLAPPED = POINTER(_OVERLAPPED)
LPDWORD = POINTER(DWORD)
GetOverlappedResult = _stdcall_libraries['kernel32'].GetOverlappedResult
GetOverlappedResult.restype = BOOL
GetOverlappedResult.argtypes = [HANDLE, LPOVERLAPPED, LPDWORD, BOOL]
ResetEvent = _stdcall_libraries['kernel32'].ResetEvent
ResetEvent.restype = BOOL
ResetEvent.argtypes = [HANDLE]
LPCVOID = c_void_p
WriteFile = _stdcall_libraries['kernel32'].WriteFile
WriteFile.restype = BOOL
WriteFile.argtypes = [HANDLE, LPCVOID, DWORD, LPDWORD, LPOVERLAPPED]
LPVOID = c_void_p
ReadFile = _stdcall_libraries['kernel32'].ReadFile
ReadFile.restype = BOOL
ReadFile.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, LPOVERLAPPED]
CloseHandle = _stdcall_libraries['kernel32'].CloseHandle
CloseHandle.restype = BOOL
CloseHandle.argtypes = [HANDLE]
ClearCommBreak = _stdcall_libraries['kernel32'].ClearCommBreak
ClearCommBreak.restype = BOOL
ClearCommBreak.argtypes = [HANDLE]
LPCOMSTAT = POINTER(_COMSTAT)
ClearCommError = _stdcall_libraries['kernel32'].ClearCommError
ClearCommError.restype = BOOL
ClearCommError.argtypes = [HANDLE, LPDWORD, LPCOMSTAT]
SetupComm = _stdcall_libraries['kernel32'].SetupComm
SetupComm.restype = BOOL
SetupComm.argtypes = [HANDLE, DWORD, DWORD]
EscapeCommFunction = _stdcall_libraries['kernel32'].EscapeCommFunction
EscapeCommFunction.restype = BOOL
EscapeCommFunction.argtypes = [HANDLE, DWORD]
GetCommModemStatus = _stdcall_libraries['kernel32'].GetCommModemStatus
GetCommModemStatus.restype = BOOL
GetCommModemStatus.argtypes = [HANDLE, LPDWORD]
LPDCB = POINTER(_DCB)
GetCommState = _stdcall_libraries['kernel32'].GetCommState
GetCommState.restype = BOOL
GetCommState.argtypes = [HANDLE, LPDCB]
LPCOMMTIMEOUTS = POINTER(_COMMTIMEOUTS)
GetCommTimeouts = _stdcall_libraries['kernel32'].GetCommTimeouts
GetCommTimeouts.restype = BOOL
GetCommTimeouts.argtypes = [HANDLE, LPCOMMTIMEOUTS]
PurgeComm = _stdcall_libraries['kernel32'].PurgeComm
PurgeComm.restype = BOOL
PurgeComm.argtypes = [HANDLE, DWORD]
SetCommBreak = _stdcall_libraries['kernel32'].SetCommBreak
SetCommBreak.restype = BOOL
SetCommBreak.argtypes = [HANDLE]
SetCommMask = _stdcall_libraries['kernel32'].SetCommMask
SetCommMask.restype = BOOL
SetCommMask.argtypes = [HANDLE, DWORD]
SetCommState = _stdcall_libraries['kernel32'].SetCommState
SetCommState.restype = BOOL
SetCommState.argtypes = [HANDLE, LPDCB]
SetCommTimeouts = _stdcall_libraries['kernel32'].SetCommTimeouts
SetCommTimeouts.restype = BOOL
SetCommTimeouts.argtypes = [HANDLE, LPCOMMTIMEOUTS]
WaitForSingleObject = _stdcall_libraries['kernel32'].WaitForSingleObject
WaitForSingleObject.restype = DWORD
WaitForSingleObject.argtypes = [HANDLE, DWORD]
ONESTOPBIT = 0 # Variable c_int
TWOSTOPBITS = 2 # Variable c_int
ONE5STOPBITS = 1
NOPARITY = 0 # Variable c_int
ODDPARITY = 1 # Variable c_int
EVENPARITY = 2 # Variable c_int
MARKPARITY = 3
SPACEPARITY = 4
RTS_CONTROL_HANDSHAKE = 2 # Variable c_int
RTS_CONTROL_DISABLE = 0 # Variable c_int
RTS_CONTROL_ENABLE = 1 # Variable c_int
RTS_CONTROL_TOGGLE = 3 # Variable c_int
SETRTS = 3
CLRRTS = 4
DTR_CONTROL_HANDSHAKE = 2 # Variable c_int
DTR_CONTROL_DISABLE = 0 # Variable c_int
DTR_CONTROL_ENABLE = 1 # Variable c_int
SETDTR = 5
CLRDTR = 6
MS_DSR_ON = 32 # Variable c_ulong
EV_RING = 256 # Variable c_int
EV_PERR = 512 # Variable c_int
EV_ERR = 128 # Variable c_int
SETXOFF = 1 # Variable c_int
EV_RXCHAR = 1 # Variable c_int
GENERIC_WRITE = 1073741824 # Variable c_long
PURGE_TXCLEAR = 4 # Variable c_int
FILE_FLAG_OVERLAPPED = 1073741824 # Variable c_int
EV_DSR = 16 # Variable c_int
MAXDWORD = 4294967295L # Variable c_uint
EV_RLSD = 32 # Variable c_int
ERROR_IO_PENDING = 997 # Variable c_long
MS_CTS_ON = 16 # Variable c_ulong
EV_EVENT1 = 2048 # Variable c_int
EV_RX80FULL = 1024 # Variable c_int
PURGE_RXABORT = 2 # Variable c_int
FILE_ATTRIBUTE_NORMAL = 128 # Variable c_int
PURGE_TXABORT = 1 # Variable c_int
SETXON = 2 # Variable c_int
OPEN_EXISTING = 3 # Variable c_int
MS_RING_ON = 64 # Variable c_ulong
EV_TXEMPTY = 4 # Variable c_int
EV_RXFLAG = 2 # Variable c_int
MS_RLSD_ON = 128 # Variable c_ulong
GENERIC_READ = 2147483648L # Variable c_ulong
EV_EVENT2 = 4096 # Variable c_int
EV_CTS = 8 # Variable c_int
EV_BREAK = 64 # Variable c_int
PURGE_RXCLEAR = 8 # Variable c_int
INFINITE = 0xFFFFFFFFL
class N11_OVERLAPPED4DOLLAR_48E(Union):
pass
class N11_OVERLAPPED4DOLLAR_484DOLLAR_49E(Structure):
pass
N11_OVERLAPPED4DOLLAR_484DOLLAR_49E._fields_ = [
('Offset', DWORD),
('OffsetHigh', DWORD),
]
PVOID = c_void_p
N11_OVERLAPPED4DOLLAR_48E._anonymous_ = ['_0']
N11_OVERLAPPED4DOLLAR_48E._fields_ = [
('_0', N11_OVERLAPPED4DOLLAR_484DOLLAR_49E),
('Pointer', PVOID),
]
_OVERLAPPED._anonymous_ = ['_0']
_OVERLAPPED._fields_ = [
('Internal', ULONG_PTR),
('InternalHigh', ULONG_PTR),
('_0', N11_OVERLAPPED4DOLLAR_48E),
('hEvent', HANDLE),
]
_SECURITY_ATTRIBUTES._fields_ = [
('nLength', DWORD),
('lpSecurityDescriptor', LPVOID),
('bInheritHandle', BOOL),
]
_COMSTAT._fields_ = [
('fCtsHold', DWORD, 1),
('fDsrHold', DWORD, 1),
('fRlsdHold', DWORD, 1),
('fXoffHold', DWORD, 1),
('fXoffSent', DWORD, 1),
('fEof', DWORD, 1),
('fTxim', DWORD, 1),
('fReserved', DWORD, 25),
('cbInQue', DWORD),
('cbOutQue', DWORD),
]
_DCB._fields_ = [
('DCBlength', DWORD),
('BaudRate', DWORD),
('fBinary', DWORD, 1),
('fParity', DWORD, 1),
('fOutxCtsFlow', DWORD, 1),
('fOutxDsrFlow', DWORD, 1),
('fDtrControl', DWORD, 2),
('fDsrSensitivity', DWORD, 1),
('fTXContinueOnXoff', DWORD, 1),
('fOutX', DWORD, 1),
('fInX', DWORD, 1),
('fErrorChar', DWORD, 1),
('fNull', DWORD, 1),
('fRtsControl', DWORD, 2),
('fAbortOnError', DWORD, 1),
('fDummy2', DWORD, 17),
('wReserved', WORD),
('XonLim', WORD),
('XoffLim', WORD),
('ByteSize', BYTE),
('Parity', BYTE),
('StopBits', BYTE),
('XonChar', c_char),
('XoffChar', c_char),
('ErrorChar', c_char),
('EofChar', c_char),
('EvtChar', c_char),
('wReserved1', WORD),
]
_COMMTIMEOUTS._fields_ = [
('ReadIntervalTimeout', DWORD),
('ReadTotalTimeoutMultiplier', DWORD),
('ReadTotalTimeoutConstant', DWORD),
('WriteTotalTimeoutMultiplier', DWORD),
('WriteTotalTimeoutConstant', DWORD),
]
__all__ = ['GetLastError', 'MS_CTS_ON', 'FILE_ATTRIBUTE_NORMAL',
'DTR_CONTROL_ENABLE', '_COMSTAT', 'MS_RLSD_ON',
'GetOverlappedResult', 'SETXON', 'PURGE_TXABORT',
'PurgeComm', 'N11_OVERLAPPED4DOLLAR_48E', 'EV_RING',
'ONESTOPBIT', 'SETXOFF', 'PURGE_RXABORT', 'GetCommState',
'RTS_CONTROL_ENABLE', '_DCB', 'CreateEvent',
'_COMMTIMEOUTS', '_SECURITY_ATTRIBUTES', 'EV_DSR',
'EV_PERR', 'EV_RXFLAG', 'OPEN_EXISTING', 'DCB',
'FILE_FLAG_OVERLAPPED', 'EV_CTS', 'SetupComm',
'LPOVERLAPPED', 'EV_TXEMPTY', 'ClearCommBreak',
'LPSECURITY_ATTRIBUTES', 'SetCommBreak', 'SetCommTimeouts',
'COMMTIMEOUTS', 'ODDPARITY', 'EV_RLSD',
'GetCommModemStatus', 'EV_EVENT2', 'PURGE_TXCLEAR',
'EV_BREAK', 'EVENPARITY', 'LPCVOID', 'COMSTAT', 'ReadFile',
'PVOID', '_OVERLAPPED', 'WriteFile', 'GetCommTimeouts',
'ResetEvent', 'EV_RXCHAR', 'LPCOMSTAT', 'ClearCommError',
'ERROR_IO_PENDING', 'EscapeCommFunction', 'GENERIC_READ',
'RTS_CONTROL_HANDSHAKE', 'OVERLAPPED',
'DTR_CONTROL_HANDSHAKE', 'PURGE_RXCLEAR', 'GENERIC_WRITE',
'LPDCB', 'CreateEventW', 'SetCommMask', 'EV_EVENT1',
'SetCommState', 'LPVOID', 'CreateFileW', 'LPDWORD',
'EV_RX80FULL', 'TWOSTOPBITS', 'LPCOMMTIMEOUTS', 'MAXDWORD',
'MS_DSR_ON', 'MS_RING_ON',
'N11_OVERLAPPED4DOLLAR_484DOLLAR_49E', 'EV_ERR',
'ULONG_PTR', 'CreateFile', 'NOPARITY', 'CloseHandle']
| unlicense |
dgrat/Book | Tools/autotest/arducopter.py | 39 | 44394 | # fly ArduCopter in SITL
# Flight mode switch positions are set-up in arducopter.param to be
# switch 1 = Circle
# switch 2 = Land
# switch 3 = RTL
# switch 4 = Auto
# switch 5 = Loiter
# switch 6 = Stabilize
import util, pexpect, sys, time, math, shutil, os
from common import *
from pymavlink import mavutil, mavwp
import random
# get location of scripts
testdir=os.path.dirname(os.path.realpath(__file__))
FRAME='+'
TARGET='sitl'
HOME=mavutil.location(-35.362938,149.165085,584,270)
AVCHOME=mavutil.location(40.072842,-105.230575,1586,0)
homeloc = None
num_wp = 0
speedup_default = 5
def hover(mavproxy, mav, hover_throttle=1450):
mavproxy.send('rc 3 %u\n' % hover_throttle)
return True
def arm_motors(mavproxy, mav):
'''arm motors'''
print("Arming motors")
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 1000\n')
mavproxy.send('rc 4 2000\n')
mavproxy.expect('APM: ARMING MOTORS')
mavproxy.send('rc 4 1500\n')
mav.motors_armed_wait()
print("MOTORS ARMED OK")
return True
def disarm_motors(mavproxy, mav):
'''disarm motors'''
print("Disarming motors")
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 1000\n')
mavproxy.send('rc 4 1000\n')
mavproxy.expect('APM: DISARMING MOTORS')
mavproxy.send('rc 4 1500\n')
mav.motors_disarmed_wait()
print("MOTORS DISARMED OK")
return True
def takeoff(mavproxy, mav, alt_min = 30, takeoff_throttle=1700):
'''takeoff get to 30m altitude'''
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 %u\n' % takeoff_throttle)
m = mav.recv_match(type='VFR_HUD', blocking=True)
if (m.alt < alt_min):
wait_altitude(mav, alt_min, (alt_min + 5))
hover(mavproxy, mav)
print("TAKEOFF COMPLETE")
return True
# loiter - fly south west, then hold loiter within 5m position and altitude
def loiter(mavproxy, mav, holdtime=10, maxaltchange=5, maxdistchange=5):
'''hold loiter position'''
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# first aim south east
print("turn south east")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 170):
return False
mavproxy.send('rc 4 1500\n')
#fly south east 50m
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 50):
return False
mavproxy.send('rc 2 1500\n')
# wait for copter to slow moving
if not wait_groundspeed(mav, 0, 2):
return False
success = True
m = mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
start = mav.location()
tstart = get_sim_time(mav)
tholdstart = get_sim_time(mav)
print("Holding loiter at %u meters for %u seconds" % (start_altitude, holdtime))
while get_sim_time(mav) < tstart + holdtime:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
delta = get_distance(start, pos)
alt_delta = math.fabs(m.alt - start_altitude)
print("Loiter Dist: %.2fm, alt:%u" % (delta, m.alt))
if alt_delta > maxaltchange:
print("Loiter alt shifted %u meters (> limit of %u)" % (alt_delta, maxaltchange))
success = False
if delta > maxdistchange:
print("Loiter shifted %u meters (> limit of %u)" % (delta, maxdistchange))
success = False
if success:
print("Loiter OK for %u seconds" % holdtime)
else:
print("Loiter FAILED")
return success
def change_alt(mavproxy, mav, alt_min, climb_throttle=1920, descend_throttle=1080):
'''change altitude'''
m = mav.recv_match(type='VFR_HUD', blocking=True)
if(m.alt < alt_min):
print("Rise to alt:%u from %u" % (alt_min, m.alt))
mavproxy.send('rc 3 %u\n' % climb_throttle)
wait_altitude(mav, alt_min, (alt_min + 5))
else:
print("Lower to alt:%u from %u" % (alt_min, m.alt))
mavproxy.send('rc 3 %u\n' % descend_throttle)
wait_altitude(mav, (alt_min -5), alt_min)
hover(mavproxy, mav)
return True
# fly a square in stabilize mode
def fly_square(mavproxy, mav, side=50, timeout=300):
'''fly a square, flying N then E'''
tstart = get_sim_time(mav)
success = True
# ensure all sticks in the middle
mavproxy.send('rc 1 1500\n')
mavproxy.send('rc 2 1500\n')
mavproxy.send('rc 3 1500\n')
mavproxy.send('rc 4 1500\n')
# switch to loiter mode temporarily to stop us from rising
mavproxy.send('switch 5\n')
wait_mode(mav, 'LOITER')
# first aim north
print("turn right towards north")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 10):
print("Failed to reach heading")
success = False
mavproxy.send('rc 4 1500\n')
mav.recv_match(condition='RC_CHANNELS_RAW.chan4_raw==1500', blocking=True)
# save bottom left corner of box as waypoint
print("Save WP 1 & 2")
save_wp(mavproxy, mav)
# switch back to stabilize mode
mavproxy.send('rc 3 1430\n')
mavproxy.send('switch 6\n')
wait_mode(mav, 'STABILIZE')
# pitch forward to fly north
print("Going north %u meters" % side)
mavproxy.send('rc 2 1300\n')
if not wait_distance(mav, side):
print("Failed to reach distance of %u") % side
success = False
mavproxy.send('rc 2 1500\n')
# save top left corner of square as waypoint
print("Save WP 3")
save_wp(mavproxy, mav)
# roll right to fly east
print("Going east %u meters" % side)
mavproxy.send('rc 1 1700\n')
if not wait_distance(mav, side):
print("Failed to reach distance of %u") % side
success = False
mavproxy.send('rc 1 1500\n')
# save top right corner of square as waypoint
print("Save WP 4")
save_wp(mavproxy, mav)
# pitch back to fly south
print("Going south %u meters" % side)
mavproxy.send('rc 2 1700\n')
if not wait_distance(mav, side):
print("Failed to reach distance of %u") % side
success = False
mavproxy.send('rc 2 1500\n')
# save bottom right corner of square as waypoint
print("Save WP 5")
save_wp(mavproxy, mav)
# roll left to fly west
print("Going west %u meters" % side)
mavproxy.send('rc 1 1300\n')
if not wait_distance(mav, side):
print("Failed to reach distance of %u") % side
success = False
mavproxy.send('rc 1 1500\n')
# save bottom left corner of square (should be near home) as waypoint
print("Save WP 6")
save_wp(mavproxy, mav)
# descend to 10m
print("Descend to 10m in Loiter")
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
mavproxy.send('rc 3 1300\n')
time_left = timeout - (get_sim_time(mav) - tstart)
print("timeleft = %u" % time_left)
if time_left < 20:
time_left = 20
if not wait_altitude(mav, -10, 10, time_left):
print("Failed to reach alt of 10m")
success = False
save_wp(mavproxy, mav)
return success
def fly_RTL(mavproxy, mav, side=60, timeout=250):
'''Return, land'''
print("# Enter RTL")
mavproxy.send('switch 3\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
home_distance = get_distance(HOME, pos)
print("Alt: %u HomeDistance: %.0f" % (m.alt, home_distance))
if(m.alt <= 1 and home_distance < 10):
return True
return False
def fly_throttle_failsafe(mavproxy, mav, side=60, timeout=180):
'''Fly east, Failsafe, return, land'''
# switch to loiter mode temporarily to stop us from rising
mavproxy.send('switch 5\n')
wait_mode(mav, 'LOITER')
# first aim east
print("turn east")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 135):
return False
mavproxy.send('rc 4 1500\n')
# switch to stabilize mode
mavproxy.send('switch 6\n')
wait_mode(mav, 'STABILIZE')
hover(mavproxy, mav)
failed = False
# fly east 60 meters
print("# Going forward %u meters" % side)
mavproxy.send('rc 2 1350\n')
if not wait_distance(mav, side, 5, 60):
failed = True
mavproxy.send('rc 2 1500\n')
# pull throttle low
print("# Enter Failsafe")
mavproxy.send('rc 3 900\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
home_distance = get_distance(HOME, pos)
print("Alt: %u HomeDistance: %.0f" % (m.alt, home_distance))
# check if we've reached home
if m.alt <= 1 and home_distance < 10:
# reduce throttle
mavproxy.send('rc 3 1100\n')
# switch back to stabilize
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
print("Reached failsafe home OK")
return True
print("Failed to land on failsafe RTL - timed out after %u seconds" % timeout)
# reduce throttle
mavproxy.send('rc 3 1100\n')
# switch back to stabilize mode
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
return False
def fly_battery_failsafe(mavproxy, mav, timeout=30):
# assume failure
success = False
# switch to loiter mode so that we hold position
mavproxy.send('switch 5\n')
wait_mode(mav, 'LOITER')
mavproxy.send("rc 3 1500\n")
# enable battery failsafe
mavproxy.send("param set FS_BATT_ENABLE 1\n")
# trigger low voltage
mavproxy.send('param set SIM_BATT_VOLTAGE 10\n')
# wait for LAND mode
new_mode = wait_mode(mav, 'LAND')
if new_mode == 'LAND':
success = True
# disable battery failsafe
mavproxy.send('param set FS_BATT_ENABLE 0\n')
# return status
if success:
print("Successfully entered LAND mode after battery failsafe")
else:
print("Failed to enter LAND mode after battery failsafe")
return success
# fly_stability_patch - fly south, then hold loiter within 5m position and altitude and reduce 1 motor to 60% efficiency
def fly_stability_patch(mavproxy, mav, holdtime=30, maxaltchange=5, maxdistchange=10):
'''hold loiter position'''
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# first south
print("turn south")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 180):
return False
mavproxy.send('rc 4 1500\n')
#fly west 80m
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 80):
return False
mavproxy.send('rc 2 1500\n')
# wait for copter to slow moving
if not wait_groundspeed(mav, 0, 2):
return False
success = True
m = mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
start = mav.location()
tstart = get_sim_time(mav)
tholdstart = get_sim_time(mav)
print("Holding loiter at %u meters for %u seconds" % (start_altitude, holdtime))
# cut motor 1 to 55% efficiency
print("Cutting motor 1 to 55% efficiency")
mavproxy.send('param set SIM_ENGINE_MUL 0.55\n')
while get_sim_time(mav) < tstart + holdtime:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
delta = get_distance(start, pos)
alt_delta = math.fabs(m.alt - start_altitude)
print("Loiter Dist: %.2fm, alt:%u" % (delta, m.alt))
if alt_delta > maxaltchange:
print("Loiter alt shifted %u meters (> limit of %u)" % (alt_delta, maxaltchange))
success = False
if delta > maxdistchange:
print("Loiter shifted %u meters (> limit of %u)" % (delta, maxdistchange))
success = False
# restore motor 1 to 100% efficiency
mavproxy.send('param set SIM_ENGINE_MUL 1.0\n')
if success:
print("Stability patch and Loiter OK for %u seconds" % holdtime)
else:
print("Stability Patch FAILED")
return success
# fly_fence_test - fly east until you hit the horizontal circular fence
def fly_fence_test(mavproxy, mav, timeout=180):
'''hold loiter position'''
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# enable fence
mavproxy.send('param set FENCE_ENABLE 1\n')
# first east
print("turn east")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 160):
return False
mavproxy.send('rc 4 1500\n')
# fly forward (east) at least 20m
pitching_forward = True
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 20):
return False
# start timer
tstart = get_sim_time(mav)
while get_sim_time(mav) < tstart + timeout:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
home_distance = get_distance(HOME, pos)
print("Alt: %u HomeDistance: %.0f" % (m.alt, home_distance))
# recenter pitch sticks once we reach home so we don't fly off again
if pitching_forward and home_distance < 10 :
pitching_forward = False
mavproxy.send('rc 2 1500\n')
# disable fence
mavproxy.send('param set FENCE_ENABLE 0\n')
if m.alt <= 1 and home_distance < 10:
# reduce throttle
mavproxy.send('rc 3 1000\n')
# switch mode to stabilize
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
print("Reached home OK")
return True
# disable fence
mavproxy.send('param set FENCE_ENABLE 0\n')
# reduce throttle
mavproxy.send('rc 3 1000\n')
# switch mode to stabilize
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
mavproxy.send('switch 6\n') # stabilize mode
wait_mode(mav, 'STABILIZE')
print("Fence test failed to reach home - timed out after %u seconds" % timeout)
return False
def show_gps_and_sim_positions(mavproxy, on_off):
if on_off == True:
# turn on simulator display of gps and actual position
mavproxy.send('map set showgpspos 1\n')
mavproxy.send('map set showsimpos 1\n')
else:
# turn off simulator display of gps and actual position
mavproxy.send('map set showgpspos 0\n')
mavproxy.send('map set showsimpos 0\n')
# fly_gps_glitch_loiter_test - fly south east in loiter and test reaction to gps glitch
def fly_gps_glitch_loiter_test(mavproxy, mav, timeout=30, max_distance=20):
'''hold loiter position'''
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# turn on simulator display of gps and actual position
show_gps_and_sim_positions(mavproxy, True)
# set-up gps glitch array
glitch_lat = [0.0002996,0.0006958,0.0009431,0.0009991,0.0009444,0.0007716,0.0006221]
glitch_lon = [0.0000717,0.0000912,0.0002761,0.0002626,0.0002807,0.0002049,0.0001304]
glitch_num = len(glitch_lat)
print("GPS Glitches:")
for i in range(1,glitch_num):
print("glitch %d %.7f %.7f" % (i,glitch_lat[i],glitch_lon[i]))
# turn south east
print("turn south east")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 150):
show_gps_and_sim_positions(mavproxy, False)
return False
mavproxy.send('rc 4 1500\n')
# fly forward (south east) at least 60m
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 60):
show_gps_and_sim_positions(mavproxy, False)
return False
mavproxy.send('rc 2 1500\n')
# wait for copter to slow down
if not wait_groundspeed(mav, 0, 1):
show_gps_and_sim_positions(mavproxy, False)
return False
# record time and position
tstart = get_sim_time(mav)
tnow = tstart
start_pos = sim_location(mav)
success = True
# initialise current glitch
glitch_current = 0;
print("Apply first glitch")
mavproxy.send('param set SIM_GPS_GLITCH_X %.7f\n' % glitch_lat[glitch_current])
mavproxy.send('param set SIM_GPS_GLITCH_Y %.7f\n' % glitch_lon[glitch_current])
# record position for 30 seconds
while tnow < tstart + timeout:
tnow = get_sim_time(mav)
desired_glitch_num = int((tnow - tstart) * 2.2)
if desired_glitch_num > glitch_current and glitch_current != -1:
glitch_current = desired_glitch_num
# turn off glitching if we've reached the end of the glitch list
if glitch_current >= glitch_num:
glitch_current = -1
print("Completed Glitches")
mavproxy.send('param set SIM_GPS_GLITCH_X 0\n')
mavproxy.send('param set SIM_GPS_GLITCH_Y 0\n')
else:
print("Applying glitch %u" % glitch_current)
#move onto the next glitch
mavproxy.send('param set SIM_GPS_GLITCH_X %.7f\n' % glitch_lat[glitch_current])
mavproxy.send('param set SIM_GPS_GLITCH_Y %.7f\n' % glitch_lon[glitch_current])
# start displaying distance moved after all glitches applied
if (glitch_current == -1):
m = mav.recv_match(type='VFR_HUD', blocking=True)
curr_pos = sim_location(mav)
moved_distance = get_distance(curr_pos, start_pos)
print("Alt: %u Moved: %.0f" % (m.alt, moved_distance))
if moved_distance > max_distance:
print("Moved over %u meters, Failed!" % max_distance)
success = False
# disable gps glitch
if glitch_current != -1:
glitch_current = -1
mavproxy.send('param set SIM_GPS_GLITCH_X 0\n')
mavproxy.send('param set SIM_GPS_GLITCH_Y 0\n')
show_gps_and_sim_positions(mavproxy, False)
if success:
print("GPS glitch test passed! stayed within %u meters for %u seconds" % (max_distance, timeout))
else:
print("GPS glitch test FAILED!")
return success
# fly_gps_glitch_auto_test - fly mission and test reaction to gps glitch
def fly_gps_glitch_auto_test(mavproxy, mav, timeout=30, max_distance=100):
# set-up gps glitch array
glitch_lat = [0.0002996,0.0006958,0.0009431,0.0009991,0.0009444,0.0007716,0.0006221]
glitch_lon = [0.0000717,0.0000912,0.0002761,0.0002626,0.0002807,0.0002049,0.0001304]
glitch_num = len(glitch_lat)
print("GPS Glitches:")
for i in range(1,glitch_num):
print("glitch %d %.7f %.7f" % (i,glitch_lat[i],glitch_lon[i]))
# Fly mission #1
print("# Load copter_glitch_mission")
if not load_mission_from_file(mavproxy, mav, os.path.join(testdir, "copter_glitch_mission.txt")):
print("load copter_glitch_mission failed")
return False
# turn on simulator display of gps and actual position
show_gps_and_sim_positions(mavproxy, True)
# load the waypoint count
global homeloc
global num_wp
print("test: Fly a mission from 1 to %u" % num_wp)
mavproxy.send('wp set 1\n')
# switch into AUTO mode and raise throttle
mavproxy.send('switch 4\n') # auto mode
wait_mode(mav, 'AUTO')
mavproxy.send('rc 3 1500\n')
# wait until 100m from home
if not wait_distance(mav, 100, 5, 60):
show_gps_and_sim_positions(mavproxy, False)
return False
# record time and position
tstart = get_sim_time(mav)
tnow = tstart
start_pos = sim_location(mav)
# initialise current glitch
glitch_current = 0;
print("Apply first glitch")
mavproxy.send('param set SIM_GPS_GLITCH_X %.7f\n' % glitch_lat[glitch_current])
mavproxy.send('param set SIM_GPS_GLITCH_Y %.7f\n' % glitch_lon[glitch_current])
# record position for 30 seconds
while glitch_current < glitch_num:
tnow = get_sim_time(mav)
desired_glitch_num = int((tnow - tstart) * 2)
if desired_glitch_num > glitch_current and glitch_current != -1:
glitch_current = desired_glitch_num
# apply next glitch
if glitch_current < glitch_num:
print("Applying glitch %u" % glitch_current)
mavproxy.send('param set SIM_GPS_GLITCH_X %.7f\n' % glitch_lat[glitch_current])
mavproxy.send('param set SIM_GPS_GLITCH_Y %.7f\n' % glitch_lon[glitch_current])
# turn off glitching
print("Completed Glitches")
mavproxy.send('param set SIM_GPS_GLITCH_X 0\n')
mavproxy.send('param set SIM_GPS_GLITCH_Y 0\n')
# continue with the mission
ret = wait_waypoint(mav, 0, num_wp-1, timeout=500, mode='AUTO')
# wait for arrival back home
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
dist_to_home = get_distance(HOME, pos)
while dist_to_home > 5:
m = mav.recv_match(type='VFR_HUD', blocking=True)
pos = mav.location()
dist_to_home = get_distance(HOME, pos)
print("Dist from home: %u" % dist_to_home)
# turn off simulator display of gps and actual position
show_gps_and_sim_positions(mavproxy, False)
print("GPS Glitch test Auto completed: passed=%s" % ret)
return ret
#fly_simple - assumes the simple bearing is initialised to be directly north
# flies a box with 100m west, 15 seconds north, 50 seconds east, 15 seconds south
def fly_simple(mavproxy, mav, side=50, timeout=120):
failed = False
# hold position in loiter
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
#set SIMPLE mode for all flight modes
mavproxy.send('param set SIMPLE 63\n')
# switch to stabilize mode
mavproxy.send('switch 6\n')
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 1430\n')
# fly south 50m
print("# Flying south %u meters" % side)
mavproxy.send('rc 1 1300\n')
if not wait_distance(mav, side, 5, 60):
failed = True
mavproxy.send('rc 1 1500\n')
# fly west 8 seconds
print("# Flying west for 8 seconds")
mavproxy.send('rc 2 1300\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < (tstart + 8):
m = mav.recv_match(type='VFR_HUD', blocking=True)
delta = (get_sim_time(mav) - tstart)
#print("%u" % delta)
mavproxy.send('rc 2 1500\n')
# fly north 25 meters
print("# Flying north %u meters" % (side/2.0))
mavproxy.send('rc 1 1700\n')
if not wait_distance(mav, side/2, 5, 60):
failed = True
mavproxy.send('rc 1 1500\n')
# fly east 8 seconds
print("# Flying east for 8 seconds")
mavproxy.send('rc 2 1700\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < (tstart + 8):
m = mav.recv_match(type='VFR_HUD', blocking=True)
delta = (get_sim_time(mav) - tstart)
#print("%u" % delta)
mavproxy.send('rc 2 1500\n')
#restore to default
mavproxy.send('param set SIMPLE 0\n')
#hover in place
hover(mavproxy, mav)
return not failed
#fly_super_simple - flies a circle around home for 45 seconds
def fly_super_simple(mavproxy, mav, timeout=45):
failed = False
# hold position in loiter
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# fly forward 20m
print("# Flying forward 20 meters")
mavproxy.send('rc 2 1300\n')
if not wait_distance(mav, 20, 5, 60):
failed = True
mavproxy.send('rc 2 1500\n')
#set SUPER SIMPLE mode for all flight modes
mavproxy.send('param set SUPER_SIMPLE 63\n')
# switch to stabilize mode
mavproxy.send('switch 6\n')
wait_mode(mav, 'STABILIZE')
mavproxy.send('rc 3 1430\n')
# start copter yawing slowly
mavproxy.send('rc 4 1550\n')
# roll left for timeout seconds
print("# rolling left from pilot's point of view for %u seconds" % timeout)
mavproxy.send('rc 1 1300\n')
tstart = get_sim_time(mav)
while get_sim_time(mav) < (tstart + timeout):
m = mav.recv_match(type='VFR_HUD', blocking=True)
delta = (get_sim_time(mav) - tstart)
# stop rolling and yawing
mavproxy.send('rc 1 1500\n')
mavproxy.send('rc 4 1500\n')
#restore simple mode parameters to default
mavproxy.send('param set SUPER_SIMPLE 0\n')
#hover in place
hover(mavproxy, mav)
return not failed
#fly_circle - flies a circle with 20m radius
def fly_circle(mavproxy, mav, maxaltchange=10, holdtime=36):
# hold position in loiter
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
# face west
print("turn west")
mavproxy.send('rc 4 1580\n')
if not wait_heading(mav, 270):
return False
mavproxy.send('rc 4 1500\n')
#set CIRCLE radius
mavproxy.send('param set CIRCLE_RADIUS 3000\n')
# fly forward (east) at least 100m
mavproxy.send('rc 2 1100\n')
if not wait_distance(mav, 100):
return False
# return pitch stick back to middle
mavproxy.send('rc 2 1500\n')
# set CIRCLE mode
mavproxy.send('switch 1\n') # circle mode
wait_mode(mav, 'CIRCLE')
# wait
m = mav.recv_match(type='VFR_HUD', blocking=True)
start_altitude = m.alt
tstart = get_sim_time(mav)
tholdstart = get_sim_time(mav)
print("Circle at %u meters for %u seconds" % (start_altitude, holdtime))
while get_sim_time(mav) < tstart + holdtime:
m = mav.recv_match(type='VFR_HUD', blocking=True)
print("heading %u" % m.heading)
print("CIRCLE OK for %u seconds" % holdtime)
return True
# fly_auto_test - fly mission which tests a significant number of commands
def fly_auto_test(mavproxy, mav):
# Fly mission #1
print("# Load copter_mission")
if not load_mission_from_file(mavproxy, mav, os.path.join(testdir, "copter_mission.txt")):
print("load copter_mission failed")
return False
# load the waypoint count
global homeloc
global num_wp
print("test: Fly a mission from 1 to %u" % num_wp)
mavproxy.send('wp set 1\n')
# switch into AUTO mode and raise throttle
mavproxy.send('switch 4\n') # auto mode
wait_mode(mav, 'AUTO')
mavproxy.send('rc 3 1500\n')
# fly the mission
ret = wait_waypoint(mav, 0, num_wp-1, timeout=500, mode='AUTO')
# set throttle to minimum
mavproxy.send('rc 3 1000\n')
# wait for disarm
mav.motors_disarmed_wait()
print("MOTORS DISARMED OK")
print("Auto mission completed: passed=%s" % ret)
return ret
# fly_avc_test - fly AVC mission
def fly_avc_test(mavproxy, mav):
# upload mission from file
print("# Load copter_AVC2013_mission")
if not load_mission_from_file(mavproxy, mav, os.path.join(testdir, "copter_AVC2013_mission.txt")):
print("load copter_AVC2013_mission failed")
return False
# load the waypoint count
global homeloc
global num_wp
print("Fly AVC mission from 1 to %u" % num_wp)
mavproxy.send('wp set 1\n')
# switch into AUTO mode and raise throttle
mavproxy.send('switch 4\n') # auto mode
wait_mode(mav, 'AUTO')
mavproxy.send('rc 3 1500\n')
# fly the mission
ret = wait_waypoint(mav, 0, num_wp-1, timeout=500, mode='AUTO')
# set throttle to minimum
mavproxy.send('rc 3 1000\n')
# wait for disarm
mav.motors_disarmed_wait()
print("MOTORS DISARMED OK")
print("AVC mission completed: passed=%s" % ret)
return ret
def land(mavproxy, mav, timeout=60):
'''land the quad'''
print("STARTING LANDING")
mavproxy.send('switch 2\n') # land mode
wait_mode(mav, 'LAND')
print("Entered Landing Mode")
ret = wait_altitude(mav, -5, 1)
print("LANDING: ok= %s" % ret)
return ret
def fly_mission(mavproxy, mav, height_accuracy=-1, target_altitude=None):
'''fly a mission from a file'''
global homeloc
global num_wp
print("test: Fly a mission from 1 to %u" % num_wp)
mavproxy.send('wp set 1\n')
mavproxy.send('switch 4\n') # auto mode
wait_mode(mav, 'AUTO')
ret = wait_waypoint(mav, 0, num_wp-1, timeout=500, mode='AUTO')
expect_msg = "Reached Command #%u" % (num_wp-1)
if (ret):
mavproxy.expect(expect_msg)
print("test: MISSION COMPLETE: passed=%s" % ret)
# wait here until ready
mavproxy.send('switch 5\n') # loiter mode
wait_mode(mav, 'LOITER')
return ret
def load_mission_from_file(mavproxy, mav, filename):
'''Load a mission from a file to flight controller'''
global num_wp
mavproxy.send('wp load %s\n' % filename)
mavproxy.expect('flight plan received')
mavproxy.send('wp list\n')
mavproxy.expect('Requesting [0-9]+ waypoints')
# update num_wp
wploader = mavwp.MAVWPLoader()
wploader.load(filename)
num_wp = wploader.count()
return True
def save_mission_to_file(mavproxy, mav, filename):
global num_wp
mavproxy.send('wp save %s\n' % filename)
mavproxy.expect('Saved ([0-9]+) waypoints')
num_wp = int(mavproxy.match.group(1))
print("num_wp: %d" % num_wp)
return True
def setup_rc(mavproxy):
'''setup RC override control'''
for chan in range(1,9):
mavproxy.send('rc %u 1500\n' % chan)
# zero throttle
mavproxy.send('rc 3 1000\n')
def fly_ArduCopter(viewerip=None, map=False):
'''fly ArduCopter in SIL
you can pass viewerip as an IP address to optionally send fg and
mavproxy packets too for local viewing of the flight in real time
'''
global homeloc
if TARGET != 'sitl':
util.build_SIL('ArduCopter', target=TARGET)
home = "%f,%f,%u,%u" % (HOME.lat, HOME.lng, HOME.alt, HOME.heading)
sil = util.start_SIL('ArduCopter', wipe=True, model='+', home=home, speedup=speedup_default)
mavproxy = util.start_MAVProxy_SIL('ArduCopter', options='--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --quadcopter')
mavproxy.expect('Received [0-9]+ parameters')
# setup test parameters
mavproxy.send("param load %s/copter_params.parm\n" % testdir)
mavproxy.expect('Loaded [0-9]+ parameters')
# reboot with new parameters
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sil = util.start_SIL('ArduCopter', model='+', home=home, speedup=speedup_default)
options = '--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --quadcopter --streamrate=5'
if viewerip:
options += ' --out=%s:14550' % viewerip
if map:
options += ' --map'
mavproxy = util.start_MAVProxy_SIL('ArduCopter', options=options)
mavproxy.expect('Telemetry log: (\S+)')
logfile = mavproxy.match.group(1)
print("LOGFILE %s" % logfile)
buildlog = util.reltopdir("../buildlogs/ArduCopter-test.tlog")
print("buildlog=%s" % buildlog)
copyTLog = False
if os.path.exists(buildlog):
os.unlink(buildlog)
try:
os.link(logfile, buildlog)
except Exception:
print( "WARN: Failed to create symlink: " + logfile + " => " + buildlog + ", Will copy tlog manually to target location" )
copyTLog = True
# the received parameters can come before or after the ready to fly message
mavproxy.expect(['Received [0-9]+ parameters', 'Ready to FLY'])
mavproxy.expect(['Received [0-9]+ parameters', 'Ready to FLY'])
util.expect_setup_callback(mavproxy, expect_callback)
expect_list_clear()
expect_list_extend([sil, mavproxy])
# get a mavlink connection going
try:
mav = mavutil.mavlink_connection('127.0.0.1:19550', robust_parsing=True)
except Exception, msg:
print("Failed to start mavlink connection on 127.0.0.1:19550" % msg)
raise
mav.message_hooks.append(message_hook)
mav.idle_hooks.append(idle_hook)
failed = False
failed_test_msg = "None"
try:
mav.wait_heartbeat()
setup_rc(mavproxy)
homeloc = mav.location()
# wait 10sec to allow EKF to settle
wait_seconds(mav, 10)
# Arm
print("# Arm motors")
if not arm_motors(mavproxy, mav):
failed_test_msg = "arm_motors failed"
print(failed_test_msg)
failed = True
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Fly a square in Stabilize mode
print("#")
print("########## Fly a square and save WPs with CH7 switch ##########")
print("#")
if not fly_square(mavproxy, mav):
failed_test_msg = "fly_square failed"
print(failed_test_msg)
failed = True
# save the stored mission to file
print("# Save out the CH7 mission to file")
if not save_mission_to_file(mavproxy, mav, os.path.join(testdir, "ch7_mission.txt")):
failed_test_msg = "save_mission_to_file failed"
print(failed_test_msg)
failed = True
# fly the stored mission
print("# Fly CH7 saved mission")
if not fly_mission(mavproxy, mav,height_accuracy = 0.5, target_altitude=10):
failed_test_msg = "fly ch7_mission failed"
print(failed_test_msg)
failed = True
# Throttle Failsafe
print("#")
print("########## Test Failsafe ##########")
print("#")
if not fly_throttle_failsafe(mavproxy, mav):
failed_test_msg = "fly_throttle_failsafe failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Battery failsafe
if not fly_battery_failsafe(mavproxy, mav):
failed_test_msg = "fly_battery_failsafe failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Stability patch
print("#")
print("########## Test Stability Patch ##########")
print("#")
if not fly_stability_patch(mavproxy, mav, 30):
failed_test_msg = "fly_stability_patch failed"
print(failed_test_msg)
failed = True
# RTL
print("# RTL #")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after stab patch failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Fence test
print("#")
print("########## Test Horizontal Fence ##########")
print("#")
if not fly_fence_test(mavproxy, mav, 180):
failed_test_msg = "fly_fence_test failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Fly GPS Glitch Loiter test
print("# GPS Glitch Loiter Test")
if not fly_gps_glitch_loiter_test(mavproxy, mav):
failed_test_msg = "fly_gps_glitch_loiter_test failed"
print(failed_test_msg)
failed = True
# RTL after GPS Glitch Loiter test
print("# RTL #")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL failed"
print(failed_test_msg)
failed = True
# Fly GPS Glitch test in auto mode
print("# GPS Glitch Auto Test")
if not fly_gps_glitch_auto_test(mavproxy, mav):
failed_test_msg = "fly_gps_glitch_auto_test failed"
print(failed_test_msg)
failed = True
# take-off ahead of next test
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Loiter for 10 seconds
print("#")
print("########## Test Loiter for 10 seconds ##########")
print("#")
if not loiter(mavproxy, mav):
failed_test_msg = "loiter failed"
print(failed_test_msg)
failed = True
# Loiter Climb
print("#")
print("# Loiter - climb to 30m")
print("#")
if not change_alt(mavproxy, mav, 30):
failed_test_msg = "change_alt climb failed"
print(failed_test_msg)
failed = True
# Loiter Descend
print("#")
print("# Loiter - descend to 20m")
print("#")
if not change_alt(mavproxy, mav, 20):
failed_test_msg = "change_alt descend failed"
print(failed_test_msg)
failed = True
# RTL
print("#")
print("########## Test RTL ##########")
print("#")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after Loiter climb/descend failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Simple mode
print("# Fly in SIMPLE mode")
if not fly_simple(mavproxy, mav):
failed_test_msg = "fly_simple failed"
print(failed_test_msg)
failed = True
# RTL
print("#")
print("########## Test RTL ##########")
print("#")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after simple mode failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Fly a circle in super simple mode
print("# Fly a circle in SUPER SIMPLE mode")
if not fly_super_simple(mavproxy, mav):
failed_test_msg = "fly_super_simple failed"
print(failed_test_msg)
failed = True
# RTL
print("# RTL #")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after super simple mode failed"
print(failed_test_msg)
failed = True
# Takeoff
print("# Takeoff")
if not takeoff(mavproxy, mav, 10):
failed_test_msg = "takeoff failed"
print(failed_test_msg)
failed = True
# Circle mode
print("# Fly CIRCLE mode")
if not fly_circle(mavproxy, mav):
failed_test_msg = "fly_circle failed"
print(failed_test_msg)
failed = True
# RTL
print("#")
print("########## Test RTL ##########")
print("#")
if not fly_RTL(mavproxy, mav):
failed_test_msg = "fly_RTL after circle failed"
print(failed_test_msg)
failed = True
print("# Fly copter mission")
if not fly_auto_test(mavproxy, mav):
failed_test_msg = "fly_auto_test failed"
print(failed_test_msg)
failed = True
else:
print("Flew copter mission OK")
# wait for disarm
mav.motors_disarmed_wait()
if not log_download(mavproxy, mav, util.reltopdir("../buildlogs/ArduCopter-log.bin")):
failed_test_msg = "log_download failed"
print(failed_test_msg)
failed = True
except pexpect.TIMEOUT, failed_test_msg:
failed_test_msg = "Timeout"
failed = True
mav.close()
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
if os.path.exists('ArduCopter-valgrind.log'):
os.chmod('ArduCopter-valgrind.log', 0644)
shutil.copy("ArduCopter-valgrind.log", util.reltopdir("../buildlogs/ArduCopter-valgrind.log"))
# [2014/05/07] FC Because I'm doing a cross machine build (source is on host, build is on guest VM) I cannot hard link
# This flag tells me that I need to copy the data out
if copyTLog:
shutil.copy(logfile, buildlog)
if failed:
print("FAILED: %s" % failed_test_msg)
return False
return True
def fly_CopterAVC(viewerip=None, map=False):
'''fly ArduCopter in SIL for AVC2013 mission
'''
global homeloc
if TARGET != 'sitl':
util.build_SIL('ArduCopter', target=TARGET)
home = "%f,%f,%u,%u" % (AVCHOME.lat, AVCHOME.lng, AVCHOME.alt, AVCHOME.heading)
sil = util.start_SIL('ArduCopter', wipe=True, model='+', home=home, speedup=speedup_default)
mavproxy = util.start_MAVProxy_SIL('ArduCopter', options='--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --quadcopter')
mavproxy.expect('Received [0-9]+ parameters')
# setup test parameters
mavproxy.send("param load %s/copter_AVC2013_params.parm\n" % testdir)
mavproxy.expect('Loaded [0-9]+ parameters')
# reboot with new parameters
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
sil = util.start_SIL('ArduCopter', model='+', home=home, speedup=speedup_default)
options = '--sitl=127.0.0.1:5501 --out=127.0.0.1:19550 --quadcopter --streamrate=5'
if viewerip:
options += ' --out=%s:14550' % viewerip
if map:
options += ' --map'
mavproxy = util.start_MAVProxy_SIL('ArduCopter', options=options)
mavproxy.expect('Telemetry log: (\S+)')
logfile = mavproxy.match.group(1)
print("LOGFILE %s" % logfile)
buildlog = util.reltopdir("../buildlogs/CopterAVC-test.tlog")
print("buildlog=%s" % buildlog)
if os.path.exists(buildlog):
os.unlink(buildlog)
try:
os.link(logfile, buildlog)
except Exception:
pass
# the received parameters can come before or after the ready to fly message
mavproxy.expect(['Received [0-9]+ parameters', 'Ready to FLY'])
mavproxy.expect(['Received [0-9]+ parameters', 'Ready to FLY'])
util.expect_setup_callback(mavproxy, expect_callback)
expect_list_clear()
expect_list_extend([sil, mavproxy])
if map:
mavproxy.send('map icon 40.072467969730496 -105.2314389590174\n')
mavproxy.send('map icon 40.072600990533829 -105.23146100342274\n')
# get a mavlink connection going
try:
mav = mavutil.mavlink_connection('127.0.0.1:19550', robust_parsing=True)
except Exception, msg:
print("Failed to start mavlink connection on 127.0.0.1:19550" % msg)
raise
mav.message_hooks.append(message_hook)
mav.idle_hooks.append(idle_hook)
failed = False
failed_test_msg = "None"
try:
mav.wait_heartbeat()
setup_rc(mavproxy)
homeloc = mav.location()
# wait 10sec to allow EKF to settle
wait_seconds(mav, 10)
# Arm
print("# Arm motors")
if not arm_motors(mavproxy, mav):
failed_test_msg = "arm_motors failed"
print(failed_test_msg)
failed = True
print("# Fly AVC mission")
if not fly_avc_test(mavproxy, mav):
failed_test_msg = "fly_avc_test failed"
print(failed_test_msg)
failed = True
else:
print("Flew AVC mission OK")
#mission includes disarm at end so should be ok to download logs now
if not log_download(mavproxy, mav, util.reltopdir("../buildlogs/CopterAVC-log.bin")):
failed_test_msg = "log_download failed"
print(failed_test_msg)
failed = True
except pexpect.TIMEOUT, failed_test_msg:
failed_test_msg = "Timeout"
failed = True
mav.close()
util.pexpect_close(mavproxy)
util.pexpect_close(sil)
if failed:
print("FAILED: %s" % failed_test_msg)
return False
return True
| gpl-3.0 |
kevinpt/hdlparse | hdlparse/verilog_parser.py | 1 | 7107 | # -*- coding: utf-8 -*-
# Copyright © 2017 Kevin Thibedeau
# Distributed under the terms of the MIT license
from __future__ import print_function
import re, os, io, ast, pprint, collections
from minilexer import MiniLexer
'''Verilog documentation parser'''
verilog_tokens = {
'root': [
(r'\bmodule\s+(\w+)\s*', 'module', 'module'),
(r'/\*', 'block_comment', 'block_comment'),
(r'//#+(.*)\n', 'metacomment'),
(r'//.*\n', None),
],
'module': [
(r'parameter\s*(signed|integer|realtime|real|time)?\s*(\[[^]]+\])?', 'parameter_start', 'parameters'),
(r'(input|inout|output)\s*(reg|supply0|supply1|tri|triand|trior|tri0|tri1|wire|wand|wor)?\s*(signed)?\s*(\[[^]]+\])?', 'module_port_start', 'module_port'),
(r'endmodule', 'end_module', '#pop'),
(r'/\*', 'block_comment', 'block_comment'),
(r'//#\s*{{(.*)}}\n', 'section_meta'),
(r'//.*\n', None),
],
'parameters': [
(r'\s*parameter\s*(signed|integer|realtime|real|time)?\s*(\[[^]]+\])?', 'parameter_start'),
(r'\s*(\w+)[^),;]*', 'param_item'),
(r',', None),
(r'[);]', None, '#pop'),
],
'module_port': [
(r'\s*(input|inout|output)\s*(reg|supply0|supply1|tri|triand|trior|tri0|tri1|wire|wand|wor)?\s*(signed)?\s*(\[[^]]+\])?', 'module_port_start'),
(r'\s*(\w+)\s*,?', 'port_param'),
(r'[);]', None, '#pop'),
(r'//#\s*{{(.*)}}\n', 'section_meta'),
(r'//.*\n', None),
],
'block_comment': [
(r'\*/', 'end_comment', '#pop'),
],
}
VerilogLexer = MiniLexer(verilog_tokens)
class VerilogObject(object):
'''Base class for parsed Verilog objects'''
def __init__(self, name, desc=None):
self.name = name
self.kind = 'unknown'
self.desc = desc
class VerilogParameter(object):
'''Parameter and port to a module'''
def __init__(self, name, mode=None, data_type=None, default_value=None, desc=None):
self.name = name
self.mode = mode
self.data_type = data_type
self.default_value = default_value
self.desc = desc
def __str__(self):
if self.mode is not None:
param = '{} : {} {}'.format(self.name, self.mode, self.data_type)
else:
param = '{} : {}'.format(self.name, self.data_type)
if self.default_value is not None:
param = '{} := {}'.format(param, self.default_value)
return param
def __repr__(self):
return "VerilogParameter('{}')".format(self.name)
class VerilogModule(VerilogObject):
'''Module definition'''
def __init__(self, name, ports, generics=None, sections=None, desc=None):
VerilogObject.__init__(self, name, desc)
self.kind = 'module'
# Verilog params
self.generics = generics if generics is not None else []
self.ports = ports
self.sections = sections if sections is not None else {}
def __repr__(self):
return "VerilogModule('{}') {}".format(self.name, self.ports)
def parse_verilog_file(fname):
'''Parse a named Verilog file
Args:
fname (str): File to parse.
Returns:
List of parsed objects.
'''
with open(fname, 'rt') as fh:
text = fh.read()
return parse_verilog(text)
def parse_verilog(text):
'''Parse a text buffer of Verilog code
Args:
text (str): Source code to parse
Returns:
List of parsed objects.
'''
lex = VerilogLexer
name = None
kind = None
saved_type = None
mode = 'input'
ptype = 'wire'
metacomments = []
parameters = []
param_items = []
generics = []
ports = collections.OrderedDict()
sections = []
port_param_index = 0
last_item = None
array_range_start_pos = 0
objects = []
for pos, action, groups in lex.run(text):
if action == 'metacomment':
if last_item is None:
metacomments.append(groups[0])
else:
last_item.desc = groups[0]
if action == 'section_meta':
sections.append((port_param_index, groups[0]))
elif action == 'module':
kind = 'module'
name = groups[0]
generics = []
ports = collections.OrderedDict()
param_items = []
sections = []
port_param_index = 0
elif action == 'parameter_start':
net_type, vec_range = groups
new_ptype = ''
if net_type is not None:
new_ptype += net_type
if vec_range is not None:
new_ptype += ' ' + vec_range
ptype = new_ptype
elif action == 'param_item':
generics.append(VerilogParameter(groups[0], 'in', ptype))
elif action == 'module_port_start':
new_mode, net_type, signed, vec_range = groups
new_ptype = ''
if net_type is not None:
new_ptype += net_type
if signed is not None:
new_ptype += ' ' + signed
if vec_range is not None:
new_ptype += ' ' + vec_range
# Complete pending items
for i in param_items:
ports[i] = VerilogParameter(i, mode, ptype)
param_items = []
if len(ports) > 0:
last_item = next(reversed(ports))
# Start with new mode
mode = new_mode
ptype = new_ptype
elif action == 'port_param':
ident = groups[0]
param_items.append(ident)
port_param_index += 1
elif action == 'end_module':
# Finish any pending ports
for i in param_items:
ports[i] = VerilogParameter(i, mode, ptype)
vobj = VerilogModule(name, ports.values(), generics, dict(sections), metacomments)
objects.append(vobj)
last_item = None
metacomments = []
return objects
def is_verilog(fname):
'''Identify file as Verilog by its extension
Args:
fname (str): File name to check
Returns:
True when file has a Verilog extension.
'''
return os.path.splitext(fname)[1].lower() in ('.vlog', '.v')
class VerilogExtractor(object):
'''Utility class that caches parsed objects'''
def __init__(self):
self.object_cache = {}
def extract_objects(self, fname, type_filter=None):
'''Extract objects from a source file
Args:
fname(str): Name of file to read from
type_filter (class, optional): Object class to filter results
Returns:
List of objects extracted from the file.
'''
objects = []
if fname in self.object_cache:
objects = self.object_cache[fname]
else:
with io.open(fname, 'rt', encoding='utf-8') as fh:
text = fh.read()
objects = parse_verilog(text)
self.object_cache[fname] = objects
if type_filter:
objects = [o for o in objects if isinstance(o, type_filter)]
return objects
def extract_objects_from_source(self, text, type_filter=None):
'''Extract object declarations from a text buffer
Args:
text (str): Source code to parse
type_filter (class, optional): Object class to filter results
Returns:
List of parsed objects.
'''
objects = parse_verilog(text)
if type_filter:
objects = [o for o in objects if isinstance(o, type_filter)]
return objects
def is_array(self, data_type):
'''Check if a type is an array type
Args:
data_type (str): Data type
Returns:
True when a data type is an array.
'''
return '[' in data_type
| mit |
malin1993ml/h-store | third_party/python/fabric/tasks.py | 9 | 1652 | from functools import wraps
class Task(object):
"""
Abstract base class for objects wishing to be picked up as Fabric tasks.
Instances of subclasses will be treated as valid tasks when present in
fabfiles loaded by the :doc:`fab </usage/fab>` tool.
For details on how to implement and use `~fabric.tasks.Task` subclasses,
please see the usage documentation on :ref:`new-style tasks
<new-style-tasks>`.
.. versionadded:: 1.1
"""
name = 'undefined'
use_task_objects = True
aliases = None
is_default = False
# TODO: make it so that this wraps other decorators as expected
def __init__(self, alias=None, aliases=None, default=False,
*args, **kwargs):
if alias is not None:
self.aliases = [alias, ]
if aliases is not None:
self.aliases = aliases
self.is_default = default
def run(self):
raise NotImplementedError
class WrappedCallableTask(Task):
"""
Wraps a given callable transparently, while marking it as a valid Task.
Generally used via `@task <~fabric.decorators.task>` and not directly.
.. versionadded:: 1.1
"""
def __init__(self, callable, *args, **kwargs):
super(WrappedCallableTask, self).__init__(*args, **kwargs)
self.wrapped = callable
self.__name__ = self.name = callable.__name__
self.__doc__ = callable.__doc__
def __call__(self, *args, **kwargs):
return self.run(*args, **kwargs)
def run(self, *args, **kwargs):
return self.wrapped(*args, **kwargs)
def __getattr__(self, k):
return getattr(self.wrapped, k)
| gpl-3.0 |
arielalmendral/ert | python/python/ert/config/config_content.py | 2 | 7090 | # Copyright (C) 2015 Statoil ASA, Norway.
#
# The file 'config_content.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
import os.path
from ert.config import UnrecognizedEnum, ContentTypeEnum, ConfigError, ConfigPrototype, SchemaItem
from cwrap import BaseCClass
class ContentNode(BaseCClass):
TYPE_NAME = "content_node"
_iget = ConfigPrototype("char* config_content_node_iget( content_node , int)")
_size = ConfigPrototype("int config_content_node_get_size( content_node )")
_get_full_string = ConfigPrototype("char* config_content_node_get_full_string( content_node , char* )")
_iget_type = ConfigPrototype("config_content_type_enum config_content_node_iget_type( content_node , int)")
_iget_as_abspath = ConfigPrototype("char* config_content_node_iget_as_abspath( content_node , int)")
_iget_as_relpath = ConfigPrototype("char* config_content_node_iget_as_relpath( content_node , int)")
_iget_as_string = ConfigPrototype("char* config_content_node_iget( content_node , int)")
_iget_as_int = ConfigPrototype("int config_content_node_iget_as_int( content_node , int)")
_iget_as_double = ConfigPrototype("double config_content_node_iget_as_double( content_node , int)")
_iget_as_path = ConfigPrototype("char* config_content_node_iget_as_path( content_node , int)")
_iget_as_bool = ConfigPrototype("bool config_content_node_iget_as_bool( content_node , int)")
_iget_as_isodate = ConfigPrototype("time_t config_content_node_iget_as_isodate( content_node , int)")
typed_get = {
ContentTypeEnum.CONFIG_STRING: _iget_as_string,
ContentTypeEnum.CONFIG_INT: _iget_as_int,
ContentTypeEnum.CONFIG_FLOAT: _iget_as_double,
ContentTypeEnum.CONFIG_PATH: _iget_as_path,
ContentTypeEnum.CONFIG_EXISTING_PATH: _iget_as_path,
ContentTypeEnum.CONFIG_BOOL: _iget_as_bool,
ContentTypeEnum.CONFIG_ISODATE: _iget_as_isodate
}
def __init__(self):
raise NotImplementedError("Class can not be instantiated directly!")
def __len__(self):
return self._size()
def __assertIndex(self , index):
if isinstance(index, int):
if index < 0:
index += len(self)
if not 0 <= index < len(self):
raise IndexError
return index
else:
raise TypeError("Invalid argument type: %s" % index)
def __getitem__(self, index):
index = self.__assertIndex(index)
content_type = self._iget_type(index)
typed_get = self.typed_get[content_type]
return typed_get(self, index)
def getPath(self , index = 0, absolute = True , relative_start = None):
index = self.__assertIndex(index)
content_type = self._iget_type(index)
if content_type in [ContentTypeEnum.CONFIG_EXISTING_PATH , ContentTypeEnum.CONFIG_PATH]:
if absolute:
return self._iget_as_abspath(index)
else:
if relative_start is None:
return self._iget_as_relpath(index)
else:
abs_path = self._iget_as_abspath(index)
return os.path.relpath( abs_path , relative_start )
else:
raise TypeError("The getPath() method can only be called on PATH items")
def content(self, sep=" "):
return self._get_full_string(sep)
def igetString(self , index):
index = self.__assertIndex(index)
return self._iget(index )
def asList(self):
return [x for x in self]
class ContentItem(BaseCClass):
TYPE_NAME = "content_item"
_alloc = ConfigPrototype("void* config_content_item_alloc( schema_item , void* )" , bind = False )
_size = ConfigPrototype("int config_content_item_get_size( content_item )")
_iget_content_node = ConfigPrototype("content_node_ref config_content_item_iget_node( content_item , int)")
_free = ConfigPrototype("void config_content_item_free( content_item )")
def __init__(self , schema_item):
path_elm = None
c_ptr = self._alloc( schema_item , path_elm)
super( ContentItem, self).__init__(c_ptr)
def __len__(self):
return self._size()
def __getitem__(self, index):
if isinstance(index, int):
if index < 0:
index += len(self)
if (index >= 0) and (index < len(self)):
return self._iget_content_node(index).setParent(self)
else:
raise IndexError
else:
raise TypeError("[] operator must have integer index")
def last(self):
return self[-1]
def getValue(self , item_index = -1 , node_index = 0):
node = self[item_index]
return node[node_index]
def free(self):
self._free( )
class ConfigContent(BaseCClass):
TYPE_NAME = "config_content"
_free = ConfigPrototype("void config_content_free( config_content )")
_is_valid = ConfigPrototype("bool config_content_is_valid( config_content )")
_has_key = ConfigPrototype("bool config_content_has_item( config_content , char*)")
_get_item = ConfigPrototype("content_item_ref config_content_get_item( config_content , char*)")
_get_errors = ConfigPrototype("config_error_ref config_content_get_errors( config_content )")
_get_warnings = ConfigPrototype("stringlist_ref config_content_get_warnings( config_content )")
def __init__(self):
raise NotImplementedError("Class can not be instantiated directly!")
def __contains__(self , key):
return self._has_key(key)
def setParser(self , parser):
self._parser = parser
def __getitem__(self , key):
if key in self:
item = self._get_item(key)
item.setParent( self )
return item
else:
if key in self._parser:
schema_item = SchemaItem( key )
return ContentItem( schema_item )
else:
raise KeyError("No such key: %s" % key)
def hasKey(self,key):
return key in self
def getValue(self , key , item_index = -1 , node_index = 0):
item = self[key]
return item.getValue( item_index , node_index )
def isValid(self):
return self._is_valid()
def free(self):
self._free()
def getErrors(self):
""" @rtype: ConfigError """
return self._get_errors()
def getWarnings(self):
""" @rtype: ConfigError """
return self._get_warnings( )
| gpl-3.0 |
SteveG/tracks-queue | src/widgetActionEditor.py | 1 | 27462 | #!/usr/bin/env python
"""
Copyright (C) 2010 Stephen Georg
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
For Questions about this program please contact
Stephen Georg at [email protected]
A copy of the license should be included in the file LICENSE.txt
"""
#NOTE PyQt4.QtCore.QDateTime.currentDateTime().toUTC().toLocalTime()
#>>> foo = PyQt4.QtCore.QDateTime.fromString("2010-05-27 11:52:14", "yyyy-MM-dd HH:mm:ss")
#>>> foo.setTimeSpec(1)
#>>> foo.toLocalTime()
#PyQt4.QtCore.QDateTime(2010, 5, 27, 21, 22, 14)
"""
Provides an editor side pane for GTD actions
"""
from PyQt4 import QtCore, QtGui
import logging
class WidgetActionEditor(QtGui.QGroupBox):
"""Provides a sidebar widget for editing/creating actions"""
# TODO define signals emitted by this widget
__pyqtSignals__ = ("actionModified()"
)
actionModified = QtCore.pyqtSignal()
def __init__(self, dbCon):
logging.info("TracksActionEditor initiated...")
# The current item id
self.current_id = None
self.databaseCon = dbCon
self.current_user_id = None
# default values
self.defaultContext = None
self.defaultProject = None
self.defaultTags = None
QtGui.QGroupBox.__init__(self)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sizePolicy().hasHeightForWidth())
self.setSizePolicy(sizePolicy)
self.setMaximumSize(QtCore.QSize(250, 16777215))
self.setMinimumSize(QtCore.QSize(250, 0))
#self.setTitle("Johnny")
self.verticalLayout = QtGui.QVBoxLayout(self)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setMargin(4)
# Description line edit
self.descriptionLabel = QtGui.QLabel(self)
self.descriptionLabel.setText("Description")
self.verticalLayout.addWidget(self.descriptionLabel)
self.descriptionEdit = QtGui.QLineEdit(self)
self.verticalLayout.addWidget(self.descriptionEdit)
# Notes edit
self.notesLabel = QtGui.QLabel(self)
self.notesLabel.setText("Notes")
self.verticalLayout.addWidget(self.notesLabel)
self.notesEdit = QtGui.QPlainTextEdit(self)
self.notesEdit.setTabChangesFocus(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.notesEdit.sizePolicy().hasHeightForWidth())
self.notesEdit.setSizePolicy(sizePolicy)
self.notesEdit.setMinimumSize(QtCore.QSize(0, 120))
self.notesEdit.setMaximumSize(QtCore.QSize(16777215, 120))
self.verticalLayout.addWidget(self.notesEdit)
# Project Line Edit
self.projectLabel = QtGui.QLabel(self)
self.projectLabel.setText("Project")
self.verticalLayout.addWidget(self.projectLabel)
self.projectEdit = QtGui.QLineEdit(self)
self.verticalLayout.addWidget(self.projectEdit)
# Add string list completer
# TODO get projects from database
projectList = []
for row in self.databaseCon.execute("select name FROM projects ORDER BY name"):
projectList.append(row[0])
projectCompleter = QtGui.QCompleter(projectList)
projectCompleter.setCompletionMode(1)
self.projectEdit.setCompleter(projectCompleter)
# Context Line Edit
self.contextLabel = QtGui.QLabel(self)
self.contextLabel.setText("Context")
self.verticalLayout.addWidget(self.contextLabel)
self.contextEdit = QtGui.QLineEdit(self)
self.verticalLayout.addWidget(self.contextEdit)
# Add string list completer
# TODO get contexts from database
contextList = []
for row in self.databaseCon.execute("select name FROM contexts ORDER BY name"):
contextList.append(row[0])
contextStringList = QtCore.QStringList(contextList)
contextCompleter = QtGui.QCompleter(contextStringList)
contextCompleter.setCompletionMode(1) # This displays all possible options, but pressing
# down goes to the best match
self.contextEdit.setCompleter(contextCompleter)
# Tags Line Edit
# TODO find existing tags from database
self.existingTags = []
for row in self.databaseCon.execute("select name FROM tags"):
self.existingTags.append(row[0])
self.existingTags.append("FAKE-TAG")
#
self.tagsLabel = QtGui.QLabel(self)
self.tagsLabel.setText("Tags (Separate with commas)")
self.verticalLayout.addWidget(self.tagsLabel)
self.tagsEdit = QtGui.QLineEdit(self)
self.verticalLayout.addWidget(self.tagsEdit)
# TODO add completion. Consider this: http://john.nachtimwald.com/2009/07/04/qcompleter-and-comma-separated-tags/
# make tags all lower case
# use set(list of strings) and set.diffence
#QObject.connect(self, SIGNAL('textChanged(QString)'), self.text_changed)
self.tagsEdit.textChanged.connect(self.tagsEditChanged)
self.tagCompleter = QtGui.QCompleter(QtCore.QStringList(self.existingTags))
self.tagCompleter.setWidget(self.tagsEdit)
self.tagCompleter.setCompletionMode(1)
self.tagCompleter.activated.connect(self.tagsCompleterSelect)
# make tags invisible
self.tagsLabel.setVisible(False)
self.tagsEdit.setVisible(False)
# Date fields
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.verticalLayout_1 = QtGui.QVBoxLayout()
self.verticalLayout_1.setSpacing(0)
self.dueEdit = QtGui.QDateEdit(QtCore.QDate.currentDate())
self.dueEdit.setCalendarPopup(True)
self.dueCheckBox = QtGui.QCheckBox()
self.dueCheckBox.setText("Due")
self.dueCheckBox.stateChanged.connect(self.dueDateCheckChanged)
self.dueEdit.setDisabled(True)
self.verticalLayout_1.addWidget(self.dueCheckBox)
self.verticalLayout_1.addWidget(self.dueEdit)
self.horizontalLayout_2.addLayout(self.verticalLayout_1)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.showFromEdit = QtGui.QDateEdit(QtCore.QDate.currentDate())
self.showFromEdit.setCalendarPopup(True)
self.showFromCheckBox = QtGui.QCheckBox()
self.showFromCheckBox.setText("Show from")
self.showFromCheckBox.stateChanged.connect(self.showFromCheckChanged)
self.showFromEdit.setDisabled(True)
self.verticalLayout_2.addWidget(self.showFromCheckBox)
self.verticalLayout_2.addWidget(self.showFromEdit)
self.horizontalLayout_2.addLayout(self.verticalLayout_2)
self.verticalLayout.addLayout(self.horizontalLayout_2)
# Depends on
self.existingActions = []
for row in self.databaseCon.execute("select description FROM todos where state='active'"):
self.existingActions.append(row[0])
#self.existingActions.append("FAKE-TAG")
#
self.dependsLabel = QtGui.QLabel(self)
self.dependsLabel.setText("Depends on (Separate with ;)")
self.verticalLayout.addWidget(self.dependsLabel)
self.dependsEdit = QtGui.QLineEdit(self)
self.verticalLayout.addWidget(self.dependsEdit)
# TODO add completion. Consider this: http://john.nachtimwald.com/2009/07/04/qcompleter-and-comma-separated-tags/
# make tags all lower case
# use set(list of strings) and set.diffence
self.dependsEdit.textChanged.connect(self.dependsEditChanged)
self.dependsCompleter = QtGui.QCompleter(QtCore.QStringList(self.existingActions))
self.dependsCompleter.setWidget(self.dependsEdit)
self.dependsCompleter.setCompletionMode(1)
self.dependsCompleter.activated.connect(self.dependsCompleterSelect)
# Commit and Cancel button
# TODO hide cancel button by default??? only show when editing an existing item
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.addActionButton = QtGui.QPushButton(self)
self.addActionButton.setText("Add action")
self.horizontalLayout_5.addWidget(self.addActionButton)
self.cancelEditButton = QtGui.QPushButton(self)
self.cancelEditButton.setText("Cancel edit")
self.horizontalLayout_5.addWidget(self.cancelEditButton)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout_5)
# connect buttons
self.cancelEditButton.clicked.connect(self.cancelButtonClicked)
self.addActionButton.clicked.connect(self.addActionButtonClicked)
#self.cancelEditButton.setVisible(self.current_id != None)
# Add a vertical spacer
spacerItem = QtGui.QSpacerItem(
1, 1, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
# Settings
self.settings = QtCore.QSettings("tracks-queue", "tracks-queue")
# Set up keyboard shortcuts
shortcut = QtGui.QShortcut(self)
shortcut.setKey(QtGui.QKeySequence("Esc"))
shortcut.setContext(QtCore.Qt.WidgetWithChildrenShortcut)
shortcut.activated.connect(self.cancelButtonClicked)
def dueDateCheckChanged(self):
"""Check box enabling the due date has been clicked"""
logging.info("TracksActionEditor->dueDateCheckChanged")
self.dueEdit.setDisabled(not self.dueCheckBox.isChecked())
def showFromCheckChanged(self):
"""Check box enabling the show from date has been clicked"""
logging.info("TracksActionEditor->showFromCheckChanged")
self.showFromEdit.setDisabled(not self.showFromCheckBox.isChecked())
#def hideButtonClicked(self):
#logging.info("TracksActionEditor->hideButtonClicked")
#self.formVisible = not self.formVisible
#self.settings.setValue("editor/visible", QtCore.QVariant(self.formVisible))
#self.updateHidden()
#def updateHidden(self):
#logging.info("TracksActionEditor->updateHidden")
#if self.formVisible:
#self.hideFormButton.setText(">> Hide Form")
#self.setMaximumSize(QtCore.QSize(250, 16777215))
#self.setMinimumSize(QtCore.QSize(250, 0))
#self.verticalLayout.setMargin(4)
#self.descriptionEdit.setFocus()
#else:
#self.hideFormButton.setText("<<")
#self.setMaximumSize(QtCore.QSize(30, 16777215))
#self.setMinimumSize(QtCore.QSize(30, 0))
#self.verticalLayout.setMargin(0)
## Hide or show all of the form elements
#self.descriptionLabel.setVisible(self.formVisible)
#self.descriptionEdit.setVisible(self.formVisible)
#self.notesLabel.setVisible(self.formVisible)
#self.notesEdit.setVisible(self.formVisible)
#self.projectLabel.setVisible(self.formVisible)
#self.projectEdit.setVisible(self.formVisible)
#self.contextLabel.setVisible(self.formVisible)
#self.contextEdit.setVisible(self.formVisible)
#self.tagsLabel.setVisible(False)#self.formVisible)
#self.tagsEdit.setVisible(False)#self.formVisible)
#self.dueEdit.setVisible(self.formVisible)
#self.dueCheckBox.setVisible(self.formVisible)
#self.showFromEdit.setVisible(self.formVisible)
#self.showFromCheckBox.setVisible(self.formVisible)
#self.dependsLabel.setVisible(self.formVisible)
#self.dependsEdit.setVisible(self.formVisible)
#self.addActionButton.setVisible(self.formVisible)
##TODO only reshow cancel button when editing existing item
#self.cancelEditButton.setVisible(self.formVisible and self.current_id != None)
def tagsEditChanged(self, theText):
# refer to this example:
# http://john.nachtimwald.com/2009/07/04/qcompleter-and-comma-separated-tags/
#logging.info("TracksActionEditor->tagsEditChanged - "+str(theText))
tagText = str(theText).lower().split(",")
theTags = []
for tag in tagText:
tag = tag.strip()
if len(tag) > 0:
theTags.append(tag)
theSet = list(set(theTags))
currentText = str(theText[:self.tagsEdit.cursorPosition()])
prefix = currentText.split(',')[-1].strip()
tags = list(set(self.existingTags).difference(theSet))
model = QtGui.QStringListModel(QtCore.QStringList(tags), self.tagCompleter)
model.sort(0)
self.tagCompleter.setModel(model)
self.tagCompleter.setCompletionPrefix(prefix)
if prefix.strip() != '':
self.tagCompleter.complete()
self.tagCompleter.setModelSorting(2)
def tagsCompleterSelect(self, theText):
# refer to this example:
# http://john.nachtimwald.com/2009/07/04/qcompleter-and-comma-separated-tags/
#logging.info("TracksActionEditor->tagsCompleterSelect - " + str(theText))
cursor_pos = self.tagsEdit.cursorPosition()
before_text = unicode(self.tagsEdit.text())[:cursor_pos]
after_text = unicode(self.tagsEdit.text())[cursor_pos:]
prefix_len = len(before_text.split(',')[-1].strip())
self.tagsEdit.setText('%s%s, %s' % (before_text[:cursor_pos - prefix_len], theText, after_text))
self.tagsEdit.setCursorPosition(cursor_pos - prefix_len + len(theText) + 2)
def dependsEditChanged(self, theText):
# refer to this example:
# http://john.nachtimwald.com/2009/07/04/qcompleter-and-comma-separated-tags/
#logging.info("TracksActionEditor->tagsEditChanged - "+str(theText))
tagText = str(theText).split(";")
theTags = []
for tag in tagText:
tag = tag.strip()
if len(tag) > 0:
theTags.append(tag)
theSet = list(set(theTags))
currentText = str(theText[:self.dependsEdit.cursorPosition()])
prefix = currentText.split(';')[-1].strip()
tags = list(set(self.existingActions).difference(theSet))
model = QtGui.QStringListModel(QtCore.QStringList(tags), self.dependsCompleter)
model.sort(0)
self.dependsCompleter.setModel(model)
self.dependsCompleter.setCompletionPrefix(prefix)
if prefix.strip() != '':
self.dependsCompleter.complete()
self.dependsCompleter.setModelSorting(1)
def dependsCompleterSelect(self, theText):
# refer to this example:
# http://john.nachtimwald.com/2009/07/04/qcompleter-and-comma-separated-tags/
#logging.info("TracksActionEditor->tagsCompleterSelect - " + str(theText))
cursor_pos = self.dependsEdit.cursorPosition()
before_text = unicode(self.dependsEdit.text())[:cursor_pos]
after_text = unicode(self.dependsEdit.text())[cursor_pos:]
prefix_len = len(before_text.split(';')[-1].strip())
self.dependsEdit.setText('%s%s; %s' % (before_text[:cursor_pos - prefix_len], theText, after_text))
self.dependsEdit.setCursorPosition(cursor_pos - prefix_len + len(theText) + 2)
def cancelButtonClicked(self):
logging.info("TracksActionEditor->cancelButtonClicked")
# Clear all the widgets
# TODO also clear internal data reflecting the database item we are editing
self.descriptionEdit.clear()
self.notesEdit.clear()
self.projectEdit.clear()
if self.defaultProject:
self.projectEdit.setText(self.defaultProject)
self.contextEdit.clear()
if self.defaultContext:
self.contextEdit.setText(self.defaultContext)
self.tagsEdit.clear()
if self.defaultTags:
self.tagsEdit.setText(self.defaultTags)
self.dueEdit.setDate(QtCore.QDate.currentDate())
self.dueEdit.setDisabled(True)
self.dueCheckBox.setChecked(False)
self.showFromEdit.setDate(QtCore.QDate.currentDate())
self.showFromEdit.setDisabled(True)
self.showFromCheckBox.setChecked(False)
self.dependsEdit.clear()
self.current_id = None
#self.cancelEditButton.setVisible(False)
self.setVisible(False)
self.addActionButton.setText("Add Action")
def addActionButtonClicked(self):
"""Add a new action to db, or modify an existing one"""
logging.info("TracksActionEditor->addActionButtonClicked")
if self.descriptionEdit.text() == "" or self.contextEdit.text()== "":
QtGui.QMessageBox.critical(self,
"Error",
"An action must have a description and a context\n\nNo data has been inserted or modified")
return
if self.current_user_id==None:
QtGui.QMessageBox.critical(self,
"Error",
"Editor doesn't know the user?\n\nNo data has been inserted or modified")
return
desc = str(self.descriptionEdit.text())
notes = str(self.notesEdit.toPlainText())
# Context
context = None
try:
context = self.databaseCon.execute("select id from contexts where user_id=? AND name=?",[self.current_user_id, str(self.contextEdit.text()),]).fetchall()[0][0]
except:
QtGui.QMessageBox.critical(self,
"Error",
"Context doesn't exist\n\nThis may provide an option to create the context in the future\n\nNothing added")
return
# Project
project = None
try:
project = self.databaseCon.execute("select id from projects where user_id=? AND name=?",[self.current_user_id,str(self.projectEdit.text()),]).fetchall()[0][0]
except:
QtGui.QMessageBox.critical(self,
"Error",
"Project doesn't exist\n\nThis may provide an option to create the context in the future\n\nNothing added")
return
# Due Date
due = None
if self.dueCheckBox.isChecked():
due = str(self.dueEdit.date().toString("yyyy-MM-dd"))
# Show from Date
show = None
if self.showFromCheckBox.isChecked():
show = str(self.showFromEdit.date().toString("yyyy-MM-dd"))
# Depends on
tagText = str(self.dependsEdit.text()).split(";")
theTags = []
dependsIDs = []
if tagText:
for tag in tagText:
tag = tag.strip()
if len(tag) > 0:
theTags.append(tag)
try:
for tag in theTags:
id = self.databaseCon.execute("select id from todos where description=?",[tag,]).fetchone()[0]
dependsIDs.append(id)
except:
QtGui.QMessageBox.critical(self,
"Error",
"Action doesn't exist\n\nWhat does this depend on?\n\nNothing added")
return
# Insert the data
if self.current_id == None:
q = "insert into todos values(NULL,?,?,?,?,DATETIME('now'),?,NULL,?,?,'active',NULL,DATETIME('now'))"
self.databaseCon.execute(q,[context,project,desc,notes,due,self.current_user_id,show])
self.databaseCon.commit()
if len(dependsIDs) > 0:
currentID = self.databaseCon.execute("SELECT last_insert_rowid()").fetchone()[0]
for id in dependsIDs:
logging.debug("TracksActionEditor->addActionButtonClicked - Inserting a dependancy")
q = "insert into dependencies values(NULL,?,?,'depends')"
self.databaseCon.execute(q,[currentID,id])
self.databaseCon.commit()
else:
q = "UPDATE todos SET context_id=?, project_id=?, description=?, notes=?, due=?, show_from=?, updated_at=DATETIME('now') where id=?"
self.databaseCon.execute(q,[context,project,desc,notes,due,show,self.current_id])
self.databaseCon.commit()
# remove all the old dependancies
self.databaseCon.execute("DELETE FROM dependencies WHERE successor_id=?", [self.current_id,])
if len(dependsIDs) > 0:
currentID = self.current_id
for id in dependsIDs:
logging.debug("TracksActionEditor->addActionButtonClicked - Inserting a dependancy")
q = "insert into dependencies values(NULL,?,?,'depends')"
self.databaseCon.execute(q,[currentID,id])
self.databaseCon.commit()
self.cancelButtonClicked()
self.emit(QtCore.SIGNAL("actionModified()"))
def setCurrentActionID(self, actionID):
logging.info("TracksActionEditor->setCurrentActionID")
self.addActionButton.setText("Save Action")
self.current_id = actionID
self.cancelEditButton.setVisible(True)
# The General stuff
for row in self.databaseCon.execute("select description, notes, due, show_from from todos WHERE id="+str(actionID)):
self.descriptionEdit.setText(row[0])
if row[1]:
self.notesEdit.setPlainText(row[1])
else:
self.notesEdit.clear()
if row[2]:
# row[2] will be string in format yyyy-MM-dd
self.dueCheckBox.setChecked(True)
self.dueEdit.setDisabled(False)
self.dueEdit.setDate(QtCore.QDate.fromString(row[2][0:10],"yyyy-MM-dd"))
else:
self.dueEdit.clear()
self.dueCheckBox.setChecked(False)
self.dueEdit.setDisabled(True)
if row[3]:
# row[3] will be string in format yyyy-MM-dd
self.showFromCheckBox.setChecked(True)
self.showFromEdit.setDisabled(False)
self.showFromEdit.setDate(QtCore.QDate.fromString(row[3][0:10],"yyyy-MM-dd"))
logging.debug("TracksActionEditor->setCurrentActionID - show_from=" +str(row[3][0:10]))
else:
self.showFromEdit.clear()
self.showFromCheckBox.setChecked(False)
self.showFromEdit.setDisabled(True)
# The Project
self.projectEdit.clear()
for row in self.databaseCon.execute("select projects.name FROM todos, projects WHERE todos.project_id=projects.id and todos.id="+str(actionID)):
self.projectEdit.setText(row[0])
# The context
self.contextEdit.clear()
for row in self.databaseCon.execute("select contexts.name from todos, contexts where todos.context_id=contexts.id and todos.id="+str(actionID)):
self.contextEdit.setText(row[0])
# The tags
tagText = ""
#for row in self.databaseCon.execute("select tags.name from todos, taggings, tags where todos.id=taggings.taggable_id and tags.id=taggings.tag_id and todos.id="+str(actionID)):
# if tagText == "":
# tagText.append(row[0])
# else:
# tagText.append(row[0])
#self.nameEdit.setText(row[1])
#if row[2] == "f":
# self.statusRadio1.setChecked(True)
#else:
# self.statusRadio2.setChecked(True)
self.tagsEdit.setText(tagText)
# The dependancies
dependText = ""
for row in self.databaseCon.execute("SELECT todos.description FROM dependencies, todos WHERE todos.id=dependencies.predecessor_id and dependencies.successor_id=?",[actionID,]):
dependText = dependText + str(row[0]+"; ")
self.dependsEdit.setText(dependText)
# Make the editor visible if not already and focus it
self.setVisible(True)
self.setFocus()
def setFocus(self):
logging.info("tracksActionEditor->setFocus")
self.descriptionEdit.setFocus()
def refresh(self):
"""This will refresh the action editor, ie update available projects/contexts/tags"""
logging.info("tracksActionEditor->refresh")
# Update list of available projects
projectList = []
for row in self.databaseCon.execute("select name FROM projects where user_id=? ORDER BY UPPER(name)", (self.current_user_id,)):
projectList.append(row[0])
projectCompleter = QtGui.QCompleter(projectList)
projectCompleter.setCompletionMode(1)
self.projectEdit.setCompleter(projectCompleter)
# Update list of available contexts
contextList = []
for row in self.databaseCon.execute("select name FROM contexts where user_id=? ORDER BY UPPER(name)", (self.current_user_id,)):
contextList.append(row[0])
contextStringList = QtCore.QStringList(contextList)
contextCompleter = QtGui.QCompleter(contextStringList)
contextCompleter.setCompletionMode(1)
self.contextEdit.setCompleter(contextCompleter)
# TODO refresh the list of available actions for 'depends on'
self.existingActions = []
for row in self.databaseCon.execute("select description FROM todos WHERE state='active' AND user_id=? ORDER BY UPPER(description)", (self.current_user_id,)):
self.existingActions.append(row[0])
# What is the setting re form visibility?
#if self.settings.contains("editor/visible"):
# self.setVisible(bool(self.settings.value("editor/visible").toBool()))
def setDefaultProject(self, projectName):
self.defaultProject = projectName
def setDefaultContext(self, contextName):
self.defaultContext = contextName
def setDefaultTags(self, tags):
self.defaultTags = tags
def setCurrentUser(self, user):
self.current_user_id = user
self.refresh()
| gpl-2.0 |
sesuncedu/sdhash | sdhash-ui/jinja2/testsuite/regression.py | 90 | 7583 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.regression
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests corner cases and bugs.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Template, Environment, DictLoader, TemplateSyntaxError, \
TemplateNotFound, PrefixLoader
env = Environment()
class CornerTestCase(JinjaTestCase):
def test_assigned_scoping(self):
t = env.from_string('''
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{{- item -}}
''')
assert t.render(item=42) == '[1][2][3][4]42'
t = env.from_string('''
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{%- set item = 42 %}
{{- item -}}
''')
assert t.render() == '[1][2][3][4]42'
t = env.from_string('''
{%- set item = 42 %}
{%- for item in (1, 2, 3, 4) -%}
[{{ item }}]
{%- endfor %}
{{- item -}}
''')
assert t.render() == '[1][2][3][4]42'
def test_closure_scoping(self):
t = env.from_string('''
{%- set wrapper = "<FOO>" %}
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{{- wrapper -}}
''')
assert t.render() == '[1][2][3][4]<FOO>'
t = env.from_string('''
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{%- set wrapper = "<FOO>" %}
{{- wrapper -}}
''')
assert t.render() == '[1][2][3][4]<FOO>'
t = env.from_string('''
{%- for item in (1, 2, 3, 4) %}
{%- macro wrapper() %}[{{ item }}]{% endmacro %}
{{- wrapper() }}
{%- endfor %}
{{- wrapper -}}
''')
assert t.render(wrapper=23) == '[1][2][3][4]23'
class BugTestCase(JinjaTestCase):
def test_keyword_folding(self):
env = Environment()
env.filters['testing'] = lambda value, some: value + some
assert env.from_string("{{ 'test'|testing(some='stuff') }}") \
.render() == 'teststuff'
def test_extends_output_bugs(self):
env = Environment(loader=DictLoader({
'parent.html': '(({% block title %}{% endblock %}))'
}))
t = env.from_string('{% if expr %}{% extends "parent.html" %}{% endif %}'
'[[{% block title %}title{% endblock %}]]'
'{% for item in [1, 2, 3] %}({{ item }}){% endfor %}')
assert t.render(expr=False) == '[[title]](1)(2)(3)'
assert t.render(expr=True) == '((title))'
def test_urlize_filter_escaping(self):
tmpl = env.from_string('{{ "http://www.example.org/<foo"|urlize }}')
assert tmpl.render() == '<a href="http://www.example.org/<foo">http://www.example.org/<foo</a>'
def test_loop_call_loop(self):
tmpl = env.from_string('''
{% macro test() %}
{{ caller() }}
{% endmacro %}
{% for num1 in range(5) %}
{% call test() %}
{% for num2 in range(10) %}
{{ loop.index }}
{% endfor %}
{% endcall %}
{% endfor %}
''')
assert tmpl.render().split() == map(unicode, range(1, 11)) * 5
def test_weird_inline_comment(self):
env = Environment(line_statement_prefix='%')
self.assert_raises(TemplateSyntaxError, env.from_string,
'% for item in seq {# missing #}\n...% endfor')
def test_old_macro_loop_scoping_bug(self):
tmpl = env.from_string('{% for i in (1, 2) %}{{ i }}{% endfor %}'
'{% macro i() %}3{% endmacro %}{{ i() }}')
assert tmpl.render() == '123'
def test_partial_conditional_assignments(self):
tmpl = env.from_string('{% if b %}{% set a = 42 %}{% endif %}{{ a }}')
assert tmpl.render(a=23) == '23'
assert tmpl.render(b=True) == '42'
def test_stacked_locals_scoping_bug(self):
env = Environment(line_statement_prefix='#')
t = env.from_string('''\
# for j in [1, 2]:
# set x = 1
# for i in [1, 2]:
# print x
# if i % 2 == 0:
# set x = x + 1
# endif
# endfor
# endfor
# if a
# print 'A'
# elif b
# print 'B'
# elif c == d
# print 'C'
# else
# print 'D'
# endif
''')
assert t.render(a=0, b=False, c=42, d=42.0) == '1111C'
def test_stacked_locals_scoping_bug_twoframe(self):
t = Template('''
{% set x = 1 %}
{% for item in foo %}
{% if item == 1 %}
{% set x = 2 %}
{% endif %}
{% endfor %}
{{ x }}
''')
rv = t.render(foo=[1]).strip()
assert rv == u'1'
def test_call_with_args(self):
t = Template("""{% macro dump_users(users) -%}
<ul>
{%- for user in users -%}
<li><p>{{ user.username|e }}</p>{{ caller(user) }}</li>
{%- endfor -%}
</ul>
{%- endmacro -%}
{% call(user) dump_users(list_of_user) -%}
<dl>
<dl>Realname</dl>
<dd>{{ user.realname|e }}</dd>
<dl>Description</dl>
<dd>{{ user.description }}</dd>
</dl>
{% endcall %}""")
assert [x.strip() for x in t.render(list_of_user=[{
'username':'apo',
'realname':'something else',
'description':'test'
}]).splitlines()] == [
u'<ul><li><p>apo</p><dl>',
u'<dl>Realname</dl>',
u'<dd>something else</dd>',
u'<dl>Description</dl>',
u'<dd>test</dd>',
u'</dl>',
u'</li></ul>'
]
def test_empty_if_condition_fails(self):
self.assert_raises(TemplateSyntaxError, Template, '{% if %}....{% endif %}')
self.assert_raises(TemplateSyntaxError, Template, '{% if foo %}...{% elif %}...{% endif %}')
self.assert_raises(TemplateSyntaxError, Template, '{% for x in %}..{% endfor %}')
def test_recursive_loop_bug(self):
tpl1 = Template("""
{% for p in foo recursive%}
{{p.bar}}
{% for f in p.fields recursive%}
{{f.baz}}
{{p.bar}}
{% if f.rec %}
{{ loop(f.sub) }}
{% endif %}
{% endfor %}
{% endfor %}
""")
tpl2 = Template("""
{% for p in foo%}
{{p.bar}}
{% for f in p.fields recursive%}
{{f.baz}}
{{p.bar}}
{% if f.rec %}
{{ loop(f.sub) }}
{% endif %}
{% endfor %}
{% endfor %}
""")
def test_correct_prefix_loader_name(self):
env = Environment(loader=PrefixLoader({
'foo': DictLoader({})
}))
try:
env.get_template('foo/bar.html')
except TemplateNotFound, e:
assert e.name == 'foo/bar.html'
else:
assert False, 'expected error here'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(CornerTestCase))
suite.addTest(unittest.makeSuite(BugTestCase))
return suite
| apache-2.0 |
jemucino/pybilliards | src/billiards/settings.py | 2 | 1523 | # -*- coding: utf-8 -*-
import logging
import os
import settings_default
# TODO: replace platform specific paths
class Settings(object):
def __init__(self):
self._settings = {}
#self.add_from_file(os.path.join('settings_default.py'))
self._settings.update(settings_default.settings)
self.userdir = self._settings['userdir']
self.add_from_file(os.path.join(self.userdir,'user_settings.py'))
logging.basicConfig(level=getattr(logging,self._settings['loglevel'].upper()))
def get(self, key):
keys = key.split('.')
value = self._settings
try:
for k in keys:
value = value[k]
except KeyError:
logging.info('No setting found for key: %s'%key)
value = None
return value
def set(self, key, value):
keys = key.split('.')
v = self._settings
for k in keys[:-1]:
try:
v = v[k]
except KeyError:
v[k] = {}
v = v[k]
v[keys[-1]] = value
def add_from_file(self, filename):
d = {}
try:
execfile(filename, d)
data = d['settings']
self._settings.update(data)
except Exception, exc:
# any log call before basicConfig results in failure to set the log level
#logging.warn(repr(exc))
#logging.warn('Unable to load settings from: %s'%filename)
pass
settings = Settings()
| gpl-3.0 |
theodoregoetz/clas12-dc-wiremap | clas12_wiremap/ui/main_window.py | 1 | 5268 | from __future__ import print_function, division
import sys
import os
import numpy as np
from clas12_wiremap.ui import QtGui, uic
from clas12_wiremap.ui import Sidebar, CrateTab, DBTab, TBTab, DCWireStack, SetRunDialogue
from clas12_wiremap.ui.dcrb_tab import DCRB
from clas12_wiremap.ui.stb_tab import STBTab
from clas12_wiremap import initialize_session, DCWires
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
curdir = os.path.dirname(os.path.realpath(__file__))
uic.loadUi(os.path.join(curdir,'MainWindow.ui'), self)
self.dcwires = DCWires()
self.loadRun(1)
#self.dcwires.initialize_session()
#self.run_number.setValue(int(DCWires.runnum))
#self.run_number.valueChanged.connect(self.run_number.show)
#if (self.run_number.value.Changed() :
#print(self.run_number.value())
### Explorer Tabs
self.explorer_tabs = QtGui.QTabWidget()
TBTab.stateChanged = self.sendTBArray
DBTab.stateChanged = self.sendDBArray
STBTab.stateChanged = self.sendSTBArray
DCRB.stateChanged = self.sendDCRBArray
self.crate = CrateTab(self)
self.crate.setMinimumWidth(750)
self.crate.setMaximumHeight(1000)
crate_vbox = QtGui.QVBoxLayout(self.crate)
self.explorer_tabs.addTab(self.crate, 'Crates')
self.dboard = DBTab()
self.dboard.setMinimumWidth(750)
dboard_vbox = QtGui.QVBoxLayout(self.dboard)
self.explorer_tabs.addTab(self.dboard, 'Distribution Boards')
self.tboard = TBTab()
self.tboard.setMinimumWidth(750)
tboard_vbox = QtGui.QVBoxLayout(self.tboard)
self.explorer_tabs.addTab(self.tboard, 'Translation Boards')
self.dcrb = DCRB()
self.dcrb.setMinimumWidth(750)
dcrb_vbox = QtGui.QVBoxLayout(self.dcrb)
self.explorer_tabs.addTab(self.dcrb, 'Drift Chamber Readout Board')
self.stb = STBTab()
self.stb.setMinimumWidth(750)
stb_vbox = QtGui.QVBoxLayout(self.stb)
self.explorer_tabs.addTab(self.stb, 'Signal Translation Board')
self.explorer_tabs.setMinimumWidth(750)
self.explorer_tabs.setSizePolicy(
QtGui.QSizePolicy.Fixed,
QtGui.QSizePolicy.Expanding)
explorer_vbox = QtGui.QVBoxLayout()
explorer_vbox.addWidget(self.explorer_tabs)
self.explorer_holder.setLayout(explorer_vbox)
### Chooser Sidebar
#self.sidebar = Sidebar(self.session)
#sidebar_vbox = QtGui.QVBoxLayout()
#sidebar_vbox.addWidget(self.sidebar)
#self.chooser_holder.setLayout(sidebar_vbox)
### Wiremap
self.wiremaps = DCWireStack(self)
wmap_vbox = QtGui.QVBoxLayout()
wmap_vbox.addWidget(self.wiremaps)
self.wiremap_holder.setLayout(wmap_vbox)
def update_wiremap(sec,data):
if sec is not None:
self.wiremaps.setCurrentIndex(sec+1)
else:
self.wiremaps.setCurrentIndex(0)
self.wiremaps.data = data
#self.sidebar.post_update = update_wiremap
for i in [self.dboard, self.tboard, self.dcrb, self.stb]:
i.currentChanged.connect(lambda x: self.wiremaps.setCurrentIndex(x+1))
def f(i):
if (i == 0):
self.wiremaps.setCurrentIndex(0)
else:
self.wiremaps.setCurrentIndex(self.explorer_tabs.currentWidget().currentIndex() + 1)
self.explorer_tabs.currentChanged.connect(f)
self.setModeExplorer()
self.show()
def setModeExplorer(self):
self.actionExplorer.setChecked(True)
self.actionChooser.setChecked(False)
self.left_stacked_widget.setCurrentIndex(0)
def setModeChooser(self):
self.actionExplorer.setChecked(False)
self.actionChooser.setChecked(True)
self.left_stacked_widget.setCurrentIndex(1)
def setRunDialogue(self):
run,ok = SetRunDialogue.getRunNum()
if ok:
self.loadRun(run)
def loadRun(self, runnumber):
self.rundisplay.setNum(runnumber)
self.dcwires.run = runnumber
self.dcwires.fetch_data()
def sendTBArray(*args):
return main_window.tboard.get_sectors(),
main_window.tboard.get_superlayers(),
main_window.tboard.get_boards(),
main_window.tboard.get_halfs()
def sendDBArray(*args):
return main_window.dboard.get_sector(),
main_window.dboard.get_super_layer(),
main_window.dboard.get_direction(),
main_window.dboard.get_box(),
main_window.dboard.get_quad(),
main_window.dboard.get_doublet()
def sendSTBArray(*args):
return main_window.stb.get_board(),
main_window.stb.get_superlayer(),
main_window.stb.get_sector()
def sendDCRBArray(*args):
print(main_window.dcrb.get_board())
print(main_window.dcrb.get_superlayer())
print(main_window.dcrb.get_sector())
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
main_window = MainWindow()
sys.exit(app.exec_())
| gpl-3.0 |
noelbk/neutron-juniper | neutron/plugins/hyperv/rpc_callbacks.py | 18 | 3977 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Cloudbase Solutions SRL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Alessandro Pilotti, Cloudbase Solutions Srl
from neutron.common import constants as q_const
from neutron.common import rpc as q_rpc
from neutron.db import agents_db
from neutron.db import dhcp_rpc_base
from neutron.db import l3_rpc_base
from neutron.openstack.common import log as logging
from neutron.plugins.hyperv import db as hyperv_db
LOG = logging.getLogger(__name__)
class HyperVRpcCallbacks(
dhcp_rpc_base.DhcpRpcCallbackMixin,
l3_rpc_base.L3RpcCallbackMixin):
# Set RPC API version to 1.0 by default.
RPC_API_VERSION = '1.1'
def __init__(self, notifier):
self.notifier = notifier
self._db = hyperv_db.HyperVPluginDB()
def create_rpc_dispatcher(self):
'''Get the rpc dispatcher for this manager.
If a manager would like to set an rpc API version, or support more than
one class as the target of rpc messages, override this method.
'''
return q_rpc.PluginRpcDispatcher([self,
agents_db.AgentExtRpcCallback()])
def get_device_details(self, rpc_context, **kwargs):
"""Agent requests device details."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s details requested from %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port = self._db.get_port(device)
if port:
binding = self._db.get_network_binding(None, port['network_id'])
entry = {'device': device,
'network_id': port['network_id'],
'port_id': port['id'],
'admin_state_up': port['admin_state_up'],
'network_type': binding.network_type,
'segmentation_id': binding.segmentation_id,
'physical_network': binding.physical_network}
# Set the port status to UP
self._db.set_port_status(port['id'], q_const.PORT_STATUS_ACTIVE)
else:
entry = {'device': device}
LOG.debug(_("%s can not be found in database"), device)
return entry
def update_device_down(self, rpc_context, **kwargs):
"""Device no longer exists on agent."""
# TODO(garyk) - live migration and port status
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s no longer exists on %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port = self._db.get_port(device)
if port:
entry = {'device': device,
'exists': True}
# Set port status to DOWN
self._db.set_port_status(port['id'], q_const.PORT_STATUS_DOWN)
else:
entry = {'device': device,
'exists': False}
LOG.debug(_("%s can not be found in database"), device)
return entry
def tunnel_sync(self, rpc_context, **kwargs):
"""Tunnel sync.
Dummy function for ovs agent running on Linux to
work with Hyper-V plugin and agent.
"""
entry = dict()
entry['tunnels'] = {}
# Return the list of tunnels IP's to the agent
return entry
| apache-2.0 |
PyMySQL/mysqlclient-python | setup_windows.py | 1 | 1765 | import os
import sys
from distutils.msvccompiler import get_build_version
def get_config():
from setup_common import get_metadata_and_options, create_release_file
metadata, options = get_metadata_and_options()
connector = options["connector"]
extra_objects = []
# client = "mysqlclient"
client = "mariadbclient"
vcversion = int(get_build_version())
if client == "mariadbclient":
library_dirs = [os.path.join(connector, "lib", "mariadb")]
libraries = [
"kernel32",
"advapi32",
"wsock32",
"shlwapi",
"Ws2_32",
"crypt32",
"secur32",
"bcrypt",
client,
]
include_dirs = [os.path.join(connector, "include", "mariadb")]
else:
library_dirs = [
os.path.join(connector, r"lib\vs%d" % vcversion),
os.path.join(connector, "lib"),
]
libraries = ["kernel32", "advapi32", "wsock32", client]
include_dirs = [os.path.join(connector, r"include")]
extra_link_args = ["/MANIFEST"]
name = "mysqlclient"
metadata["name"] = name
define_macros = [
("version_info", metadata["version_info"]),
("__version__", metadata["version"]),
]
create_release_file(metadata)
del metadata["version_info"]
ext_options = dict(
library_dirs=library_dirs,
libraries=libraries,
extra_link_args=extra_link_args,
include_dirs=include_dirs,
extra_objects=extra_objects,
define_macros=define_macros,
)
return metadata, ext_options
if __name__ == "__main__":
sys.stderr.write(
"""You shouldn't be running this directly; it is used by setup.py."""
)
| gpl-2.0 |
alexandrucoman/vbox-nova-driver | nova/tests/unit/virt/xenapi/image/test_bittorrent.py | 21 | 5714 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mox3 import mox
import pkg_resources
import six
from nova import context
from nova import test
from nova.tests.unit.virt.xenapi import stubs
from nova.virt.xenapi import driver as xenapi_conn
from nova.virt.xenapi import fake
from nova.virt.xenapi.image import bittorrent
from nova.virt.xenapi import vm_utils
class TestBittorrentStore(stubs.XenAPITestBaseNoDB):
def setUp(self):
super(TestBittorrentStore, self).setUp()
self.store = bittorrent.BittorrentStore()
self.mox = mox.Mox()
self.flags(torrent_base_url='http://foo',
connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.context = context.RequestContext(
'user', 'project', auth_token='foobar')
fake.reset()
stubs.stubout_session(self.stubs, fake.SessionBase)
def mock_iter_eps(namespace):
return []
self.stubs.Set(pkg_resources, 'iter_entry_points', mock_iter_eps)
driver = xenapi_conn.XenAPIDriver(False)
self.session = driver._session
self.stubs.Set(
vm_utils, 'get_sr_path', lambda *a, **kw: '/fake/sr/path')
def test_download_image(self):
instance = {'uuid': '00000000-0000-0000-0000-000000007357'}
params = {'image_id': 'fake_image_uuid',
'sr_path': '/fake/sr/path',
'torrent_download_stall_cutoff': 600,
'torrent_listen_port_end': 6891,
'torrent_listen_port_start': 6881,
'torrent_max_last_accessed': 86400,
'torrent_max_seeder_processes_per_host': 1,
'torrent_seed_chance': 1.0,
'torrent_seed_duration': 3600,
'torrent_url': 'http://foo/fake_image_uuid.torrent',
'uuid_stack': ['uuid1']}
self.stubs.Set(vm_utils, '_make_uuid_stack',
lambda *a, **kw: ['uuid1'])
self.mox.StubOutWithMock(self.session, 'call_plugin_serialized')
self.session.call_plugin_serialized(
'bittorrent', 'download_vhd', **params)
self.mox.ReplayAll()
self.store.download_image(self.context, self.session,
instance, 'fake_image_uuid')
self.mox.VerifyAll()
def test_upload_image(self):
self.assertRaises(NotImplementedError, self.store.upload_image,
self.context, self.session, mox.IgnoreArg, 'fake_image_uuid',
['fake_vdi_uuid'])
def bad_fetcher(image_id):
raise test.TestingException("just plain bad.")
def another_fetcher(image_id):
return "http://www.foobar.com/%s" % image_id
class MockEntryPoint(object):
name = "torrent_url"
def load(self):
return another_fetcher
class LookupTorrentURLTestCase(test.NoDBTestCase):
def setUp(self):
super(LookupTorrentURLTestCase, self).setUp()
self.store = bittorrent.BittorrentStore()
self.image_id = 'fakeimageid'
def _mock_iter_none(self, namespace):
return []
def _mock_iter_single(self, namespace):
return [MockEntryPoint()]
def test_default_fetch_url_no_base_url_set(self):
self.flags(torrent_base_url=None,
group='xenserver')
self.stubs.Set(pkg_resources, 'iter_entry_points',
self._mock_iter_none)
exc = self.assertRaises(
RuntimeError, self.store._lookup_torrent_url_fn)
self.assertEqual('Cannot create default bittorrent URL without'
' torrent_base_url set'
' or torrent URL fetcher extension',
six.text_type(exc))
def test_default_fetch_url_base_url_is_set(self):
self.flags(torrent_base_url='http://foo',
group='xenserver')
self.stubs.Set(pkg_resources, 'iter_entry_points',
self._mock_iter_single)
lookup_fn = self.store._lookup_torrent_url_fn()
self.assertEqual('http://foo/fakeimageid.torrent',
lookup_fn(self.image_id))
def test_with_extension(self):
self.stubs.Set(pkg_resources, 'iter_entry_points',
self._mock_iter_single)
lookup_fn = self.store._lookup_torrent_url_fn()
self.assertEqual("http://www.foobar.com/%s" % self.image_id,
lookup_fn(self.image_id))
def test_multiple_extensions_found(self):
self.flags(torrent_base_url=None,
group='xenserver')
def mock_iter_multiple(namespace):
return [MockEntryPoint(), MockEntryPoint()]
self.stubs.Set(pkg_resources, 'iter_entry_points', mock_iter_multiple)
exc = self.assertRaises(
RuntimeError, self.store._lookup_torrent_url_fn)
self.assertEqual('Multiple torrent URL fetcher extensions found.'
' Failing.',
six.text_type(exc))
| apache-2.0 |
willprice/arduino-sphere-project | scripts/example_direction_finder/temboo/Library/Fitbit/Statistics/GetActivityStats.py | 5 | 4462 | # -*- coding: utf-8 -*-
###############################################################################
#
# GetActivityStats
# Gets user's activity statistics.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetActivityStats(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetActivityStats Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetActivityStats, self).__init__(temboo_session, '/Library/Fitbit/Statistics/GetActivityStats')
def new_input_set(self):
return GetActivityStatsInputSet()
def _make_result_set(self, result, path):
return GetActivityStatsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetActivityStatsChoreographyExecution(session, exec_id, path)
class GetActivityStatsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetActivityStats
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessTokenSecret(self, value):
"""
Set the value of the AccessTokenSecret input for this Choreo. ((required, string) The Access Token Secret retrieved during the OAuth process.)
"""
super(GetActivityStatsInputSet, self)._set_input('AccessTokenSecret', value)
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The Access Token retrieved during the OAuth process.)
"""
super(GetActivityStatsInputSet, self)._set_input('AccessToken', value)
def set_ConsumerKey(self, value):
"""
Set the value of the ConsumerKey input for this Choreo. ((required, string) The Consumer Key provided by Fitbit.)
"""
super(GetActivityStatsInputSet, self)._set_input('ConsumerKey', value)
def set_ConsumerSecret(self, value):
"""
Set the value of the ConsumerSecret input for this Choreo. ((required, string) The Consumer Secret provided by Fitbit.)
"""
super(GetActivityStatsInputSet, self)._set_input('ConsumerSecret', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that you want the response to be in: xml or json. Defaults to json.)
"""
super(GetActivityStatsInputSet, self)._set_input('ResponseFormat', value)
def set_UserID(self, value):
"""
Set the value of the UserID input for this Choreo. ((optional, string) The user's encoded id. Defaults to "-" (dash) which will return data for the user associated with the token credentials provided.)
"""
super(GetActivityStatsInputSet, self)._set_input('UserID', value)
class GetActivityStatsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetActivityStats Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Fitbit.)
"""
return self._output.get('Response', None)
class GetActivityStatsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetActivityStatsResultSet(response, path)
| gpl-2.0 |
marco-lancini/Showcase | social_auth/backends/contrib/live.py | 4 | 2698 | """
MSN Live Connect oAuth 2.0
Settings:
LIVE_CLIENT_ID
LIVE_CLIENT_SECRET
LIVE_EXTENDED_PERMISSIONS (defaults are: wl.basic, wl.emails)
References:
* oAuth http://msdn.microsoft.com/en-us/library/live/hh243649.aspx
* Scopes http://msdn.microsoft.com/en-us/library/live/hh243646.aspx
* REST http://msdn.microsoft.com/en-us/library/live/hh243648.aspx
Throws:
AuthUnknownError - if user data retrieval fails
"""
from urllib import urlencode, urlopen
from django.utils import simplejson
from social_auth.utils import setting
from social_auth.backends import BaseOAuth2, OAuthBackend, USERNAME
from social_auth.backends.exceptions import AuthUnknownError
# Live Connect configuration
LIVE_AUTHORIZATION_URL = 'https://login.live.com/oauth20_authorize.srf'
LIVE_ACCESS_TOKEN_URL = 'https://login.live.com/oauth20_token.srf'
LIVE_USER_DATA_URL = 'https://apis.live.net/v5.0/me'
LIVE_SERVER = 'live.com'
LIVE_DEFAULT_PERMISSIONS = ['wl.basic', 'wl.emails']
class LiveBackend(OAuthBackend):
name = 'live'
EXTRA_DATA = [
('id', 'id'),
('access_token', 'access_token'),
('reset_token', 'reset_token'),
('expires', setting('SOCIAL_AUTH_EXPIRATION', 'expires')),
('email', 'email'),
('first_name', 'first_name'),
('last_name', 'last_name'),
]
def get_user_id(self, details, response):
return response['id']
def get_user_details(self, response):
"""Return user details from Live Connect account"""
try:
email = response['emails']['account']
except KeyError:
email = ''
return {USERNAME: response.get('name'),
'email': email,
'first_name': response.get('first_name'),
'last_name': response.get('last_name')}
class LiveAuth(BaseOAuth2):
AUTHORIZATION_URL = LIVE_AUTHORIZATION_URL
ACCESS_TOKEN_URL = LIVE_ACCESS_TOKEN_URL
SERVER_URL = LIVE_SERVER
AUTH_BACKEND = LiveBackend
SETTINGS_KEY_NAME = 'LIVE_CLIENT_ID'
SETTINGS_SECRET_NAME = 'LIVE_CLIENT_SECRET'
SCOPE_SEPARATOR = ','
SCOPE_VAR_NAME = 'LIVE_EXTENDED_PERMISSIONS'
DEFAULT_SCOPE = LIVE_DEFAULT_PERMISSIONS
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
url = LIVE_USER_DATA_URL + '?' + urlencode({
'access_token': access_token
})
try:
return simplejson.load(urlopen(url))
except (ValueError, IOError):
raise AuthUnknownError('Error during profile retrieval, ' \
'please, try again later')
# Backend definition
BACKENDS = {
'live': LiveAuth,
}
| mit |
Stanford-Online/edx-platform | openedx/core/djangoapps/programs/tests/test_backpopulate_program_credentials.py | 9 | 12157 | """Tests for the backpopulate_program_credentials management command."""
import ddt
import mock
from django.core.management import call_command
from django.test import TestCase
from lms.djangoapps.certificates.models import CertificateStatuses # pylint: disable=import-error
from course_modes.models import CourseMode
from lms.djangoapps.certificates.api import MODES
from lms.djangoapps.certificates.tests.factories import GeneratedCertificateFactory
from openedx.core.djangoapps.catalog.tests.factories import (
generate_course_run_key,
ProgramFactory,
CourseFactory,
CourseRunFactory,
)
from openedx.core.djangoapps.catalog.tests.mixins import CatalogIntegrationMixin
from openedx.core.djangoapps.credentials.tests.mixins import CredentialsApiConfigMixin
from openedx.core.djangolib.testing.utils import skip_unless_lms
from student.tests.factories import UserFactory
COMMAND_MODULE = 'openedx.core.djangoapps.programs.management.commands.backpopulate_program_credentials'
@ddt.ddt
@mock.patch(COMMAND_MODULE + '.get_programs')
@mock.patch(COMMAND_MODULE + '.award_program_certificates.delay')
@skip_unless_lms
class BackpopulateProgramCredentialsTests(CatalogIntegrationMixin, CredentialsApiConfigMixin, TestCase):
"""Tests for the backpopulate_program_credentials management command."""
course_run_key, alternate_course_run_key = (generate_course_run_key() for __ in range(2))
# Constants for the _get_programs_data hierarchy types used in test_flatten()
SEPARATE_PROGRAMS = 'separate_programs'
SEPARATE_COURSES = 'separate_courses'
SAME_COURSE = 'same_course'
def setUp(self):
super(BackpopulateProgramCredentialsTests, self).setUp()
self.alice = UserFactory()
self.bob = UserFactory()
# Disable certification to prevent the task from being triggered when
# setting up test data (i.e., certificates with a passing status), thereby
# skewing mock call counts.
self.create_credentials_config(enable_learner_issuance=False)
catalog_integration = self.create_catalog_integration()
UserFactory(username=catalog_integration.service_username)
def _get_programs_data(self, hierarchy_type):
"""
Generate a mock response for get_programs() with the given type of
course hierarchy. Dramatically simplifies (and makes consistent
between test runs) the ddt-generated test_flatten methods.
"""
if hierarchy_type == self.SEPARATE_PROGRAMS:
return [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key),
]),
]
),
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.alternate_course_run_key),
]),
]
),
]
elif hierarchy_type == self.SEPARATE_COURSES:
return [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key),
]),
CourseFactory(course_runs=[
CourseRunFactory(key=self.alternate_course_run_key),
]),
]
),
]
else: # SAME_COURSE
return [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key),
CourseRunFactory(key=self.alternate_course_run_key),
]),
]
),
]
@ddt.data(True, False)
def test_handle(self, commit, mock_task, mock_get_programs):
"""
Verify that relevant tasks are only enqueued when the commit option is passed.
"""
data = [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key),
]),
]
),
]
mock_get_programs.return_value = data
GeneratedCertificateFactory(
user=self.alice,
course_id=self.course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
GeneratedCertificateFactory(
user=self.bob,
course_id=self.alternate_course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
call_command('backpopulate_program_credentials', commit=commit)
if commit:
mock_task.assert_called_once_with(self.alice.username)
else:
mock_task.assert_not_called()
def test_handle_professional(self, mock_task, mock_get_programs):
""" Verify the task can handle both professional and no-id-professional modes. """
mock_get_programs.return_value = [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key, type='professional'),
]),
]
),
]
GeneratedCertificateFactory(
user=self.alice,
course_id=self.course_run_key,
mode=CourseMode.PROFESSIONAL,
status=CertificateStatuses.downloadable,
)
GeneratedCertificateFactory(
user=self.bob,
course_id=self.course_run_key,
mode=CourseMode.NO_ID_PROFESSIONAL_MODE,
status=CertificateStatuses.downloadable,
)
call_command('backpopulate_program_credentials', commit=True)
# The task should be called for both users since professional and no-id-professional are equivalent.
mock_task.assert_has_calls([mock.call(self.alice.username), mock.call(self.bob.username)], any_order=True)
@ddt.data(SEPARATE_PROGRAMS, SEPARATE_COURSES, SAME_COURSE)
def test_handle_flatten(self, hierarchy_type, mock_task, mock_get_programs):
"""Verify that program structures are flattened correctly."""
mock_get_programs.return_value = self._get_programs_data(hierarchy_type)
GeneratedCertificateFactory(
user=self.alice,
course_id=self.course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
GeneratedCertificateFactory(
user=self.bob,
course_id=self.alternate_course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
call_command('backpopulate_program_credentials', commit=True)
calls = [
mock.call(self.alice.username),
mock.call(self.bob.username)
]
mock_task.assert_has_calls(calls, any_order=True)
def test_handle_username_dedup(self, mock_task, mock_get_programs):
"""
Verify that only one task is enqueued for a user with multiple eligible
course run certificates.
"""
data = [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key),
CourseRunFactory(key=self.alternate_course_run_key),
]),
]
),
]
mock_get_programs.return_value = data
GeneratedCertificateFactory(
user=self.alice,
course_id=self.course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
GeneratedCertificateFactory(
user=self.alice,
course_id=self.alternate_course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
call_command('backpopulate_program_credentials', commit=True)
mock_task.assert_called_once_with(self.alice.username)
def test_handle_mode_slugs(self, mock_task, mock_get_programs):
"""
Verify that course run types are taken into account when identifying
qualifying course run certificates.
"""
data = [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key, type='honor'),
]),
]
),
]
mock_get_programs.return_value = data
GeneratedCertificateFactory(
user=self.alice,
course_id=self.course_run_key,
mode=MODES.honor,
status=CertificateStatuses.downloadable,
)
GeneratedCertificateFactory(
user=self.bob,
course_id=self.course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
call_command('backpopulate_program_credentials', commit=True)
mock_task.assert_called_once_with(self.alice.username)
def test_handle_passing_status(self, mock_task, mock_get_programs):
"""
Verify that only course run certificates with a passing status are selected.
"""
data = [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key),
]),
]
),
]
mock_get_programs.return_value = data
passing_status = CertificateStatuses.downloadable
failing_status = CertificateStatuses.notpassing
self.assertIn(passing_status, CertificateStatuses.PASSED_STATUSES)
self.assertNotIn(failing_status, CertificateStatuses.PASSED_STATUSES)
GeneratedCertificateFactory(
user=self.alice,
course_id=self.course_run_key,
mode=MODES.verified,
status=passing_status,
)
GeneratedCertificateFactory(
user=self.bob,
course_id=self.course_run_key,
mode=MODES.verified,
status=failing_status,
)
call_command('backpopulate_program_credentials', commit=True)
mock_task.assert_called_once_with(self.alice.username)
@mock.patch(COMMAND_MODULE + '.logger.exception')
def test_handle_enqueue_failure(self, mock_log, mock_task, mock_get_programs):
"""Verify that failure to enqueue a task doesn't halt execution."""
def side_effect(username):
"""Simulate failure to enqueue a task."""
if username == self.alice.username:
raise Exception
mock_task.side_effect = side_effect
data = [
ProgramFactory(
courses=[
CourseFactory(course_runs=[
CourseRunFactory(key=self.course_run_key),
]),
]
),
]
mock_get_programs.return_value = data
GeneratedCertificateFactory(
user=self.alice,
course_id=self.course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
GeneratedCertificateFactory(
user=self.bob,
course_id=self.course_run_key,
mode=MODES.verified,
status=CertificateStatuses.downloadable,
)
call_command('backpopulate_program_credentials', commit=True)
self.assertTrue(mock_log.called)
calls = [
mock.call(self.alice.username),
mock.call(self.bob.username)
]
mock_task.assert_has_calls(calls, any_order=True)
| agpl-3.0 |
0359xiaodong/fb-adb | mk-fingerprint.py | 5 | 2543 | #!/usr/bin/env python3
# -*- python-indent-offset: 2 -*-
# Copyright (c) 2014, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in
# the LICENSE file in the root directory of this source tree. An
# additional grant of patent rights can be found in the PATENTS file
# in the same directory.
# This program reads all the ar-archives (.a files, in practice) on
# the command line and prints the SHA256 digest (base64-encoded for
# brevity) of all the contents of all the archives, considered in the
# order they're given in the archive files and on the command
# line, respectively.
#
# We do _not_ include archive metadata, like UIDs, modtimes, and so on
# in the digest, so we can use the hash we compute for deterministic
# build identification. (We do include the file names
# themselves, however.)
#
# Why not "ar D"? Because we don't always use GNU ar.
#
import sys
import hashlib
import logging
from os.path import basename
from argparse import ArgumentParser
import arpy
from base64 import b64encode
log = logging.getLogger(basename(sys.argv[0]))
def main(argv):
p = ArgumentParser(
prog=basename(argv[0]),
description="Hash the contents of archives")
p.add_argument("--debug", action="store_true",
help="Enable debugging output")
p.add_argument("archives", metavar="ARCHIVES", nargs="*")
args = p.parse_args(argv[1:])
root_logger = logging.getLogger()
logging.basicConfig()
if args.debug:
root_logger.setLevel(logging.DEBUG)
else:
root_logger.setLevel(logging.INFO)
hash = hashlib.sha256()
for archive_filename in args.archives:
with open(archive_filename, "rb") as archive_file:
archive = arpy.Archive(fileobj=archive_file)
log.debug("opened archive %r", archive_filename)
for arfile in archive:
hash.update(arfile.header.name)
nbytes = 0
filehash = hashlib.sha256()
while True:
buf = arfile.read(32768)
if not buf:
break
hash.update(buf)
filehash.update(buf)
nbytes += len(buf)
log.debug("hashed %s/%s %r %s bytes",
archive_filename,
arfile.header.name.decode("utf-8"),
filehash.hexdigest(),
nbytes)
# 128 bits of entropy is enough for anyone
digest = hash.digest()[:16]
log.debug("digest %r", digest)
print(b64encode(digest, b"@_").decode("ascii").rstrip("="))
if __name__ == "__main__":
sys.exit(main(sys.argv))
| bsd-3-clause |
andrewleech/SickRage | lib/tvdb_api/tvdb_ui.py | 92 | 5494 | #!/usr/bin/env python2
#encoding:utf-8
#author:dbr/Ben
#project:tvdb_api
#repository:http://github.com/dbr/tvdb_api
#license:unlicense (http://unlicense.org/)
"""Contains included user interfaces for Tvdb show selection.
A UI is a callback. A class, it's __init__ function takes two arguments:
- config, which is the Tvdb config dict, setup in tvdb_api.py
- log, which is Tvdb's logger instance (which uses the logging module). You can
call log.info() log.warning() etc
It must have a method "selectSeries", this is passed a list of dicts, each dict
contains the the keys "name" (human readable show name), and "sid" (the shows
ID as on thetvdb.com). For example:
[{'name': u'Lost', 'sid': u'73739'},
{'name': u'Lost Universe', 'sid': u'73181'}]
The "selectSeries" method must return the appropriate dict, or it can raise
tvdb_userabort (if the selection is aborted), tvdb_shownotfound (if the show
cannot be found).
A simple example callback, which returns a random series:
>>> import random
>>> from tvdb_ui import BaseUI
>>> class RandomUI(BaseUI):
... def selectSeries(self, allSeries):
... import random
... return random.choice(allSeries)
Then to use it..
>>> from tvdb_api import Tvdb
>>> t = Tvdb(custom_ui = RandomUI)
>>> random_matching_series = t['Lost']
>>> type(random_matching_series)
<class 'tvdb_api.Show'>
"""
__author__ = "dbr/Ben"
__version__ = "1.9"
import logging
import warnings
from tvdb_exceptions import tvdb_userabort
def log():
return logging.getLogger(__name__)
class BaseUI:
"""Default non-interactive UI, which auto-selects first results
"""
def __init__(self, config, log = None):
self.config = config
if log is not None:
warnings.warn("the UI's log parameter is deprecated, instead use\n"
"use import logging; logging.getLogger('ui').info('blah')\n"
"The self.log attribute will be removed in the next version")
self.log = logging.getLogger(__name__)
def selectSeries(self, allSeries):
return allSeries[0]
class ConsoleUI(BaseUI):
"""Interactively allows the user to select a show from a console based UI
"""
def _displaySeries(self, allSeries, limit = 6):
"""Helper function, lists series with corresponding ID
"""
if limit is not None:
toshow = allSeries[:limit]
else:
toshow = allSeries
print "TVDB Search Results:"
for i, cshow in enumerate(toshow):
i_show = i + 1 # Start at more human readable number 1 (not 0)
log().debug('Showing allSeries[%s], series %s)' % (i_show, allSeries[i]['seriesname']))
if i == 0:
extra = " (default)"
else:
extra = ""
print "%s -> %s [%s] # http://thetvdb.com/?tab=series&id=%s&lid=%s%s" % (
i_show,
cshow['seriesname'].encode("UTF-8", "ignore"),
cshow['language'].encode("UTF-8", "ignore"),
str(cshow['id']),
cshow['lid'],
extra
)
def selectSeries(self, allSeries):
self._displaySeries(allSeries)
if len(allSeries) == 1:
# Single result, return it!
print "Automatically selecting only result"
return allSeries[0]
if self.config['select_first'] is True:
print "Automatically returning first search result"
return allSeries[0]
while True: # return breaks this loop
try:
print "Enter choice (first number, return for default, 'all', ? for help):"
ans = raw_input()
except KeyboardInterrupt:
raise tvdb_userabort("User aborted (^c keyboard interupt)")
except EOFError:
raise tvdb_userabort("User aborted (EOF received)")
log().debug('Got choice of: %s' % (ans))
try:
selected_id = int(ans) - 1 # The human entered 1 as first result, not zero
except ValueError: # Input was not number
if len(ans.strip()) == 0:
# Default option
log().debug('Default option, returning first series')
return allSeries[0]
if ans == "q":
log().debug('Got quit command (q)')
raise tvdb_userabort("User aborted ('q' quit command)")
elif ans == "?":
print "## Help"
print "# Enter the number that corresponds to the correct show."
print "# a - display all results"
print "# all - display all results"
print "# ? - this help"
print "# q - abort tvnamer"
print "# Press return with no input to select first result"
elif ans.lower() in ["a", "all"]:
self._displaySeries(allSeries, limit = None)
else:
log().debug('Unknown keypress %s' % (ans))
else:
log().debug('Trying to return ID: %d' % (selected_id))
try:
return allSeries[selected_id]
except IndexError:
log().debug('Invalid show number entered!')
print "Invalid number (%s) selected!"
self._displaySeries(allSeries)
| gpl-3.0 |
Bulochkin/tensorflow_pack | tensorflow/contrib/makefile/downloads/protobuf/python/google/protobuf/internal/service_reflection_test.py | 75 | 5170 | #! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.internal.service_reflection."""
__author__ = '[email protected] (Petar Petrov)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import unittest_pb2
from google.protobuf import service_reflection
from google.protobuf import service
class FooUnitTest(unittest.TestCase):
def testService(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request, response, callback):
self.method = method
self.controller = controller
self.request = request
callback(response)
class MockRpcController(service.RpcController):
def SetFailed(self, msg):
self.failure_message = msg
self.callback_response = None
class MyService(unittest_pb2.TestService):
pass
self.callback_response = None
def MyCallback(response):
self.callback_response = response
rpc_controller = MockRpcController()
channel = MockRpcChannel()
srvc = MyService()
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual('Method Foo not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
rpc_controller.failure_message = None
service_descriptor = unittest_pb2.TestService.GetDescriptor()
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual('Method Bar not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
class MyServiceImpl(unittest_pb2.TestService):
def Foo(self, rpc_controller, request, done):
self.foo_called = True
def Bar(self, rpc_controller, request, done):
self.bar_called = True
srvc = MyServiceImpl()
rpc_controller.failure_message = None
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.foo_called)
rpc_controller.failure_message = None
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.bar_called)
def testServiceStub(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request,
response_class, callback):
self.method = method
self.controller = controller
self.request = request
callback(response_class())
self.callback_response = None
def MyCallback(response):
self.callback_response = response
channel = MockRpcChannel()
stub = unittest_pb2.TestService_Stub(channel)
rpc_controller = 'controller'
request = 'request'
# GetDescriptor now static, still works as instance method for compatibility
self.assertEqual(unittest_pb2.TestService_Stub.GetDescriptor(),
stub.GetDescriptor())
# Invoke method.
stub.Foo(rpc_controller, request, MyCallback)
self.assertIsInstance(self.callback_response, unittest_pb2.FooResponse)
self.assertEqual(request, channel.request)
self.assertEqual(rpc_controller, channel.controller)
self.assertEqual(stub.GetDescriptor().methods[0], channel.method)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
liuyang1/vimwiki_utils | dfsvw.py | 1 | 2418 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""dfsvw.py
Create site map for vimwiki.
Must run script under vimwiki directory.
Example:
output readable site map to stdout
dfsvw.py
output vimwiki-format sitemap to file
dfsvw.py > map.wiki
Python version: 2"""
import sys
import re
linkRE = re.compile(r'(?<=\[\[)\w+(?=[|\]])', re.UNICODE)
nameRE = re.compile(r'(?<=[|\[])[^|\[\]]+(?=\]\])', re.UNICODE)
def probeLink(s):
"""
>>> probeLink(' abc')
[]
>>> probeLink(' [[]]')
[]
>>> probeLink(' [[abc]] [[abc|123]] ')
[('abc', 'abc'), ('abc', '123')]
>>> a, b = probeLink(' [[中文1|中文2]]')[0]; print a, b
中文1 中文2
>>> probeLink(' [[abc|abc 123]]')
[('abc', 'abc 123')]
"""
try:
ns = s.decode('utf8')
except UnicodeDecodeError:
return []
ret = zip(linkRE.findall(ns), nameRE.findall(ns))
return [(a.encode('utf8'), b.encode('utf8')) for a, b in ret]
def probeAllLink(fn):
"""extract all link in a wiki file"""
try:
lst = []
fp = open(fn)
for line in fp.readlines():
l = probeLink(line)
lst.extend(l)
except IOError:
print >> sys.stderr, "cannot open", fn
return lst
def outputNode(link, name, level):
"""output one wiki node, level for vimwiki tree hierarchy"""
prefix = " " * 4 * level
dct = {"prefix":prefix, "link":link, "name":name}
if sys.stdout.isatty():
fmt = "{prefix}{name}"
else:
if link == name:
fmt = "{prefix}[[{name}]]"
else:
fmt = "{prefix}[[{link}|{name}]]"
print fmt.format(**dct)
def dfs(index):
"""depth first search on vimwiki
skip searched wiki file to avoid deadloop"""
index = index.decode('utf8')
lst = [(index, index, 0)]
hist = []
while 1:
try:
link, name, level = lst.pop(0)
except IndexError:
break
if link in hist:
print >> sys.stderr, "link {} already in map".format(link)
continue
hist.append(link)
outputNode(link, name, level)
ret = probeAllLink(link + '.wiki')
level += 1
levelret = [(node[0], node[1], level) for node in ret]
lst = levelret + lst
if __name__ == "__main__":
if len(sys.argv) > 1:
print __doc__
sys.exit(1)
dfs('index')
| mit |
JumpLink/jade2html2jade | ThirdParty/js-beautify/python/jsbeautifier/tests/test-perf-jsbeautifier.py | 7 | 1142 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import copy
import jsbeautifier
options = jsbeautifier.default_options()
options.wrap_line_length = 80
data = ''
data_min = ''
def beautifier_test_underscore():
jsbeautifier.beautify(data, options)
def beautifier_test_underscore_min():
jsbeautifier.beautify(data_min, options)
def report_perf(fn):
import timeit
iter = 50
time = timeit.timeit(fn + "()", setup="from __main__ import " + fn + "; gc.enable()", number=iter)
print(fn + ": " + str(iter/time) + " cycles/sec")
if __name__ == '__main__':
dirname = os.path.dirname(os.path.abspath(__file__))
underscore_file = os.path.join(dirname, "../../../", "test/underscore.js")
underscore_min_file = os.path.join(dirname, "../../../", "test/underscore-min.js")
data = copy.copy(''.join(open(underscore_file).readlines()))
data_min = copy.copy(''.join(open(underscore_min_file).readlines()))
# warm up
beautifier_test_underscore()
beautifier_test_underscore_min()
report_perf("beautifier_test_underscore")
report_perf("beautifier_test_underscore_min")
| mit |
PearsonIOKI/compose-forum | askbot/migrations/0121_auto__add_field_groupprofile_is_open__add_field_groupprofile_preapprov.py | 17 | 29105 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GroupProfile.is_open'
db.add_column('askbot_groupprofile', 'is_open',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'GroupProfile.preapproved_emails'
db.add_column('askbot_groupprofile', 'preapproved_emails',
self.gf('django.db.models.fields.TextField')(default='', null=True, blank=True),
keep_default=False)
# Adding field 'GroupProfile.preapproved_email_domains'
db.add_column('askbot_groupprofile', 'preapproved_email_domains',
self.gf('django.db.models.fields.TextField')(default='', null=True, blank=True),
keep_default=False)
# Adding unique constraint on 'GroupMembership', fields ['group', 'user']
db.create_unique('askbot_groupmembership', ['group_id', 'user_id'])
def backwards(self, orm):
# Removing unique constraint on 'GroupMembership', fields ['group', 'user']
db.delete_unique('askbot_groupmembership', ['group_id', 'user_id'])
# Deleting field 'GroupProfile.is_open'
db.delete_column('askbot_groupprofile', 'is_open')
# Deleting field 'GroupProfile.preapproved_emails'
db.delete_column('askbot_groupprofile', 'preapproved_emails')
# Deleting field 'GroupProfile.preapproved_email_domains'
db.delete_column('askbot_groupprofile', 'preapproved_email_domains')
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Post']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'askbot.emailfeedsetting': {
'Meta': {'unique_together': "(('subscriber', 'feed_type'),)", 'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.groupmembership': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupMembership'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_memberships'", 'to': "orm['askbot.Tag']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'group_memberships'", 'to': "orm['auth.User']"})
},
'askbot.groupprofile': {
'Meta': {'object_name': 'GroupProfile'},
'group_tag': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'group_profile'", 'unique': 'True', 'to': "orm['askbot.Tag']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_open': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'logo_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'moderate_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'preapproved_email_domains': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'preapproved_emails': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.post': {
'Meta': {'object_name': 'Post'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_posts'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'old_answer_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_comment_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'old_question_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'post_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'posts'", 'null': 'True', 'blank': 'True', 'to': "orm['askbot.Thread']"}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.postflagreason': {
'Meta': {'object_name': 'PostFlagReason'},
'added_at': ('django.db.models.fields.DateTimeField', [], {}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'details': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'post_reject_reasons'", 'to': "orm['askbot.Post']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'askbot.postrevision': {
'Meta': {'ordering': "('-revision',)", 'unique_together': "(('post', 'revision'),)", 'object_name': 'PostRevision'},
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'approved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'postrevisions'", 'to': "orm['auth.User']"}),
'by_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisions'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'revision_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '125', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Post']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.replyaddress': {
'Meta': {'object_name': 'ReplyAddress'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'allowed_from_email': ('django.db.models.fields.EmailField', [], {'max_length': '150'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reply_addresses'", 'to': "orm['askbot.Post']"}),
'response_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'edit_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'tag_wiki': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'described_tag'", 'unique': 'True', 'null': 'True', 'to': "orm['askbot.Post']"}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.thread': {
'Meta': {'object_name': 'Thread'},
'accepted_answer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['askbot.Post']"}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'unused_favorite_threads'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_threads'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'unused_last_active_in_threads'", 'to': "orm['auth.User']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'threads'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('user', 'voted_post'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'voted_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['askbot.Post']"})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'subscribed_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot'] | gpl-3.0 |
wrouesnel/ansible | test/units/modules/network/f5/test_bigip_irule.py | 24 | 8316 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.compat.tests.mock import mock_open
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import PY3
try:
from library.bigip_irule import Parameters
from library.bigip_irule import ModuleManager
from library.bigip_irule import ArgumentSpec
from library.bigip_irule import GtmManager
from library.bigip_irule import LtmManager
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_irule import Parameters
from ansible.modules.network.f5.bigip_irule import ModuleManager
from ansible.modules.network.f5.bigip_irule import ArgumentSpec
from ansible.modules.network.f5.bigip_irule import GtmManager
from ansible.modules.network.f5.bigip_irule import LtmManager
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class BigIpObj(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class TestParameters(unittest.TestCase):
def test_module_parameters_ltm(self):
content = load_fixture('create_ltm_irule.tcl')
args = dict(
content=content,
module='ltm',
name='foo',
state='present'
)
p = Parameters(params=args)
assert p.content == content.strip()
def test_module_parameters_gtm(self):
content = load_fixture('create_gtm_irule.tcl')
args = dict(
content=content,
module='gtm',
name='foo',
state='present'
)
p = Parameters(params=args)
assert p.content == content.strip()
def test_api_parameters_ltm(self):
content = load_fixture('create_ltm_irule.tcl')
args = dict(
apiAnonymous=content
)
p = Parameters(params=args)
assert p.content == content.strip()
def test_return_api_params(self):
content = load_fixture('create_ltm_irule.tcl')
args = dict(
content=content,
module='ltm',
name='foo',
state='present'
)
p = Parameters(params=args)
params = p.api_params()
assert 'apiAnonymous' in params
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.ltm_irules = []
self.gtm_irules = []
members = load_fixture('load_ltm_irules.json')
for item in members:
self.ltm_irules.append(BigIpObj(**item))
members = load_fixture('load_gtm_irules.json')
for item in members:
self.gtm_irules.append(BigIpObj(**item))
def test_create_ltm_irule(self, *args):
set_module_args(dict(
name='foo',
module='ltm',
content='this is my content',
partition='Common',
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
mutually_exclusive=self.spec.mutually_exclusive,
)
# Override methods in the specific type of manager
tm = LtmManager(module=module, params=module.params)
tm.exists = Mock(side_effect=[False, True])
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert results['content'] == 'this is my content'
def test_create_gtm_irule(self, *args):
set_module_args(dict(
name='foo',
module='gtm',
content='this is my content',
partition='Common',
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
mutually_exclusive=self.spec.mutually_exclusive,
)
# Override methods in the specific type of manager
tm = GtmManager(module=module, params=module.params)
tm.exists = Mock(side_effect=[False, True])
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert results['content'] == 'this is my content'
def test_create_gtm_irule_src(self, *args):
set_module_args(dict(
name='foo',
module='gtm',
src='{0}/create_ltm_irule.tcl'.format(fixture_path),
partition='Common',
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
mutually_exclusive=self.spec.mutually_exclusive,
)
if PY3:
builtins_name = 'builtins'
else:
builtins_name = '__builtin__'
with patch(builtins_name + '.open', mock_open(read_data='this is my content'), create=True):
# Override methods in the specific type of manager
tm = GtmManager(module=module, params=module.params)
tm.exists = Mock(side_effect=[False, True])
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert results['content'] == 'this is my content'
assert results['module'] == 'gtm'
assert results['src'] == '{0}/create_ltm_irule.tcl'.format(fixture_path)
assert len(results.keys()) == 4
def test_module_mutual_exclusion(self, *args):
set_module_args(dict(
content='foo',
module='ltm',
name='foo',
state='present',
src='/path/to/irules/foo.tcl',
partition='Common',
server='localhost',
password='password',
user='admin'
))
with patch('ansible.module_utils.basic.AnsibleModule.fail_json', unsafe=True) as mo:
AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
mutually_exclusive=self.spec.mutually_exclusive,
)
mo.assert_called_once()
| gpl-3.0 |
jhg/django | django/views/generic/edit.py | 19 | 11221 | import inspect
import re
import warnings
from django.core.exceptions import ImproperlyConfigured
from django.forms import models as model_forms
from django.http import HttpResponseRedirect
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
from django.views.generic.base import ContextMixin, TemplateResponseMixin, View
from django.views.generic.detail import (
BaseDetailView, SingleObjectMixin, SingleObjectTemplateResponseMixin,
)
PERCENT_PLACEHOLDER_REGEX = re.compile(r'%\([^\)]+\)') # RemovedInDjango20Warning
class FormMixinBase(type):
def __new__(cls, name, bases, attrs):
get_form = attrs.get('get_form')
if get_form and inspect.isfunction(get_form):
try:
inspect.getcallargs(get_form, None)
except TypeError:
warnings.warn(
"`%s.%s.get_form` method must define a default value for "
"its `form_class` argument." % (attrs['__module__'], name),
RemovedInDjango20Warning, stacklevel=2
)
def get_form_with_form_class(self, form_class=None):
if form_class is None:
form_class = self.get_form_class()
return get_form(self, form_class=form_class)
attrs['get_form'] = get_form_with_form_class
return super(FormMixinBase, cls).__new__(cls, name, bases, attrs)
class FormMixin(six.with_metaclass(FormMixinBase, ContextMixin)):
"""
A mixin that provides a way to show and handle a form in a request.
"""
initial = {}
form_class = None
success_url = None
prefix = None
def get_initial(self):
"""
Returns the initial data to use for forms on this view.
"""
return self.initial.copy()
def get_prefix(self):
"""
Returns the prefix to use for forms on this view
"""
return self.prefix
def get_form_class(self):
"""
Returns the form class to use in this view
"""
return self.form_class
def get_form(self, form_class=None):
"""
Returns an instance of the form to be used in this view.
"""
if form_class is None:
form_class = self.get_form_class()
return form_class(**self.get_form_kwargs())
def get_form_kwargs(self):
"""
Returns the keyword arguments for instantiating the form.
"""
kwargs = {
'initial': self.get_initial(),
'prefix': self.get_prefix(),
}
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
def get_success_url(self):
"""
Returns the supplied success URL.
"""
if self.success_url:
# Forcing possible reverse_lazy evaluation
url = force_text(self.success_url)
else:
raise ImproperlyConfigured(
"No URL to redirect to. Provide a success_url.")
return url
def form_valid(self, form):
"""
If the form is valid, redirect to the supplied URL.
"""
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form):
"""
If the form is invalid, re-render the context data with the
data-filled form and errors.
"""
return self.render_to_response(self.get_context_data())
def get_context_data(self, **kwargs):
"""
Insert the form into the context dict.
"""
kwargs.setdefault('form', self.get_form())
return super(FormMixin, self).get_context_data(**kwargs)
class ModelFormMixin(FormMixin, SingleObjectMixin):
"""
A mixin that provides a way to show and handle a modelform in a request.
"""
fields = None
def get_form_class(self):
"""
Returns the form class to use in this view.
"""
if self.fields is not None and self.form_class:
raise ImproperlyConfigured(
"Specifying both 'fields' and 'form_class' is not permitted."
)
if self.form_class:
return self.form_class
else:
if self.model is not None:
# If a model has been explicitly provided, use it
model = self.model
elif hasattr(self, 'object') and self.object is not None:
# If this view is operating on a single object, use
# the class of that object
model = self.object.__class__
else:
# Try to get a queryset and extract the model class
# from that
model = self.get_queryset().model
if self.fields is None:
raise ImproperlyConfigured(
"Using ModelFormMixin (base class of %s) without "
"the 'fields' attribute is prohibited." % self.__class__.__name__
)
return model_forms.modelform_factory(model, fields=self.fields)
def get_form_kwargs(self):
"""
Returns the keyword arguments for instantiating the form.
"""
kwargs = super(ModelFormMixin, self).get_form_kwargs()
if hasattr(self, 'object'):
kwargs.update({'instance': self.object})
return kwargs
def get_success_url(self):
"""
Returns the supplied URL.
"""
if self.success_url:
# force_text can be removed with deprecation warning
self.success_url = force_text(self.success_url)
if PERCENT_PLACEHOLDER_REGEX.search(self.success_url):
warnings.warn(
"%()s placeholder style in success_url is deprecated. "
"Please replace them by the {} Python format syntax.",
RemovedInDjango20Warning, stacklevel=2
)
url = self.success_url % self.object.__dict__
else:
url = self.success_url.format(**self.object.__dict__)
else:
try:
url = self.object.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured(
"No URL to redirect to. Either provide a url or define"
" a get_absolute_url method on the Model.")
return url
def form_valid(self, form):
"""
If the form is valid, save the associated model.
"""
self.object = form.save()
return super(ModelFormMixin, self).form_valid(form)
class ProcessFormView(View):
"""
A mixin that renders a form on GET and processes it on POST.
"""
def get(self, request, *args, **kwargs):
"""
Handles GET requests and instantiates a blank version of the form.
"""
return self.render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
"""
Handles POST requests, instantiating a form instance with the passed
POST variables and then checked for validity.
"""
form = self.get_form()
if form.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
# PUT is a valid HTTP verb for creating (with a known URL) or editing an
# object, note that browsers only support POST for now.
def put(self, *args, **kwargs):
return self.post(*args, **kwargs)
class BaseFormView(FormMixin, ProcessFormView):
"""
A base view for displaying a form
"""
class FormView(TemplateResponseMixin, BaseFormView):
"""
A view for displaying a form, and rendering a template response.
"""
class BaseCreateView(ModelFormMixin, ProcessFormView):
"""
Base view for creating an new object instance.
Using this base class requires subclassing to provide a response mixin.
"""
def get(self, request, *args, **kwargs):
self.object = None
return super(BaseCreateView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = None
return super(BaseCreateView, self).post(request, *args, **kwargs)
class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView):
"""
View for creating a new object instance,
with a response rendered by template.
"""
template_name_suffix = '_form'
class BaseUpdateView(ModelFormMixin, ProcessFormView):
"""
Base view for updating an existing object.
Using this base class requires subclassing to provide a response mixin.
"""
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super(BaseUpdateView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super(BaseUpdateView, self).post(request, *args, **kwargs)
class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView):
"""
View for updating an object,
with a response rendered by template.
"""
template_name_suffix = '_form'
class DeletionMixin(object):
"""
A mixin providing the ability to delete objects
"""
success_url = None
def delete(self, request, *args, **kwargs):
"""
Calls the delete() method on the fetched object and then
redirects to the success URL.
"""
self.object = self.get_object()
success_url = self.get_success_url()
self.object.delete()
return HttpResponseRedirect(success_url)
# Add support for browsers which only accept GET and POST for now.
def post(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
def get_success_url(self):
if self.success_url:
# force_text can be removed with deprecation warning
self.success_url = force_text(self.success_url)
if PERCENT_PLACEHOLDER_REGEX.search(self.success_url):
warnings.warn(
"%()s placeholder style in success_url is deprecated. "
"Please replace them by the {} Python format syntax.",
RemovedInDjango20Warning, stacklevel=2
)
return self.success_url % self.object.__dict__
else:
return self.success_url.format(**self.object.__dict__)
else:
raise ImproperlyConfigured(
"No URL to redirect to. Provide a success_url.")
class BaseDeleteView(DeletionMixin, BaseDetailView):
"""
Base view for deleting an object.
Using this base class requires subclassing to provide a response mixin.
"""
class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView):
"""
View for deleting an object retrieved with `self.get_object()`,
with a response rendered by template.
"""
template_name_suffix = '_confirm_delete'
| bsd-3-clause |
code-wukong/lolcomp | ws/views.py | 1 | 14685 | # ws/views.py
from django.http import HttpResponse
from django.http import HttpResponseRedirect
import json
from lolcomp.helpers import *
import os
import requests
from ws.models import *
# retrieve constants for app sitedown
def cst_sitedown(request):
if request.method == 'POST':
post = json.loads(request.body)
data = {
'static_url': os.environ.get('DJANGO_STATIC_HOST', False),
'jarvan': im_jarvan(),
}
if(post['test'] == 'wuju'):
data['compromised'] = True
return HttpResponse(json.dumps(data), content_type='application/json')
else:
return HttpResponseRedirect("/")
# retrieve constants for app main
def cst_main(request):
if request.method == 'POST':
# Get Version
query_set = Static.objects.filter(label=CST['config'])
if(query_set):
version = json.loads(query_set[0].definition)['current_patch']
else:
version = 'error'
# Get Champ List
query_set = Static.objects.filter(label=CST['champ_list'])
if(query_set):
champ_list = json.loads(query_set[0].definition)
else:
champ_list = 'error'
data = {
'static_url': os.environ.get('DJANGO_STATIC_HOST', False),
'champ_list': champ_list,
'version': version
}
return HttpResponse(json.dumps(data), content_type='application/json')
else:
return HttpResponseRedirect("/")
# retrieve constants for app internal
def cst_internal(request):
if request.method == 'POST':
post = json.loads(request.body)
data = {
'static_url': os.environ.get('DJANGO_STATIC_HOST', False),
'type': {
'synergy': CST['synergy'],
'counter': CST['counter']
}
}
return HttpResponse(json.dumps(data), content_type='application/json')
else:
return HttpResponseRedirect("/")
# get/set config data singleton
def rw_static_def(request):
if request.method == 'POST':
'''
expected_object = {
label: "config",
mode: "write",
data: data_to_write
}
'''
post = json.loads(request.body)
singleton = post['label']
data_to_write = post.get('data', {})
create_new_flag = False
if(post['mode'] == 'read'):
try:
query_set = Static.objects.filter(label=CST[singleton])
data = json.loads(query_set[0].definition)
except:
create_new_flag = True
data = data_to_write
elif(post['mode'] == 'write'):
# overwrite the previous data
try:
query_set = Static.objects.filter(label=CST[singleton])
obj = query_set[0]
obj.definition = json.dumps(data_to_write)
obj.save()
except:
create_new_flag = True
data = {
'status': "success - wrote " + CST[singleton],
}
if(create_new_flag == True):
static_create({
'label': CST[singleton],
'definition': json.dumps(data_to_write)
})
return HttpResponse(json.dumps(data), content_type='application/json')
else:
return HttpResponseRedirect("/")
# send a request to riot api
def riot_api_request(request):
if request.method == 'POST':
post = json.loads(request.body)
settings = {
'api_key': os.environ.get('RIOT_API_KEY', '')
}
for p in post['params']:
settings[p] = post['params'][p]
r = requests.get(API[post['url']], params=settings)
data = {
'status': r.status_code,
'data': r.json()
}
return HttpResponse(json.dumps(data), content_type='application/json')
else:
return HttpResponseRedirect("/")
# get champ_obj definitions for each champ in list
def get_champs_data(request):
if request.method == 'POST':
post = json.loads(request.body)
champs_to_retrieve = post['champs']
list_to_return = []
for i in champs_to_retrieve:
champ_obj = Champion.objects.get(label=i)
info = json.loads(champ_obj.definition)
list_to_return.append(info)
data = {
'data': list_to_return
}
return HttpResponse(json.dumps(data), content_type='application/json')
else:
return HttpResponseRedirect("/")
# update the static data champ singleton
def update_champs_data(request):
if request.method == 'POST':
# Make call to Riot API for all champ data
settings = {
'champData': 'all',
'api_key': os.environ.get('RIOT_API_KEY', '')
}
url = API['static_data']
r = requests.get(url, params=settings)
static = r.json()
data = {
'status': r.status_code,
'patch': static.get('version', 'error'),
}
if data['patch'] == 'error':
data['response'] = r.json()
else:
# Record to DB
champ_list = []
for i in static['data']:
champ_list.append(static['data'][i]['name'])
query_set = Static.objects.filter(label=CST['champ_list'])
if(query_set):
static_obj = query_set[0]
static_obj.definition = json.dumps(champ_list)
static_obj.save()
else:
static_obj = Static(label=CST['champ_list'], definition=json.dumps(champ_list))
static_obj.save()
# overwrite the previous data
champion_db = Champion.objects.all()
if champion_db:
champion_db.delete()
skill_db = Skill.objects.all()
if skill_db:
skill_db.delete()
for info in static['data'].itervalues():
definition = {
CST['static']:{
'name': info['name'],
'key': info['key'],
'image': info['image'],
'skins': info['skins'],
'spells': info['spells'],
'passive': info['passive'],
'partype': info['partype'],
},
CST['synergy']: {},
CST['counter']: {}
}
obj = Champion(label=info['name'], definition=json.dumps(definition))
obj.save()
map = ["Q", "W", "E", "R"]
for i, skill in enumerate(info['spells']):
if(i<4):
obj_skill = Skill(label=skill['name'], definition=json.dumps(skill))
obj_skill.key = map[i]
obj_skill.champ = obj
obj_skill.save()
obj_passive = Skill(label=info['passive']['name'], definition=json.dumps(info['passive']))
obj_passive.champ = obj
obj_passive.key = "Passive"
obj_passive.save()
return HttpResponse(json.dumps(data), content_type='application/json')
else:
return HttpResponseRedirect("/")
# generate relation objects for each relation rule
def apply_rules_to_db(request):
if request.method == 'POST':
data = {
"status": 200
}
# Apply Tag Rules
Tag.objects.all().delete()
obj = Static.objects.filter(label=CST['tag_defs'])
if(obj):
# create a tag obj in db for each rule
tag_defs = json.loads(obj[0].definition)
for rule in tag_defs:
new_tag = Tag(label=rule['label'], definition=json.dumps(rule))
new_tag.save()
# loop over each champion's skill and apply the tags accordingly
for champ_obj in Champion.objects.all():
for skill in champ_obj.skill_set.all():
skill_def = json.loads(skill.definition)
if(skill.key == 'Passive'):
info = skill_def['sanitizedDescription']
else:
info = skill_def['sanitizedTooltip']
for rule in tag_defs:
status = check_if_tag_qualifies(rule['key_words'], info)
if(status == True):
new_obj = Tag.objects.filter(label=rule['label'])
skill.tags.add(new_obj[0])
# Apply Relation Rules
Relation.objects.all().delete()
obj = Static.objects.filter(label=CST['relation_defs'])
if(obj):
relation_defs = json.loads(obj[0].definition)
for rule in relation_defs:
# check if tag or exception
type_k1 = rule['k1']['obj_type']
type_k2 = rule['k2']['obj_type']
skill_obj_k1 = [] # array of champ_objs
skill_obj_k2 = []
rel_def = {} # { champ : skill }
# case 1: k1=exception to k2=exception
if (type_k1 == 'exception') and (type_k2 == 'exception'):
# k1
champ_obj = Champion.objects.filter(label=rule['k1']['data']['champ'])
skill_obj = Skill.objects.filter(key=rule['k1']['data']['skill'], champ=champ_obj)[0]
skill_obj_k1.append(skill_obj)
# k2
champ_obj = Champion.objects.filter(label=rule['k2']['data']['champ'])
skill_obj = Skill.objects.filter(key=rule['k2']['data']['skill'], champ=champ_obj)[0]
skill_obj_k2.append(skill_obj)
# case 2: k1=exception to k2=tag
if (type_k1 == 'exception') and (type_k2 == 'tag'):
# k1
champ_obj = Champion.objects.filter(label=rule['k1']['data']['champ'])
skill_obj = Skill.objects.filter(key=rule['k1']['data']['skill'], champ=champ_obj)[0]
skill_obj_k1.append(skill_obj)
# k2
tag_obj = Tag.objects.filter(label=rule['k2']['data'])[0]
for skill_obj in tag_obj.skill_set.all():
skill_obj_k2.append(skill_obj)
# case 3: k1=tag to k2=exception
if (type_k1 == 'tag') and (type_k2 == 'exception'):
# k1
tag_obj = Tag.objects.filter(label=rule['k1']['data'])[0]
for skill_obj in tag_obj.skill_set.all():
skill_obj_k1.append(skill_obj)
# k2
champ_obj = Champion.objects.filter(label=rule['k2']['data']['champ'])
skill_obj = Skill.objects.filter(key=rule['k2']['data']['skill'], champ=champ_obj)[0]
skill_obj_k2.append(skill_obj)
# case 4: k1=tag to k2=tag
if (type_k1 == 'tag') and (type_k2 == 'tag'):
# k1
tag_obj = Tag.objects.filter(label=rule['k1']['data'])[0]
for skill_obj in tag_obj.skill_set.all():
skill_obj_k1.append(skill_obj)
# k2
tag_obj = Tag.objects.filter(label=rule['k2']['data'])[0]
for skill_obj in tag_obj.skill_set.all():
skill_obj_k2.append(skill_obj)
# create a new relation between champ_i and champ_j
for skill_k1 in skill_obj_k1:
for skill_k2 in skill_obj_k2:
rel_def = {
"label": rule['label'],
"k1": {
"name": skill_k1.champ.label,
"key": skill_k1.key
},
"k2": {
"name": skill_k2.champ.label,
"key": skill_k2.key
},
'description': rule['description']
}
new_rule = Relation(definition=json.dumps(rel_def), type=rule['type'])
new_rule.save()
new_rule.champs.add(skill_k1.champ)
new_rule.champs.add(skill_k2.champ)
# Clear the old synergy and counter maps
for champ_obj in Champion.objects.all():
definition = json.loads(champ_obj.definition)
definition[CST['synergy']] = {}
definition[CST['counter']] = {}
champ_obj.definition = json.dumps(definition)
champ_obj.save()
# Build the relation map for each champ
for champ_obj in Champion.objects.all():
# create a synergy and counter tree for each champion
definition = json.loads(champ_obj.definition)
for rel_obj in Relation.objects.filter(champs=champ_obj):
# add each relation to the correct type tree under the partner's name
info = json.loads(rel_obj.definition)
if champ_obj.label == info['k1']['name']:
other_champ_name = info['k2']['name']
else:
other_champ_name = info['k1']['name']
relation_list = definition[rel_obj.type].get(other_champ_name, False)
if (not relation_list):
definition[rel_obj.type][other_champ_name] = []
definition[rel_obj.type][other_champ_name].append(info)
# record to champ_obj definition
champ_obj.definition = json.dumps(definition)
champ_obj.save()
return HttpResponse(json.dumps(data), content_type='application/json')
else:
return HttpResponseRedirect("/") | gpl-2.0 |
jungla/ICOM-fluidity-toolbox | 2D/RST/plot_T_spec_res.py | 1 | 8498 | import os, sys
import myfun
import numpy as np
import matplotlib as mpl
mpl.use('ps')
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from scipy import interpolate
import lagrangian_stats
import scipy.fftpack
## READ archive (too many points... somehow)
# args: name, dayi, dayf, days
label = 'm_50_6f'
label_50 = 'm_50_6f'
label_25 = 'm_25_1'
label_10 = 'm_10_1'
basename = 'mli'
dayi = 36
dayf = 49
days = 1
#label = sys.argv[1]
#basename = sys.argv[2]
#dayi = int(sys.argv[3])
#dayf = int(sys.argv[4])
#days = int(sys.argv[5])
path = './Temperature_CG/'
try: os.stat('./plot/'+label)
except OSError: os.mkdir('./plot/'+label)
# dimensions archives
# ML exp
Xlist_50 = np.linspace(0,2000,41)
Ylist_50 = np.linspace(0,2000,41)
Xlist_25 = np.linspace(0,2000,81)
Ylist_25 = np.linspace(0,2000,81)
Xlist_10 = np.linspace(0,2000,161)
Ylist_10 = np.linspace(0,2000,161)
Zlist = np.linspace(0,-50,51)
dl = [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1]
Zlist = np.cumsum(dl)
xn_50 = len(Xlist_50)
yn_50 = len(Ylist_50)
xn_25 = len(Xlist_25)
yn_25 = len(Ylist_25)
xn_10 = len(Xlist_10)
yn_10 = len(Ylist_10)
zn = len(Zlist)
dx_50 = np.diff(Xlist_50)
dx_25 = np.diff(Xlist_25)
dx_10 = np.diff(Xlist_10)
for time in range(dayi,dayf,days):
print 'time:', time
tlabel = str(time)
while len(tlabel) < 3: tlabel = '0'+tlabel
#Temperature_CG_m_50_6e_9.csv
file0_50 = path+'Temperature_CG_'+label_50+'_'+str(time)+'.csv'
file0_25 = path+'Temperature_CG_'+label_25+'_'+str(time)+'.csv'
file0_10 = path+'Temperature_CG_'+label_10+'_'+str(time)+'.csv'
file1 = 'Temperature_CG_'+label+'_'+str(time)
file1_50 = 'Temperature_CG_'+label_50+'_'+str(time)
file1_25 = 'Temperature_CG_'+label_25+'_'+str(time)
file1_10 = 'Temperature_CG_'+label_10+'_'+str(time)
#
# xn_50 = 101
# yn_50 = 101
# xn_25 = 101
# yn_25 = 101
T_50 = lagrangian_stats.read_Scalar(file0_50,zn,xn_50,yn_50)
T_25 = lagrangian_stats.read_Scalar(file0_25,zn,xn_25,yn_25)
T_10 = lagrangian_stats.read_Scalar(file0_10,zn,xn_10,yn_10)
# xn_50 = 41
# yn_50 = 41
# xn_25 = 81
# yn_25 = 81
# T_50 = T_50[:,0:xn_50,0:yn_50]
# T_25 = T_25[:,0:xn_25,0:yn_25]
# Xlist_50 = np.linspace(0,2000,xn_50)
# Ylist_50 = np.linspace(0,2000,yn_50)
# Xlist_25 = np.linspace(0,2000,xn_25)
# Ylist_25 = np.linspace(0,2000,yn_25)
FT_50 = np.zeros((xn_50/1,yn_50))
FT_25 = np.zeros((xn_25/1,yn_25))
FT_10 = np.zeros((xn_10/1,yn_10))
#
for k in range(1):
for j in range(len(Ylist_50)):
tempfft = scipy.fftpack.fft(T_50[k,j,:],xn_50)
FT_50[:,j] = abs(tempfft)**2
w_50 = scipy.fftpack.fftfreq(xn_50, dx_50[1])
# w_50 = scipy.fftpack.fftshift(w_50)
FTp_50 = np.mean(FT_50,1)/xn_50
for j in range(len(Ylist_25)):
tempfft = scipy.fftpack.fft(T_25[k,j,:],xn_25)
FT_25[:,j] = abs(tempfft)**2
w_25 = scipy.fftpack.fftfreq(xn_25, dx_25[1])
# w_25 = scipy.fftpack.fftshift(w_25)
FTp_25 = np.mean(FT_25,1)/xn_25
for j in range(len(Ylist_10)):
tempfft = scipy.fftpack.fft(T_10[k,j,:],xn_10)
FT_10[:,j] = abs(tempfft)**2
w_10 = scipy.fftpack.fftfreq(xn_10, dx_10[1])
# w_10 = scipy.fftpack.fftshift(w_10)
FTp_10 = np.mean(FT_10,1)/xn_10
fig = plt.figure(figsize=(10,8))
p50, = plt.loglog(w_50[w_50>0], FTp_50[w_50>0],'b',linewidth=2)
p25, = plt.loglog(w_25[w_25>0], FTp_25[w_25>0],'r',linewidth=2)
p10, = plt.loglog(w_10[w_10>0], FTp_10[w_10>0],'k',linewidth=2)
plt.legend([p50,p25,p10],['$B50_m$','$B25_m$','$B10_m$'],fontsize=24,loc=3)
# pU, = plt.plot(w_50, FTp_50,'b',linewidth=2)
# pU, = plt.plot(w_25, FTp_25,'r',linewidth=2)
# plt.ylim(0,1)
# plt.plot([0.5*10**-3, 4*10**-3],[4*10**-3, 0.5*10**-3],'k',linewidth=1.5)
# plt.plot([0.5*10**-3, 4*10**-3],[3.*4*10**-3, 0.5*10**-3],'k',linewidth=1.5)
plt.plot([4*10**-3, 4*10**-2],[4*10**-1, 4*10**-(1+5/3.)],'k',linewidth=1.5)
plt.plot([4*10**-3, 4*10**-2],[4*10**-1, 4*10**-(1+3.)],'k',linewidth=1.5)
# plt.plot([4*10**-3, 4*10**-2],[4*10**-1, 4*10**-(1+1.)],'k',linewidth=1.5)
plt.text(5*10**-2, 4*10**-(1+5/3.), '-5/3',fontsize=24)
plt.text(5*10**-2, 4*10**-(1+3.), '-3',fontsize=24)
# plt.text(5*10**-2, 4*10**-(1+1.), '-1',fontsize=24)
# plt.text(0.3*10**-3, 3.*4*10**-3, '-3')
# plt.text(0.3*10**-3, 5./3.*4*10**-3, '-5/3')
plt.xscale('log')
# pU, = plt.loglog(w_10[w_10>0], FTp_10[w_10>0],'k.',linewidth=2)
plt.xlabel(r'k $[m^{-1}]$',fontsize=26)
plt.ylabel('Temperature PSD',fontsize=24)
# plt.xticks(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7),np.round(1/np.linspace(np.min(w[w>0]),np.max(w[w>0]),7)),fontsize=16)
#plt.xticks(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7),np.round(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7)*360000)/100,fontsize=16)
plt.yticks(fontsize=24)
# plt.xticks(fontsize=24)
plt.xticks([0.1,0.01,0.001,1/500.],[10**-1,10**-2,10**-3,1/500.],fontsize=24)
plt.xlim([1/2000.,1/10.])
plt.savefig('./plot/'+label+'/'+file1+'_'+str(Zlist[k])+'_spec.eps',bbox_inches='tight')
print './plot/'+label+'/'+file1+'_'+str(Zlist[k])+'_spec.eps'
plt.close()
#
# PDF
vals50,bins50 = np.histogram(T_50[k,:,:],50,(18.6,20.1),normed=True)
vals25,bins25 = np.histogram(T_25[k,:,:],50,(18.6,20.1),normed=True)
vals10,bins10 = np.histogram(T_10[k,:,:],50,(18.6,20.1),normed=True)
bins = np.linspace(18.6,19.8,50)
fig = plt.figure(figsize=(8,8))
ph50, = plt.plot(bins,vals50,'k--')
ph25, = plt.plot(bins,vals25,'k.-')
ph10, = plt.plot(bins,vals10,'k',linewidth=2)
plt.ylabel(r'PDF',fontsize=22)
plt.xlabel('Temperature $[^\circ C]$',fontsize=22)
# plt.xticks(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7),np.round(1/np.linspace(np.min(w[w>0]),np.max(w[w>0]),7)),fontsize=16)
#plt.xticks(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7),np.round(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7)*360000)/100,fontsize=16)
plt.yticks(fontsize=20)
plt.xticks(np.linspace(18.6,20.1,7),np.linspace(18.6,20.1,7),fontsize=20)
plt.tight_layout()
plt.legend([ph50,ph25,ph10],['$B50_m$','$B25_m$','$B10_m$'],loc=2,fontsize=20)
plt.savefig('./plot/'+label+'/'+file1+'_'+str(Zlist[k])+'_hist.eps')
print './plot/'+label+'/'+file1+'_'+str(Zlist[k])+'_hist.eps'
plt.close()
Tm = 18.6 #min(np.min(T_10[k,:,:]),np.min(T_25[k,:,:]),np.min(T_50[k,:,:]))
TM = 19.8 #max(np.max(T_10[k,:,:]),np.max(T_25[k,:,:]),np.max(T_50[k,:,:]))
# print Tm,TM
plt.contourf(Xlist_50/1000,Ylist_50/1000,T_50[k,:,:],np.linspace(Tm,TM,30),extend='both')
cb = plt.colorbar(ticks=np.linspace(Tm,TM,5))
cb.ax.tick_params(labelsize=22)
plt.xlabel('X [km]',fontsize=24)
plt.ylabel('Y [km]',fontsize=24)
plt.xticks(fontsize=22)
plt.yticks(fontsize=22)
plt.axes().set_aspect('equal')
plt.xlim(0,2)
plt.ylim(0,2)
#plt.xticks(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7),np.round(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7)*360000)/100,fontsize=16)
#plt.yticks(fontsize=16)
plt.savefig('./plot/'+label+'/'+file1_50+'_'+str(Zlist[k])+'.eps',bbox_inches='tight')
print './plot/'+label+'/'+file1_50+'_'+str(Zlist[k])+'.eps'
plt.close()
###
plt.contourf(Xlist_25/1000,Ylist_25/1000,T_25[k,:,:],np.linspace(Tm,TM,30),extend='both')
cb = plt.colorbar(ticks=np.linspace(Tm,TM,5))
cb.ax.tick_params(labelsize=22)
plt.xlabel('X [km]',fontsize=24)
plt.ylabel('Y [km]',fontsize=24)
plt.xticks(fontsize=22)
plt.yticks(fontsize=22)
plt.axes().set_aspect('equal')
plt.xlim(0,2)
plt.ylim(0,2)
#plt.xticks(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7),np.round(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7)*360000)/100,fontsize=16)
#plt.yticks(fontsize=16)
plt.savefig('./plot/'+label+'/'+file1_25+'_'+str(Zlist[k])+'.eps',bbox_inches='tight')
print './plot/'+label+'/'+file1_25+'_'+str(Zlist[k])+'.eps'
plt.close()
##
plt.contourf(Xlist_10/1000,Ylist_10/1000,T_10[k,:,:],np.linspace(Tm,TM,30),extend='both')
cb = plt.colorbar(ticks=np.linspace(Tm,TM,5))
cb.ax.tick_params(labelsize=22)
plt.xlabel('X [km]',fontsize=24)
plt.ylabel('Y [km]',fontsize=24)
plt.xticks(fontsize=22)
plt.yticks(fontsize=22)
plt.axes().set_aspect('equal')
plt.xlim(0,2)
plt.ylim(0,2)
#plt.xticks(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7),np.round(np.linspace(np.min(w[w>0]),np.max(w[w>0]),7)*360000)/100,fontsize=16)
#plt.yticks(fontsize=16)
plt.savefig('./plot/'+label+'/'+file1_10+'_'+str(Zlist[k])+'.eps',bbox_inches='tight')
print './plot/'+label+'/'+file1_10+'_'+str(Zlist[k])+'.eps'
plt.close()
###
##
| gpl-2.0 |
halvertoluke/edx-platform | common/lib/xmodule/xmodule/tests/test_import.py | 27 | 26954 | # -*- coding: utf-8 -*-
import datetime
import ddt
import unittest
from fs.memoryfs import MemoryFS
from lxml import etree
from mock import Mock, patch
from django.utils.timezone import UTC
from xmodule.xml_module import is_pointer_tag
from opaque_keys.edx.locations import Location
from xmodule.modulestore import only_xmodules
from xmodule.modulestore.xml import ImportSystem, XMLModuleStore, LibraryXMLModuleStore
from xmodule.modulestore.inheritance import compute_inherited_metadata
from xmodule.x_module import XModuleMixin
from xmodule.fields import Date
from xmodule.tests import DATA_DIR
from xmodule.modulestore.inheritance import InheritanceMixin
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xblock.core import XBlock
from xblock.fields import Scope, String, Integer
from xblock.runtime import KvsFieldData, DictKeyValueStore
ORG = 'test_org'
COURSE = 'test_course'
class DummySystem(ImportSystem):
@patch('xmodule.modulestore.xml.OSFS', lambda dir: MemoryFS())
def __init__(self, load_error_modules, library=False):
if library:
xmlstore = LibraryXMLModuleStore("data_dir", source_dirs=[], load_error_modules=load_error_modules)
else:
xmlstore = XMLModuleStore("data_dir", source_dirs=[], load_error_modules=load_error_modules)
course_id = SlashSeparatedCourseKey(ORG, COURSE, 'test_run')
course_dir = "test_dir"
error_tracker = Mock()
super(DummySystem, self).__init__(
xmlstore=xmlstore,
course_id=course_id,
course_dir=course_dir,
error_tracker=error_tracker,
load_error_modules=load_error_modules,
mixins=(InheritanceMixin, XModuleMixin),
field_data=KvsFieldData(DictKeyValueStore()),
)
def render_template(self, _template, _context):
raise Exception("Shouldn't be called")
class BaseCourseTestCase(unittest.TestCase):
'''Make sure module imports work properly, including for malformed inputs'''
@staticmethod
def get_system(load_error_modules=True, library=False):
'''Get a dummy system'''
return DummySystem(load_error_modules, library=library)
def get_course(self, name):
"""Get a test course by directory name. If there's more than one, error."""
print "Importing {0}".format(name)
modulestore = XMLModuleStore(
DATA_DIR,
source_dirs=[name],
xblock_mixins=(InheritanceMixin,),
xblock_select=only_xmodules,
)
courses = modulestore.get_courses()
self.assertEquals(len(courses), 1)
return courses[0]
class GenericXBlock(XBlock):
"""XBlock for testing pure xblock xml import"""
has_children = True
field1 = String(default="something", scope=Scope.user_state)
field2 = Integer(scope=Scope.user_state)
@ddt.ddt
class PureXBlockImportTest(BaseCourseTestCase):
"""
Tests of import pure XBlocks (not XModules) from xml
"""
def assert_xblocks_are_good(self, block):
"""Assert a number of conditions that must be true for `block` to be good."""
scope_ids = block.scope_ids
self.assertIsNotNone(scope_ids.usage_id)
self.assertIsNotNone(scope_ids.def_id)
for child_id in block.children:
child = block.runtime.get_block(child_id)
self.assert_xblocks_are_good(child)
@XBlock.register_temp_plugin(GenericXBlock)
@ddt.data(
"<genericxblock/>",
"<genericxblock field1='abc' field2='23' />",
"<genericxblock field1='abc' field2='23'><genericxblock/></genericxblock>",
)
@patch('xmodule.x_module.XModuleMixin.location')
def test_parsing_pure_xblock(self, xml, mock_location):
system = self.get_system(load_error_modules=False)
descriptor = system.process_xml(xml)
self.assertIsInstance(descriptor, GenericXBlock)
self.assert_xblocks_are_good(descriptor)
self.assertFalse(mock_location.called)
class ImportTestCase(BaseCourseTestCase):
date = Date()
def test_fallback(self):
'''Check that malformed xml loads as an ErrorDescriptor.'''
# Use an exotic character to also flush out Unicode issues.
bad_xml = u'''<sequential display_name="oops\N{SNOWMAN}"><video url="hi"></sequential>'''
system = self.get_system()
descriptor = system.process_xml(bad_xml)
self.assertEqual(descriptor.__class__.__name__, 'ErrorDescriptorWithMixins')
def test_unique_url_names(self):
'''Check that each error gets its very own url_name'''
bad_xml = '''<sequential display_name="oops"><video url="hi"></sequential>'''
bad_xml2 = '''<sequential url_name="oops"><video url="hi"></sequential>'''
system = self.get_system()
descriptor1 = system.process_xml(bad_xml)
descriptor2 = system.process_xml(bad_xml2)
self.assertNotEqual(descriptor1.location, descriptor2.location)
# Check that each vertical gets its very own url_name
bad_xml = '''<vertical display_name="abc"><problem url_name="exam1:2013_Spring:abc"/></vertical>'''
bad_xml2 = '''<vertical display_name="abc"><problem url_name="exam2:2013_Spring:abc"/></vertical>'''
descriptor1 = system.process_xml(bad_xml)
descriptor2 = system.process_xml(bad_xml2)
self.assertNotEqual(descriptor1.location, descriptor2.location)
def test_reimport(self):
'''Make sure an already-exported error xml tag loads properly'''
self.maxDiff = None
bad_xml = '''<sequential display_name="oops"><video url="hi"></sequential>'''
system = self.get_system()
descriptor = system.process_xml(bad_xml)
node = etree.Element('unknown')
descriptor.add_xml_to_node(node)
re_import_descriptor = system.process_xml(etree.tostring(node))
self.assertEqual(re_import_descriptor.__class__.__name__, 'ErrorDescriptorWithMixins')
self.assertEqual(descriptor.contents, re_import_descriptor.contents)
self.assertEqual(descriptor.error_msg, re_import_descriptor.error_msg)
def test_fixed_xml_tag(self):
"""Make sure a tag that's been fixed exports as the original tag type"""
# create a error tag with valid xml contents
root = etree.Element('error')
good_xml = '''<sequential display_name="fixed"><video url="hi"/></sequential>'''
root.text = good_xml
xml_str_in = etree.tostring(root)
# load it
system = self.get_system()
descriptor = system.process_xml(xml_str_in)
# export it
node = etree.Element('unknown')
descriptor.add_xml_to_node(node)
# Now make sure the exported xml is a sequential
self.assertEqual(node.tag, 'sequential')
def course_descriptor_inheritance_check(self, descriptor, from_date_string, unicorn_color, url_name):
"""
Checks to make sure that metadata inheritance on a course descriptor is respected.
"""
# pylint: disable=protected-access
print(descriptor, descriptor._field_data)
self.assertEqual(descriptor.due, ImportTestCase.date.from_json(from_date_string))
# Check that the child inherits due correctly
child = descriptor.get_children()[0]
self.assertEqual(child.due, ImportTestCase.date.from_json(from_date_string))
# need to convert v to canonical json b4 comparing
self.assertEqual(
ImportTestCase.date.to_json(ImportTestCase.date.from_json(from_date_string)),
child.xblock_kvs.inherited_settings['due']
)
# Now export and check things
descriptor.runtime.export_fs = MemoryFS()
node = etree.Element('unknown')
descriptor.add_xml_to_node(node)
# Check that the exported xml is just a pointer
print("Exported xml:", etree.tostring(node))
self.assertTrue(is_pointer_tag(node))
# but it's a special case course pointer
self.assertEqual(node.attrib['course'], COURSE)
self.assertEqual(node.attrib['org'], ORG)
# Does the course still have unicorns?
with descriptor.runtime.export_fs.open('course/{url_name}.xml'.format(url_name=url_name)) as f:
course_xml = etree.fromstring(f.read())
self.assertEqual(course_xml.attrib['unicorn'], unicorn_color)
# the course and org tags should be _only_ in the pointer
self.assertNotIn('course', course_xml.attrib)
self.assertNotIn('org', course_xml.attrib)
# did we successfully strip the url_name from the definition contents?
self.assertNotIn('url_name', course_xml.attrib)
# Does the chapter tag now have a due attribute?
# hardcoded path to child
with descriptor.runtime.export_fs.open('chapter/ch.xml') as f:
chapter_xml = etree.fromstring(f.read())
self.assertEqual(chapter_xml.tag, 'chapter')
self.assertNotIn('due', chapter_xml.attrib)
def test_metadata_import_export(self):
"""Two checks:
- unknown metadata is preserved across import-export
- inherited metadata doesn't leak to children.
"""
system = self.get_system()
from_date_string = 'March 20 17:00'
url_name = 'test1'
unicorn_color = 'purple'
start_xml = '''
<course org="{org}" course="{course}"
due="{due}" url_name="{url_name}" unicorn="{unicorn_color}">
<chapter url="hi" url_name="ch" display_name="CH">
<html url_name="h" display_name="H">Two houses, ...</html>
</chapter>
</course>'''.format(
due=from_date_string, org=ORG, course=COURSE, url_name=url_name, unicorn_color=unicorn_color
)
descriptor = system.process_xml(start_xml)
compute_inherited_metadata(descriptor)
self.course_descriptor_inheritance_check(descriptor, from_date_string, unicorn_color, url_name)
def test_library_metadata_import_export(self):
"""Two checks:
- unknown metadata is preserved across import-export
- inherited metadata doesn't leak to children.
"""
system = self.get_system(library=True)
from_date_string = 'March 26 17:00'
url_name = 'test2'
unicorn_color = 'rainbow'
start_xml = '''
<library org="TestOrg" library="TestLib" display_name="stuff">
<course org="{org}" course="{course}"
due="{due}" url_name="{url_name}" unicorn="{unicorn_color}">
<chapter url="hi" url_name="ch" display_name="CH">
<html url_name="h" display_name="H">Two houses, ...</html>
</chapter>
</course>
</library>'''.format(
due=from_date_string, org=ORG, course=COURSE, url_name=url_name, unicorn_color=unicorn_color
)
descriptor = system.process_xml(start_xml)
# pylint: disable=protected-access
original_unwrapped = descriptor._unwrapped_field_data
LibraryXMLModuleStore.patch_descriptor_kvs(descriptor)
# '_unwrapped_field_data' is reset in `patch_descriptor_kvs`
# pylint: disable=protected-access
self.assertIsNot(original_unwrapped, descriptor._unwrapped_field_data)
compute_inherited_metadata(descriptor)
# Check the course module, since it has inheritance
descriptor = descriptor.get_children()[0]
self.course_descriptor_inheritance_check(descriptor, from_date_string, unicorn_color, url_name)
def test_metadata_no_inheritance(self):
"""
Checks that default value of None (for due) does not get marked as inherited when a
course is the root block.
"""
system = self.get_system()
url_name = 'test1'
start_xml = '''
<course org="{org}" course="{course}"
url_name="{url_name}" unicorn="purple">
<chapter url="hi" url_name="ch" display_name="CH">
<html url_name="h" display_name="H">Two houses, ...</html>
</chapter>
</course>'''.format(org=ORG, course=COURSE, url_name=url_name)
descriptor = system.process_xml(start_xml)
compute_inherited_metadata(descriptor)
self.course_descriptor_no_inheritance_check(descriptor)
def test_library_metadata_no_inheritance(self):
"""
Checks that the default value of None (for due) does not get marked as inherited when a
library is the root block.
"""
system = self.get_system()
url_name = 'test1'
start_xml = '''
<library org="TestOrg" library="TestLib" display_name="stuff">
<course org="{org}" course="{course}"
url_name="{url_name}" unicorn="purple">
<chapter url="hi" url_name="ch" display_name="CH">
<html url_name="h" display_name="H">Two houses, ...</html>
</chapter>
</course>
</library>'''.format(org=ORG, course=COURSE, url_name=url_name)
descriptor = system.process_xml(start_xml)
LibraryXMLModuleStore.patch_descriptor_kvs(descriptor)
compute_inherited_metadata(descriptor)
# Run the checks on the course node instead.
descriptor = descriptor.get_children()[0]
self.course_descriptor_no_inheritance_check(descriptor)
def course_descriptor_no_inheritance_check(self, descriptor):
"""
Verifies that a default value of None (for due) does not get marked as inherited.
"""
self.assertEqual(descriptor.due, None)
# Check that the child does not inherit a value for due
child = descriptor.get_children()[0]
self.assertEqual(child.due, None)
# Check that the child hasn't started yet
self.assertLessEqual(
datetime.datetime.now(UTC()),
child.start
)
def override_metadata_check(self, descriptor, child, course_due, child_due):
"""
Verifies that due date can be overriden at child level.
"""
self.assertEqual(descriptor.due, ImportTestCase.date.from_json(course_due))
self.assertEqual(child.due, ImportTestCase.date.from_json(child_due))
# Test inherited metadata. Due does not appear here (because explicitly set on child).
self.assertEqual(
ImportTestCase.date.to_json(ImportTestCase.date.from_json(course_due)),
child.xblock_kvs.inherited_settings['due']
)
def test_metadata_override_default(self):
"""
Checks that due date can be overriden at child level when a course is the root.
"""
system = self.get_system()
course_due = 'March 20 17:00'
child_due = 'April 10 00:00'
url_name = 'test1'
start_xml = '''
<course org="{org}" course="{course}"
due="{due}" url_name="{url_name}" unicorn="purple">
<chapter url="hi" url_name="ch" display_name="CH">
<html url_name="h" display_name="H">Two houses, ...</html>
</chapter>
</course>'''.format(due=course_due, org=ORG, course=COURSE, url_name=url_name)
descriptor = system.process_xml(start_xml)
child = descriptor.get_children()[0]
# pylint: disable=protected-access
child._field_data.set(child, 'due', child_due)
compute_inherited_metadata(descriptor)
self.override_metadata_check(descriptor, child, course_due, child_due)
def test_library_metadata_override_default(self):
"""
Checks that due date can be overriden at child level when a library is the root.
"""
system = self.get_system()
course_due = 'March 20 17:00'
child_due = 'April 10 00:00'
url_name = 'test1'
start_xml = '''
<library org="TestOrg" library="TestLib" display_name="stuff">
<course org="{org}" course="{course}"
due="{due}" url_name="{url_name}" unicorn="purple">
<chapter url="hi" url_name="ch" display_name="CH">
<html url_name="h" display_name="H">Two houses, ...</html>
</chapter>
</course>
</library>'''.format(due=course_due, org=ORG, course=COURSE, url_name=url_name)
descriptor = system.process_xml(start_xml)
LibraryXMLModuleStore.patch_descriptor_kvs(descriptor)
# Chapter is two levels down here.
child = descriptor.get_children()[0].get_children()[0]
# pylint: disable=protected-access
child._field_data.set(child, 'due', child_due)
compute_inherited_metadata(descriptor)
descriptor = descriptor.get_children()[0]
self.override_metadata_check(descriptor, child, course_due, child_due)
def test_is_pointer_tag(self):
"""
Check that is_pointer_tag works properly.
"""
yes = ["""<html url_name="blah"/>""",
"""<html url_name="blah"></html>""",
"""<html url_name="blah"> </html>""",
"""<problem url_name="blah"/>""",
"""<course org="HogwartsX" course="Mathemagics" url_name="3.14159"/>"""]
no = ["""<html url_name="blah" also="this"/>""",
"""<html url_name="blah">some text</html>""",
"""<problem url_name="blah"><sub>tree</sub></problem>""",
"""<course org="HogwartsX" course="Mathemagics" url_name="3.14159">
<chapter>3</chapter>
</course>
"""]
for xml_str in yes:
print "should be True for {0}".format(xml_str)
self.assertTrue(is_pointer_tag(etree.fromstring(xml_str)))
for xml_str in no:
print "should be False for {0}".format(xml_str)
self.assertFalse(is_pointer_tag(etree.fromstring(xml_str)))
def test_metadata_inherit(self):
"""Make sure that metadata is inherited properly"""
print "Starting import"
course = self.get_course('toy')
def check_for_key(key, node, value):
"recursive check for presence of key"
print "Checking {0}".format(node.location.to_deprecated_string())
self.assertEqual(getattr(node, key), value)
for c in node.get_children():
check_for_key(key, c, value)
check_for_key('graceperiod', course, course.graceperiod)
def test_policy_loading(self):
"""Make sure that when two courses share content with the same
org and course names, policy applies to the right one."""
toy = self.get_course('toy')
two_toys = self.get_course('two_toys')
self.assertEqual(toy.url_name, "2012_Fall")
self.assertEqual(two_toys.url_name, "TT_2012_Fall")
toy_ch = toy.get_children()[0]
two_toys_ch = two_toys.get_children()[0]
self.assertEqual(toy_ch.display_name, "Overview")
self.assertEqual(two_toys_ch.display_name, "Two Toy Overview")
# Also check that the grading policy loaded
self.assertEqual(two_toys.grade_cutoffs['C'], 0.5999)
# Also check that keys from policy are run through the
# appropriate attribute maps -- 'graded' should be True, not 'true'
self.assertEqual(toy.graded, True)
def test_definition_loading(self):
"""When two courses share the same org and course name and
both have a module with the same url_name, the definitions shouldn't clash.
TODO (vshnayder): once we have a CMS, this shouldn't
happen--locations should uniquely name definitions. But in
our imperfect XML world, it can (and likely will) happen."""
modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy', 'two_toys'])
location = Location("edX", "toy", "2012_Fall", "video", "Welcome", None)
toy_video = modulestore.get_item(location)
location_two = Location("edX", "toy", "TT_2012_Fall", "video", "Welcome", None)
two_toy_video = modulestore.get_item(location_two)
self.assertEqual(toy_video.youtube_id_1_0, "p2Q6BrNhdh8")
self.assertEqual(two_toy_video.youtube_id_1_0, "p2Q6BrNhdh9")
def test_colon_in_url_name(self):
"""Ensure that colons in url_names convert to file paths properly"""
print "Starting import"
# Not using get_courses because we need the modulestore object too afterward
modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy'])
courses = modulestore.get_courses()
self.assertEquals(len(courses), 1)
course = courses[0]
print "course errors:"
for (msg, err) in modulestore.get_course_errors(course.id):
print msg
print err
chapters = course.get_children()
self.assertEquals(len(chapters), 5)
ch2 = chapters[1]
self.assertEquals(ch2.url_name, "secret:magic")
print "Ch2 location: ", ch2.location
also_ch2 = modulestore.get_item(ch2.location)
self.assertEquals(ch2, also_ch2)
print "making sure html loaded"
loc = course.id.make_usage_key('html', 'secret:toylab')
html = modulestore.get_item(loc)
self.assertEquals(html.display_name, "Toy lab")
def test_unicode(self):
"""Check that courses with unicode characters in filenames and in
org/course/name import properly. Currently, this means: (a) Having
files with unicode names does not prevent import; (b) if files are not
loaded because of unicode filenames, there are appropriate
exceptions/errors to that effect."""
print "Starting import"
modulestore = XMLModuleStore(DATA_DIR, source_dirs=['test_unicode'])
courses = modulestore.get_courses()
self.assertEquals(len(courses), 1)
course = courses[0]
print "course errors:"
# Expect to find an error/exception about characters in "®esources"
expect = "InvalidKeyError"
errors = [
(msg.encode("utf-8"), err.encode("utf-8"))
for msg, err
in modulestore.get_course_errors(course.id)
]
self.assertTrue(any(
expect in msg or expect in err
for msg, err in errors
))
chapters = course.get_children()
self.assertEqual(len(chapters), 4)
def test_url_name_mangling(self):
"""
Make sure that url_names are only mangled once.
"""
modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy'])
toy_id = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
course = modulestore.get_course(toy_id)
chapters = course.get_children()
ch1 = chapters[0]
sections = ch1.get_children()
self.assertEqual(len(sections), 4)
for i in (2, 3):
video = sections[i]
# Name should be 'video_{hash}'
print "video {0} url_name: {1}".format(i, video.url_name)
self.assertEqual(len(video.url_name), len('video_') + 12)
def test_poll_and_conditional_import(self):
modulestore = XMLModuleStore(DATA_DIR, source_dirs=['conditional_and_poll'])
course = modulestore.get_courses()[0]
chapters = course.get_children()
ch1 = chapters[0]
sections = ch1.get_children()
self.assertEqual(len(sections), 1)
conditional_location = course.id.make_usage_key('conditional', 'condone')
module = modulestore.get_item(conditional_location)
self.assertEqual(len(module.children), 1)
poll_location = course.id.make_usage_key('poll_question', 'first_poll')
module = modulestore.get_item(poll_location)
self.assertEqual(len(module.get_children()), 0)
self.assertEqual(module.voted, False)
self.assertEqual(module.poll_answer, '')
self.assertEqual(module.poll_answers, {})
self.assertEqual(
module.answers,
[
{'text': u'Yes', 'id': 'Yes'},
{'text': u'No', 'id': 'No'},
{'text': u"Don't know", 'id': 'Dont_know'}
]
)
def test_error_on_import(self):
'''Check that when load_error_module is false, an exception is raised, rather than returning an ErrorModule'''
bad_xml = '''<sequential display_name="oops"><video url="hi"></sequential>'''
system = self.get_system(False)
self.assertRaises(etree.XMLSyntaxError, system.process_xml, bad_xml)
def test_graphicslidertool_import(self):
'''
Check to see if definition_from_xml in gst_module.py
works properly. Pulls data from the graphic_slider_tool directory
in the test data directory.
'''
modulestore = XMLModuleStore(DATA_DIR, source_dirs=['graphic_slider_tool'])
sa_id = SlashSeparatedCourseKey("edX", "gst_test", "2012_Fall")
location = sa_id.make_usage_key("graphical_slider_tool", "sample_gst")
gst_sample = modulestore.get_item(location)
render_string_from_sample_gst_xml = """
<slider var="a" style="width:400px;float:left;"/>\
<plot style="margin-top:15px;margin-bottom:15px;"/>""".strip()
self.assertIn(render_string_from_sample_gst_xml, gst_sample.data)
def test_word_cloud_import(self):
modulestore = XMLModuleStore(DATA_DIR, source_dirs=['word_cloud'])
course = modulestore.get_courses()[0]
chapters = course.get_children()
ch1 = chapters[0]
sections = ch1.get_children()
self.assertEqual(len(sections), 1)
location = course.id.make_usage_key('word_cloud', 'cloud1')
module = modulestore.get_item(location)
self.assertEqual(len(module.get_children()), 0)
self.assertEqual(module.num_inputs, 5)
self.assertEqual(module.num_top_words, 250)
def test_cohort_config(self):
"""
Check that cohort config parsing works right.
Note: The cohort config on the CourseModule is no longer used.
See openedx.core.djangoapps.course_groups.models.CourseCohortSettings.
"""
modulestore = XMLModuleStore(DATA_DIR, source_dirs=['toy'])
toy_id = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
course = modulestore.get_course(toy_id)
# No config -> False
self.assertFalse(course.is_cohorted)
# empty config -> False
course.cohort_config = {}
self.assertFalse(course.is_cohorted)
# false config -> False
course.cohort_config = {'cohorted': False}
self.assertFalse(course.is_cohorted)
# and finally...
course.cohort_config = {'cohorted': True}
self.assertTrue(course.is_cohorted)
| agpl-3.0 |
joachimmetz/dfvfs | dfvfs/path/encoded_stream_path_spec.py | 2 | 1389 | # -*- coding: utf-8 -*-
"""The encoded stream path specification implementation."""
from dfvfs.lib import definitions
from dfvfs.path import factory
from dfvfs.path import path_spec
class EncodedStreamPathSpec(path_spec.PathSpec):
"""Encoded stream path specification.
Attributes:
encoding_method (str): method used to the encode the data.
"""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_ENCODED_STREAM
def __init__(self, encoding_method=None, parent=None, **kwargs):
"""Initializes a path specification.
Note that the encoded stream path specification must have a parent.
Args:
encoding_method (Optional[str]): method used to the encode the data.
parent (Optional[PathSpec]): parent path specification.
Raises:
ValueError: when encoding method or parent are not set.
"""
if not encoding_method or not parent:
raise ValueError('Missing encoding method or parent value.')
super(EncodedStreamPathSpec, self).__init__(parent=parent, **kwargs)
self.encoding_method = encoding_method
@property
def comparable(self):
"""str: comparable representation of the path specification."""
sub_comparable_string = 'encoding_method: {0:s}'.format(
self.encoding_method)
return self._GetComparable(sub_comparable_string=sub_comparable_string)
factory.Factory.RegisterPathSpec(EncodedStreamPathSpec)
| apache-2.0 |
Pablo1990/BioinfoTeam | plugins/ti.alloy/plugin.py | 1729 | 5251 | import os, sys, subprocess, hashlib
import subprocess
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
Backported from Python 2.7 as it's implemented as pure python on stdlib.
>>> check_output(['/usr/bin/python', '--version'])
Python 2.6.2
"""
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output
def compile(config):
paths = {}
binaries = ["alloy","node"]
dotAlloy = os.path.abspath(os.path.join(config['project_dir'], 'build', '.alloynewcli'))
if os.path.exists(dotAlloy):
print "[DEBUG] build/.alloynewcli file found, skipping plugin..."
os.remove(dotAlloy)
else:
for binary in binaries:
try:
# see if the environment variable is defined
paths[binary] = os.environ["ALLOY_" + ("NODE_" if binary == "node" else "") + "PATH"]
except KeyError as ex:
# next try PATH, and then our guess paths
if sys.platform == "darwin" or sys.platform.startswith('linux'):
userPath = os.environ["HOME"]
guessPaths = [
"/usr/local/bin/"+binary,
"/opt/local/bin/"+binary,
userPath+"/local/bin/"+binary,
"/opt/bin/"+binary,
"/usr/bin/"+binary,
"/usr/local/share/npm/bin/"+binary
]
try:
binaryPath = check_output(["which",binary], stderr=subprocess.STDOUT).strip()
print "[DEBUG] %s installed at '%s'" % (binary,binaryPath)
except:
print "[WARN] Couldn't find %s on your PATH:" % binary
print "[WARN] %s" % os.environ["PATH"]
print "[WARN]"
print "[WARN] Checking for %s in a few default locations:" % binary
for p in guessPaths:
sys.stdout.write("[WARN] %s -> " % p)
if os.path.exists(p):
binaryPath = p
print "FOUND"
break
else:
print "not found"
binaryPath = None
if binaryPath is None:
print "[ERROR] Couldn't find %s" % binary
sys.exit(1)
else:
paths[binary] = binaryPath
# no guesses on windows, just use the PATH
elif sys.platform == "win32":
paths["alloy"] = "alloy.cmd"
f = os.path.abspath(os.path.join(config['project_dir'], 'app'))
if os.path.exists(f):
print "[INFO] alloy app found at %s" % f
rd = os.path.abspath(os.path.join(config['project_dir'], 'Resources'))
devicefamily = 'none'
simtype = 'none'
version = '0'
deploytype = 'development'
if config['platform']==u'ios':
version = config['iphone_version']
devicefamily = config['devicefamily']
deploytype = config['deploytype']
if config['platform']==u'android':
builder = config['android_builder']
version = builder.tool_api_level
deploytype = config['deploy_type']
if config['platform']==u'mobileweb':
builder = config['mobileweb_builder']
deploytype = config['deploytype']
cfg = "platform=%s,version=%s,simtype=%s,devicefamily=%s,deploytype=%s," % (config['platform'],version,simtype,devicefamily,deploytype)
if sys.platform == "win32":
cmd = [paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
else:
cmd = [paths["node"], paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
print "[INFO] Executing Alloy compile:"
print "[INFO] %s" % " ".join(cmd)
try:
print check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
if hasattr(ex, 'output'):
print ex.output
print "[ERROR] Alloy compile failed"
retcode = 1
if hasattr(ex, 'returncode'):
retcode = ex.returncode
sys.exit(retcode)
except EnvironmentError as ex:
print "[ERROR] Unexpected error with Alloy compiler plugin: %s" % ex.strerror
sys.exit(2)
| apache-2.0 |
tectronics/amselections | pr.py | 1 | 4584 | import sys, os, math, random
class party:
def __init__(self, names, votess, seat=0):
self.name = names
self.votes = votess
self.readyseats = seat
if self.readyseats == None:
self.readyseats = int(0)
self.seats = self.readyseats
if self.seats == None:
self.seats = int(0)
def getquota(self):
return self.votes/(self.seats + 1)
def bumpseats(self):
self.seats += 1
def reset(self): #Sets seats counter back to readyseats
self.seats = self.readyseats
def dhondt(parties, topups):
i = 0
elected = []
while i < topups:
elect = sorted(parties, key=lambda party: party.getquota())
elect.reverse()
elected.append(elect[0])
elect[0].bumpseats()
i += 1
return elected
def gallagher(diffs): #Used to describe proportionality of an election
sqdiffs = []
for i in diffs:
sqdiffs.append(i*i)
#print sqdiffs
totsq = sum(sqdiffs)
#print totsq
return math.sqrt(totsq/2)
class constituency:
def __init__(self, name, parties, region = None):
self.parties = sorted(parties, key=lambda party: party.votes)
self.parties.reverse()
self.name = name
self.region = region
self.totalvotes = int(0)
for i in self.parties:
self.totalvotes += i.votes
def get_elected(self):
return self.parties[0]
def get_region(self):
return self.region
def set_region(self, region):
self.region = region
def set_results(self, parties):
self.parties = sorted(parties, key=lambda party: party.votes)
self.parties.reverse()
self.totalvotes = int(0)
for i in self.parties:
self.totalvotes += i.votes
def uniform_swing(self, swing, from_party, to_party):
for i in self.parties:
#print i.name
if i.name == from_party:
i.votes -= int(((float(swing)/2.00)/100.00)*float(self.totalvotes))
#print i.votes
if i.name == to_party:
i.votes += int(((float(swing)/2.00)/100.00)*float(self.totalvotes))
#print i.votes
self.update_parties()
def mod_votes(self, votes, party):
for i in self.parties:
if i.name == party:
i.votes += votes
self.update_parties()
def update_parties(self): #Makes sure the parties list is in the correct order; highest votes to lowest votes.
self.parties = sorted(self.parties, key=lambda party: party.votes)
self.parties.reverse()
class region(constituency):
def __init__(self, name, parties, constituencies, topups):
self.parties = sorted(parties, key=lambda party: party.votes)
self.parties.reverse()
self.name = name
self.constituencies = constituencies
self.topups = topups
self.totalvotes = int(0)
for i in self.parties:
self.totalvotes += i.votes
def set_results(self, parties):
self.parties = sorted(parties, key=lambda party: party.votes)
self.parties.reverse()
self.totalvotes = int(0)
for i in self.parties:
self.totalvotes += i.votes
def get_elected(self,top=0):
if top == 0:
top = self.topups
con_win_names = []
for i in self.constituencies:
x = i.get_elected().name
con_win_names.append(x)
for j in self.parties:
if x == j.name:
j.bumpseats()
d = dhondt(self.parties, top)
for k in self.parties:
k.reset()
return d
def constituency_uniform_swing(self, swing, from_party, to_party, const = None):
if const != None:
for i in self.constituencies:
if i.name == const:
i.uniform_swing(swing, from_party, to_party)
else:
for i in self.constituencies:
i.uniform_swing(swing, from_party, to_party)
#labour = party("Labour", 128818, 9)
#libdem = party("Liberal Democrats", 31403, 0)
#conser = party("Conservatives", 17432, 0)
#snp = party("SNP", 39702, 0)
#parties = [labour, libdem, conser, snp]
#print dhondt(parties, 7)
#if __name__ == "__main__":
# labour = party("labour", 10000,0)
# libdem = party("lib dem", 8000,0)
# a = constituency("A",[libdem,labour])
# #print a.get_elected().name
# glab = party("labour",100000,0)
# glib = party("lib dem",40000,0)
# b = region("Glasgow",[glib,glab],[a],1)
# for i in b.get_elected(6):
# print i.name
# print i.votes
#
# b.uniform_swing(20, "labour", "lib dem")
# for i in b.get_elected(6):
# print i.name
# print i.votes
# x = raw_input() | mit |
maiklos-mirrors/jfx78 | modules/web/src/main/native/Tools/Scripts/webkitpy/common/system/path.py | 191 | 5046 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""generic routines to convert platform-specific paths to URIs."""
import atexit
import subprocess
import sys
import threading
import urllib
def abspath_to_uri(platform, path):
"""Converts a platform-specific absolute path to a file: URL."""
return "file:" + _escape(_convert_path(platform, path))
def cygpath(path):
"""Converts an absolute cygwin path to an absolute Windows path."""
return _CygPath.convert_using_singleton(path)
# Note that this object is not threadsafe and must only be called
# from multiple threads under protection of a lock (as is done in cygpath())
class _CygPath(object):
"""Manages a long-running 'cygpath' process for file conversion."""
_lock = None
_singleton = None
@staticmethod
def stop_cygpath_subprocess():
if not _CygPath._lock:
return
with _CygPath._lock:
if _CygPath._singleton:
_CygPath._singleton.stop()
@staticmethod
def convert_using_singleton(path):
if not _CygPath._lock:
_CygPath._lock = threading.Lock()
with _CygPath._lock:
if not _CygPath._singleton:
_CygPath._singleton = _CygPath()
# Make sure the cygpath subprocess always gets shutdown cleanly.
atexit.register(_CygPath.stop_cygpath_subprocess)
return _CygPath._singleton.convert(path)
def __init__(self):
self._child_process = None
def start(self):
assert(self._child_process is None)
args = ['cygpath', '-f', '-', '-wa']
self._child_process = subprocess.Popen(args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
def is_running(self):
if not self._child_process:
return False
return self._child_process.returncode is None
def stop(self):
if self._child_process:
self._child_process.stdin.close()
self._child_process.wait()
self._child_process = None
def convert(self, path):
if not self.is_running():
self.start()
self._child_process.stdin.write("%s\r\n" % path)
self._child_process.stdin.flush()
windows_path = self._child_process.stdout.readline().rstrip()
# Some versions of cygpath use lowercase drive letters while others
# use uppercase. We always convert to uppercase for consistency.
windows_path = '%s%s' % (windows_path[0].upper(), windows_path[1:])
return windows_path
def _escape(path):
"""Handle any characters in the path that should be escaped."""
# FIXME: web browsers don't appear to blindly quote every character
# when converting filenames to files. Instead of using urllib's default
# rules, we allow a small list of other characters through un-escaped.
# It's unclear if this is the best possible solution.
return urllib.quote(path, safe='/+:')
def _convert_path(platform, path):
"""Handles any os-specific path separators, mappings, etc."""
if platform.is_cygwin():
return _winpath_to_uri(cygpath(path))
if platform.is_win():
return _winpath_to_uri(path)
return _unixypath_to_uri(path)
def _winpath_to_uri(path):
"""Converts a window absolute path to a file: URL."""
return "///" + path.replace("\\", "/")
def _unixypath_to_uri(path):
"""Converts a unix-style path to a file: URL."""
return "//" + path
| gpl-2.0 |
mathspace/django | tests/gis_tests/geo3d/tests.py | 26 | 17237 | from __future__ import unicode_literals
import os
import re
from unittest import skipUnless
from django.contrib.gis.db.models import Extent3D, Union
from django.contrib.gis.db.models.functions import (
AsGeoJSON, AsKML, Length, Perimeter, Scale, Translate,
)
from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.geos import GEOSGeometry, LineString, Point, Polygon
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango20Warning
from .models import (
City3D, Interstate2D, Interstate3D, InterstateProj2D, InterstateProj3D,
MultiPoint3D, Point2D, Point3D, Polygon2D, Polygon3D,
)
if HAS_GDAL:
from django.contrib.gis.utils import LayerMapping, LayerMapError
data_path = os.path.realpath(os.path.join(os.path.dirname(upath(__file__)), '..', 'data'))
city_file = os.path.join(data_path, 'cities', 'cities.shp')
vrt_file = os.path.join(data_path, 'test_vrt', 'test_vrt.vrt')
# The coordinates of each city, with Z values corresponding to their
# altitude in meters.
city_data = (
('Houston', (-95.363151, 29.763374, 18)),
('Dallas', (-96.801611, 32.782057, 147)),
('Oklahoma City', (-97.521157, 34.464642, 380)),
('Wellington', (174.783117, -41.315268, 14)),
('Pueblo', (-104.609252, 38.255001, 1433)),
('Lawrence', (-95.235060, 38.971823, 251)),
('Chicago', (-87.650175, 41.850385, 181)),
('Victoria', (-123.305196, 48.462611, 15)),
)
# Reference mapping of city name to its altitude (Z value).
city_dict = {name: coords for name, coords in city_data}
# 3D freeway data derived from the National Elevation Dataset:
# http://seamless.usgs.gov/products/9arc.php
interstate_data = (
('I-45',
'LINESTRING(-95.3708481 29.7765870 11.339,-95.3694580 29.7787980 4.536,'
'-95.3690305 29.7797359 9.762,-95.3691886 29.7812450 12.448,'
'-95.3696447 29.7850144 10.457,-95.3702511 29.7868518 9.418,'
'-95.3706724 29.7881286 14.858,-95.3711632 29.7896157 15.386,'
'-95.3714525 29.7936267 13.168,-95.3717848 29.7955007 15.104,'
'-95.3717719 29.7969804 16.516,-95.3717305 29.7982117 13.923,'
'-95.3717254 29.8000778 14.385,-95.3719875 29.8013539 15.160,'
'-95.3720575 29.8026785 15.544,-95.3721321 29.8040912 14.975,'
'-95.3722074 29.8050998 15.688,-95.3722779 29.8060430 16.099,'
'-95.3733818 29.8076750 15.197,-95.3741563 29.8103686 17.268,'
'-95.3749458 29.8129927 19.857,-95.3763564 29.8144557 15.435)',
(11.339, 4.536, 9.762, 12.448, 10.457, 9.418, 14.858,
15.386, 13.168, 15.104, 16.516, 13.923, 14.385, 15.16,
15.544, 14.975, 15.688, 16.099, 15.197, 17.268, 19.857,
15.435),
),
)
# Bounding box polygon for inner-loop of Houston (in projected coordinate
# system 32140), with elevation values from the National Elevation Dataset
# (see above).
bbox_data = (
'POLYGON((941527.97 4225693.20,962596.48 4226349.75,963152.57 4209023.95,'
'942051.75 4208366.38,941527.97 4225693.20))',
(21.71, 13.21, 9.12, 16.40, 21.71)
)
class Geo3DLoadingHelper(object):
def _load_interstate_data(self):
# Interstate (2D / 3D and Geographic/Projected variants)
for name, line, exp_z in interstate_data:
line_3d = GEOSGeometry(line, srid=4269)
line_2d = LineString([l[:2] for l in line_3d.coords], srid=4269)
# Creating a geographic and projected version of the
# interstate in both 2D and 3D.
Interstate3D.objects.create(name=name, line=line_3d)
InterstateProj3D.objects.create(name=name, line=line_3d)
Interstate2D.objects.create(name=name, line=line_2d)
InterstateProj2D.objects.create(name=name, line=line_2d)
def _load_city_data(self):
for name, pnt_data in city_data:
City3D.objects.create(name=name, point=Point(*pnt_data, srid=4326))
def _load_polygon_data(self):
bbox_wkt, bbox_z = bbox_data
bbox_2d = GEOSGeometry(bbox_wkt, srid=32140)
bbox_3d = Polygon(tuple((x, y, z) for (x, y), z in zip(bbox_2d[0].coords, bbox_z)), srid=32140)
Polygon2D.objects.create(name='2D BBox', poly=bbox_2d)
Polygon3D.objects.create(name='3D BBox', poly=bbox_3d)
@skipUnless(HAS_GDAL, "GDAL is required for Geo3DTest.")
@skipUnlessDBFeature("gis_enabled", "supports_3d_storage")
class Geo3DTest(Geo3DLoadingHelper, TestCase):
"""
Only a subset of the PostGIS routines are 3D-enabled, and this TestCase
tries to test the features that can handle 3D and that are also
available within GeoDjango. For more information, see the PostGIS docs
on the routines that support 3D:
http://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions
"""
def test_3d_hasz(self):
"""
Make sure data is 3D and has expected Z values -- shouldn't change
because of coordinate system.
"""
self._load_interstate_data()
for name, line, exp_z in interstate_data:
interstate = Interstate3D.objects.get(name=name)
interstate_proj = InterstateProj3D.objects.get(name=name)
for i in [interstate, interstate_proj]:
self.assertTrue(i.line.hasz)
self.assertEqual(exp_z, tuple(i.line.z))
self._load_city_data()
for name, pnt_data in city_data:
city = City3D.objects.get(name=name)
z = pnt_data[2]
self.assertTrue(city.point.hasz)
self.assertEqual(z, city.point.z)
def test_3d_polygons(self):
"""
Test the creation of polygon 3D models.
"""
self._load_polygon_data()
p3d = Polygon3D.objects.get(name='3D BBox')
self.assertTrue(p3d.poly.hasz)
self.assertIsInstance(p3d.poly, Polygon)
self.assertEqual(p3d.poly.srid, 32140)
def test_3d_layermapping(self):
"""
Testing LayerMapping on 3D models.
"""
point_mapping = {'point': 'POINT'}
mpoint_mapping = {'mpoint': 'MULTIPOINT'}
# The VRT is 3D, but should still be able to map sans the Z.
lm = LayerMapping(Point2D, vrt_file, point_mapping, transform=False)
lm.save()
self.assertEqual(3, Point2D.objects.count())
# The city shapefile is 2D, and won't be able to fill the coordinates
# in the 3D model -- thus, a LayerMapError is raised.
with self.assertRaises(LayerMapError):
LayerMapping(Point3D, city_file, point_mapping, transform=False)
# 3D model should take 3D data just fine.
lm = LayerMapping(Point3D, vrt_file, point_mapping, transform=False)
lm.save()
self.assertEqual(3, Point3D.objects.count())
# Making sure LayerMapping.make_multi works right, by converting
# a Point25D into a MultiPoint25D.
lm = LayerMapping(MultiPoint3D, vrt_file, mpoint_mapping, transform=False)
lm.save()
self.assertEqual(3, MultiPoint3D.objects.count())
@ignore_warnings(category=RemovedInDjango20Warning)
def test_kml(self):
"""
Test GeoQuerySet.kml() with Z values.
"""
self._load_city_data()
h = City3D.objects.kml(precision=6).get(name='Houston')
# KML should be 3D.
# `SELECT ST_AsKML(point, 6) FROM geo3d_city3d WHERE name = 'Houston';`
ref_kml_regex = re.compile(r'^<Point><coordinates>-95.363\d+,29.763\d+,18</coordinates></Point>$')
self.assertTrue(ref_kml_regex.match(h.kml))
@ignore_warnings(category=RemovedInDjango20Warning)
def test_geojson(self):
"""
Test GeoQuerySet.geojson() with Z values.
"""
self._load_city_data()
h = City3D.objects.geojson(precision=6).get(name='Houston')
# GeoJSON should be 3D
# `SELECT ST_AsGeoJSON(point, 6) FROM geo3d_city3d WHERE name='Houston';`
ref_json_regex = re.compile(r'^{"type":"Point","coordinates":\[-95.363151,29.763374,18(\.0+)?\]}$')
self.assertTrue(ref_json_regex.match(h.geojson))
@skipUnlessDBFeature("supports_3d_functions")
def test_union(self):
"""
Testing the Union aggregate of 3D models.
"""
# PostGIS query that returned the reference EWKT for this test:
# `SELECT ST_AsText(ST_Union(point)) FROM geo3d_city3d;`
self._load_city_data()
ref_ewkt = (
'SRID=4326;MULTIPOINT(-123.305196 48.462611 15,-104.609252 38.255001 1433,'
'-97.521157 34.464642 380,-96.801611 32.782057 147,-95.363151 29.763374 18,'
'-95.23506 38.971823 251,-87.650175 41.850385 181,174.783117 -41.315268 14)'
)
ref_union = GEOSGeometry(ref_ewkt)
union = City3D.objects.aggregate(Union('point'))['point__union']
self.assertTrue(union.hasz)
# Ordering of points in the resulting geometry may vary between implementations
self.assertSetEqual({p.ewkt for p in ref_union}, {p.ewkt for p in union})
@skipUnlessDBFeature("supports_3d_functions")
def test_extent(self):
"""
Testing the Extent3D aggregate for 3D models.
"""
self._load_city_data()
# `SELECT ST_Extent3D(point) FROM geo3d_city3d;`
ref_extent3d = (-123.305196, -41.315268, 14, 174.783117, 48.462611, 1433)
extent = City3D.objects.aggregate(Extent3D('point'))['point__extent3d']
def check_extent3d(extent3d, tol=6):
for ref_val, ext_val in zip(ref_extent3d, extent3d):
self.assertAlmostEqual(ref_val, ext_val, tol)
check_extent3d(extent)
self.assertIsNone(City3D.objects.none().aggregate(Extent3D('point'))['point__extent3d'])
@ignore_warnings(category=RemovedInDjango20Warning)
@skipUnlessDBFeature("supports_3d_functions")
def test_perimeter(self):
"""
Testing GeoQuerySet.perimeter() on 3D fields.
"""
self._load_polygon_data()
# Reference query for values below:
# `SELECT ST_Perimeter3D(poly), ST_Perimeter2D(poly) FROM geo3d_polygon3d;`
ref_perim_3d = 76859.2620451
ref_perim_2d = 76859.2577803
tol = 6
self.assertAlmostEqual(ref_perim_2d,
Polygon2D.objects.perimeter().get(name='2D BBox').perimeter.m,
tol)
self.assertAlmostEqual(ref_perim_3d,
Polygon3D.objects.perimeter().get(name='3D BBox').perimeter.m,
tol)
@ignore_warnings(category=RemovedInDjango20Warning)
@skipUnlessDBFeature("supports_3d_functions")
def test_length(self):
"""
Testing GeoQuerySet.length() on 3D fields.
"""
# ST_Length_Spheroid Z-aware, and thus does not need to use
# a separate function internally.
# `SELECT ST_Length_Spheroid(line, 'SPHEROID["GRS 1980",6378137,298.257222101]')
# FROM geo3d_interstate[2d|3d];`
self._load_interstate_data()
tol = 3
ref_length_2d = 4368.1721949481
ref_length_3d = 4368.62547052088
self.assertAlmostEqual(ref_length_2d,
Interstate2D.objects.length().get(name='I-45').length.m,
tol)
self.assertAlmostEqual(ref_length_3d,
Interstate3D.objects.length().get(name='I-45').length.m,
tol)
# Making sure `ST_Length3D` is used on for a projected
# and 3D model rather than `ST_Length`.
# `SELECT ST_Length(line) FROM geo3d_interstateproj2d;`
ref_length_2d = 4367.71564892392
# `SELECT ST_Length3D(line) FROM geo3d_interstateproj3d;`
ref_length_3d = 4368.16897234101
self.assertAlmostEqual(ref_length_2d,
InterstateProj2D.objects.length().get(name='I-45').length.m,
tol)
self.assertAlmostEqual(ref_length_3d,
InterstateProj3D.objects.length().get(name='I-45').length.m,
tol)
@ignore_warnings(category=RemovedInDjango20Warning)
@skipUnlessDBFeature("supports_3d_functions")
def test_scale(self):
"""
Testing GeoQuerySet.scale() on Z values.
"""
self._load_city_data()
# Mapping of City name to reference Z values.
zscales = (-3, 4, 23)
for zscale in zscales:
for city in City3D.objects.scale(1.0, 1.0, zscale):
self.assertEqual(city_dict[city.name][2] * zscale, city.scale.z)
@ignore_warnings(category=RemovedInDjango20Warning)
@skipUnlessDBFeature("supports_3d_functions")
def test_translate(self):
"""
Testing GeoQuerySet.translate() on Z values.
"""
self._load_city_data()
ztranslations = (5.23, 23, -17)
for ztrans in ztranslations:
for city in City3D.objects.translate(0, 0, ztrans):
self.assertEqual(city_dict[city.name][2] + ztrans, city.translate.z)
@skipUnless(HAS_GDAL, "GDAL is required for Geo3DTest.")
@skipUnlessDBFeature("gis_enabled", "supports_3d_functions")
class Geo3DFunctionsTests(Geo3DLoadingHelper, TestCase):
def test_kml(self):
"""
Test KML() function with Z values.
"""
self._load_city_data()
h = City3D.objects.annotate(kml=AsKML('point', precision=6)).get(name='Houston')
# KML should be 3D.
# `SELECT ST_AsKML(point, 6) FROM geo3d_city3d WHERE name = 'Houston';`
ref_kml_regex = re.compile(r'^<Point><coordinates>-95.363\d+,29.763\d+,18</coordinates></Point>$')
self.assertTrue(ref_kml_regex.match(h.kml))
def test_geojson(self):
"""
Test GeoJSON() function with Z values.
"""
self._load_city_data()
h = City3D.objects.annotate(geojson=AsGeoJSON('point', precision=6)).get(name='Houston')
# GeoJSON should be 3D
# `SELECT ST_AsGeoJSON(point, 6) FROM geo3d_city3d WHERE name='Houston';`
ref_json_regex = re.compile(r'^{"type":"Point","coordinates":\[-95.363151,29.763374,18(\.0+)?\]}$')
self.assertTrue(ref_json_regex.match(h.geojson))
def test_perimeter(self):
"""
Testing Perimeter() function on 3D fields.
"""
self._load_polygon_data()
# Reference query for values below:
# `SELECT ST_Perimeter3D(poly), ST_Perimeter2D(poly) FROM geo3d_polygon3d;`
ref_perim_3d = 76859.2620451
ref_perim_2d = 76859.2577803
tol = 6
poly2d = Polygon2D.objects.annotate(perimeter=Perimeter('poly')).get(name='2D BBox')
self.assertAlmostEqual(ref_perim_2d, poly2d.perimeter.m, tol)
poly3d = Polygon3D.objects.annotate(perimeter=Perimeter('poly')).get(name='3D BBox')
self.assertAlmostEqual(ref_perim_3d, poly3d.perimeter.m, tol)
def test_length(self):
"""
Testing Length() function on 3D fields.
"""
# ST_Length_Spheroid Z-aware, and thus does not need to use
# a separate function internally.
# `SELECT ST_Length_Spheroid(line, 'SPHEROID["GRS 1980",6378137,298.257222101]')
# FROM geo3d_interstate[2d|3d];`
self._load_interstate_data()
tol = 3
ref_length_2d = 4368.1721949481
ref_length_3d = 4368.62547052088
inter2d = Interstate2D.objects.annotate(length=Length('line')).get(name='I-45')
self.assertAlmostEqual(ref_length_2d, inter2d.length.m, tol)
inter3d = Interstate3D.objects.annotate(length=Length('line')).get(name='I-45')
self.assertAlmostEqual(ref_length_3d, inter3d.length.m, tol)
# Making sure `ST_Length3D` is used on for a projected
# and 3D model rather than `ST_Length`.
# `SELECT ST_Length(line) FROM geo3d_interstateproj2d;`
ref_length_2d = 4367.71564892392
# `SELECT ST_Length3D(line) FROM geo3d_interstateproj3d;`
ref_length_3d = 4368.16897234101
inter2d = InterstateProj2D.objects.annotate(length=Length('line')).get(name='I-45')
self.assertAlmostEqual(ref_length_2d, inter2d.length.m, tol)
inter3d = InterstateProj3D.objects.annotate(length=Length('line')).get(name='I-45')
self.assertAlmostEqual(ref_length_3d, inter3d.length.m, tol)
def test_scale(self):
"""
Testing Scale() function on Z values.
"""
self._load_city_data()
# Mapping of City name to reference Z values.
zscales = (-3, 4, 23)
for zscale in zscales:
for city in City3D.objects.annotate(scale=Scale('point', 1.0, 1.0, zscale)):
self.assertEqual(city_dict[city.name][2] * zscale, city.scale.z)
def test_translate(self):
"""
Testing Translate() function on Z values.
"""
self._load_city_data()
ztranslations = (5.23, 23, -17)
for ztrans in ztranslations:
for city in City3D.objects.annotate(translate=Translate('point', 0, 0, ztrans)):
self.assertEqual(city_dict[city.name][2] + ztrans, city.translate.z)
| bsd-3-clause |
philipforget/python-oauth2 | example/server.py | 375 | 7669 | """
The MIT License
Copyright (c) 2007 Leah Culver
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import urllib
import oauth.oauth as oauth
# fake urls for the test server
REQUEST_TOKEN_URL = 'https://photos.example.net/request_token'
ACCESS_TOKEN_URL = 'https://photos.example.net/access_token'
AUTHORIZATION_URL = 'https://photos.example.net/authorize'
CALLBACK_URL = 'http://printer.example.com/request_token_ready'
RESOURCE_URL = 'http://photos.example.net/photos'
REALM = 'http://photos.example.net/'
VERIFIER = 'verifier'
# example store for one of each thing
class MockOAuthDataStore(oauth.OAuthDataStore):
def __init__(self):
self.consumer = oauth.OAuthConsumer('key', 'secret')
self.request_token = oauth.OAuthToken('requestkey', 'requestsecret')
self.access_token = oauth.OAuthToken('accesskey', 'accesssecret')
self.nonce = 'nonce'
self.verifier = VERIFIER
def lookup_consumer(self, key):
if key == self.consumer.key:
return self.consumer
return None
def lookup_token(self, token_type, token):
token_attrib = getattr(self, '%s_token' % token_type)
if token == token_attrib.key:
## HACK
token_attrib.set_callback(CALLBACK_URL)
return token_attrib
return None
def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
if oauth_token and oauth_consumer.key == self.consumer.key and (oauth_token.key == self.request_token.key or oauth_token.key == self.access_token.key) and nonce == self.nonce:
return self.nonce
return None
def fetch_request_token(self, oauth_consumer, oauth_callback):
if oauth_consumer.key == self.consumer.key:
if oauth_callback:
# want to check here if callback is sensible
# for mock store, we assume it is
self.request_token.set_callback(oauth_callback)
return self.request_token
return None
def fetch_access_token(self, oauth_consumer, oauth_token, oauth_verifier):
if oauth_consumer.key == self.consumer.key and oauth_token.key == self.request_token.key and oauth_verifier == self.verifier:
# want to check here if token is authorized
# for mock store, we assume it is
return self.access_token
return None
def authorize_request_token(self, oauth_token, user):
if oauth_token.key == self.request_token.key:
# authorize the request token in the store
# for mock store, do nothing
return self.request_token
return None
class RequestHandler(BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
self.oauth_server = oauth.OAuthServer(MockOAuthDataStore())
self.oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
self.oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
# example way to send an oauth error
def send_oauth_error(self, err=None):
# send a 401 error
self.send_error(401, str(err.message))
# return the authenticate header
header = oauth.build_authenticate_header(realm=REALM)
for k, v in header.iteritems():
self.send_header(k, v)
def do_GET(self):
# debug info
#print self.command, self.path, self.headers
# get the post data (if any)
postdata = None
if self.command == 'POST':
try:
length = int(self.headers.getheader('content-length'))
postdata = self.rfile.read(length)
except:
pass
# construct the oauth request from the request parameters
oauth_request = oauth.OAuthRequest.from_request(self.command, self.path, headers=self.headers, query_string=postdata)
# request token
if self.path.startswith(REQUEST_TOKEN_URL):
try:
# create a request token
token = self.oauth_server.fetch_request_token(oauth_request)
# send okay response
self.send_response(200, 'OK')
self.end_headers()
# return the token
self.wfile.write(token.to_string())
except oauth.OAuthError, err:
self.send_oauth_error(err)
return
# user authorization
if self.path.startswith(AUTHORIZATION_URL):
try:
# get the request token
token = self.oauth_server.fetch_request_token(oauth_request)
# authorize the token (kind of does nothing for now)
token = self.oauth_server.authorize_token(token, None)
token.set_verifier(VERIFIER)
# send okay response
self.send_response(200, 'OK')
self.end_headers()
# return the callback url (to show server has it)
self.wfile.write(token.get_callback_url())
except oauth.OAuthError, err:
self.send_oauth_error(err)
return
# access token
if self.path.startswith(ACCESS_TOKEN_URL):
try:
# create an access token
token = self.oauth_server.fetch_access_token(oauth_request)
# send okay response
self.send_response(200, 'OK')
self.end_headers()
# return the token
self.wfile.write(token.to_string())
except oauth.OAuthError, err:
self.send_oauth_error(err)
return
# protected resources
if self.path.startswith(RESOURCE_URL):
try:
# verify the request has been oauth authorized
consumer, token, params = self.oauth_server.verify_request(oauth_request)
# send okay response
self.send_response(200, 'OK')
self.end_headers()
# return the extra parameters - just for something to return
self.wfile.write(str(params))
except oauth.OAuthError, err:
self.send_oauth_error(err)
return
def do_POST(self):
return self.do_GET()
def main():
try:
server = HTTPServer(('', 8080), RequestHandler)
print 'Test server running...'
server.serve_forever()
except KeyboardInterrupt:
server.socket.close()
if __name__ == '__main__':
main() | mit |
trankmichael/scikit-learn | sklearn/neighbors/approximate.py | 128 | 22351 | """Approximate nearest neighbor search"""
# Author: Maheshakya Wijewardena <[email protected]>
# Joel Nothman <[email protected]>
import numpy as np
import warnings
from scipy import sparse
from .base import KNeighborsMixin, RadiusNeighborsMixin
from ..base import BaseEstimator
from ..utils.validation import check_array
from ..utils import check_random_state
from ..metrics.pairwise import pairwise_distances
from ..random_projection import GaussianRandomProjection
__all__ = ["LSHForest"]
HASH_DTYPE = '>u4'
MAX_HASH_SIZE = np.dtype(HASH_DTYPE).itemsize * 8
def _find_matching_indices(tree, bin_X, left_mask, right_mask):
"""Finds indices in sorted array of integers.
Most significant h bits in the binary representations of the
integers are matched with the items' most significant h bits.
"""
left_index = np.searchsorted(tree, bin_X & left_mask)
right_index = np.searchsorted(tree, bin_X | right_mask,
side='right')
return left_index, right_index
def _find_longest_prefix_match(tree, bin_X, hash_size,
left_masks, right_masks):
"""Find the longest prefix match in tree for each query in bin_X
Most significant bits are considered as the prefix.
"""
hi = np.empty_like(bin_X, dtype=np.intp)
hi.fill(hash_size)
lo = np.zeros_like(bin_X, dtype=np.intp)
res = np.empty_like(bin_X, dtype=np.intp)
left_idx, right_idx = _find_matching_indices(tree, bin_X,
left_masks[hi],
right_masks[hi])
found = right_idx > left_idx
res[found] = lo[found] = hash_size
r = np.arange(bin_X.shape[0])
kept = r[lo < hi] # indices remaining in bin_X mask
while kept.shape[0]:
mid = (lo.take(kept) + hi.take(kept)) // 2
left_idx, right_idx = _find_matching_indices(tree,
bin_X.take(kept),
left_masks[mid],
right_masks[mid])
found = right_idx > left_idx
mid_found = mid[found]
lo[kept[found]] = mid_found + 1
res[kept[found]] = mid_found
hi[kept[~found]] = mid[~found]
kept = r[lo < hi]
return res
class ProjectionToHashMixin(object):
"""Turn a transformed real-valued array into a hash"""
@staticmethod
def _to_hash(projected):
if projected.shape[1] % 8 != 0:
raise ValueError('Require reduced dimensionality to be a multiple '
'of 8 for hashing')
# XXX: perhaps non-copying operation better
out = np.packbits((projected > 0).astype(int)).view(dtype=HASH_DTYPE)
return out.reshape(projected.shape[0], -1)
def fit_transform(self, X, y=None):
self.fit(X)
return self.transform(X)
def transform(self, X, y=None):
return self._to_hash(super(ProjectionToHashMixin, self).transform(X))
class GaussianRandomProjectionHash(ProjectionToHashMixin,
GaussianRandomProjection):
"""Use GaussianRandomProjection to produce a cosine LSH fingerprint"""
def __init__(self,
n_components=8,
random_state=None):
super(GaussianRandomProjectionHash, self).__init__(
n_components=n_components,
random_state=random_state)
def _array_of_arrays(list_of_arrays):
"""Creates an array of array from list of arrays."""
out = np.empty(len(list_of_arrays), dtype=object)
out[:] = list_of_arrays
return out
class LSHForest(BaseEstimator, KNeighborsMixin, RadiusNeighborsMixin):
"""Performs approximate nearest neighbor search using LSH forest.
LSH Forest: Locality Sensitive Hashing forest [1] is an alternative
method for vanilla approximate nearest neighbor search methods.
LSH forest data structure has been implemented using sorted
arrays and binary search and 32 bit fixed-length hashes.
Random projection is used as the hash family which approximates
cosine distance.
The cosine distance is defined as ``1 - cosine_similarity``: the lowest
value is 0 (identical point) but it is bounded above by 2 for the farthest
points. Its value does not depend on the norm of the vector points but
only on their relative angles.
Read more in the :ref:`User Guide <approximate_nearest_neighbors>`.
Parameters
----------
n_estimators : int (default = 10)
Number of trees in the LSH Forest.
min_hash_match : int (default = 4)
lowest hash length to be searched when candidate selection is
performed for nearest neighbors.
n_candidates : int (default = 10)
Minimum number of candidates evaluated per estimator, assuming enough
items meet the `min_hash_match` constraint.
n_neighbors : int (default = 5)
Number of neighbors to be returned from query function when
it is not provided to the :meth:`kneighbors` method.
radius : float, optinal (default = 1.0)
Radius from the data point to its neighbors. This is the parameter
space to use by default for the :meth`radius_neighbors` queries.
radius_cutoff_ratio : float, optional (default = 0.9)
A value ranges from 0 to 1. Radius neighbors will be searched until
the ratio between total neighbors within the radius and the total
candidates becomes less than this value unless it is terminated by
hash length reaching `min_hash_match`.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
hash_functions_ : list of GaussianRandomProjectionHash objects
Hash function g(p,x) for a tree is an array of 32 randomly generated
float arrays with the same dimenstion as the data set. This array is
stored in GaussianRandomProjectionHash object and can be obtained
from ``components_`` attribute.
trees_ : array, shape (n_estimators, n_samples)
Each tree (corresponding to a hash function) contains an array of
sorted hashed values. The array representation may change in future
versions.
original_indices_ : array, shape (n_estimators, n_samples)
Original indices of sorted hashed values in the fitted index.
References
----------
.. [1] M. Bawa, T. Condie and P. Ganesan, "LSH Forest: Self-Tuning
Indexes for Similarity Search", WWW '05 Proceedings of the
14th international conference on World Wide Web, 651-660,
2005.
Examples
--------
>>> from sklearn.neighbors import LSHForest
>>> X_train = [[5, 5, 2], [21, 5, 5], [1, 1, 1], [8, 9, 1], [6, 10, 2]]
>>> X_test = [[9, 1, 6], [3, 1, 10], [7, 10, 3]]
>>> lshf = LSHForest()
>>> lshf.fit(X_train) # doctest: +NORMALIZE_WHITESPACE
LSHForest(min_hash_match=4, n_candidates=50, n_estimators=10,
n_neighbors=5, radius=1.0, radius_cutoff_ratio=0.9,
random_state=None)
>>> distances, indices = lshf.kneighbors(X_test, n_neighbors=2)
>>> distances # doctest: +ELLIPSIS
array([[ 0.069..., 0.149...],
[ 0.229..., 0.481...],
[ 0.004..., 0.014...]])
>>> indices
array([[1, 2],
[2, 0],
[4, 0]])
"""
def __init__(self, n_estimators=10, radius=1.0, n_candidates=50,
n_neighbors=5, min_hash_match=4, radius_cutoff_ratio=.9,
random_state=None):
self.n_estimators = n_estimators
self.radius = radius
self.random_state = random_state
self.n_candidates = n_candidates
self.n_neighbors = n_neighbors
self.min_hash_match = min_hash_match
self.radius_cutoff_ratio = radius_cutoff_ratio
def _compute_distances(self, query, candidates):
"""Computes the cosine distance.
Distance is from the query to points in the candidates array.
Returns argsort of distances in the candidates
array and sorted distances.
"""
if candidates.shape == (0,):
# needed since _fit_X[np.array([])] doesn't work if _fit_X sparse
return np.empty(0, dtype=np.int), np.empty(0, dtype=float)
if sparse.issparse(self._fit_X):
candidate_X = self._fit_X[candidates]
else:
candidate_X = self._fit_X.take(candidates, axis=0, mode='clip')
distances = pairwise_distances(query, candidate_X,
metric='cosine')[0]
distance_positions = np.argsort(distances)
distances = distances.take(distance_positions, mode='clip', axis=0)
return distance_positions, distances
def _generate_masks(self):
"""Creates left and right masks for all hash lengths."""
tri_size = MAX_HASH_SIZE + 1
# Called once on fitting, output is independent of hashes
left_mask = np.tril(np.ones((tri_size, tri_size), dtype=int))[:, 1:]
right_mask = left_mask[::-1, ::-1]
self._left_mask = np.packbits(left_mask).view(dtype=HASH_DTYPE)
self._right_mask = np.packbits(right_mask).view(dtype=HASH_DTYPE)
def _get_candidates(self, query, max_depth, bin_queries, n_neighbors):
"""Performs the Synchronous ascending phase.
Returns an array of candidates, their distance ranks and
distances.
"""
index_size = self._fit_X.shape[0]
# Number of candidates considered including duplicates
# XXX: not sure whether this is being calculated correctly wrt
# duplicates from different iterations through a single tree
n_candidates = 0
candidate_set = set()
min_candidates = self.n_candidates * self.n_estimators
while (max_depth > self.min_hash_match and
(n_candidates < min_candidates or
len(candidate_set) < n_neighbors)):
left_mask = self._left_mask[max_depth]
right_mask = self._right_mask[max_depth]
for i in range(self.n_estimators):
start, stop = _find_matching_indices(self.trees_[i],
bin_queries[i],
left_mask, right_mask)
n_candidates += stop - start
candidate_set.update(
self.original_indices_[i][start:stop].tolist())
max_depth -= 1
candidates = np.fromiter(candidate_set, count=len(candidate_set),
dtype=np.intp)
# For insufficient candidates, candidates are filled.
# Candidates are filled from unselected indices uniformly.
if candidates.shape[0] < n_neighbors:
warnings.warn(
"Number of candidates is not sufficient to retrieve"
" %i neighbors with"
" min_hash_match = %i. Candidates are filled up"
" uniformly from unselected"
" indices." % (n_neighbors, self.min_hash_match))
remaining = np.setdiff1d(np.arange(0, index_size), candidates)
to_fill = n_neighbors - candidates.shape[0]
candidates = np.concatenate((candidates, remaining[:to_fill]))
ranks, distances = self._compute_distances(query,
candidates.astype(int))
return (candidates[ranks[:n_neighbors]],
distances[:n_neighbors])
def _get_radius_neighbors(self, query, max_depth, bin_queries, radius):
"""Finds radius neighbors from the candidates obtained.
Their distances from query are smaller than radius.
Returns radius neighbors and distances.
"""
ratio_within_radius = 1
threshold = 1 - self.radius_cutoff_ratio
total_candidates = np.array([], dtype=int)
total_neighbors = np.array([], dtype=int)
total_distances = np.array([], dtype=float)
while (max_depth > self.min_hash_match and
ratio_within_radius > threshold):
left_mask = self._left_mask[max_depth]
right_mask = self._right_mask[max_depth]
candidates = []
for i in range(self.n_estimators):
start, stop = _find_matching_indices(self.trees_[i],
bin_queries[i],
left_mask, right_mask)
candidates.extend(
self.original_indices_[i][start:stop].tolist())
candidates = np.setdiff1d(candidates, total_candidates)
total_candidates = np.append(total_candidates, candidates)
ranks, distances = self._compute_distances(query, candidates)
m = np.searchsorted(distances, radius, side='right')
positions = np.searchsorted(total_distances, distances[:m])
total_neighbors = np.insert(total_neighbors, positions,
candidates[ranks[:m]])
total_distances = np.insert(total_distances, positions,
distances[:m])
ratio_within_radius = (total_neighbors.shape[0] /
float(total_candidates.shape[0]))
max_depth = max_depth - 1
return total_neighbors, total_distances
def fit(self, X, y=None):
"""Fit the LSH forest on the data.
This creates binary hashes of input data points by getting the
dot product of input points and hash_function then
transforming the projection into a binary string array based
on the sign (positive/negative) of the projection.
A sorted array of binary hashes is created.
Parameters
----------
X : array_like or sparse (CSR) matrix, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
self : object
Returns self.
"""
self._fit_X = check_array(X, accept_sparse='csr')
# Creates a g(p,x) for each tree
self.hash_functions_ = []
self.trees_ = []
self.original_indices_ = []
rng = check_random_state(self.random_state)
int_max = np.iinfo(np.int32).max
for i in range(self.n_estimators):
# This is g(p,x) for a particular tree.
# Builds a single tree. Hashing is done on an array of data points.
# `GaussianRandomProjection` is used for hashing.
# `n_components=hash size and n_features=n_dim.
hasher = GaussianRandomProjectionHash(MAX_HASH_SIZE,
rng.randint(0, int_max))
hashes = hasher.fit_transform(self._fit_X)[:, 0]
original_index = np.argsort(hashes)
bin_hashes = hashes[original_index]
self.original_indices_.append(original_index)
self.trees_.append(bin_hashes)
self.hash_functions_.append(hasher)
self._generate_masks()
return self
def _query(self, X):
"""Performs descending phase to find maximum depth."""
# Calculate hashes of shape (n_samples, n_estimators, [hash_size])
bin_queries = np.asarray([hasher.transform(X)[:, 0]
for hasher in self.hash_functions_])
bin_queries = np.rollaxis(bin_queries, 1)
# descend phase
depths = [_find_longest_prefix_match(tree, tree_queries, MAX_HASH_SIZE,
self._left_mask, self._right_mask)
for tree, tree_queries in zip(self.trees_,
np.rollaxis(bin_queries, 1))]
return bin_queries, np.max(depths, axis=0)
def kneighbors(self, X, n_neighbors=None, return_distance=True):
"""Returns n_neighbors of approximate nearest neighbors.
Parameters
----------
X : array_like or sparse (CSR) matrix, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single query.
n_neighbors : int, opitonal (default = None)
Number of neighbors required. If not provided, this will
return the number specified at the initialization.
return_distance : boolean, optional (default = False)
Returns the distances of neighbors if set to True.
Returns
-------
dist : array, shape (n_samples, n_neighbors)
Array representing the cosine distances to each point,
only present if return_distance=True.
ind : array, shape (n_samples, n_neighbors)
Indices of the approximate nearest points in the population
matrix.
"""
if not hasattr(self, 'hash_functions_'):
raise ValueError("estimator should be fitted.")
if n_neighbors is None:
n_neighbors = self.n_neighbors
X = check_array(X, accept_sparse='csr')
neighbors, distances = [], []
bin_queries, max_depth = self._query(X)
for i in range(X.shape[0]):
neighs, dists = self._get_candidates(X[i], max_depth[i],
bin_queries[i],
n_neighbors)
neighbors.append(neighs)
distances.append(dists)
if return_distance:
return np.array(distances), np.array(neighbors)
else:
return np.array(neighbors)
def radius_neighbors(self, X, radius=None, return_distance=True):
"""Finds the neighbors within a given radius of a point or points.
Return the indices and distances of some points from the dataset
lying in a ball with size ``radius`` around the points of the query
array. Points lying on the boundary are included in the results.
The result points are *not* necessarily sorted by distance to their
query point.
LSH Forest being an approximate method, some true neighbors from the
indexed dataset might be missing from the results.
Parameters
----------
X : array_like or sparse (CSR) matrix, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single query.
radius : float
Limiting distance of neighbors to return.
(default is the value passed to the constructor).
return_distance : boolean, optional (default = False)
Returns the distances of neighbors if set to True.
Returns
-------
dist : array, shape (n_samples,) of arrays
Each element is an array representing the cosine distances
to some points found within ``radius`` of the respective query.
Only present if ``return_distance=True``.
ind : array, shape (n_samples,) of arrays
Each element is an array of indices for neighbors within ``radius``
of the respective query.
"""
if not hasattr(self, 'hash_functions_'):
raise ValueError("estimator should be fitted.")
if radius is None:
radius = self.radius
X = check_array(X, accept_sparse='csr')
neighbors, distances = [], []
bin_queries, max_depth = self._query(X)
for i in range(X.shape[0]):
neighs, dists = self._get_radius_neighbors(X[i], max_depth[i],
bin_queries[i], radius)
neighbors.append(neighs)
distances.append(dists)
if return_distance:
return _array_of_arrays(distances), _array_of_arrays(neighbors)
else:
return _array_of_arrays(neighbors)
def partial_fit(self, X, y=None):
"""
Inserts new data into the already fitted LSH Forest.
Cost is proportional to new total size, so additions
should be batched.
Parameters
----------
X : array_like or sparse (CSR) matrix, shape (n_samples, n_features)
New data point to be inserted into the LSH Forest.
"""
X = check_array(X, accept_sparse='csr')
if not hasattr(self, 'hash_functions_'):
return self.fit(X)
if X.shape[1] != self._fit_X.shape[1]:
raise ValueError("Number of features in X and"
" fitted array does not match.")
n_samples = X.shape[0]
n_indexed = self._fit_X.shape[0]
for i in range(self.n_estimators):
bin_X = self.hash_functions_[i].transform(X)[:, 0]
# gets the position to be added in the tree.
positions = self.trees_[i].searchsorted(bin_X)
# adds the hashed value into the tree.
self.trees_[i] = np.insert(self.trees_[i],
positions, bin_X)
# add the entry into the original_indices_.
self.original_indices_[i] = np.insert(self.original_indices_[i],
positions,
np.arange(n_indexed,
n_indexed +
n_samples))
# adds the entry into the input_array.
if sparse.issparse(X) or sparse.issparse(self._fit_X):
self._fit_X = sparse.vstack((self._fit_X, X))
else:
self._fit_X = np.row_stack((self._fit_X, X))
return self
| bsd-3-clause |
singlebrook/AWS-ElasticBeanstalk-CLI | eb/macosx/python2.7/lib/aws/requests/packages/urllib3/exceptions.py | 245 | 2258 | # urllib3/exceptions.py
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
## Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, self.url)
class SSLError(HTTPError):
"Raised when SSL certificate fails in an HTTPS connection."
pass
class DecodeError(HTTPError):
"Raised when automatic decoding based on Content-Type fails."
pass
## Leaf Exceptions
class MaxRetryError(PoolError):
"Raised when the maximum number of retries is exceeded."
def __init__(self, pool, url, reason=None):
self.reason = reason
message = "Max retries exceeded with url: %s" % url
if reason:
message += " (Caused by %s: %s)" % (type(reason), reason)
else:
message += " (Caused by redirect)"
PoolError.__init__(self, pool, message)
self.url = url
class HostChangedError(PoolError):
"Raised when an existing pool gets a request for a foreign host."
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
PoolError.__init__(self, pool, message)
self.url = url
self.retries = retries
class TimeoutError(PoolError):
"Raised when a socket timeout occurs."
pass
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
class ClosedPoolError(PoolError):
"Raised when a request enters a pool after the pool has been closed."
pass
class LocationParseError(ValueError, HTTPError):
"Raised when get_host or similar fails to parse the URL input."
def __init__(self, location):
message = "Failed to parse: %s" % location
HTTPError.__init__(self, message)
self.location = location
| apache-2.0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.