repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
samfoo/servo
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/stream.py
|
673
|
2748
|
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file exports public symbols.
"""
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import InvalidUTF8Exception
from mod_pywebsocket._stream_base import UnsupportedFrameException
from mod_pywebsocket._stream_hixie75 import StreamHixie75
from mod_pywebsocket._stream_hybi import Frame
from mod_pywebsocket._stream_hybi import Stream
from mod_pywebsocket._stream_hybi import StreamOptions
# These methods are intended to be used by WebSocket client developers to have
# their implementations receive broken data in tests.
from mod_pywebsocket._stream_hybi import create_close_frame
from mod_pywebsocket._stream_hybi import create_header
from mod_pywebsocket._stream_hybi import create_length_header
from mod_pywebsocket._stream_hybi import create_ping_frame
from mod_pywebsocket._stream_hybi import create_pong_frame
from mod_pywebsocket._stream_hybi import create_binary_frame
from mod_pywebsocket._stream_hybi import create_text_frame
from mod_pywebsocket._stream_hybi import create_closing_handshake_body
# vi:sts=4 sw=4 et
|
mpl-2.0
|
KousikaGanesh/purchaseandInventory
|
openerp/addons/stock/wizard/stock_inventory_merge.py
|
57
|
3815
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class stock_inventory_merge(osv.osv_memory):
_name = "stock.inventory.merge"
_description = "Merge Inventory"
def fields_view_get(self, cr, uid, view_id=None, view_type='form',
context=None, toolbar=False, submenu=False):
"""
Changes the view dynamically
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return: New arch of view.
"""
if context is None:
context={}
res = super(stock_inventory_merge, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False)
if context.get('active_model','') == 'stock.inventory' and len(context['active_ids']) < 2:
raise osv.except_osv(_('Warning!'),
_('Please select multiple physical inventories to merge in the list view.'))
return res
def do_merge(self, cr, uid, ids, context=None):
""" To merge selected Inventories.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return:
"""
invent_obj = self.pool.get('stock.inventory')
invent_line_obj = self.pool.get('stock.inventory.line')
invent_lines = {}
if context is None:
context = {}
for inventory in invent_obj.browse(cr, uid, context['active_ids'], context=context):
if inventory.state == "done":
raise osv.except_osv(_('Warning!'),
_('Merging is only allowed on draft inventories.'))
for line in inventory.inventory_line_id:
key = (line.location_id.id, line.product_id.id, line.product_uom.id)
if key in invent_lines:
invent_lines[key] += line.product_qty
else:
invent_lines[key] = line.product_qty
new_invent = invent_obj.create(cr, uid, {
'name': 'Merged inventory'
}, context=context)
for key, quantity in invent_lines.items():
invent_line_obj.create(cr, uid, {
'inventory_id': new_invent,
'location_id': key[0],
'product_id': key[1],
'product_uom': key[2],
'product_qty': quantity,
})
return {'type': 'ir.actions.act_window_close'}
stock_inventory_merge()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
KaranToor/MA450
|
google-cloud-sdk/.install/.backup/lib/third_party/httplib2/iri2uri.py
|
706
|
3828
|
"""
iri2uri
Converts an IRI to a URI.
"""
__author__ = "Joe Gregorio ([email protected])"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = []
__version__ = "1.0.0"
__license__ = "MIT"
__history__ = """
"""
import urlparse
# Convert an IRI to a URI following the rules in RFC 3987
#
# The characters we need to enocde and escape are defined in the spec:
#
# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD
# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF
# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD
# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD
# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD
# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD
# / %xD0000-DFFFD / %xE1000-EFFFD
escape_range = [
(0xA0, 0xD7FF),
(0xE000, 0xF8FF),
(0xF900, 0xFDCF),
(0xFDF0, 0xFFEF),
(0x10000, 0x1FFFD),
(0x20000, 0x2FFFD),
(0x30000, 0x3FFFD),
(0x40000, 0x4FFFD),
(0x50000, 0x5FFFD),
(0x60000, 0x6FFFD),
(0x70000, 0x7FFFD),
(0x80000, 0x8FFFD),
(0x90000, 0x9FFFD),
(0xA0000, 0xAFFFD),
(0xB0000, 0xBFFFD),
(0xC0000, 0xCFFFD),
(0xD0000, 0xDFFFD),
(0xE1000, 0xEFFFD),
(0xF0000, 0xFFFFD),
(0x100000, 0x10FFFD),
]
def encode(c):
retval = c
i = ord(c)
for low, high in escape_range:
if i < low:
break
if i >= low and i <= high:
retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')])
break
return retval
def iri2uri(uri):
"""Convert an IRI to a URI. Note that IRIs must be
passed in a unicode strings. That is, do not utf-8 encode
the IRI before passing it into the function."""
if isinstance(uri ,unicode):
(scheme, authority, path, query, fragment) = urlparse.urlsplit(uri)
authority = authority.encode('idna')
# For each character in 'ucschar' or 'iprivate'
# 1. encode as utf-8
# 2. then %-encode each octet of that utf-8
uri = urlparse.urlunsplit((scheme, authority, path, query, fragment))
uri = "".join([encode(c) for c in uri])
return uri
if __name__ == "__main__":
import unittest
class Test(unittest.TestCase):
def test_uris(self):
"""Test that URIs are invariant under the transformation."""
invariant = [
u"ftp://ftp.is.co.za/rfc/rfc1808.txt",
u"http://www.ietf.org/rfc/rfc2396.txt",
u"ldap://[2001:db8::7]/c=GB?objectClass?one",
u"mailto:[email protected]",
u"news:comp.infosystems.www.servers.unix",
u"tel:+1-816-555-1212",
u"telnet://192.0.2.16:80/",
u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ]
for uri in invariant:
self.assertEqual(uri, iri2uri(uri))
def test_iri(self):
""" Test that the right type of escaping is done for each part of the URI."""
self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}"))
self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}"))
self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}"))
self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}"))
self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))
self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")))
self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8')))
unittest.main()
|
apache-2.0
|
h3biomed/ansible
|
lib/ansible/modules/network/fortimanager/fmgr_secprof_dns.py
|
39
|
11232
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_secprof_dns
version_added: "2.8"
notes:
- Full Documentation at U(https://ftnt-ansible-docs.readthedocs.io/en/latest/).
author:
- Luke Weighall (@lweighall)
- Andrew Welsh (@Ghilli3)
- Jim Huber (@p4r4n0y1ng)
short_description: Manage DNS security profiles in FortiManager
description:
- Manage DNS security profiles in FortiManager
options:
adom:
description:
- The ADOM the configuration should belong to.
required: false
default: root
mode:
description:
- Sets one of three modes for managing the object.
- Allows use of soft-adds instead of overwriting existing values.
choices: ['add', 'set', 'delete', 'update']
required: false
default: add
youtube_restrict:
type: str
description:
- Set safe search for YouTube restriction level.
- choice | strict | Enable strict safe seach for YouTube.
- choice | moderate | Enable moderate safe search for YouTube.
required: false
choices: ["strict", "moderate"]
sdns_ftgd_err_log:
type: str
description:
- Enable/disable FortiGuard SDNS rating error logging.
- choice | disable | Disable FortiGuard SDNS rating error logging.
- choice | enable | Enable FortiGuard SDNS rating error logging.
required: false
choices: ["disable", "enable"]
sdns_domain_log:
type: str
description:
- Enable/disable domain filtering and botnet domain logging.
- choice | disable | Disable domain filtering and botnet domain logging.
- choice | enable | Enable domain filtering and botnet domain logging.
required: false
choices: ["disable", "enable"]
safe_search:
type: str
description:
- Enable/disable Google, Bing, and YouTube safe search.
- choice | disable | Disable Google, Bing, and YouTube safe search.
- choice | enable | Enable Google, Bing, and YouTube safe search.
required: false
choices: ["disable", "enable"]
redirect_portal:
type: str
description:
- IP address of the SDNS redirect portal.
required: false
name:
type: str
description:
- Profile name.
required: false
log_all_domain:
type: str
description:
- Enable/disable logging of all domains visited (detailed DNS logging).
- choice | disable | Disable logging of all domains visited.
- choice | enable | Enable logging of all domains visited.
required: false
choices: ["disable", "enable"]
external_ip_blocklist:
type: str
description:
- One or more external IP block lists.
required: false
comment:
type: str
description:
- Comment for the security profile to show in the FortiManager GUI.
required: false
block_botnet:
type: str
description:
- Enable/disable blocking botnet C&C; DNS lookups.
- choice | disable | Disable blocking botnet C&C; DNS lookups.
- choice | enable | Enable blocking botnet C&C; DNS lookups.
required: false
choices: ["disable", "enable"]
block_action:
type: str
description:
- Action to take for blocked domains.
- choice | block | Return NXDOMAIN for blocked domains.
- choice | redirect | Redirect blocked domains to SDNS portal.
required: false
choices: ["block", "redirect"]
domain_filter_domain_filter_table:
type: str
description:
- DNS domain filter table ID.
required: false
ftgd_dns_options:
type: str
description:
- FortiGuard DNS filter options.
- FLAG Based Options. Specify multiple in list form.
- flag | error-allow | Allow all domains when FortiGuard DNS servers fail.
- flag | ftgd-disable | Disable FortiGuard DNS domain rating.
required: false
choices: ["error-allow", "ftgd-disable"]
ftgd_dns_filters_action:
type: str
description:
- Action to take for DNS requests matching the category.
- choice | monitor | Allow DNS requests matching the category and log the result.
- choice | block | Block DNS requests matching the category.
required: false
choices: ["monitor", "block"]
ftgd_dns_filters_category:
type: str
description:
- Category number.
required: false
ftgd_dns_filters_log:
type: str
description:
- Enable/disable DNS filter logging for this DNS profile.
- choice | disable | Disable DNS filter logging.
- choice | enable | Enable DNS filter logging.
required: false
choices: ["disable", "enable"]
'''
EXAMPLES = '''
- name: DELETE Profile
fmgr_secprof_dns:
name: "Ansible_DNS_Profile"
comment: "Created by Ansible Module TEST"
mode: "delete"
- name: CREATE Profile
fmgr_secprof_dns:
name: "Ansible_DNS_Profile"
comment: "Created by Ansible Module TEST"
mode: "set"
block_action: "block"
'''
RETURN = """
api_result:
description: full API response, includes status code and message
returned: always
type: str
"""
from ansible.module_utils.basic import AnsibleModule, env_fallback
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortimanager.fortimanager import FortiManagerHandler
from ansible.module_utils.network.fortimanager.common import FMGBaseException
from ansible.module_utils.network.fortimanager.common import FMGRCommon
from ansible.module_utils.network.fortimanager.common import FMGRMethods
from ansible.module_utils.network.fortimanager.common import DEFAULT_RESULT_OBJ
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
from ansible.module_utils.network.fortimanager.common import prepare_dict
from ansible.module_utils.network.fortimanager.common import scrub_dict
###############
# START METHODS
###############
def fmgr_dnsfilter_profile_modify(fmgr, paramgram):
"""
:param fmgr: The fmgr object instance from fortimanager.py
:type fmgr: class object
:param paramgram: The formatted dictionary of options to process
:type paramgram: dict
:return: The response from the FortiManager
:rtype: dict
"""
mode = paramgram["mode"]
adom = paramgram["adom"]
url = ""
datagram = {}
response = DEFAULT_RESULT_OBJ
# EVAL THE MODE PARAMETER FOR SET OR ADD
if mode in ['set', 'add', 'update']:
url = '/pm/config/adom/{adom}/obj/dnsfilter/profile'.format(adom=adom)
datagram = scrub_dict(prepare_dict(paramgram))
# EVAL THE MODE PARAMETER FOR DELETE
elif mode == "delete":
# SET THE CORRECT URL FOR DELETE
url = '/pm/config/adom/{adom}/obj/dnsfilter/profile/{name}'.format(adom=adom, name=paramgram["name"])
datagram = {}
response = fmgr.process_request(url, datagram, paramgram["mode"])
return response
#############
# END METHODS
#############
def main():
argument_spec = dict(
adom=dict(type="str", default="root"),
mode=dict(choices=["add", "set", "delete", "update"], type="str", default="add"),
youtube_restrict=dict(required=False, type="str", choices=["strict", "moderate"]),
sdns_ftgd_err_log=dict(required=False, type="str", choices=["disable", "enable"]),
sdns_domain_log=dict(required=False, type="str", choices=["disable", "enable"]),
safe_search=dict(required=False, type="str", choices=["disable", "enable"]),
redirect_portal=dict(required=False, type="str"),
name=dict(required=False, type="str"),
log_all_domain=dict(required=False, type="str", choices=["disable", "enable"]),
external_ip_blocklist=dict(required=False, type="str"),
comment=dict(required=False, type="str"),
block_botnet=dict(required=False, type="str", choices=["disable", "enable"]),
block_action=dict(required=False, type="str", choices=["block", "redirect"]),
domain_filter_domain_filter_table=dict(required=False, type="str"),
ftgd_dns_options=dict(required=False, type="str", choices=["error-allow", "ftgd-disable"]),
ftgd_dns_filters_action=dict(required=False, type="str", choices=["monitor", "block"]),
ftgd_dns_filters_category=dict(required=False, type="str"),
ftgd_dns_filters_log=dict(required=False, type="str", choices=["disable", "enable"]),
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False, )
# MODULE PARAMGRAM
paramgram = {
"mode": module.params["mode"],
"adom": module.params["adom"],
"youtube-restrict": module.params["youtube_restrict"],
"sdns-ftgd-err-log": module.params["sdns_ftgd_err_log"],
"sdns-domain-log": module.params["sdns_domain_log"],
"safe-search": module.params["safe_search"],
"redirect-portal": module.params["redirect_portal"],
"name": module.params["name"],
"log-all-domain": module.params["log_all_domain"],
"external-ip-blocklist": module.params["external_ip_blocklist"],
"comment": module.params["comment"],
"block-botnet": module.params["block_botnet"],
"block-action": module.params["block_action"],
"domain-filter": {
"domain-filter-table": module.params["domain_filter_domain_filter_table"],
},
"ftgd-dns": {
"options": module.params["ftgd_dns_options"],
"filters": {
"action": module.params["ftgd_dns_filters_action"],
"category": module.params["ftgd_dns_filters_category"],
"log": module.params["ftgd_dns_filters_log"],
}
}
}
module.paramgram = paramgram
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = FortiManagerHandler(connection, module)
fmgr.tools = FMGRCommon()
else:
module.fail_json(**FAIL_SOCKET_MSG)
results = DEFAULT_RESULT_OBJ
try:
results = fmgr_dnsfilter_profile_modify(fmgr, paramgram)
fmgr.govern_response(module=module, results=results,
ansible_facts=fmgr.construct_ansible_facts(results, module.params, paramgram))
except Exception as err:
raise FMGBaseException(err)
return module.exit_json(**results[1])
if __name__ == "__main__":
main()
|
gpl-3.0
|
kalahbrown/HueBigSQL
|
desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/Protocol/__init__.py
|
125
|
1573
|
# -*- coding: utf-8 -*-
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Cryptographic protocols
Implements various cryptographic protocols. (Don't expect to find
network protocols here.)
Crypto.Protocol.AllOrNothing
Transforms a message into a set of message blocks, such that the blocks
can be recombined to get the message back.
Crypto.Protocol.Chaffing
Takes a set of authenticated message blocks (the wheat) and adds a number
of randomly generated blocks (the chaff).
Crypto.Protocol.KDF
A collection of standard key derivation functions.
:undocumented: __revision__
"""
__all__ = ['AllOrNothing', 'Chaffing', 'KDF']
__revision__ = "$Id$"
|
apache-2.0
|
PaloAltoNetworks-BD/SplunkforPaloAltoNetworks
|
Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py2/http/client.py
|
45
|
2786
|
from __future__ import absolute_import
import sys
assert sys.version_info[0] < 3
from httplib import *
from httplib import HTTPMessage
# These constants aren't included in __all__ in httplib.py:
from httplib import (HTTP_PORT,
HTTPS_PORT,
_CS_IDLE,
_CS_REQ_STARTED,
_CS_REQ_SENT,
CONTINUE,
SWITCHING_PROTOCOLS,
PROCESSING,
OK,
CREATED,
ACCEPTED,
NON_AUTHORITATIVE_INFORMATION,
NO_CONTENT,
RESET_CONTENT,
PARTIAL_CONTENT,
MULTI_STATUS,
IM_USED,
MULTIPLE_CHOICES,
MOVED_PERMANENTLY,
FOUND,
SEE_OTHER,
NOT_MODIFIED,
USE_PROXY,
TEMPORARY_REDIRECT,
BAD_REQUEST,
UNAUTHORIZED,
PAYMENT_REQUIRED,
FORBIDDEN,
NOT_FOUND,
METHOD_NOT_ALLOWED,
NOT_ACCEPTABLE,
PROXY_AUTHENTICATION_REQUIRED,
REQUEST_TIMEOUT,
CONFLICT,
GONE,
LENGTH_REQUIRED,
PRECONDITION_FAILED,
REQUEST_ENTITY_TOO_LARGE,
REQUEST_URI_TOO_LONG,
UNSUPPORTED_MEDIA_TYPE,
REQUESTED_RANGE_NOT_SATISFIABLE,
EXPECTATION_FAILED,
UNPROCESSABLE_ENTITY,
LOCKED,
FAILED_DEPENDENCY,
UPGRADE_REQUIRED,
INTERNAL_SERVER_ERROR,
NOT_IMPLEMENTED,
BAD_GATEWAY,
SERVICE_UNAVAILABLE,
GATEWAY_TIMEOUT,
HTTP_VERSION_NOT_SUPPORTED,
INSUFFICIENT_STORAGE,
NOT_EXTENDED,
MAXAMOUNT,
)
# These are not available on Python 2.6.x:
try:
from httplib import LineTooLong, LineAndFileWrapper
except ImportError:
pass
# These may not be available on all versions of Python 2.6.x or 2.7.x
try:
from httplib import (
_MAXLINE,
_MAXHEADERS,
_is_legal_header_name,
_is_illegal_header_value,
_METHODS_EXPECTING_BODY
)
except ImportError:
pass
|
isc
|
mzadel/libmapper-sc
|
editors/sced/sced/ScLang.py
|
46
|
2479
|
# -*- coding: utf-8 -*-
# sced (SuperCollider mode for gedit)
# Copyright 2009 Artem Popov and other contributors (see AUTHORS)
#
# sced is free software:
# you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
import time # FIXME: time is a workaround
class ScLang:
def __init__(self, plugin):
self.__sclang = None
self.__settings = plugin.settings
def start (self):
if self.running():
return
# FIXME: maybe we need a default value in Settings?
folder = self.__settings.props.runtime_folder
if folder is None:
folder = os.getcwd()
self.__sclang = subprocess.Popen(["sclang",
"-i", "sced", "-d", folder],
bufsize=0,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
close_fds=True)
self.stdout = self.__sclang.stdout
self.stdin = self.__sclang.stdin
def stop(self):
if self.running():
self.stdin.close()
self.__sclang.wait()
self.__sclang = None
def running(self):
return (self.__sclang is not None) and (self.__sclang.poll() is None)
# FIXME: use sclang.communicate()
def evaluate(self, code, silent=False):
self.stdin.write(code)
if silent:
self.stdin.write("\x1b")
else:
self.stdin.write("\x0c")
self.stdin.flush()
def toggle_recording(self, record):
if record:
self.evaluate("s.prepareForRecord;", silent=True)
time.sleep(0.1) # give server some time to prepare
self.evaluate("s.record;", silent=True)
else:
self.evaluate("s.stopRecording;", silent=True)
def stop_sound(self):
self.evaluate("thisProcess.stop;", silent=True)
|
gpl-3.0
|
gspilio/nova
|
nova/api/openstack/compute/contrib/image_size.py
|
38
|
3327
|
# Copyright 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
authorize = extensions.soft_extension_authorizer('compute', 'image_size')
def make_image(elem):
elem.set('{%s}size' % Image_size.namespace, '%s:size' % Image_size.alias)
class ImagesSizeTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('images')
elem = xmlutil.SubTemplateElement(root, 'image', selector='images')
make_image(elem)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Image_size.alias: Image_size.namespace})
class ImageSizeTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('image', selector='image')
make_image(root)
return xmlutil.SlaveTemplate(root, 1, nsmap={
Image_size.alias: Image_size.namespace})
class ImageSizeController(wsgi.Controller):
def _extend_image(self, image, image_cache):
key = "%s:size" % Image_size.alias
image[key] = image_cache['size']
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ["nova.context"]
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ImageSizeTemplate())
image_resp = resp_obj.obj['image']
# image guaranteed to be in the cache due to the core API adding
# it in its 'show' method
image_cached = req.get_db_item('images', image_resp['id'])
self._extend_image(image_resp, image_cached)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
# Attach our slave template to the response object
resp_obj.attach(xml=ImagesSizeTemplate())
images_resp = list(resp_obj.obj['images'])
# images guaranteed to be in the cache due to the core API adding
# it in its 'detail' method
for image in images_resp:
image_cached = req.get_db_item('images', image['id'])
self._extend_image(image, image_cached)
class Image_size(extensions.ExtensionDescriptor):
"""Adds image size to image listings."""
name = "ImageSize"
alias = "OS-EXT-IMG-SIZE"
namespace = ("http://docs.openstack.org/compute/ext/"
"image_size/api/v1.1")
updated = "2013-02-19T00:00:00+00:00"
def get_controller_extensions(self):
controller = ImageSizeController()
extension = extensions.ControllerExtension(self, 'images', controller)
return [extension]
|
apache-2.0
|
metasmile/awesome-strings
|
raw/chromium/grit/grit/format/policy_templates/template_formatter.py
|
61
|
2591
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
from functools import partial
from grit.format.policy_templates import policy_template_generator
from grit.format.policy_templates import writer_configuration
from grit.node import misc
from grit.node import structure
def GetFormatter(type):
return partial(_TemplateFormatter,
'grit.format.policy_templates.writers.%s_writer' % type)
def _TemplateFormatter(writer_module_name, root, lang, output_dir):
'''Creates a template file corresponding to an <output> node of the grit
tree.
More precisely, processes the whole grit tree for a given <output> node whose
type is one of adm, plist, plist_strings, admx, adml, doc, json, reg.
The result of processing is a policy template file with the given type and
language of the <output> node. This function does the interfacing with
grit, but the actual template-generating work is done in
policy_template_generator.PolicyTemplateGenerator.
Args:
writer_name: A string identifying the TemplateWriter subclass used
for generating the output.
root: the <grit> root node of the grit tree.
lang: the language of outputted text, e.g.: 'en'
output_dir: The output directory, currently unused here.
Yields the text of the template file.
'''
__import__(writer_module_name)
writer_module = sys.modules[writer_module_name]
config = writer_configuration.GetConfigurationForBuild(root.defines)
policy_data = _ParseGritNodes(root, lang)
policy_generator = \
policy_template_generator.PolicyTemplateGenerator(config, policy_data)
writer = writer_module.GetWriter(config)
yield policy_generator.GetTemplateText(writer)
def _ParseGritNodes(root, lang):
'''Collects the necessary information from the grit tree:
the message strings and the policy definitions.
Args:
root: The root of the grit tree.
lang: the language of outputted text, e.g.: 'en'
Returns:
Policy data.
'''
policy_data = None
for item in root.ActiveDescendants():
with item:
if (isinstance(item, structure.StructureNode) and
item.attrs['type'] == 'policy_template_metafile'):
assert policy_data is None
json_text = item.gatherer.Translate(
lang,
pseudo_if_not_available=item.PseudoIsAllowed(),
fallback_to_english=item.ShouldFallbackToEnglish())
policy_data = eval(json_text)
return policy_data
|
mit
|
JianyuWang/neutron
|
neutron/tests/api/admin/test_l3_agent_scheduler.py
|
43
|
4395
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from neutron.tests.api import base
from neutron.tests.tempest import exceptions
from neutron.tests.tempest import test
AGENT_TYPE = 'L3 agent'
AGENT_MODES = (
'legacy',
'dvr_snat'
)
class L3AgentSchedulerTestJSON(base.BaseAdminNetworkTest):
_agent_mode = 'legacy'
"""
Tests the following operations in the Neutron API using the REST client for
Neutron:
List routers that the given L3 agent is hosting.
List L3 agents hosting the given router.
Add and Remove Router to L3 agent
v2.0 of the Neutron API is assumed.
The l3_agent_scheduler extension is required for these tests.
"""
@classmethod
def skip_checks(cls):
super(L3AgentSchedulerTestJSON, cls).skip_checks()
if not test.is_extension_enabled('l3_agent_scheduler', 'network'):
msg = "L3 Agent Scheduler Extension not enabled."
raise cls.skipException(msg)
@classmethod
def resource_setup(cls):
super(L3AgentSchedulerTestJSON, cls).resource_setup()
body = cls.admin_client.list_agents()
agents = body['agents']
for agent in agents:
# TODO(armax): falling back on default _agent_mode can be
# dropped as soon as Icehouse is dropped.
agent_mode = (
agent['configurations'].get('agent_mode', cls._agent_mode))
if agent['agent_type'] == AGENT_TYPE and agent_mode in AGENT_MODES:
cls.agent = agent
break
else:
msg = "L3 Agent Scheduler enabled in conf, but L3 Agent not found"
raise exceptions.InvalidConfiguration(msg)
cls.router = cls.create_router(data_utils.rand_name('router'))
# NOTE(armax): If DVR is an available extension, and the created router
# is indeed a distributed one, more resources need to be provisioned
# in order to bind the router to the L3 agent.
# That said, let's preserve the existing test logic, where the extra
# query and setup steps are only required if the extension is available
# and only if the router's default type is distributed.
if test.is_extension_enabled('dvr', 'network'):
is_dvr_router = cls.admin_client.show_router(
cls.router['id'])['router'].get('distributed', False)
if is_dvr_router:
cls.network = cls.create_network()
cls.create_subnet(cls.network)
cls.port = cls.create_port(cls.network)
cls.client.add_router_interface_with_port_id(
cls.router['id'], cls.port['id'])
@test.attr(type='smoke')
@test.idempotent_id('b7ce6e89-e837-4ded-9b78-9ed3c9c6a45a')
def test_list_routers_on_l3_agent(self):
self.admin_client.list_routers_on_l3_agent(self.agent['id'])
@test.attr(type='smoke')
@test.idempotent_id('9464e5e7-8625-49c3-8fd1-89c52be59d66')
def test_add_list_remove_router_on_l3_agent(self):
l3_agent_ids = list()
self.admin_client.add_router_to_l3_agent(
self.agent['id'],
self.router['id'])
body = (
self.admin_client.list_l3_agents_hosting_router(self.router['id']))
for agent in body['agents']:
l3_agent_ids.append(agent['id'])
self.assertIn('agent_type', agent)
self.assertEqual('L3 agent', agent['agent_type'])
self.assertIn(self.agent['id'], l3_agent_ids)
body = self.admin_client.remove_router_from_l3_agent(
self.agent['id'],
self.router['id'])
# NOTE(afazekas): The deletion not asserted, because neutron
# is not forbidden to reschedule the router to the same agent
|
apache-2.0
|
joshuajan/odoo
|
openerp/addons/base/res/res_country.py
|
33
|
4249
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
def location_name_search(self, cr, user, name='', args=None, operator='ilike',
context=None, limit=100):
if not args:
args = []
ids = []
if len(name) == 2:
ids = self.search(cr, user, [('code', 'ilike', name)] + args,
limit=limit, context=context)
search_domain = [('name', operator, name)]
if ids: search_domain.append(('id', 'not in', ids))
ids.extend(self.search(cr, user, search_domain + args,
limit=limit, context=context))
locations = self.name_get(cr, user, ids, context)
return sorted(locations, key=lambda (id, name): ids.index(id))
class Country(osv.osv):
_name = 'res.country'
_description = 'Country'
_columns = {
'name': fields.char('Country Name', size=64,
help='The full name of the country.', required=True, translate=True),
'code': fields.char('Country Code', size=2,
help='The ISO country code in two chars.\n'
'You can use this field for quick search.'),
'address_format': fields.text('Address Format', help="""You can state here the usual format to use for the \
addresses belonging to this country.\n\nYou can use the python-style string patern with all the field of the address \
(for example, use '%(street)s' to display the field 'street') plus
\n%(state_name)s: the name of the state
\n%(state_code)s: the code of the state
\n%(country_name)s: the name of the country
\n%(country_code)s: the code of the country"""),
'currency_id': fields.many2one('res.currency', 'Currency'),
'image': fields.binary("Image"),
}
_sql_constraints = [
('name_uniq', 'unique (name)',
'The name of the country must be unique !'),
('code_uniq', 'unique (code)',
'The code of the country must be unique !')
]
_defaults = {
'address_format': "%(street)s\n%(street2)s\n%(city)s %(state_code)s %(zip)s\n%(country_name)s",
}
_order='name'
name_search = location_name_search
def create(self, cursor, user, vals, context=None):
if vals.get('code'):
vals['code'] = vals['code'].upper()
return super(Country, self).create(cursor, user, vals,
context=context)
def write(self, cursor, user, ids, vals, context=None):
if vals.get('code'):
vals['code'] = vals['code'].upper()
return super(Country, self).write(cursor, user, ids, vals,
context=context)
class CountryState(osv.osv):
_description="Country state"
_name = 'res.country.state'
_columns = {
'country_id': fields.many2one('res.country', 'Country',
required=True),
'name': fields.char('State Name', size=64, required=True,
help='Administrative divisions of a country. E.g. Fed. State, Departement, Canton'),
'code': fields.char('State Code', size=3,
help='The state code in max. three chars.', required=True),
}
_order = 'code'
name_search = location_name_search
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
timoc/autokey
|
src/lib/gtkapp.py
|
48
|
10468
|
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Chris Dekter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import common
common.USING_QT = False
import sys, traceback, os.path, signal, logging, logging.handlers, subprocess, optparse, time
import gettext, dbus, dbus.service, dbus.mainloop.glib
from gi.repository import Gtk, Gdk, GObject, GLib
gettext.install("autokey")
import service, monitor
from gtkui.notifier import get_notifier
from gtkui.popupmenu import PopupMenu
from gtkui.configwindow import ConfigWindow
from configmanager import *
from common import *
PROGRAM_NAME = _("AutoKey")
DESCRIPTION = _("Desktop automation utility")
COPYRIGHT = _("(c) 2008-2011 Chris Dekter")
class Application:
"""
Main application class; starting and stopping of the application is controlled
from here, together with some interactions from the tray icon.
"""
def __init__(self):
GLib.threads_init()
Gdk.threads_init()
p = optparse.OptionParser()
p.add_option("-l", "--verbose", help="Enable verbose logging", action="store_true", default=False)
p.add_option("-c", "--configure", help="Show the configuration window on startup", action="store_true", default=False)
options, args = p.parse_args()
try:
# Create configuration directory
if not os.path.exists(CONFIG_DIR):
os.makedirs(CONFIG_DIR)
# Initialise logger
rootLogger = logging.getLogger()
if options.verbose:
rootLogger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
else:
rootLogger.setLevel(logging.INFO)
handler = logging.handlers.RotatingFileHandler(LOG_FILE,
maxBytes=MAX_LOG_SIZE, backupCount=MAX_LOG_COUNT)
handler.setFormatter(logging.Formatter(LOG_FORMAT))
rootLogger.addHandler(handler)
if self.__verifyNotRunning():
self.__createLockFile()
self.initialise(options.configure)
except Exception, e:
self.show_error_dialog(_("Fatal error starting AutoKey.\n") + str(e))
logging.exception("Fatal error starting AutoKey: " + str(e))
sys.exit(1)
def __createLockFile(self):
f = open(LOCK_FILE, 'w')
f.write(str(os.getpid()))
f.close()
def __verifyNotRunning(self):
if os.path.exists(LOCK_FILE):
f = open(LOCK_FILE, 'r')
pid = f.read()
f.close()
# Check that the found PID is running and is autokey
p = subprocess.Popen(["ps", "-p", pid, "-o", "command"], stdout=subprocess.PIPE)
p.wait()
output = p.stdout.read()
if "autokey" in output:
logging.debug("AutoKey is already running as pid %s", pid)
bus = dbus.SessionBus()
try:
dbusService = bus.get_object("org.autokey.Service", "/AppService")
dbusService.show_configure(dbus_interface = "org.autokey.Service")
sys.exit(0)
except dbus.DBusException, e:
logging.exception("Error communicating with Dbus service")
self.show_error_dialog(_("AutoKey is already running as pid %s but is not responding") % pid, str(e))
sys.exit(1)
return True
def main(self):
Gtk.main()
def initialise(self, configure):
logging.info("Initialising application")
self.monitor = monitor.FileMonitor(self)
self.configManager = get_config_manager(self)
self.service = service.Service(self)
self.serviceDisabled = False
# Initialise user code dir
if self.configManager.userCodeDir is not None:
sys.path.append(self.configManager.userCodeDir)
try:
self.service.start()
except Exception, e:
logging.exception("Error starting interface: " + str(e))
self.serviceDisabled = True
self.show_error_dialog(_("Error starting interface. Keyboard monitoring will be disabled.\n" +
"Check your system/configuration."), str(e))
self.notifier = get_notifier(self)
self.configWindow = None
self.monitor.start()
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
self.dbusService = common.AppService(self)
if configure: self.show_configure()
def init_global_hotkeys(self, configManager):
logging.info("Initialise global hotkeys")
configManager.toggleServiceHotkey.set_closure(self.toggle_service)
configManager.configHotkey.set_closure(self.show_configure_async)
def config_altered(self, persistGlobal):
self.configManager.config_altered(persistGlobal)
self.notifier.rebuild_menu()
def hotkey_created(self, item):
logging.debug("Created hotkey: %r %s", item.modifiers, item.hotKey)
self.service.mediator.interface.grab_hotkey(item)
def hotkey_removed(self, item):
logging.debug("Removed hotkey: %r %s", item.modifiers, item.hotKey)
self.service.mediator.interface.ungrab_hotkey(item)
def path_created_or_modified(self, path):
time.sleep(0.5)
changed = self.configManager.path_created_or_modified(path)
if changed and self.configWindow is not None:
self.configWindow.config_modified()
def path_removed(self, path):
time.sleep(0.5)
changed = self.configManager.path_removed(path)
if changed and self.configWindow is not None:
self.configWindow.config_modified()
def unpause_service(self):
"""
Unpause the expansion service (start responding to keyboard and mouse events).
"""
self.service.unpause()
self.notifier.update_tool_tip()
def pause_service(self):
"""
Pause the expansion service (stop responding to keyboard and mouse events).
"""
self.service.pause()
self.notifier.update_tool_tip()
def toggle_service(self):
"""
Convenience method for toggling the expansion service on or off.
"""
if self.service.is_running():
self.pause_service()
else:
self.unpause_service()
def shutdown(self):
"""
Shut down the entire application.
"""
if self.configWindow is not None:
if self.configWindow.promptToSave():
return
self.configWindow.hide()
self.notifier.hide_icon()
t = threading.Thread(target=self.__completeShutdown)
t.start()
def __completeShutdown(self):
logging.info("Shutting down")
self.service.shutdown()
self.monitor.stop()
Gdk.threads_enter()
Gtk.main_quit()
Gdk.threads_leave()
os.remove(LOCK_FILE)
logging.debug("All shutdown tasks complete... quitting")
def notify_error(self, message):
"""
Show an error notification popup.
@param message: Message to show in the popup
"""
self.notifier.notify_error(message)
def update_notifier_visibility(self):
self.notifier.update_visible_status()
def show_configure(self):
"""
Show the configuration window, or deiconify (un-minimise) it if it's already open.
"""
logging.info("Displaying configuration window")
if self.configWindow is None:
self.configWindow = ConfigWindow(self)
self.configWindow.show()
else:
self.configWindow.deiconify()
def show_configure_async(self):
Gdk.threads_enter()
self.show_configure()
Gdk.threads_leave()
def main(self):
logging.info("Entering main()")
Gdk.threads_enter()
Gtk.main()
Gdk.threads_leave()
def show_error_dialog(self, message, details=None):
"""
Convenience method for showing an error dialog.
"""
dlg = Gtk.MessageDialog(type=Gtk.MessageType.ERROR, buttons=Gtk.ButtonsType.OK,
message_format=message)
if details is not None:
dlg.format_secondary_text(details)
dlg.run()
dlg.destroy()
def show_script_error(self, parent):
"""
Show the last script error (if any)
"""
if self.service.scriptRunner.error != '':
dlg = Gtk.MessageDialog(type=Gtk.MessageType.INFO, buttons=Gtk.ButtonsType.OK,
message_format=self.service.scriptRunner.error)
self.service.scriptRunner.error = ''
else:
dlg = Gtk.MessageDialog(type=Gtk.MessageType.INFO, buttons=Gtk.ButtonsType.OK,
message_format=_("No error information available"))
dlg.set_title(_("View script error"))
dlg.set_transient_for(parent)
dlg.run()
dlg.destroy()
def show_popup_menu(self, folders=[], items=[], onDesktop=True, title=None):
self.menu = PopupMenu(self.service, folders, items, onDesktop, title)
self.menu.show_on_desktop()
def hide_menu(self):
self.menu.remove_from_desktop()
|
gpl-3.0
|
LAMBDA-HYPERON/fluids-movie
|
make_movie.py
|
1
|
1245
|
#!/usr/bin/env python
import sys
import argparse
import numpy as np
import netCDF4 as nc
import matplotlib.pyplot as plt
from matplotlib import animation
"""
This script makes a fluid animation using NetCDF data.
"""
def main ():
# Setup arguments.
parser = argparse.ArgumentParser()
parser.add_argument('input_file',help="Input file containing our data")
parser.add_argument('field_name',help="Data field to animate")
args = parser.parse_args()
# Open netCDF file (arg.input_file).
f = nc.Dataset(args.input_file)
vorticity = f.variables['vorticity_z']
# vorticity dimension: Time, st_ocean (pressure), yt_ocean (latitude), xt_ocean (longitude)
vorticity = vorticity[:]
fig = plt.figure()
images = []
# Generate the plot at pressure level = 0
for t in range(0,vorticity.shape[0]):
#plt.imshow(vorticity[t,0,:,:])
img = plt.imshow(vorticity[t,0,:,:])
images.append([img])
# To show plot immediately, use:
# plt.show()
# Save the plot as an image file
#plt.savefig('vorticity'+str(t).zfill(3)+'.png')
#plt.close()
ani = animation.ArtistAnimation(fig,images,interval=20)
plt.show()
# Close the netcdf file.
f.close()
print "Completed..."
return True
if __name__=="__main__":
sys.exit(main())
|
apache-2.0
|
CumulusNetworks/ansible-modules-extras
|
packaging/os/zypper_repository.py
|
73
|
9155
|
#!/usr/bin/python
# encoding: utf-8
# (c) 2013, Matthias Vogelgesang <[email protected]>
# (c) 2014, Justin Lecher <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: zypper_repository
author: "Matthias Vogelgesang (@matze)"
version_added: "1.4"
short_description: Add and remove Zypper repositories
description:
- Add or remove Zypper repositories on SUSE and openSUSE
options:
name:
required: false
default: none
description:
- A name for the repository. Not required when adding repofiles.
repo:
required: false
default: none
description:
- URI of the repository or .repo file. Required when state=present.
state:
required: false
choices: [ "absent", "present" ]
default: "present"
description:
- A source string state.
description:
required: false
default: none
description:
- A description of the repository
disable_gpg_check:
description:
- Whether to disable GPG signature checking of
all packages. Has an effect only if state is
I(present).
required: false
default: "no"
choices: [ "yes", "no" ]
aliases: []
refresh:
description:
- Enable autorefresh of the repository.
required: false
default: "yes"
choices: [ "yes", "no" ]
aliases: []
notes: []
requirements: [ zypper ]
'''
EXAMPLES = '''
# Add NVIDIA repository for graphics drivers
- zypper_repository: name=nvidia-repo repo='ftp://download.nvidia.com/opensuse/12.2' state=present
# Remove NVIDIA repository
- zypper_repository: name=nvidia-repo repo='ftp://download.nvidia.com/opensuse/12.2' state=absent
# Add python development repository
- zypper_repository: repo=http://download.opensuse.org/repositories/devel:/languages:/python/SLE_11_SP3/devel:languages:python.repo
'''
REPO_OPTS = ['alias', 'name', 'priority', 'enabled', 'autorefresh', 'gpgcheck']
def zypper_version(module):
"""Return (rc, message) tuple"""
cmd = ['/usr/bin/zypper', '-V']
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return rc, stdout
else:
return rc, stderr
def _parse_repos(module):
"""parses the output of zypper -x lr and returns a parse repo dictionary"""
cmd = ['/usr/bin/zypper', '-x', 'lr']
from xml.dom.minidom import parseString as parseXML
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
repos = []
dom = parseXML(stdout)
repo_list = dom.getElementsByTagName('repo')
for repo in repo_list:
opts = {}
for o in REPO_OPTS:
opts[o] = repo.getAttribute(o)
opts['url'] = repo.getElementsByTagName('url')[0].firstChild.data
# A repo can be uniquely identified by an alias + url
repos.append(opts)
return repos
# exit code 6 is ZYPPER_EXIT_NO_REPOS (no repositories defined)
elif rc == 6:
return []
else:
d = { 'zypper_exit_code': rc }
if stderr:
d['stderr'] = stderr
if stdout:
d['stdout'] = stdout
module.fail_json(msg='Failed to execute "%s"' % " ".join(cmd), **d)
def _parse_repos_old(module):
"""parses the output of zypper sl and returns a parse repo dictionary"""
cmd = ['/usr/bin/zypper', 'sl']
repos = []
rc, stdout, stderr = module.run_command(cmd, check_rc=True)
for line in stdout.split('\n'):
matched = re.search(r'\d+\s+\|\s+(?P<enabled>\w+)\s+\|\s+(?P<autorefresh>\w+)\s+\|\s+(?P<type>\w+)\s+\|\s+(?P<name>\w+)\s+\|\s+(?P<url>.*)', line)
if matched == None:
continue
m = matched.groupdict()
m['alias']= m['name']
m['priority'] = 100
m['gpgcheck'] = 1
repos.append(m)
return repos
def repo_exists(module, old_zypper, **kwargs):
def repo_subset(realrepo, repocmp):
for k in repocmp:
if k not in realrepo:
return False
for k, v in realrepo.items():
if k in repocmp:
if v.rstrip("/") != repocmp[k].rstrip("/"):
return False
return True
if old_zypper:
repos = _parse_repos_old(module)
else:
repos = _parse_repos(module)
for repo in repos:
if repo_subset(repo, kwargs):
return True
return False
def add_repo(module, repo, alias, description, disable_gpg_check, old_zypper, refresh):
if old_zypper:
cmd = ['/usr/bin/zypper', 'sa']
else:
cmd = ['/usr/bin/zypper', 'ar', '--check']
if repo.startswith("file:/") and old_zypper:
cmd.extend(['-t', 'Plaindir'])
else:
cmd.extend(['-t', 'plaindir'])
if description:
cmd.extend(['--name', description])
if disable_gpg_check and not old_zypper:
cmd.append('--no-gpgcheck')
if refresh:
cmd.append('--refresh')
cmd.append(repo)
if not repo.endswith('.repo'):
cmd.append(alias)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
changed = rc == 0
if rc == 0:
changed = True
elif 'already exists. Please use another alias' in stderr:
changed = False
else:
#module.fail_json(msg=stderr if stderr else stdout)
if stderr:
module.fail_json(msg=stderr)
else:
module.fail_json(msg=stdout)
return changed
def remove_repo(module, repo, alias, old_zypper):
if old_zypper:
cmd = ['/usr/bin/zypper', 'sd']
else:
cmd = ['/usr/bin/zypper', 'rr']
if alias:
cmd.append(alias)
else:
cmd.append(repo)
rc, stdout, stderr = module.run_command(cmd, check_rc=True)
changed = rc == 0
return changed
def fail_if_rc_is_null(module, rc, stdout, stderr):
if rc != 0:
#module.fail_json(msg=stderr if stderr else stdout)
if stderr:
module.fail_json(msg=stderr)
else:
module.fail_json(msg=stdout)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=False),
repo=dict(required=False),
state=dict(choices=['present', 'absent'], default='present'),
description=dict(required=False),
disable_gpg_check = dict(required=False, default='no', type='bool'),
refresh = dict(required=False, default='yes', type='bool'),
),
supports_check_mode=False,
)
repo = module.params['repo']
state = module.params['state']
name = module.params['name']
description = module.params['description']
disable_gpg_check = module.params['disable_gpg_check']
refresh = module.params['refresh']
def exit_unchanged():
module.exit_json(changed=False, repo=repo, state=state, name=name)
rc, out = zypper_version(module)
match = re.match(r'zypper\s+(\d+)\.(\d+)\.(\d+)', out)
if not match or int(match.group(1)) > 0:
old_zypper = False
else:
old_zypper = True
# Check run-time module parameters
if state == 'present' and not repo:
module.fail_json(msg='Module option state=present requires repo')
if state == 'absent' and not repo and not name:
module.fail_json(msg='Alias or repo parameter required when state=absent')
if repo and repo.endswith('.repo'):
if name:
module.fail_json(msg='Incompatible option: \'name\'. Do not use name when adding repo files')
else:
if not name and state == "present":
module.fail_json(msg='Name required when adding non-repo files:')
if repo and repo.endswith('.repo'):
exists = repo_exists(module, old_zypper, url=repo, alias=name)
elif repo:
exists = repo_exists(module, old_zypper, url=repo)
else:
exists = repo_exists(module, old_zypper, alias=name)
if state == 'present':
if exists:
exit_unchanged()
changed = add_repo(module, repo, name, description, disable_gpg_check, old_zypper, refresh)
elif state == 'absent':
if not exists:
exit_unchanged()
changed = remove_repo(module, repo, name, old_zypper)
module.exit_json(changed=changed, repo=repo, state=state)
# import module snippets
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
godLoveLucifer/viewfinder
|
backend/op/create_prospective_op.py
|
13
|
14304
|
# -*- coding: utf-8 -*-
# Copyright 2013 Viewfinder Inc. All Rights Reserved.
"""Viewfinder CreateProspectiveOperation.
This operation creates a prospective (un-registered) user, along with the new user's identity,
default viewpoint, initial account settings, etc.
"""
__authors__ = ['[email protected] (Andy Kimball)']
import json
from tornado import gen, options
from viewfinder.backend.base import util
from viewfinder.backend.db.accounting import AccountingAccumulator
from viewfinder.backend.db.activity import Activity
from viewfinder.backend.db.comment import Comment
from viewfinder.backend.db.analytics import Analytics
from viewfinder.backend.db.device import Device
from viewfinder.backend.db.episode import Episode
from viewfinder.backend.db.lock import Lock
from viewfinder.backend.db.operation import Operation
from viewfinder.backend.db.post import Post
from viewfinder.backend.db.user import User
from viewfinder.backend.db.viewpoint import Viewpoint
from viewfinder.backend.op.viewfinder_op import ViewfinderOperation
from viewfinder.backend.www import system_users
class CreateProspectiveOperation(ViewfinderOperation):
"""The CreateProspective operation expects the caller to allocate the new user's id and
web device id. The caller is also responsible for ensuring that the user does not yet
exist.
"""
_ASSET_ID_COUNT = 24
"""Number of asset ids that will be allocated for the welcome conversation."""
_UPDATE_SEQ_COUNT = 13
"""Number of viewpoint updates that will be made for the welcome conversation."""
def __init__(self, client, new_user_id, webapp_dev_id, identity_key, reason=None):
super(CreateProspectiveOperation, self).__init__(client)
self._new_user_id = new_user_id
self._webapp_dev_id = webapp_dev_id
self._identity_key = identity_key
self._reason = reason
@classmethod
@gen.coroutine
def Execute(cls, client, user_id, webapp_dev_id, identity_key, reason=None):
"""Entry point called by the operation framework."""
yield CreateProspectiveOperation(client, user_id, webapp_dev_id, identity_key, reason=reason)._CreateProspective()
@gen.coroutine
def _CreateProspective(self):
"""Create the prospective user and identity."""
self._new_user, _ = yield User.CreateProspective(self._client,
self._new_user_id,
self._webapp_dev_id,
self._identity_key,
self._op.timestamp)
# If system user is defined, then create the welcome conversation.
# For now, add a check to ensure the welcome conversation is not created in production.
if system_users.NARRATOR_USER is not None:
# Checkpoint the allocated asset id range used to create the welcome conversation.
if self._op.checkpoint is None:
# NOTE: Asset ids are allocated from the new user's ids. This is different than the
# usual practice of allocating from the sharer's ids.
self._unique_id_start = yield gen.Task(User.AllocateAssetIds,
self._client,
self._new_user_id,
CreateProspectiveOperation._ASSET_ID_COUNT)
checkpoint = {'id': self._unique_id_start}
yield self._op.SetCheckpoint(self._client, checkpoint)
else:
self._unique_id_start = self._op.checkpoint['id']
yield self._CreateWelcomeConversation()
# Add an analytics entry for this user.
analytics = Analytics.Create(entity='us:%d' % self._new_user_id,
type=Analytics.USER_CREATE_PROSPECTIVE,
timestamp=self._op.timestamp,
payload=self._reason)
yield gen.Task(analytics.Update, self._client)
yield Operation.TriggerFailpoint(self._client)
@gen.coroutine
def _CreateWelcomeConversation(self):
"""Creates the welcome conversation at the db level. Operations are not used in order
to avoid creating notifications, sending alerts, taking locks, running nested operations,
etc.
"""
from viewfinder.backend.www.system_users import NARRATOR_USER
from viewfinder.backend.www.system_users import NARRATOR_UPLOAD_PHOTOS, NARRATOR_UPLOAD_PHOTOS_2, NARRATOR_UPLOAD_PHOTOS_3
# Accumulate accounting changes.
self._acc_accum = AccountingAccumulator()
self._unique_id = self._unique_id_start
self._update_seq = 1
# Create the viewpoint.
self._viewpoint_id = Viewpoint.ConstructViewpointId(self._new_user.webapp_dev_id, self._unique_id)
self._unique_id += 1
initial_follower_ids = [self._new_user.user_id]
viewpoint, followers = yield Viewpoint.CreateNewWithFollowers(self._client,
follower_ids=initial_follower_ids,
user_id=NARRATOR_USER.user_id,
viewpoint_id=self._viewpoint_id,
type=Viewpoint.SYSTEM,
title='Welcome...',
timestamp=self._op.timestamp)
# Narrator creates and introduces the conversation.
yield self._CreateActivity(NARRATOR_USER,
self._op.timestamp - 60,
Activity.CreateShareNew,
ep_dicts=[],
follower_ids=initial_follower_ids)
yield self._PostComment(NARRATOR_USER,
self._op.timestamp - 60,
'Welcome to Viewfinder, a new way to privately share photos with your friends.')
# Narrator shares photos.
yield self._PostComment(NARRATOR_USER,
self._op.timestamp - 59,
'Select as many photos as you want to share with exactly who you want.')
photo_ids = [ph_dict['photo_id'] for ph_dict in NARRATOR_UPLOAD_PHOTOS['photos']]
episode = yield self._CreateEpisodeWithPosts(NARRATOR_USER,
NARRATOR_UPLOAD_PHOTOS['episode']['episode_id'],
NARRATOR_UPLOAD_PHOTOS['photos'])
yield self._CreateActivity(NARRATOR_USER,
self._op.timestamp - 58,
Activity.CreateShareExisting,
ep_dicts=[{'new_episode_id': episode.episode_id, 'photo_ids': photo_ids}])
# Set cover photo on viewpoint now that episode id is known.
viewpoint.cover_photo = {'episode_id': episode.episode_id,
'photo_id': NARRATOR_UPLOAD_PHOTOS['photos'][0]['photo_id']}
yield gen.Task(viewpoint.Update, self._client)
yield self._PostComment(NARRATOR_USER,
self._op.timestamp - 56,
'Your friends can also add photos to the conversation, '
'creating unique collaborative albums.')
yield self._PostComment(NARRATOR_USER,
self._op.timestamp - 55,
'You can add as many photos, messages and friends as you want to the conversation, '
'leading to a memorable shared experience.')
# Narrator shares more photos.
photo_ids = [ph_dict['photo_id'] for ph_dict in NARRATOR_UPLOAD_PHOTOS_2['photos']]
episode = yield self._CreateEpisodeWithPosts(NARRATOR_USER,
NARRATOR_UPLOAD_PHOTOS_2['episode']['episode_id'],
NARRATOR_UPLOAD_PHOTOS_2['photos'])
yield self._CreateActivity(NARRATOR_USER,
self._op.timestamp - 54,
Activity.CreateShareExisting,
ep_dicts=[{'new_episode_id': episode.episode_id, 'photo_ids': photo_ids}])
# Single-photo comment.
yield self._PostComment(NARRATOR_USER,
self._op.timestamp - 53,
'Hold and press on photos to comment on specific pics.',
asset_id=NARRATOR_UPLOAD_PHOTOS_2['photos'][1]['photo_id'])
# Narrator rambles on for a while.
yield self._PostComment(NARRATOR_USER,
self._op.timestamp - 52,
'Use mobile #\'s or email addresses to add new people if they\'re not yet on Viewfinder.');
# Narrator shares more photos.
photo_ids = [ph_dict['photo_id'] for ph_dict in NARRATOR_UPLOAD_PHOTOS_3['photos']]
episode = yield self._CreateEpisodeWithPosts(NARRATOR_USER,
NARRATOR_UPLOAD_PHOTOS_3['episode']['episode_id'],
NARRATOR_UPLOAD_PHOTOS_3['photos'])
yield self._CreateActivity(NARRATOR_USER,
self._op.timestamp - 51,
Activity.CreateShareExisting,
ep_dicts=[{'new_episode_id': episode.episode_id, 'photo_ids': photo_ids}])
# Conclusion.
yield self._PostComment(NARRATOR_USER,
self._op.timestamp - 50,
'Viewfinder is perfect for vacations, weddings, or any shared experience where you want '
'to share photos without posting them for everyone to see.')
yield self._PostComment(NARRATOR_USER,
self._op.timestamp - 49,
'Start sharing now.')
# Validate that we allocated enough ids and counted update_seq properly.
assert self._unique_id == self._unique_id_start + CreateProspectiveOperation._ASSET_ID_COUNT, self._unique_id
assert self._update_seq == CreateProspectiveOperation._UPDATE_SEQ_COUNT, self._update_seq
# Set update_seq on the new viewpoint.
viewpoint.update_seq = self._update_seq
yield gen.Task(viewpoint.Update, self._client)
# Remove this viewpoint for all sample users so that accounting will be correct (also in case
# we want to sync a device to Nick's account and see if new users are trying to chat). Also
# update viewed_seq so that entire conversation is "read" for each sample user.
for follower in followers:
if follower.user_id != self._new_user.user_id:
follower.viewed_seq = viewpoint.update_seq
yield follower.RemoveViewpoint(self._client)
# Commit accounting changes.
yield self._acc_accum.Apply(self._client)
@gen.coroutine
def _CreateActivity(self, sharer_user, timestamp, activity_func, **kwargs):
"""Creates an activity by invoking "activity_func" with the given args."""
activity_id = Activity.ConstructActivityId(timestamp, self._new_user.webapp_dev_id, self._unique_id)
self._unique_id += 1
activity = yield activity_func(self._client,
sharer_user.user_id,
self._viewpoint_id,
activity_id,
timestamp,
update_seq=self._update_seq,
**kwargs)
self._update_seq += 1
raise gen.Return(activity)
@gen.coroutine
def _CreateEpisodeWithPosts(self, sharer_user, parent_ep_id, ph_dicts):
"""Creates a new episode containing the given photos."""
# Create the episode.
episode_id = Episode.ConstructEpisodeId(self._op.timestamp, self._new_user.webapp_dev_id, self._unique_id)
self._unique_id += 1
episode = yield gen.Task(Episode.CreateNew,
self._client,
episode_id=episode_id,
parent_ep_id=parent_ep_id,
user_id=sharer_user.user_id,
viewpoint_id=self._viewpoint_id,
publish_timestamp=util.GetCurrentTimestamp(),
timestamp=self._op.timestamp,
location=ph_dicts[0].get('location', None),
placemark=ph_dicts[0].get('placemark', None))
# Create the photos from photo dicts.
photo_ids = [ph_dict['photo_id'] for ph_dict in ph_dicts]
for photo_id in photo_ids:
yield gen.Task(Post.CreateNew, self._client, episode_id=episode_id, photo_id=photo_id)
# Update accounting, but only apply to the new user, since system users will remove
# themselves from the viewpoint.
yield self._acc_accum.SharePhotos(self._client,
sharer_user.user_id,
self._viewpoint_id,
photo_ids,
[self._new_user.user_id])
# Update viewpoint shared by total for the sharing user.
self._acc_accum.GetViewpointSharedBy(self._viewpoint_id, sharer_user.user_id).IncrementFromPhotoDicts(ph_dicts)
raise gen.Return(episode)
@gen.coroutine
def _PostComment(self, sharer_user, timestamp, message, asset_id=None):
"""Creates a new comment and a corresponding activity."""
comment_id = Comment.ConstructCommentId(timestamp, self._new_user.webapp_dev_id, self._unique_id)
self._unique_id += 1
comment = yield Comment.CreateNew(self._client,
viewpoint_id=self._viewpoint_id,
comment_id=comment_id,
user_id=sharer_user.user_id,
asset_id=asset_id,
timestamp=timestamp,
message=message)
# Create post_comment activity.
yield self._CreateActivity(sharer_user, timestamp, Activity.CreatePostComment, cm_dict={'comment_id': comment_id})
raise gen.Return(comment)
|
apache-2.0
|
bosstb/HaberPush
|
youtube_dl/extractor/rtl2.py
|
8
|
3463
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import int_or_none
class RTL2IE(InfoExtractor):
_VALID_URL = r'http?://(?:www\.)?rtl2\.de/[^?#]*?/(?P<id>[^?#/]*?)(?:$|/(?:$|[?#]))'
_TESTS = [{
'url': 'http://www.rtl2.de/sendung/grip-das-motormagazin/folge/folge-203-0',
'info_dict': {
'id': 'folge-203-0',
'ext': 'f4v',
'title': 'GRIP sucht den Sommerkönig',
'description': 'md5:e3adbb940fd3c6e76fa341b8748b562f'
},
'params': {
# rtmp download
'skip_download': True,
},
}, {
'url': 'http://www.rtl2.de/sendung/koeln-50667/video/5512-anna/21040-anna-erwischt-alex/',
'info_dict': {
'id': '21040-anna-erwischt-alex',
'ext': 'mp4',
'title': 'Anna erwischt Alex!',
'description': 'Anna nimmt ihrem Vater nicht ab, dass er nicht spielt. Und tatsächlich erwischt sie ihn auf frischer Tat.'
},
'params': {
# rtmp download
'skip_download': True,
},
}]
def _real_extract(self, url):
# Some rtl2 urls have no slash at the end, so append it.
if not url.endswith('/'):
url += '/'
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
mobj = re.search(
r'<div[^>]+data-collection="(?P<vico_id>\d+)"[^>]+data-video="(?P<vivi_id>\d+)"',
webpage)
if mobj:
vico_id = mobj.group('vico_id')
vivi_id = mobj.group('vivi_id')
else:
vico_id = self._html_search_regex(
r'vico_id\s*:\s*([0-9]+)', webpage, 'vico_id')
vivi_id = self._html_search_regex(
r'vivi_id\s*:\s*([0-9]+)', webpage, 'vivi_id')
info = self._download_json(
'http://www.rtl2.de/sites/default/modules/rtl2/mediathek/php/get_video_jw.php',
video_id, query={
'vico_id': vico_id,
'vivi_id': vivi_id,
})
video_info = info['video']
title = video_info['titel']
formats = []
rtmp_url = video_info.get('streamurl')
if rtmp_url:
rtmp_url = rtmp_url.replace('\\', '')
stream_url = 'mp4:' + self._html_search_regex(r'/ondemand/(.+)', rtmp_url, 'stream URL')
rtmp_conn = ['S:connect', 'O:1', 'NS:pageUrl:' + url, 'NB:fpad:0', 'NN:videoFunction:1', 'O:0']
formats.append({
'format_id': 'rtmp',
'url': rtmp_url,
'play_path': stream_url,
'player_url': 'http://www.rtl2.de/flashplayer/vipo_player.swf',
'page_url': url,
'flash_version': 'LNX 11,2,202,429',
'rtmp_conn': rtmp_conn,
'no_resume': True,
'preference': 1,
})
m3u8_url = video_info.get('streamurl_hls')
if m3u8_url:
formats.extend(self._extract_akamai_formats(m3u8_url, video_id))
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'thumbnail': video_info.get('image'),
'description': video_info.get('beschreibung'),
'duration': int_or_none(video_info.get('duration')),
'formats': formats,
}
|
mit
|
awagnon/maraschino
|
modules/couchpotato.py
|
3
|
14369
|
from flask import render_template, request, jsonify, json, send_file
from jinja2.filters import FILTERS
from maraschino.tools import get_setting_value, requires_auth
from maraschino import logger, app, WEBROOT
import urllib2
import StringIO
import base64
import re
def couchpotato_http():
if get_setting_value('couchpotato_https') == '1':
return 'https://'
else:
return 'http://'
def couchpotato_url():
port = get_setting_value('couchpotato_port')
url_base = get_setting_value('couchpotato_ip')
webroot = get_setting_value('couchpotato_webroot')
if port:
url_base = '%s:%s' % (url_base, port)
if webroot:
url_base = '%s/%s' % (url_base, webroot)
url = '%s/api/%s' % (url_base, get_setting_value('couchpotato_api'))
return couchpotato_http() + url
def couchpotato_url_no_api():
port = get_setting_value('couchpotato_port')
url_base = get_setting_value('couchpotato_ip')
webroot = get_setting_value('couchpotato_webroot')
if port:
url_base = '%s:%s' % (url_base, port)
if webroot:
url_base = '%s/%s' % (url_base, webroot)
return couchpotato_http() + url_base
def couchpotato_api(method, params=None, use_json=True, dev=False):
username = get_setting_value('couchpotato_user')
password = get_setting_value('couchpotato_password')
if params:
params = '/?%s' % params
else:
params = '/'
params = (params).replace(' ', '%20')
url = '%s/%s%s' % (couchpotato_url(), method, params)
req = urllib2.Request(url)
if username and password:
base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '')
req.add_header("Authorization", "Basic %s" % base64string)
data = urllib2.urlopen(req).read()
if dev:
print url
print data
if use_json:
data = json.JSONDecoder().decode(data)
return data
def log_exception(e):
logger.log('CouchPotato :: EXCEPTION -- %s' % e, 'DEBUG')
def couchpotato_image(path):
path_pieces = re.split('\\/', path)
return '%s/xhr/couchpotato/image/%s' % (WEBROOT, path_pieces[-1])
FILTERS['cp_img'] = couchpotato_image
@app.route('/xhr/couchpotato/image/<path:url>')
def couchpotato_proxy(url):
username = get_setting_value('couchpotato_user')
password = get_setting_value('couchpotato_password')
url = '%s/file.cache/%s' % (couchpotato_url(), url)
req = urllib2.Request(url)
if username and password:
base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '')
req.add_header("Authorization", "Basic %s" % base64string)
img = StringIO.StringIO(urllib2.urlopen(req).read())
logger.log('CouchPotato :: Fetching image from %s' % (url), 'DEBUG')
return send_file(img, mimetype='image/jpeg')
@app.route('/xhr/couchpotato/')
@app.route('/xhr/couchpotato/<status>/')
def xhr_couchpotato(status='active'):
profiles = {}
status_string = 'status=%s' % status
template = 'couchpotato.html'
if status is not 'active':
template = 'couchpotato/all.html'
try:
logger.log('CouchPotato :: Fetching "%s movies" list' % status, 'INFO')
couchpotato = couchpotato_api('movie.list', params=status_string)
except Exception as e:
log_exception(e)
couchpotato = None
logger.log('CouchPotato :: Fetching "%s movies" list (DONE)' % status, 'INFO')
if status == 'wanted' and not type(couchpotato) is list:
logger.log('CouchPotato :: Wanted movies list is empty', 'INFO')
return cp_search('There are no movies in your wanted list.')
profiles = couchpotato_api('profile.list')
for movie in couchpotato['movies']:
for profile in profiles['list']:
if profile['_id'] == movie['profile_id']:
movie['profile_label'] = profile['label']
return render_template(template,
url=couchpotato_url(),
app_link=couchpotato_url_no_api(),
couchpotato=couchpotato,
profiles=profiles,
compact_view=get_setting_value('couchpotato_compact') == '1',
)
@app.route('/xhr/couchpotato/history/')
def xhr_couchpotato_history():
unread = 0
try:
couchpotato = couchpotato_api('notification.list')
couchpotato = couchpotato['notifications']
for notification in couchpotato:
if not notification['read']:
unread = unread + 1
except Exception as e:
logger.log('CouchPotato :: Could not retrieve Couchpotato - %s' % (e), 'WARNING')
couchpotato = "empty"
return render_template('couchpotato/history.html',
couchpotato=couchpotato,
unread=unread,
)
@app.route('/xhr/couchpotato/search/')
def cp_search(message=None):
couchpotato = {}
params = False
profiles = {}
try:
query = request.args['name']
params = 'q=' + query
except:
pass
if params:
try:
logger.log('CouchPotato :: Searching for movie: %s' % (query), 'INFO')
couchpotato = couchpotato_api('movie.search', params=params)
amount = len(couchpotato['movies'])
logger.log('CouchPotato :: found %i movies for %s' % (amount, query), 'INFO')
if couchpotato['success'] and amount != 0:
couchpotato = couchpotato['movies']
try:
# logger.log('CouchPotato :: Getting quality profiles', 'INFO')
profiles = couchpotato_api('profile.list')
except Exception as e:
log_exception(e)
else:
return render_template('couchpotato/search.html', error='No movies with "%s" were found' % (query), couchpotato='results')
except Exception as e:
log_exception(e)
couchpotato = None
else:
logger.log('CouchPotato :: Loading search template', 'DEBUG')
couchpotato = None
return render_template('couchpotato/search.html',
data=couchpotato,
couchpotato='results',
profiles=profiles,
error=message
)
@app.route('/xhr/couchpotato/add_movie/<imdbid>/<title>/')
@app.route('/xhr/couchpotato/add_movie/<imdbid>/<title>/<profile>/')
def add_movie(imdbid, title, profile=False):
if profile:
params = 'identifier=%s&title=%s&profile_id=%s' % (imdbid, title, profile)
else:
params = 'identifier=%s&title=%s' % (imdbid, title)
try:
logger.log('CouchPotato :: Adding %s (%s) to wanted list' % (title, imdbid), 'INFO')
result = couchpotato_api('movie.add', params)
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/restart/')
@requires_auth
def cp_restart():
try:
logger.log('CouchPotato :: Restarting', 'INFO')
result = couchpotato_api('app.restart', use_json=False)
if 'restarting' in result:
return jsonify({'success': True})
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/available/')
@requires_auth
def cp_available():
try:
logger.log('CouchPotato :: Checking if CouchPotato is available', 'INFO')
result = couchpotato_api('app.available')
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/shutdown/')
@requires_auth
def cp_shutdown():
try:
logger.log('CouchPotato :: Shutting down', 'INFO')
result = couchpotato_api('app.shutdown', use_json=False)
if 'shutdown' in result:
return jsonify({'success': True})
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/version/')
@requires_auth
def cp_version():
try:
result = couchpotato_api('app.version')
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/profiles/')
@requires_auth
def cp_profiles():
try:
logger.log('CouchPotato :: Getting profiles', 'INFO')
result = couchpotato_api('profile.list')
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/quality/')
@requires_auth
def cp_quality():
try:
logger.log('CouchPotato :: Getting quality', 'INFO')
result = couchpotato_api('quality.list')
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/update/check/')
@requires_auth
def cp_update_check():
try:
logger.log('CouchPotato :: Getting update', 'INFO')
result = couchpotato_api('updater.check')
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/delete_movie/<id>/')
@requires_auth
def movie_delete(id):
"""
Delete a movie from list
----- Params -----
id int (comma separated) Movie ID(s) you want to delete.
delete_from string: all (default), wanted, manage Delete movie from this page
"""
try:
logger.log('CouchPotato :: Deleting movie %s' % id, 'INFO')
result = couchpotato_api('movie.delete', 'id=%s' % id)
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/refresh_movie/<id>/')
def movie_refresh(id):
"""
Refresh a movie from list
----- Params -----
id int (comma separated) Movie ID(s) you want to refresh.
"""
try:
logger.log('CouchPotato :: Refreshing movie %s' % id, 'INFO')
result = couchpotato_api('movie.refresh', 'id=%s' % id)
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/settings/')
def cp_settings():
"""
Retrieve settings from CP
"""
try:
logger.log('CouchPotato :: Retrieving settings', 'INFO')
result = couchpotato_api('settings')
logger.log('CouchPotato :: Retrieving settings (DONE)', 'INFO')
return render_template('couchpotato/settings.html',
couchpotato=result,
)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/get_movie/<id>/')
def cp_get_movie(id):
"""
Retrieve movie from CP
---- Params -----
id int (comma separated) The id of the movie
"""
try:
logger.log('CouchPotato :: Retrieving movie info', 'INFO')
result = couchpotato_api('media.get', 'id=%s' % id)
try:
logger.log('CouchPotato :: Getting quality profiles', 'INFO')
profiles = couchpotato_api('profile.list')
except Exception as e:
log_exception(e)
logger.log('CouchPotato :: Retrieving movie info (DONE)', 'INFO')
return render_template('couchpotato/info.html',
couchpotato=result,
profiles=profiles,
)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/edit_movie/<movieid>/<profileid>/')
def cp_edit_movie(movieid, profileid):
"""
Edit movie in CP
---- Params -----
movieid int (comma separated) The id of the movie
profileid int Id of the profile to go to
"""
try:
logger.log('CouchPotato :: Retrieving movie info', 'INFO')
result = couchpotato_api('movie.edit', 'id=%s&profile_id=%s' % (movieid, profileid))
if result['success']:
logger.log('CouchPotato :: Retrieving movie info (DONE)', 'INFO')
return jsonify({'success': True})
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/log/')
@app.route('/xhr/couchpotato/log/<type>/<lines>/')
def cp_log(type='all', lines=30):
"""
Edit movie in CP
---- Params -----
type <optional> all, error, info, debug Type of log
lines <optional> int Number of lines - last to first
"""
try:
logger.log('CouchPotato :: Retrieving "%s" log' % type, 'INFO')
result = couchpotato_api('logging.partial', 'type=%s&lines=%s' % (type, lines))
if result['success']:
logger.log('CouchPotato :: Retrieving "%s" log (DONE)' % type, 'INFO')
return render_template('couchpotato/log.html',
couchpotato=result,
level=type,
)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/notification/read/')
@app.route('/xhr/couchpotato/notification/read/<int:id>/')
def cp_notification_read(id=False):
"""
Mark notification as read in CP
---- Params -----
ids <optional> int Notification id - if empty will mark all notifications
"""
try:
logger.log('CouchPotato :: Marking notification "%i" as read' % id, 'INFO')
if id:
couchpotato_api('notification.markread', 'ids=%i' % id)
else:
couchpotato_api('notification.markread')
return jsonify({'success': True})
except Exception as e:
log_exception(e)
return jsonify({'success': False})
@app.route('/xhr/couchpotato/release/<action>/<id>/')
@requires_auth
def release_action(action, id):
if id.isdigit():
id = int(id)
try:
logger.log('CouchPotato :: %sing release %s' % (action.title()[:-1], id), 'INFO')
result = couchpotato_api('release.%s' % action, 'id=%s' % id)
return jsonify(result)
except Exception as e:
log_exception(e)
return jsonify({'success': False})
|
mit
|
T3CHNOLOG1C/Plaidohlect
|
MusicBot/musicbot/exceptions.py
|
14
|
2599
|
import shutil
import textwrap
# Base class for exceptions
class MusicbotException(Exception):
def __init__(self, message, *, expire_in=0):
self._message = message
self.expire_in = expire_in
@property
def message(self):
return self._message
@property
def message_no_format(self):
return self._message
# Something went wrong during the processing of a command
class CommandError(MusicbotException):
pass
# Something went wrong during the processing of a song/ytdl stuff
class ExtractionError(MusicbotException):
pass
# The no processing entry type failed and an entry was a playlist/vice versa
class WrongEntryTypeError(ExtractionError):
def __init__(self, message, is_playlist, use_url):
super().__init__(message)
self.is_playlist = is_playlist
self.use_url = use_url
# The user doesn't have permission to use a command
class PermissionsError(CommandError):
@property
def message(self):
return "You don't have permission to use that command.\nReason: " + self._message
# Error with pretty formatting for hand-holding users through various errors
class HelpfulError(MusicbotException):
def __init__(self, issue, solution, *, preface="An error has occured:\n", expire_in=0):
self.issue = issue
self.solution = solution
self.preface = preface
self.expire_in = expire_in
@property
def message(self):
return ("\n{}\n{}\n{}\n").format(
self.preface,
self._pretty_wrap(self.issue, " Problem: "),
self._pretty_wrap(self.solution, " Solution: "))
@property
def message_no_format(self):
return "\n{}\n{}\n{}\n".format(
self.preface,
self._pretty_wrap(self.issue, " Problem: ", width=None),
self._pretty_wrap(self.solution, " Solution: ", width=None))
@staticmethod
def _pretty_wrap(text, pretext, *, width=-1):
if width is None:
return pretext + text
elif width == -1:
width = shutil.get_terminal_size().columns
l1, *lx = textwrap.wrap(text, width=width - 1 - len(pretext))
lx = [((' ' * len(pretext)) + l).rstrip().ljust(width) for l in lx]
l1 = (pretext + l1).ljust(width)
return ''.join([l1, *lx])
class HelpfulWarning(HelpfulError):
pass
# Base class for control signals
class Signal(Exception):
pass
# signal to restart the bot
class RestartSignal(Signal):
pass
# signal to end the bot "gracefully"
class TerminateSignal(Signal):
pass
|
apache-2.0
|
dboonz/polymode
|
Polymode/difflounge/line_sparse.py
|
5
|
3516
|
#
# Lined Sparse Class
#
from numpy import *
class Line(object):
def __init__(self, value=None, offset=None, offsetr=None):
self.value = asarray(value)
self.shape = self.value.shape
self.ndim = self.value.ndim
if offset is None:
offset = self.shape[0]//2
if offsetr is not None:
offset = self.shape[0]-offsetr
self.offset = offset
def __str__(self):
return "<Line: %s @ %i>" % (self.value, self.offset)
def matvec(self, x):
return dot(x.T,self.value).T
def rmatvec(self, x):
s = (slice(None),)*self.value.ndim + (newaxis,)*x.ndim
return self.value[s]*x
class LineMulti(Line):
"Line object for multiple dimensions"
def matvec(self, x):
return sum(x*self.value, axis=0)
def rmatvec(self, x):
return self.value*x
class DefaultDict(dict):
"Dictionary with default (fallback) value"
def __init__(self):
self.default = None
dict.__init__(self)
def __getitem__(self, ii):
if self.has_key(ii):
return dict.__getitem__(self,ii)
else:
return self.default
class LineSparse(object):
def __init__(self, shape=(0,0), dtype=complex_):
self.lines = DefaultDict()
self.default = None
self.dtype = dtype
self.shape = shape
self.line_range = 0, shape[0]
def set_size(self,N):
self.shape = (N,N)
def mirror(self, x, ii=0):
l = self.lines[ii]
if l.ndim==1:
mv = (x.T*l.value).T
else:
mv = x*l.value
return mv
def matvec(self, x, axis=0):
"Multiply vector x by implicit matrix"
mv = zeros(x.shape, self.dtype)
for ii in range(*self.line_range):
l = self.lines[ii]
if l is not None:
mv[ii] = l.matvec(x[ii-l.offset:ii-l.offset+l.shape[0]])
return mv
def rmatvec(self, x):
"Multiply vector x by adjoint of implicit matrix"
mv = zeros(x.shape, self.dtype)
for ii in range(*self.line_range):
l = self.lines[ii]
if l is not None:
xslice = slice(ii-l.offset,ii-l.offset+l.shape[0])
mv[xslice] += l.rmatvec(x[ii])
return mv
def toarray(self):
A = zeros(self.shape, dtype=self.dtype)
for ii in range(self.shape[0]):
l = self.lines[ii]
if l is not None:
if l.value.ndim==1:
A[ii,ii-l.offset:ii-l.offset+l.shape[0]] = l.value
else:
A[ii,ii-l.offset:ii-l.offset+l.shape[0]] = l.value[:,0]
return A
if __name__=="__main__":
from mathlink import timer
L = DefaultDict()
L.default = Line([1,-2,1])
L[0] = Line([-2,1,3,4], 0)
L[9] = Line([1,-2], 1)
#Run
N = 10
AS = LineSparse((N,N))
AS.lines.default = Line([1,-2,1])
AS.lines[0] = Line([1,1,-2,1], 0)
AS.lines[N-1] = Line([1,-2], 1)
A = AS.toarray()
x = random.random(N)
y = AS.matvec(x)
print "matvec error:", abs(y-dot(A,x)).max()
yr = AS.rmatvec(x)
print "matvec error:", abs(yr-dot(A.T,x)).max()
import timer
#Time it
tick = timer.timer()
tick.start('matvec')
for ii in range(100):
y = AS.matvec(x)
tick.stop('matvec')
tick.start('rmatvec')
for ii in range(100):
y = AS.rmatvec(x)
tick.stop('rmatvec')
print tick.report()
#Try multidimensional arrays
x = random.random((N,2,5))
coeff = zeros((4,2,5))
coeff.T[:] = [1,1,-2,1]
coeff[:,0,0] = [4,4,-1,1]
N = 10
AS = LineSparse((N,N))
AS.lines.default = Line([1,-2,1])
AS.lines[0] = LineMulti(coeff, 0)
AS.lines[N-1] = Line([1,-2], 1)
y2 = AS.matvec(x)
y2r = AS.rmatvec(x)
for K in ndindex(x.shape[1:]):
yd = dot(A, x[(slice(None),)+K])
print K, abs(yd-y2[(slice(None),)+K]).max()
for K in ndindex(x.shape[1:]):
yrd = dot(A.T, x[(slice(None),)+K])
print K, abs(yrd-y2r[(slice(None),)+K]).max()
|
gpl-3.0
|
chinmaygarde/mojo
|
tools/idl_parser/idl_node.py
|
125
|
5444
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
#
# IDL Node
#
# IDL Node defines the IDLAttribute and IDLNode objects which are constructed
# by the parser as it processes the various 'productions'. The IDLAttribute
# objects are assigned to the IDLNode's property dictionary instead of being
# applied as children of The IDLNodes, so they do not exist in the final tree.
# The AST of IDLNodes is the output from the parsing state and will be used
# as the source data by the various generators.
#
#
# CopyToList
#
# Takes an input item, list, or None, and returns a new list of that set.
def CopyToList(item):
# If the item is 'Empty' make it an empty list
if not item:
item = []
# If the item is not a list
if type(item) is not type([]):
item = [item]
# Make a copy we can modify
return list(item)
# IDLSearch
#
# A temporary object used by the parsing process to hold an Extended Attribute
# which will be passed as a child to a standard IDLNode.
#
class IDLSearch(object):
def __init__(self):
self.depth = 0
def Enter(self, node):
pass
def Exit(self, node):
pass
# IDLAttribute
#
# A temporary object used by the parsing process to hold an Extended Attribute
# which will be passed as a child to a standard IDLNode.
#
class IDLAttribute(object):
def __init__(self, name, value):
self._cls = 'Property'
self.name = name
self.value = value
def __str__(self):
return '%s=%s' % (self.name, self.value)
def GetClass(self):
return self._cls
#
# IDLNode
#
# This class implements the AST tree, providing the associations between
# parents and children. It also contains a namepsace and propertynode to
# allow for look-ups. IDLNode is derived from IDLRelease, so it is
# version aware.
#
class IDLNode(object):
def __init__(self, cls, filename, lineno, pos, children=None):
self._cls = cls
self._properties = {
'ERRORS' : [],
'WARNINGS': [],
'FILENAME': filename,
'LINENO' : lineno,
'POSSITION' : pos,
}
self._children = []
self._parent = None
self.AddChildren(children)
#
#
#
# Return a string representation of this node
def __str__(self):
name = self.GetProperty('NAME','')
return '%s(%s)' % (self._cls, name)
def GetLogLine(self, msg):
filename, lineno = self.GetFileAndLine()
return '%s(%d) : %s\n' % (filename, lineno, msg)
# Log an error for this object
def Error(self, msg):
self.GetProperty('ERRORS').append(msg)
sys.stderr.write(self.GetLogLine('error: ' + msg))
# Log a warning for this object
def Warning(self, msg):
self.GetProperty('WARNINGS').append(msg)
sys.stdout.write(self.GetLogLine('warning:' + msg))
# Return file and line number for where node was defined
def GetFileAndLine(self):
return self.GetProperty('FILENAME'), self.GetProperty('LINENO')
def GetClass(self):
return self._cls
def GetName(self):
return self.GetProperty('NAME')
def GetParent(self):
return self._parent
def Traverse(self, search, filter_nodes):
if self._cls in filter_nodes:
return ''
search.Enter(self)
search.depth += 1
for child in self._children:
child.Traverse(search, filter_nodes)
search.depth -= 1
search.Exit(self)
def Tree(self, filter_nodes=None, accept_props=None):
class DumpTreeSearch(IDLSearch):
def __init__(self, props):
IDLSearch.__init__(self)
self.out = []
self.props = props
def Enter(self, node):
tab = ''.rjust(self.depth * 2)
self.out.append(tab + str(node))
if self.props:
proplist = []
for key, value in node.GetProperties().iteritems():
if key in self.props:
proplist.append(tab + ' %s: %s' % (key, str(value)))
if proplist:
self.out.append(tab + ' PROPERTIES')
self.out.extend(proplist)
if filter_nodes == None:
filter_nodes = ['Comment', 'Copyright']
search = DumpTreeSearch(accept_props)
self.Traverse(search, filter_nodes)
return search.out
#
# Search related functions
#
# Check if node is of a given type
def IsA(self, *typelist):
if self._cls in typelist:
return True
return False
# Get a list of all children
def GetChildren(self):
return self._children
def GetListOf(self, *keys):
out = []
for child in self.GetChildren():
if child.GetClass() in keys:
out.append(child)
return out
def GetOneOf(self, *keys):
out = self.GetListOf(*keys)
if out:
return out[0]
return None
def AddChildren(self, children):
children = CopyToList(children)
for child in children:
if not child:
continue
if type(child) == IDLAttribute:
self.SetProperty(child.name, child.value)
continue
if type(child) == IDLNode:
child._parent = self
self._children.append(child)
continue
raise RuntimeError('Adding child of type %s.\n' % type(child).__name__)
#
# Property Functions
#
def SetProperty(self, name, val):
self._properties[name] = val
def GetProperty(self, name, default=None):
return self._properties.get(name, default)
def GetProperties(self):
return self._properties
|
bsd-3-clause
|
sadaf2605/django
|
django/core/checks/security/sessions.py
|
57
|
2781
|
from django.conf import settings
from .. import Tags, Warning, register
from ..utils import patch_middleware_message
def add_session_cookie_message(message):
return message + (
" Using a secure-only session cookie makes it more difficult for "
"network traffic sniffers to hijack user sessions."
)
W010 = Warning(
add_session_cookie_message(
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
"but you have not set SESSION_COOKIE_SECURE to True."
),
id='security.W010',
)
W011 = Warning(
add_session_cookie_message(
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
"in your MIDDLEWARE, but you have not set "
"SESSION_COOKIE_SECURE to True."
),
id='security.W011',
)
W012 = Warning(
add_session_cookie_message("SESSION_COOKIE_SECURE is not set to True."),
id='security.W012',
)
def add_httponly_message(message):
return message + (
" Using an HttpOnly session cookie makes it more difficult for "
"cross-site scripting attacks to hijack user sessions."
)
W013 = Warning(
add_httponly_message(
"You have 'django.contrib.sessions' in your INSTALLED_APPS, "
"but you have not set SESSION_COOKIE_HTTPONLY to True.",
),
id='security.W013',
)
W014 = Warning(
add_httponly_message(
"You have 'django.contrib.sessions.middleware.SessionMiddleware' "
"in your MIDDLEWARE, but you have not set "
"SESSION_COOKIE_HTTPONLY to True."
),
id='security.W014',
)
W015 = Warning(
add_httponly_message("SESSION_COOKIE_HTTPONLY is not set to True."),
id='security.W015',
)
@register(Tags.security, deploy=True)
def check_session_cookie_secure(app_configs, **kwargs):
errors = []
if not settings.SESSION_COOKIE_SECURE:
if _session_app():
errors.append(W010)
if _session_middleware():
errors.append(patch_middleware_message(W011))
if len(errors) > 1:
errors = [W012]
return errors
@register(Tags.security, deploy=True)
def check_session_cookie_httponly(app_configs, **kwargs):
errors = []
if not settings.SESSION_COOKIE_HTTPONLY:
if _session_app():
errors.append(W013)
if _session_middleware():
errors.append(patch_middleware_message(W014))
if len(errors) > 1:
errors = [W015]
return errors
def _session_middleware():
return ("django.contrib.sessions.middleware.SessionMiddleware" in settings.MIDDLEWARE_CLASSES or
settings.MIDDLEWARE and "django.contrib.sessions.middleware.SessionMiddleware" in settings.MIDDLEWARE)
def _session_app():
return "django.contrib.sessions" in settings.INSTALLED_APPS
|
bsd-3-clause
|
adambrenecki/django
|
tests/test_utils/tests.py
|
4
|
23654
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
from django.db import connection
from django.forms import EmailField, IntegerField
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.html import HTMLParseError, parse_html
from django.test.utils import CaptureQueriesContext, IgnoreAllDeprecationWarningsMixin
from django.utils import six
from .models import Person
class SkippingTestCase(TestCase):
def test_skip_unless_db_feature(self):
"A test that might be skipped is actually called."
# Total hack, but it works, just want an attribute that's always true.
@skipUnlessDBFeature("__class__")
def test_func():
raise ValueError
self.assertRaises(ValueError, test_func)
class SkippingClassTestCase(TestCase):
def test_skip_class_unless_db_feature(self):
@skipUnlessDBFeature("__class__")
class NotSkippedTests(unittest.TestCase):
def test_dummy(self):
return
@skipIfDBFeature("__class__")
class SkippedTests(unittest.TestCase):
def test_will_be_skipped(self):
self.fail("We should never arrive here.")
test_suite = unittest.TestSuite()
test_suite.addTest(NotSkippedTests('test_dummy'))
try:
test_suite.addTest(SkippedTests('test_will_be_skipped'))
except unittest.SkipTest:
self.fail("SkipTest should not be raised at this stage")
result = unittest.TextTestRunner(stream=six.StringIO()).run(test_suite)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.skipped), 1)
class AssertNumQueriesTests(TestCase):
urls = 'test_utils.urls'
def test_assert_num_queries(self):
def test_func():
raise ValueError
self.assertRaises(ValueError,
self.assertNumQueries, 2, test_func
)
def test_assert_num_queries_with_client(self):
person = Person.objects.create(name='test')
self.assertNumQueries(
1,
self.client.get,
"/test_utils/get_person/%s/" % person.pk
)
self.assertNumQueries(
1,
self.client.get,
"/test_utils/get_person/%s/" % person.pk
)
def test_func():
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.assertNumQueries(2, test_func)
class AssertQuerysetEqualTests(TestCase):
def setUp(self):
self.p1 = Person.objects.create(name='p1')
self.p2 = Person.objects.create(name='p2')
def test_ordered(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[repr(self.p1), repr(self.p2)]
)
def test_unordered(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[repr(self.p2), repr(self.p1)],
ordered=False
)
def test_transform(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[self.p1.pk, self.p2.pk],
transform=lambda x: x.pk
)
def test_undefined_order(self):
# Using an unordered queryset with more than one ordered value
# is an error.
with self.assertRaises(ValueError):
self.assertQuerysetEqual(
Person.objects.all(),
[repr(self.p1), repr(self.p2)]
)
# No error for one value.
self.assertQuerysetEqual(
Person.objects.filter(name='p1'),
[repr(self.p1)]
)
class CaptureQueriesContextManagerTests(TestCase):
urls = 'test_utils.urls'
def setUp(self):
self.person_pk = six.text_type(Person.objects.create(name='test').pk)
def test_simple(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
pass
self.assertEqual(0, len(captured_queries))
def test_within(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
def test_nested(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.count()
with CaptureQueriesContext(connection) as nested_captured_queries:
Person.objects.count()
self.assertEqual(1, len(nested_captured_queries))
self.assertEqual(2, len(captured_queries))
def test_failure(self):
with self.assertRaises(TypeError), CaptureQueriesContext(connection):
raise TypeError
def test_with_client(self):
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 2)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
self.assertIn(self.person_pk, captured_queries[1]['sql'])
class AssertNumQueriesContextManagerTests(TestCase):
urls = 'test_utils.urls'
def test_simple(self):
with self.assertNumQueries(0):
pass
with self.assertNumQueries(1):
Person.objects.count()
with self.assertNumQueries(2):
Person.objects.count()
Person.objects.count()
def test_failure(self):
with self.assertRaises(AssertionError) as exc_info, self.assertNumQueries(2):
Person.objects.count()
self.assertIn("1 queries executed, 2 expected", str(exc_info.exception))
with self.assertRaises(TypeError), self.assertNumQueries(4000):
raise TypeError
def test_with_client(self):
person = Person.objects.create(name="test")
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(2):
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
class AssertTemplateUsedContextManagerTests(TestCase):
def test_usage(self):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/base.html')
with self.assertTemplateUsed(template_name='template_used/base.html'):
render_to_string('template_used/base.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/include.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/base.html')
render_to_string('template_used/base.html')
def test_nested_usage(self):
with self.assertTemplateUsed('template_used/base.html'), \
self.assertTemplateUsed('template_used/include.html'):
render_to_string('template_used/include.html')
with self.assertTemplateUsed('template_used/extends.html'), \
self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateUsed('template_used/base.html'):
with self.assertTemplateUsed('template_used/alternative.html'):
render_to_string('template_used/alternative.html')
render_to_string('template_used/base.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateNotUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
render_to_string('template_used/base.html')
def test_not_used(self):
with self.assertTemplateNotUsed('template_used/base.html'):
pass
with self.assertTemplateNotUsed('template_used/alternative.html'):
pass
def test_error_message(self):
with six.assertRaisesRegex(self, AssertionError, r'^template_used/base\.html'), \
self.assertTemplateUsed('template_used/base.html'):
pass
with six.assertRaisesRegex(self, AssertionError, r'^template_used/base\.html'), \
self.assertTemplateUsed(template_name='template_used/base.html'):
pass
with six.assertRaisesRegex(self, AssertionError, r'^template_used/base\.html.*template_used/alternative\.html$'), \
self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
def test_failure(self):
with self.assertRaises(TypeError), self.assertTemplateUsed():
pass
with self.assertRaises(AssertionError), self.assertTemplateUsed(''):
pass
with self.assertRaises(AssertionError), self.assertTemplateUsed(''):
render_to_string('template_used/base.html')
with self.assertRaises(AssertionError), self.assertTemplateUsed(template_name=''):
pass
with self.assertRaises(AssertionError), \
self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
class HTMLEqualTests(TestCase):
def test_html_parser(self):
element = parse_html('<div><p>Hello</p></div>')
self.assertEqual(len(element.children), 1)
self.assertEqual(element.children[0].name, 'p')
self.assertEqual(element.children[0].children[0], 'Hello')
parse_html('<p>')
parse_html('<p attr>')
dom = parse_html('<p>foo')
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.name, 'p')
self.assertEqual(dom[0], 'foo')
def test_parse_html_in_script(self):
parse_html('<script>var a = "<p" + ">";</script>');
parse_html('''
<script>
var js_sha_link='<p>***</p>';
</script>
''')
# script content will be parsed to text
dom = parse_html('''
<script><p>foo</p> '</scr'+'ipt>' <span>bar</span></script>
''')
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.children[0], "<p>foo</p> '</scr'+'ipt>' <span>bar</span>")
def test_self_closing_tags(self):
self_closing_tags = ('br' , 'hr', 'input', 'img', 'meta', 'spacer',
'link', 'frame', 'base', 'col')
for tag in self_closing_tags:
dom = parse_html('<p>Hello <%s> world</p>' % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], 'Hello')
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], 'world')
dom = parse_html('<p>Hello <%s /> world</p>' % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], 'Hello')
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], 'world')
def test_simple_equal_html(self):
self.assertHTMLEqual('', '')
self.assertHTMLEqual('<p></p>', '<p></p>')
self.assertHTMLEqual('<p></p>', ' <p> </p> ')
self.assertHTMLEqual(
'<div><p>Hello</p></div>',
'<div><p>Hello</p></div>')
self.assertHTMLEqual(
'<div><p>Hello</p></div>',
'<div> <p>Hello</p> </div>')
self.assertHTMLEqual(
'<div>\n<p>Hello</p></div>',
'<div><p>Hello</p></div>\n')
self.assertHTMLEqual(
'<div><p>Hello\nWorld !</p></div>',
'<div><p>Hello World\n!</p></div>')
self.assertHTMLEqual(
'<div><p>Hello\nWorld !</p></div>',
'<div><p>Hello World\n!</p></div>')
self.assertHTMLEqual(
'<p>Hello World !</p>',
'<p>Hello World\n\n!</p>')
self.assertHTMLEqual('<p> </p>', '<p></p>')
self.assertHTMLEqual('<p/>', '<p></p>')
self.assertHTMLEqual('<p />', '<p></p>')
self.assertHTMLEqual('<input checked>', '<input checked="checked">')
self.assertHTMLEqual('<p>Hello', '<p> Hello')
self.assertHTMLEqual('<p>Hello</p>World', '<p>Hello</p> World')
def test_ignore_comments(self):
self.assertHTMLEqual(
'<div>Hello<!-- this is a comment --> World!</div>',
'<div>Hello World!</div>')
def test_unequal_html(self):
self.assertHTMLNotEqual('<p>Hello</p>', '<p>Hello!</p>')
self.assertHTMLNotEqual('<p>foobar</p>', '<p>foo bar</p>')
self.assertHTMLNotEqual('<p>foo bar</p>', '<p>foo bar</p>')
self.assertHTMLNotEqual('<p>foo nbsp</p>', '<p>foo </p>')
self.assertHTMLNotEqual('<p>foo #20</p>', '<p>foo </p>')
self.assertHTMLNotEqual(
'<p><span>Hello</span><span>World</span></p>',
'<p><span>Hello</span>World</p>')
self.assertHTMLNotEqual(
'<p><span>Hello</span>World</p>',
'<p><span>Hello</span><span>World</span></p>')
def test_attributes(self):
self.assertHTMLEqual(
'<input type="text" id="id_name" />',
'<input id="id_name" type="text" />')
self.assertHTMLEqual(
'''<input type='text' id="id_name" />''',
'<input id="id_name" type="text" />')
self.assertHTMLNotEqual(
'<input type="text" id="id_name" />',
'<input type="password" id="id_name" />')
def test_complex_examples(self):
self.assertHTMLEqual(
"""<tr><th><label for="id_first_name">First name:</label></th>
<td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th>
<td><input type="text" id="id_last_name" name="last_name" value="Lennon" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th>
<td><input type="text" value="1940-10-9" name="birthday" id="id_birthday" /></td></tr>""",
"""
<tr><th>
<label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" />
</td></tr>
<tr><th>
<label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="Lennon" id="id_last_name" />
</td></tr>
<tr><th>
<label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
</td></tr>
""")
self.assertHTMLEqual(
"""<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p>
This is a valid paragraph
<div> this is a div AFTER the p</div>
</body>
</html>""", """
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p> This is a valid paragraph
<!-- browsers would close the p tag here -->
<div> this is a div AFTER the p</div>
</p> <!-- this is invalid HTML parsing, but it should make no
difference in most cases -->
</body>
</html>""")
def test_html_contain(self):
# equal html contains each other
dom1 = parse_html('<p>foo')
dom2 = parse_html('<p>foo</p>')
self.assertTrue(dom1 in dom2)
self.assertTrue(dom2 in dom1)
dom2 = parse_html('<div><p>foo</p></div>')
self.assertTrue(dom1 in dom2)
self.assertTrue(dom2 not in dom1)
self.assertFalse('<p>foo</p>' in dom2)
self.assertTrue('foo' in dom2)
# when a root element is used ...
dom1 = parse_html('<p>foo</p><p>bar</p>')
dom2 = parse_html('<p>foo</p><p>bar</p>')
self.assertTrue(dom1 in dom2)
dom1 = parse_html('<p>foo</p>')
self.assertTrue(dom1 in dom2)
dom1 = parse_html('<p>bar</p>')
self.assertTrue(dom1 in dom2)
def test_count(self):
# equal html contains each other one time
dom1 = parse_html('<p>foo')
dom2 = parse_html('<p>foo</p>')
self.assertEqual(dom1.count(dom2), 1)
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo</p><p>bar</p>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo foo</p><p>foo</p>')
self.assertEqual(dom2.count('foo'), 3)
dom2 = parse_html('<p class="bar">foo</p>')
self.assertEqual(dom2.count('bar'), 0)
self.assertEqual(dom2.count('class'), 0)
self.assertEqual(dom2.count('p'), 0)
self.assertEqual(dom2.count('o'), 2)
dom2 = parse_html('<p>foo</p><p>foo</p>')
self.assertEqual(dom2.count(dom1), 2)
dom2 = parse_html('<div><p>foo<input type=""></p><p>foo</p></div>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<div><div><p>foo</p></div></div>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo<p>foo</p></p>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo<p>bar</p></p>')
self.assertEqual(dom2.count(dom1), 0)
def test_parsing_errors(self):
with self.assertRaises(AssertionError):
self.assertHTMLEqual('<p>', '')
with self.assertRaises(AssertionError):
self.assertHTMLEqual('', '<p>')
with self.assertRaises(HTMLParseError):
parse_html('</p>')
def test_contains_html(self):
response = HttpResponse('''<body>
This is a form: <form action="" method="get">
<input type="text" name="Hello" />
</form></body>''')
self.assertNotContains(response, "<input name='Hello' type='text'>")
self.assertContains(response, '<form action="" method="get">')
self.assertContains(response, "<input name='Hello' type='text'>", html=True)
self.assertNotContains(response, '<form action="" method="get">', html=True)
invalid_response = HttpResponse('''<body <bad>>''')
with self.assertRaises(AssertionError):
self.assertContains(invalid_response, '<p></p>')
with self.assertRaises(AssertionError):
self.assertContains(response, '<p "whats" that>')
def test_unicode_handling(self):
response = HttpResponse('<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>')
self.assertContains(response, '<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>', html=True)
class XMLEqualTests(TestCase):
def test_simple_equal(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_unordered(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_raise(self):
xml1 = "<elem attr1='a' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLEqual(xml1, xml2)
def test_simple_not_equal(self):
xml1 = "<elem attr1='a' attr2='c' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLNotEqual(xml1, xml2)
def test_simple_not_equal_raise(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml1, xml2)
def test_parsing_errors(self):
xml_unvalid = "<elem attr1='a attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml_unvalid, xml2)
def test_comment_root(self):
xml1 = "<?xml version='1.0'?><!-- comment1 --><elem attr1='a' attr2='b' />"
xml2 = "<?xml version='1.0'?><!-- comment2 --><elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
class SkippingExtraTests(TestCase):
fixtures = ['should_not_be_loaded.json']
# HACK: This depends on internals of our TestCase subclasses
def __call__(self, result=None):
# Detect fixture loading by counting SQL queries, should be zero
with self.assertNumQueries(0):
super(SkippingExtraTests, self).__call__(result)
@unittest.skip("Fixture loading should not be performed for skipped tests.")
def test_fixtures_are_skipped(self):
pass
class AssertRaisesMsgTest(SimpleTestCase):
def test_special_re_chars(self):
"""assertRaisesMessage shouldn't interpret RE special chars."""
def func1():
raise ValueError("[.*x+]y?")
self.assertRaisesMessage(ValueError, "[.*x+]y?", func1)
class AssertFieldOutputTests(SimpleTestCase):
def test_assert_field_output(self):
error_invalid = ['Enter a valid email address.']
self.assertFieldOutput(EmailField, {'[email protected]': '[email protected]'}, {'aaa': error_invalid})
self.assertRaises(AssertionError, self.assertFieldOutput, EmailField, {'[email protected]': '[email protected]'}, {'aaa': error_invalid + ['Another error']})
self.assertRaises(AssertionError, self.assertFieldOutput, EmailField, {'[email protected]': 'Wrong output'}, {'aaa': error_invalid})
self.assertRaises(AssertionError, self.assertFieldOutput, EmailField, {'[email protected]': '[email protected]'}, {'aaa': ['Come on, gimme some well formatted data, dude.']})
def test_custom_required_message(self):
class MyCustomField(IntegerField):
default_error_messages = {
'required': 'This is really required.',
}
self.assertFieldOutput(MyCustomField, {}, {}, empty_value=None)
class DoctestNormalizerTest(IgnoreAllDeprecationWarningsMixin, SimpleTestCase):
def test_normalizer(self):
from django.test.simple import make_doctest
suite = make_doctest("test_utils.doctest_output")
failures = unittest.TextTestRunner(stream=six.StringIO()).run(suite)
self.assertEqual(failures.failures, [])
|
bsd-3-clause
|
helogargon/TFG
|
CyberCube/FrontEnd/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
|
1509
|
17165
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Handle version information related to Visual Stuio."""
import errno
import os
import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio."""
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based, default_toolset=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
self.project_version = project_version
self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
def ShortName(self):
return self.short_name
def Description(self):
"""Get the full description of the version."""
return self.description
def SolutionVersion(self):
"""Get the version number of the sln files."""
return self.solution_version
def ProjectVersion(self):
"""Get the version number of the vcproj or vcxproj files."""
return self.project_version
def FlatSolution(self):
return self.flat_sln
def UsesVcxproj(self):
"""Returns true if this version uses a vcxproj file."""
return self.uses_vcxproj
def ProjectExtension(self):
"""Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def DefaultToolset(self):
"""Returns the msbuild toolset version that will be used in the absence
of a user override."""
return self.default_toolset
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 and later, non-Express have a x64-x86 cross that we want
# to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
# Use the 64-on-64 compiler if we're not using an express
# edition and we're running on a 64bit OS.
if self.short_name[-1] != 'e' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
While ideally we might use the win32 module, we would like gyp to be
python neutral, so for instance cygwin python lacks this module.
Arguments:
sysdir: The system subdirectory to attempt to launch reg.exe from.
key: The registry key to read from.
value: The particular value to read.
Return:
stdout from reg.exe, or None for failure.
"""
# Skip if not on Windows or Python Win32 setup issue
if sys.platform not in ('win32', 'cygwin'):
return None
# Setup params to pass to and attempt to launch reg.exe
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
'query', key]
if value:
cmd.extend(['/v', value])
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
# Note that the error text may be in [1] in some cases
text = p.communicate()[0]
# Check return code from reg.exe; officially 0==success and 1==error
if p.returncode:
return None
return text
def _RegistryQuery(key, value=None):
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
that fails, it falls back to System32. Sysnative is available on Vista and
up and available on Windows Server 2003 and XP through KB patch 942589. Note
that Sysnative will always fail if using 64-bit python due to it being a
virtual directory and System32 will work correctly in the first place.
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
Arguments:
key: The registry key.
value: The particular registry value to read (optional).
Return:
stdout from reg.exe, or None for failure.
"""
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
except OSError, e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
raise
return text
def _RegistryGetValueUsingWinReg(key, value):
"""Use the _winreg module to obtain the value of a registry key.
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
"""
import _winreg
try:
root, subkey = key.split('\\', 1)
assert root == 'HKLM' # Only need HKLM for now.
with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
return _winreg.QueryValueEx(hkey, value)[0]
except WindowsError:
return None
def _RegistryGetValue(key, value):
"""Use _winreg or reg.exe to obtain the value of a registry key.
Using _winreg is preferable because it solves an issue on some corporate
environments where access to reg.exe is locked down. However, we still need
to fallback to reg.exe for the case where the _winreg module is not available
(for example in cygwin python).
Args:
key: The registry key.
value: The particular registry value to read.
Return:
contents of the registry key's value, or None on failure.
"""
try:
return _RegistryGetValueUsingWinReg(key, value)
except ImportError:
pass
# Fallback to reg.exe if we fail to import _winreg.
text = _RegistryQuery(key, value)
if not text:
return None
# Extract value.
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
if not match:
return None
return match.group(1)
def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error.
"""
if path:
path = os.path.normpath(path)
versions = {
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
project_version='14.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v140'),
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2012e': VisualStudioVersion('2012e',
'Visual Studio 2012',
solution_version='12.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v110'),
'2010': VisualStudioVersion('2010',
'Visual Studio 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e',
'Visual Studio 2008',
solution_version='10.00',
project_version='9.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=False,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e',
'Visual Studio 2005',
solution_version='9.00',
project_version='8.00',
flat_sln=True,
uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
}
return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions.
Returns:
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
# We don't use the 2010-encouraged-way because we also want to get the
# path to the binaries, which it doesn't offer.
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], 'InstallDir')
if not path:
continue
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
if version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions
def SelectVisualStudioVersion(version='auto', allow_fallback=True):
"""Select which version of Visual Studio projects to generate.
Arguments:
version: Hook to allow caller to force a particular version (vs auto).
Returns:
An object representing a visual studio project format version.
"""
# In auto mode, check environment variable for override.
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
'2008e': ('9.0',),
'2010': ('10.0',),
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if not allow_fallback:
raise ValueError('Could not locate Visual Studio installation.')
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]
|
mit
|
jabesq/home-assistant
|
tests/components/config/test_script.py
|
2
|
1047
|
"""Tests for config/script."""
from unittest.mock import patch
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
async def test_delete_script(hass, hass_client):
"""Test deleting a script."""
with patch.object(config, 'SECTIONS', ['script']):
await async_setup_component(hass, 'config', {})
client = await hass_client()
orig_data = {
'one': {},
'two': {},
}
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch('homeassistant.components.config._read', mock_read), \
patch('homeassistant.components.config._write', mock_write):
resp = await client.delete('/api/config/script/config/two')
assert resp.status == 200
result = await resp.json()
assert result == {'result': 'ok'}
assert len(written) == 1
assert written[0] == {
'one': {}
}
|
apache-2.0
|
RamonGuiuGou/l10n-spain
|
l10n_es_payment_order/wizard/converter.py
|
13
|
5316
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2006 ACYSOS S.L. (http://acysos.com)
# Pedro Tarrafeta <[email protected]>
# Ignacio Ibeas <[email protected]>
# Copyright (c) 2008 Pablo Rocandio. All Rights Reserved.
# Copyright (c) 2009 Zikzakmedia S.L. (http://zikzakmedia.com)
# Jordi Esteve <[email protected]>
# Copyright (c) 2009 NaN (http://www.nan-tic.com)
# Albert Cervera i Areny <[email protected]>
# Refactorización. Acysos S.L. (http://www.acysos.com) 2012
# Ignacio Ibeas <[email protected]>
#
# Migración Odoo 8.0. Acysos S.L. (http://www.acysos.com) 2015
# Ignacio Ibeas <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import _
from .log import Log
class PaymentConverterSpain(object):
def digits_only(self, cc_in):
"""Discards non-numeric chars"""
cc = ""
for i in cc_in or '':
try:
int(i)
cc += i
except ValueError:
pass
return cc
def to_ascii(self, text):
"""Converts special characters such as those with accents to their
ASCII equivalents"""
old_chars = ['á', 'é', 'í', 'ó', 'ú', 'à', 'è', 'ì', 'ò', 'ù', 'ä',
'ë', 'ï', 'ö', 'ü', 'â', 'ê', 'î', 'ô', 'û', 'Á', 'É',
'Í', 'Ú', 'Ó', 'À', 'È', 'Ì', 'Ò', 'Ù', 'Ä', 'Ë', 'Ï',
'Ö', 'Ü', 'Â', 'Ê', 'Î', 'Ô', 'Û', 'ñ', 'Ñ', 'ç', 'Ç',
'ª', 'º', '·', '\n']
new_chars = ['a', 'e', 'i', 'o', 'u', 'a', 'e', 'i', 'o', 'u', 'a',
'e', 'i', 'o', 'u', 'a', 'e', 'i', 'o', 'u', 'A', 'E',
'I', 'U', 'O', 'A', 'E', 'I', 'O', 'U', 'A', 'E', 'I',
'O', 'U', 'A', 'E', 'I', 'O', 'U', 'n', 'N', 'c', 'C',
'a', 'o', '.', ' ']
for old, new in zip(old_chars, new_chars):
text = text.replace(unicode(old, 'UTF-8'), new)
return text
def convert_text(self, text, size, justified='left'):
if justified == 'left':
return self.to_ascii(text)[:size].ljust(size)
else:
return self.to_ascii(text)[:size].rjust(size)
def convert_float(self, number, size):
text = str(int(round(number * 100, 0)))
if len(text) > size:
raise Log(_('Error:\n\nCan not convert float number %(number).2f '
'to fit in %(size)d characters.') % {
'number': number,
'size': size
})
return text.zfill(size)
def convert_int(self, number, size):
text = str(number)
if len(text) > size:
raise Log(_('Error:\n\nCan not convert integer number %(number)d '
'to fit in %(size)d characters.') % {
'number': number,
'size': size
})
return text.zfill(size)
def convert(self, value, size, justified='left'):
if not value:
return self.convert_text('', size)
elif isinstance(value, float):
return self.convert_float(value, size)
elif isinstance(value, int):
return self.convert_int(value, size)
else:
return self.convert_text(value, size, justified)
def convert_bank_account(self, value, partner_name):
if not isinstance(value, basestring):
raise Log(_('User error:\n\nThe bank account number of %s is not '
'defined.') % partner_name)
ccc = self.digits_only(value)
if len(ccc) != 20:
raise Log(_('User error:\n\nThe bank account number of %s does not'
' have 20 digits.') % partner_name)
return ccc
def bank_account_parts(self, value, partner_name):
if not isinstance(value, basestring):
raise Log(_('User error:\n\nThe bank account number of %s is not '
'defined.') % partner_name)
ccc = self.digits_only(value)
if len(ccc) != 20:
raise Log(_('User error:\n\nThe bank account number of %s does '
'not have 20 digits.') % partner_name)
return {'bank': ccc[:4],
'office': ccc[4:8],
'dc': ccc[8:10],
'account': ccc[10:]}
|
agpl-3.0
|
vjmac15/Lyilis
|
lib/pycparser/ast_transforms (VJ Washington's conflicted copy 2017-08-29).py
|
22
|
3574
|
#------------------------------------------------------------------------------
# pycparser: ast_transforms.py
#
# Some utilities used by the parser to create a friendlier AST.
#
# Eli Bendersky [http://eli.thegreenplace.net]
# License: BSD
#------------------------------------------------------------------------------
from . import c_ast
def fix_switch_cases(switch_node):
""" The 'case' statements in a 'switch' come out of parsing with one
child node, so subsequent statements are just tucked to the parent
Compound. Additionally, consecutive (fall-through) case statements
come out messy. This is a peculiarity of the C grammar. The following:
switch (myvar) {
case 10:
k = 10;
p = k + 1;
return 10;
case 20:
case 30:
return 20;
default:
break;
}
Creates this tree (pseudo-dump):
Switch
ID: myvar
Compound:
Case 10:
k = 10
p = k + 1
return 10
Case 20:
Case 30:
return 20
Default:
break
The goal of this transform is to fix this mess, turning it into the
following:
Switch
ID: myvar
Compound:
Case 10:
k = 10
p = k + 1
return 10
Case 20:
Case 30:
return 20
Default:
break
A fixed AST node is returned. The argument may be modified.
"""
assert isinstance(switch_node, c_ast.Switch)
if not isinstance(switch_node.stmt, c_ast.Compound):
return switch_node
# The new Compound child for the Switch, which will collect children in the
# correct order
new_compound = c_ast.Compound([], switch_node.stmt.coord)
# The last Case/Default node
last_case = None
# Goes over the children of the Compound below the Switch, adding them
# either directly below new_compound or below the last Case as appropriate
for child in switch_node.stmt.block_items:
if isinstance(child, (c_ast.Case, c_ast.Default)):
# If it's a Case/Default:
# 1. Add it to the Compound and mark as "last case"
# 2. If its immediate child is also a Case or Default, promote it
# to a sibling.
new_compound.block_items.append(child)
_extract_nested_case(child, new_compound.block_items)
last_case = new_compound.block_items[-1]
else:
# Other statements are added as children to the last case, if it
# exists.
if last_case is None:
new_compound.block_items.append(child)
else:
last_case.stmts.append(child)
switch_node.stmt = new_compound
return switch_node
def _extract_nested_case(case_node, stmts_list):
""" Recursively extract consecutive Case statements that are made nested
by the parser and add them to the stmts_list.
"""
if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)):
stmts_list.append(case_node.stmts.pop())
_extract_nested_case(stmts_list[-1], stmts_list)
|
gpl-3.0
|
geekboxzone/lollipop_external_chromium_org_third_party_WebKit
|
Tools/Scripts/webkitpy/layout_tests/port/builders_unittest.py
|
47
|
1897
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
import builders
class BuildersTest(unittest.TestCase):
def test_path_from_name(self):
tests = {
'test': 'test',
'Mac 10.6 (dbg)(1)': 'Mac_10_6__dbg__1_',
'(.) ': '____',
}
for name, expected in tests.items():
self.assertEqual(expected, builders.builder_path_from_name(name))
|
bsd-3-clause
|
mccheung/kbengine
|
kbe/res/scripts/common/Lib/site-packages/setuptools/depends.py
|
410
|
6221
|
from __future__ import generators
import sys, imp, marshal
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
from distutils.version import StrictVersion, LooseVersion
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
]
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(self,name,requested_version,module,homepage='',
attribute=None,format=None
):
if format is None and requested_version is not None:
format = StrictVersion
if format is not None:
requested_version = format(requested_version)
if attribute is None:
attribute = '__version__'
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name,self.requested_version)
return self.name
def version_ok(self,version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f,p,i = find_module(self.module,paths)
if f: f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module,self.attribute,default,paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self,paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self,paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version)
def _iter_code(code):
"""Yield '(op,arg)' pair for each operation in code object 'code'"""
from array import array
from dis import HAVE_ARGUMENT, EXTENDED_ARG
bytes = array('b',code.co_code)
eof = len(code.co_code)
ptr = 0
extended_arg = 0
while ptr<eof:
op = bytes[ptr]
if op>=HAVE_ARGUMENT:
arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
ptr += 3
if op==EXTENDED_ARG:
extended_arg = arg * long_type(65536)
continue
else:
arg = None
ptr += 1
yield op,arg
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
if kind==PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts,module))
return info
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix,mode,kind) = find_module(module,paths)
except ImportError:
# Module doesn't exist
return None
try:
if kind==PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind==PY_FROZEN:
code = imp.get_frozen_object(module)
elif kind==PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module,f,path,(suffix,mode,kind))
return getattr(sys.modules[module],symbol,None)
finally:
if f:
f.close()
return extract_constant(code,symbol,default)
def extract_constant(code,symbol,default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assigment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for op, arg in _iter_code(code):
if op==LOAD_CONST:
const = code.co_consts[arg]
elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
return const
else:
const = default
if sys.platform.startswith('java') or sys.platform == 'cli':
# XXX it'd be better to test assertions about bytecode instead...
del extract_constant, get_module_constant
__all__.remove('extract_constant')
__all__.remove('get_module_constant')
|
lgpl-3.0
|
Yahvuh/Eliza
|
Flask/Lib/site-packages/pip/_vendor/colorama/winterm.py
|
442
|
5732
|
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from . import win32
# from wincon.h
class WinColor(object):
BLACK = 0
BLUE = 1
GREEN = 2
CYAN = 3
RED = 4
MAGENTA = 5
YELLOW = 6
GREY = 7
# from wincon.h
class WinStyle(object):
NORMAL = 0x00 # dim text, dim background
BRIGHT = 0x08 # bright text, dim background
BRIGHT_BACKGROUND = 0x80 # dim text, bright background
class WinTerm(object):
def __init__(self):
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
self.set_attrs(self._default)
self._default_fore = self._fore
self._default_back = self._back
self._default_style = self._style
def get_attrs(self):
return self._fore + self._back * 16 + self._style
def set_attrs(self, value):
self._fore = value & 7
self._back = (value >> 4) & 7
self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
def reset_all(self, on_stderr=None):
self.set_attrs(self._default)
self.set_console(attrs=self._default)
def fore(self, fore=None, light=False, on_stderr=False):
if fore is None:
fore = self._default_fore
self._fore = fore
if light:
self._style |= WinStyle.BRIGHT
self.set_console(on_stderr=on_stderr)
def back(self, back=None, light=False, on_stderr=False):
if back is None:
back = self._default_back
self._back = back
if light:
self._style |= WinStyle.BRIGHT_BACKGROUND
self.set_console(on_stderr=on_stderr)
def style(self, style=None, on_stderr=False):
if style is None:
style = self._default_style
self._style = style
self.set_console(on_stderr=on_stderr)
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
# Because Windows coordinates are 0-based,
# and win32.SetConsoleCursorPosition expects 1-based.
position.X += 1
position.Y += 1
return position
def set_cursor_position(self, position=None, on_stderr=False):
if position is None:
#I'm not currently tracking the position, so there is no default.
#position = self.get_position()
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleCursorPosition(handle, position)
def cursor_adjust(self, x, y, on_stderr=False):
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
position = self.get_position(handle)
adjusted_position = (position.Y + y, position.X + x)
win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
def erase_screen(self, mode=0, on_stderr=False):
# 0 should clear from the cursor to the end of the screen.
# 1 should clear from the cursor to the beginning of the screen.
# 2 should clear the entire screen, and move cursor to (1,1)
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
csbi = win32.GetConsoleScreenBufferInfo(handle)
# get the number of character cells in the current buffer
cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
# get number of character cells before current cursor position
cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
if mode == 0:
from_coord = csbi.dwCursorPosition
cells_to_erase = cells_in_screen - cells_before_cursor
if mode == 1:
from_coord = win32.COORD(0, 0)
cells_to_erase = cells_before_cursor
elif mode == 2:
from_coord = win32.COORD(0, 0)
cells_to_erase = cells_in_screen
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
if mode == 2:
# put the cursor where needed
win32.SetConsoleCursorPosition(handle, (1, 1))
def erase_line(self, mode=0, on_stderr=False):
# 0 should clear from the cursor to the end of the line.
# 1 should clear from the cursor to the beginning of the line.
# 2 should clear the entire line.
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
csbi = win32.GetConsoleScreenBufferInfo(handle)
if mode == 0:
from_coord = csbi.dwCursorPosition
cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
if mode == 1:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwCursorPosition.X
elif mode == 2:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwSize.X
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
def set_title(self, title):
win32.SetConsoleTitle(title)
|
gpl-2.0
|
grueni75/GeoDiscoverer
|
Source/Platform/Target/Android/core/src/main/jni/libxml2-2.9.2/check-xinclude-test-suite.py
|
347
|
5333
|
#!/usr/bin/python
import sys
import time
import os
import string
sys.path.insert(0, "python")
import libxml2
#
# the testsuite description
#
DIR="xinclude-test-suite"
CONF="testdescr.xml"
LOG="check-xinclude-test-suite.log"
log = open(LOG, "w")
os.chdir(DIR)
test_nr = 0
test_succeed = 0
test_failed = 0
test_error = 0
#
# Error and warning handlers
#
error_nr = 0
error_msg = ''
def errorHandler(ctx, str):
global error_nr
global error_msg
if string.find(str, "error:") >= 0:
error_nr = error_nr + 1
if len(error_msg) < 300:
if len(error_msg) == 0 or error_msg[-1] == '\n':
error_msg = error_msg + " >>" + str
else:
error_msg = error_msg + str
libxml2.registerErrorHandler(errorHandler, None)
def testXInclude(filename, id):
global error_nr
global error_msg
global log
error_nr = 0
error_msg = ''
print "testXInclude(%s, %s)" % (filename, id)
return 1
def runTest(test, basedir):
global test_nr
global test_failed
global test_error
global test_succeed
global error_msg
global log
fatal_error = 0
uri = test.prop('href')
id = test.prop('id')
type = test.prop('type')
if uri == None:
print "Test without ID:", uri
return -1
if id == None:
print "Test without URI:", id
return -1
if type == None:
print "Test without URI:", id
return -1
if basedir != None:
URI = basedir + "/" + uri
else:
URI = uri
if os.access(URI, os.R_OK) == 0:
print "Test %s missing: base %s uri %s" % (URI, basedir, uri)
return -1
expected = None
outputfile = None
diff = None
if type != 'error':
output = test.xpathEval('string(output)')
if output == 'No output file.':
output = None
if output == '':
output = None
if output != None:
if basedir != None:
output = basedir + "/" + output
if os.access(output, os.R_OK) == 0:
print "Result for %s missing: %s" % (id, output)
output = None
else:
try:
f = open(output)
expected = f.read()
outputfile = output
except:
print "Result for %s unreadable: %s" % (id, output)
try:
# print "testing %s" % (URI)
doc = libxml2.parseFile(URI)
except:
doc = None
if doc != None:
res = doc.xincludeProcess()
if res >= 0 and expected != None:
result = doc.serialize()
if result != expected:
print "Result for %s differs" % (id)
open("xinclude.res", "w").write(result)
diff = os.popen("diff %s xinclude.res" % outputfile).read()
doc.freeDoc()
else:
print "Failed to parse %s" % (URI)
res = -1
test_nr = test_nr + 1
if type == 'success':
if res > 0:
test_succeed = test_succeed + 1
elif res == 0:
test_failed = test_failed + 1
print "Test %s: no substitution done ???" % (id)
elif res < 0:
test_error = test_error + 1
print "Test %s: failed valid XInclude processing" % (id)
elif type == 'error':
if res > 0:
test_error = test_error + 1
print "Test %s: failed to detect invalid XInclude processing" % (id)
elif res == 0:
test_failed = test_failed + 1
print "Test %s: Invalid but no substitution done" % (id)
elif res < 0:
test_succeed = test_succeed + 1
elif type == 'optional':
if res > 0:
test_succeed = test_succeed + 1
else:
print "Test %s: failed optional test" % (id)
# Log the ontext
if res != 1:
log.write("Test ID %s\n" % (id))
log.write(" File: %s\n" % (URI))
content = string.strip(test.content)
while content[-1] == '\n':
content = content[0:-1]
log.write(" %s:%s\n\n" % (type, content))
if error_msg != '':
log.write(" ----\n%s ----\n" % (error_msg))
error_msg = ''
log.write("\n")
if diff != None:
log.write("diff from test %s:\n" %(id))
log.write(" -----------\n%s\n -----------\n" % (diff));
return 0
def runTestCases(case):
creator = case.prop('creator')
if creator != None:
print "=>", creator
base = case.getBase(None)
basedir = case.prop('basedir')
if basedir != None:
base = libxml2.buildURI(basedir, base)
test = case.children
while test != None:
if test.name == 'testcase':
runTest(test, base)
if test.name == 'testcases':
runTestCases(test)
test = test.next
conf = libxml2.parseFile(CONF)
if conf == None:
print "Unable to load %s" % CONF
sys.exit(1)
testsuite = conf.getRootElement()
if testsuite.name != 'testsuite':
print "Expecting TESTSUITE root element: aborting"
sys.exit(1)
profile = testsuite.prop('PROFILE')
if profile != None:
print profile
start = time.time()
case = testsuite.children
while case != None:
if case.name == 'testcases':
old_test_nr = test_nr
old_test_succeed = test_succeed
old_test_failed = test_failed
old_test_error = test_error
runTestCases(case)
print " Ran %d tests: %d suceeded, %d failed and %d generated an error" % (
test_nr - old_test_nr, test_succeed - old_test_succeed,
test_failed - old_test_failed, test_error - old_test_error)
case = case.next
conf.freeDoc()
log.close()
print "Ran %d tests: %d suceeded, %d failed and %d generated an error in %.2f s." % (
test_nr, test_succeed, test_failed, test_error, time.time() - start)
|
gpl-3.0
|
zofuthan/edx-platform
|
lms/djangoapps/discussion_api/render.py
|
168
|
3644
|
"""
Content rendering functionality
Note that this module is designed to imitate the front end behavior as
implemented in Markdown.Sanitizer.js.
"""
import re
import markdown
# These patterns could be more flexible about things like attributes and
# whitespace, but this is imitating Markdown.Sanitizer.js, so it uses the
# patterns defined therein.
TAG_PATTERN = re.compile(r"<[^>]*>?")
SANITIZED_TAG_PATTERN = re.compile(r"<(/?)(\w+)[^>]*>")
ALLOWED_BASIC_TAG_PATTERN = re.compile(
r"^(</?(b|blockquote|code|del|dd|dl|dt|em|h1|h2|h3|i|kbd|li|ol|p|pre|s|sup|sub|strong|strike|ul)>|<(br|hr)\s?/?>)$"
)
ALLOWED_A_PATTERN = re.compile(
r'^(<a\shref="((https?|ftp)://|/)[-A-Za-z0-9+&@#/%?=~_|!:,.;\(\)]+"(\stitle="[^"<>]+")?\s?>|</a>)$'
)
ALLOWED_IMG_PATTERN = re.compile(
r'^(<img\ssrc="(https?://|/)[-A-Za-z0-9+&@#/%?=~_|!:,.;\(\)]+"(\swidth="\d{1,3}")?'
r'(\sheight="\d{1,3}")?(\salt="[^"<>]*")?(\stitle="[^"<>]*")?\s?/?>)$'
)
def _sanitize_tag(match):
"""Return the tag if it is allowed or the empty string otherwise"""
tag = match.group(0)
if (
ALLOWED_BASIC_TAG_PATTERN.match(tag) or
ALLOWED_A_PATTERN.match(tag) or
ALLOWED_IMG_PATTERN.match(tag)
):
return tag
else:
return ""
def _sanitize_html(source):
"""
Return source with all non-allowed tags removed, preserving the text content
"""
return TAG_PATTERN.sub(_sanitize_tag, source)
def _remove_unpaired_tags(source):
"""
Return source with all unpaired tags removed, preserving the text content
source should have already been sanitized
"""
tag_matches = list(SANITIZED_TAG_PATTERN.finditer(source))
if not tag_matches:
return source
tag_stack = []
tag_name_stack = []
text_stack = [source[:tag_matches[0].start()]]
for i, match in enumerate(tag_matches):
tag_name = match.group(2)
following_text = (
source[match.end():tag_matches[i + 1].start()] if i + 1 < len(tag_matches) else
source[match.end():]
)
if tag_name in ["p", "img", "br", "li", "hr"]: # tags that don't require closing
text_stack[-1] += match.group(0) + following_text
elif match.group(1): # end tag
if tag_name in tag_name_stack: # paired with a start tag somewhere
# pop tags until we find the matching one, keeping the non-tag text
while True:
popped_tag_name = tag_name_stack.pop()
popped_tag = tag_stack.pop()
popped_text = text_stack.pop()
if popped_tag_name == tag_name:
text_stack[-1] += popped_tag + popped_text + match.group(0)
break
else:
text_stack[-1] += popped_text
# else unpaired; drop the tag
text_stack[-1] += following_text
else: # start tag
tag_stack.append(match.group(0))
tag_name_stack.append(tag_name)
text_stack.append(following_text)
return "".join(text_stack)
def render_body(raw_body):
"""
Render raw_body to HTML.
This includes the following steps:
* Convert Markdown to HTML
* Strip non-whitelisted HTML
* Remove unbalanced HTML tags
Note that this does not prevent Markdown syntax inside a MathJax block from
being processed, which the forums JavaScript code does.
"""
rendered = markdown.markdown(raw_body)
rendered = _sanitize_html(rendered)
rendered = _remove_unpaired_tags(rendered)
return rendered
|
agpl-3.0
|
nikoonia/gem5v
|
ext/ply/test/yacc_notfunc.py
|
174
|
1476
|
# -----------------------------------------------------------------------------
# yacc_notfunc.py
#
# p_rule not defined as a function
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
# Parsing rules
precedence = (
('left','PLUS','MINUS'),
('left','TIMES','DIVIDE'),
('right','UMINUS'),
)
# dictionary of names
names = { }
p_statement_assign = "Blah"
def p_statement_expr(t):
'statement : expression'
print(t[1])
def p_expression_binop(t):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
t[0] = -t[2]
def p_expression_group(t):
'expression : LPAREN expression RPAREN'
t[0] = t[2]
def p_expression_number(t):
'expression : NUMBER'
t[0] = t[1]
def p_expression_name(t):
'expression : NAME'
try:
t[0] = names[t[1]]
except LookupError:
print("Undefined name '%s'" % t[1])
t[0] = 0
def p_error(t):
print("Syntax error at '%s'" % t.value)
yacc.yacc()
|
bsd-3-clause
|
cosurgi/trunk
|
examples/concrete/triax.py
|
2
|
7273
|
from __future__ import print_function
################################################################################
#
# Triaxial test. Axial strain rate is prescribed and transverse prestress.
# Test is possible on prism or cylinder
# An independent c++ engine may be created from this script in the future.
#
################################################################################
from builtins import range
from yade import pack, plot
import os
# default parameters or from table
readParamsFromTable(noTableOk=True,
# type of test ['cyl','cube']
testType = 'cyl',
# material parameters
young = 20e9,
poisson = .2,
frictionAngle = 1.2,
sigmaT = 1.5e6,
epsCrackOnset = 1e-4,
relDuctility = 30,
# prestress
preStress = -3e6,
# axial strain rate
strainRate = -100,
# assamlby parameters
rParticle = .075e-3, #
width = 2e-3,
height = 5e-3,
bcCoeff = 5,
# facets division
nw = 24,
nh = 15,
# output specifications
fileName = 'test',
exportDir = '/tmp',
runGnuplot = False,
runInGui = True,
)
from yade.params.table import *
assert testType in ['cyl','cube']
# materials
concMat = O.materials.append(CpmMat(
young=young,frictionAngle=frictionAngle,poisson=poisson,sigmaT=sigmaT,
epsCrackOnset=epsCrackOnset,relDuctility=relDuctility
))
frictMat = O.materials.append(FrictMat(
young=young,poisson=poisson,frictionAngle=frictionAngle
))
# spheres
pred = pack.inCylinder((0,0,0),(0,0,height),.5*width) if testType=='cyl' else pack.inAlignedBox((-.5*width,-.5*width,0),(.5*width,.5*width,height)) if testType=='cube' else None
sp=SpherePack()
sp = pack.randomDensePack(pred,spheresInCell=2000,radius=rParticle,memoizeDb='/tmp/triaxTestOnCylinder.sqlite',returnSpherePack=True)
spheres=sp.toSimulation(color=(0,1,1),material=concMat)
# bottom and top of specimen. Will have prescribed velocity
bot = [O.bodies[s] for s in spheres if O.bodies[s].state.pos[2]<rParticle*bcCoeff]
top = [O.bodies[s] for s in spheres if O.bodies[s].state.pos[2]>height-rParticle*bcCoeff]
vel = strainRate*(height-rParticle*2*bcCoeff)
for s in bot:
s.shape.color = (1,0,0)
s.state.blockedDOFs = 'xyzXYZ'
s.state.vel = (0,0,-vel)
for s in top:
s.shape.color = Vector3(0,1,0)
s.state.blockedDOFs = 'xyzXYZ'
s.state.vel = (0,0,vel)
# facets
facets = []
if testType == 'cyl':
rCyl2 = .5*width / cos(pi/float(nw))
for r in range(nw):
for h in range(nh):
v1 = Vector3( rCyl2*cos(2*pi*(r+0)/float(nw)), rCyl2*sin(2*pi*(r+0)/float(nw)), height*(h+0)/float(nh) )
v2 = Vector3( rCyl2*cos(2*pi*(r+1)/float(nw)), rCyl2*sin(2*pi*(r+1)/float(nw)), height*(h+0)/float(nh) )
v3 = Vector3( rCyl2*cos(2*pi*(r+1)/float(nw)), rCyl2*sin(2*pi*(r+1)/float(nw)), height*(h+1)/float(nh) )
v4 = Vector3( rCyl2*cos(2*pi*(r+0)/float(nw)), rCyl2*sin(2*pi*(r+0)/float(nw)), height*(h+1)/float(nh) )
f1 = facet((v1,v2,v3),color=(0,0,1),material=frictMat)
f2 = facet((v1,v3,v4),color=(0,0,1),material=frictMat)
facets.extend((f1,f2))
elif testType == 'cube':
nw2 = nw/4
for r in range(nw2):
for h in range(nh):
v11 = Vector3( -.5*width + (r+0)*width/nw2, -.5*width, height*(h+0)/float(nh) )
v12 = Vector3( -.5*width + (r+1)*width/nw2, -.5*width, height*(h+0)/float(nh) )
v13 = Vector3( -.5*width + (r+1)*width/nw2, -.5*width, height*(h+1)/float(nh) )
v14 = Vector3( -.5*width + (r+0)*width/nw2, -.5*width, height*(h+1)/float(nh) )
f11 = facet((v11,v12,v13),color=(0,0,1),material=frictMat)
f12 = facet((v11,v13,v14),color=(0,0,1),material=frictMat)
v21 = Vector3( +.5*width, -.5*width + (r+0)*width/nw2, height*(h+0)/float(nh) )
v22 = Vector3( +.5*width, -.5*width + (r+1)*width/nw2, height*(h+0)/float(nh) )
v23 = Vector3( +.5*width, -.5*width + (r+1)*width/nw2, height*(h+1)/float(nh) )
v24 = Vector3( +.5*width, -.5*width + (r+0)*width/nw2, height*(h+1)/float(nh) )
f21 = facet((v21,v22,v23),color=(0,0,1),material=frictMat)
f22 = facet((v21,v23,v24),color=(0,0,1),material=frictMat)
v31 = Vector3( +.5*width - (r+0)*width/nw2, +.5*width, height*(h+0)/float(nh) )
v32 = Vector3( +.5*width - (r+1)*width/nw2, +.5*width, height*(h+0)/float(nh) )
v33 = Vector3( +.5*width - (r+1)*width/nw2, +.5*width, height*(h+1)/float(nh) )
v34 = Vector3( +.5*width - (r+0)*width/nw2, +.5*width, height*(h+1)/float(nh) )
f31 = facet((v31,v32,v33),color=(0,0,1),material=frictMat)
f32 = facet((v31,v33,v34),color=(0,0,1),material=frictMat)
v41 = Vector3( -.5*width, +.5*width - (r+0)*width/nw2, height*(h+0)/float(nh) )
v42 = Vector3( -.5*width, +.5*width - (r+1)*width/nw2, height*(h+0)/float(nh) )
v43 = Vector3( -.5*width, +.5*width - (r+1)*width/nw2, height*(h+1)/float(nh) )
v44 = Vector3( -.5*width, +.5*width - (r+0)*width/nw2, height*(h+1)/float(nh) )
f41 = facet((v41,v42,v43),color=(0,0,1),material=frictMat)
f42 = facet((v41,v43,v44),color=(0,0,1),material=frictMat)
facets.extend((f11,f12,f21,f22,f31,f32,f41,f42))
O.bodies.append(facets)
mass = O.bodies[0].state.mass
for f in facets:
f.state.mass = mass
f.state.blockedDOFs = 'XYZz'
# plots
plot.plots = { 'e':('s',), }
def plotAddData():
f1 = sum(O.forces.f(b.id)[2] for b in top)
f2 = sum(O.forces.f(b.id)[2] for b in bot)
f = .5*(f2-f1)
s = f/(pi*.25*width*width) if testType=='cyl' else f/(width*width) if testType=='cube' else None
e = (top[0].state.displ()[2] - bot[0].state.displ()[2]) / (height-rParticle*2*bcCoeff)
plot.addData(
i = O.iter,
s = s,
e = e,
)
# apply prestress to facets
def addForces():
for f in facets:
n = f.shape.normal
a = f.shape.area
O.forces.addF(f.id,preStress*a*n)
# stop condition and exit of the simulation
def stopIfDamaged(maxEps=5e-3):
extremum = max(abs(s) for s in plot.data['s'])
s = abs(plot.data['s'][-1])
e = abs(plot.data['e'][-1])
if O.iter < 1000 or s > .5*extremum and e < maxEps:
return
f = os.path.join(exportDir,fileName)
print('gnuplot',plot.saveGnuplot(f,term='png'))
if runGnuplot:
import subprocess
os.chdir(exportDir)
subprocess.Popen(['gnuplot',f+'.gnuplot']).wait()
print('Simulation finished')
O.pause()
#sys.exit(0) # results in some threading exception
O.dt=.5*utils.PWaveTimeStep()
enlargeFactor=1.5
O.engines=[
ForceResetter(),
InsertionSortCollider([
Bo1_Sphere_Aabb(aabbEnlargeFactor=enlargeFactor,label='bo1s'),
Bo1_Facet_Aabb()
]),
InteractionLoop(
[
Ig2_Sphere_Sphere_ScGeom(interactionDetectionFactor=enlargeFactor,label='ss2d3dg'),
Ig2_Facet_Sphere_ScGeom(),
],
[
Ip2_CpmMat_CpmMat_CpmPhys(cohesiveThresholdIter=O.iter+5),
Ip2_FrictMat_CpmMat_FrictPhys(),
Ip2_FrictMat_FrictMat_FrictPhys(),
],
[
Law2_ScGeom_CpmPhys_Cpm(),
Law2_ScGeom_FrictPhys_CundallStrack(),
],
),
PyRunner(iterPeriod=1,command="addForces()"),
NewtonIntegrator(damping=.3),
CpmStateUpdater(iterPeriod=50,label='cpmStateUpdater'),
PyRunner(command='plotAddData()',iterPeriod=10),
PyRunner(iterPeriod=50,command='stopIfDamaged()'),
]
# run one step
O.step()
# reset interaction detection enlargement
bo1s.aabbEnlargeFactor=ss2d3dg.interactionDetectionFactor=1.0
# initialize auto-updated plot
if runInGui:
plot.plot()
try:
from yade import qt
renderer=qt.Renderer()
# uncomment following line to exagerate displacement
#renderer.dispScale=(100,100,100)
except:
pass
# run
O.run()
|
gpl-2.0
|
ahmadiga/min_edx
|
lms/djangoapps/verify_student/migrations/0012_populate_verification_deadlines.py
|
61
|
14155
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"""
This migration populates the "verification deadline" model with
the "expiration datetime" from the course modes table for verified
courses.
In the past, the course modes expiration (really an upgrade deadline)
and the verification deadline were always set to the same value.
With this change, the verification deadline will now be tracked in a separate
model owned by the verify_student app.
"""
# Retrieve all verified course modes (whether they have expired or not)
# Unfortunately, we don't have access to constants from the application here,
# so we hard-code the names of the course modes that require verification.
verified_modes = orm['course_modes.CourseMode'].objects.filter(
mode_slug__in=["verified", "professional"],
expiration_datetime__isnull=False,
)
for mode in verified_modes:
orm.VerificationDeadline.objects.create(
course_key=mode.course_id,
deadline=mode.expiration_datetime,
)
def backwards(self, orm):
"""
Backwards migration deletes all verification deadlines.
"""
orm.VerificationDeadline.objects.all().delete()
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'course_modes.coursemode': {
'Meta': {'unique_together': "(('course_id', 'mode_slug', 'currency'),)", 'object_name': 'CourseMode'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'expiration_date': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'expiration_datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mode_display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mode_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'sku': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'suggested_prices': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
'course_modes.coursemodesarchive': {
'Meta': {'object_name': 'CourseModesArchive'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'expiration_date': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'expiration_datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mode_display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mode_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'suggested_prices': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
},
'verify_student.historicalverificationdeadline': {
'Meta': {'ordering': "(u'-history_date', u'-history_id')", 'object_name': 'HistoricalVerificationDeadline'},
'course_key': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'deadline': ('django.db.models.fields.DateTimeField', [], {}),
u'history_date': ('django.db.models.fields.DateTimeField', [], {}),
u'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'history_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'history_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'})
},
'verify_student.incoursereverificationconfiguration': {
'Meta': {'ordering': "('-change_date',)", 'object_name': 'InCourseReverificationConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'verify_student.skippedreverification': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'SkippedReverification'},
'checkpoint': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'skipped_checkpoint'", 'to': "orm['verify_student.VerificationCheckpoint']"}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'verify_student.softwaresecurephotoverification': {
'Meta': {'ordering': "['-created_at']", 'object_name': 'SoftwareSecurePhotoVerification'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'display': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'error_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'error_msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'face_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_key': ('django.db.models.fields.TextField', [], {'max_length': '1024'}),
'receipt_id': ('django.db.models.fields.CharField', [], {'default': "'6644e0c2-da9b-49a4-9d0c-c19c596c911e'", 'max_length': '255', 'db_index': 'True'}),
'reviewing_service': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'reviewing_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'photo_verifications_reviewed'", 'null': 'True', 'to': "orm['auth.User']"}),
'status': ('model_utils.fields.StatusField', [], {'default': "'created'", 'max_length': '100', u'no_check_for_status': 'True'}),
'status_changed': ('model_utils.fields.MonitorField', [], {'default': 'datetime.datetime.now', u'monitor': "u'status'"}),
'submitted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'verify_student.verificationcheckpoint': {
'Meta': {'unique_together': "(('course_id', 'checkpoint_location'),)", 'object_name': 'VerificationCheckpoint'},
'checkpoint_location': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'photo_verification': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['verify_student.SoftwareSecurePhotoVerification']", 'symmetrical': 'False'})
},
'verify_student.verificationdeadline': {
'Meta': {'object_name': 'VerificationDeadline'},
'course_key': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'deadline': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'})
},
'verify_student.verificationstatus': {
'Meta': {'object_name': 'VerificationStatus'},
'checkpoint': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'checkpoint_status'", 'to': "orm['verify_student.VerificationCheckpoint']"}),
'error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['course_modes', 'verify_student']
symmetrical = True
|
agpl-3.0
|
ma314smith/home-assistant
|
homeassistant/components/splunk.py
|
28
|
2421
|
"""
Support to send data to an Splunk instance.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/splunk/
"""
import json
import logging
import requests
import voluptuous as vol
from homeassistant.const import (
CONF_HOST, CONF_PORT, CONF_SSL, CONF_TOKEN, EVENT_STATE_CHANGED)
from homeassistant.helpers import state as state_helper
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'splunk'
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 8088
DEFAULT_SSL = False
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_SSL, default=False): cv.boolean,
}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Setup the Splunk component."""
conf = config[DOMAIN]
host = conf.get(CONF_HOST)
port = conf.get(CONF_PORT)
token = conf.get(CONF_TOKEN)
use_ssl = conf.get(CONF_SSL)
if use_ssl:
uri_scheme = 'https://'
else:
uri_scheme = 'http://'
event_collector = '{}{}:{}/services/collector/event'.format(
uri_scheme, host, port)
headers = {'Authorization': 'Splunk {}'.format(token)}
def splunk_event_listener(event):
"""Listen for new messages on the bus and sends them to Splunk."""
state = event.data.get('new_state')
if state is None:
return
try:
_state = state_helper.state_as_number(state)
except ValueError:
_state = state.state
json_body = [
{
'domain': state.domain,
'entity_id': state.object_id,
'attributes': dict(state.attributes),
'time': str(event.time_fired),
'value': _state,
}
]
try:
payload = {"host": event_collector,
"event": json_body}
requests.post(event_collector, data=json.dumps(payload),
headers=headers)
except requests.exceptions.RequestException as error:
_LOGGER.exception('Error saving event to Splunk: %s', error)
hass.bus.listen(EVENT_STATE_CHANGED, splunk_event_listener)
return True
|
mit
|
Warboss-rus/wargameengine
|
WargameEngine/bullet/data/xacro_standalone.py
|
12
|
23887
|
#! /usr/bin/env python
# Copyright (c) 2013, Willow Garage, Inc.
# Copyright (c) 2014, Open Source Robotics Foundation, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Open Source Robotics Foundation, Inc.
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Author: Stuart Glaser
# Maintainer: William Woodall <[email protected]>
from __future__ import print_function
import getopt
import glob
import os
import re
import string
import sys
import xml
from xml.dom.minidom import parse
try:
_basestr = basestring
except NameError:
_basestr = str
# Dictionary of subtitution args
substitution_args_context = {}
class XacroException(Exception):
pass
def isnumber(x):
return hasattr(x, '__int__')
# Better pretty printing of xml
# Taken from http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-and-silly-whitespace/
def fixed_writexml(self, writer, indent="", addindent="", newl=""):
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
writer.write(indent + "<" + self.tagName)
attrs = self._get_attributes()
a_names = list(attrs.keys())
a_names.sort()
for a_name in a_names:
writer.write(" %s=\"" % a_name)
xml.dom.minidom._write_data(writer, attrs[a_name].value)
writer.write("\"")
if self.childNodes:
if len(self.childNodes) == 1 \
and self.childNodes[0].nodeType == xml.dom.minidom.Node.TEXT_NODE:
writer.write(">")
self.childNodes[0].writexml(writer, "", "", "")
writer.write("</%s>%s" % (self.tagName, newl))
return
writer.write(">%s" % (newl))
for node in self.childNodes:
# skip whitespace-only text nodes
if node.nodeType == xml.dom.minidom.Node.TEXT_NODE and \
not node.data.strip():
continue
node.writexml(writer, indent + addindent, addindent, newl)
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
else:
writer.write("/>%s" % (newl))
# replace minidom's function with ours
xml.dom.minidom.Element.writexml = fixed_writexml
class Table:
def __init__(self, parent=None):
self.parent = parent
self.table = {}
def __getitem__(self, key):
if key in self.table:
return self.table[key]
elif self.parent:
return self.parent[key]
else:
raise KeyError(key)
def __setitem__(self, key, value):
self.table[key] = value
def __contains__(self, key):
return \
key in self.table or \
(self.parent and key in self.parent)
class QuickLexer(object):
def __init__(self, **res):
self.str = ""
self.top = None
self.res = []
for k, v in res.items():
self.__setattr__(k, len(self.res))
self.res.append(v)
def lex(self, str):
self.str = str
self.top = None
self.next()
def peek(self):
return self.top
def next(self):
result = self.top
self.top = None
for i in range(len(self.res)):
m = re.match(self.res[i], self.str)
if m:
self.top = (i, m.group(0))
self.str = self.str[m.end():]
break
return result
def first_child_element(elt):
c = elt.firstChild
while c:
if c.nodeType == xml.dom.Node.ELEMENT_NODE:
return c
c = c.nextSibling
return None
def next_sibling_element(elt):
c = elt.nextSibling
while c:
if c.nodeType == xml.dom.Node.ELEMENT_NODE:
return c
c = c.nextSibling
return None
# Pre-order traversal of the elements
def next_element(elt):
child = first_child_element(elt)
if child:
return child
while elt and elt.nodeType == xml.dom.Node.ELEMENT_NODE:
next = next_sibling_element(elt)
if next:
return next
elt = elt.parentNode
return None
# Pre-order traversal of all the nodes
def next_node(node):
if node.firstChild:
return node.firstChild
while node:
if node.nextSibling:
return node.nextSibling
node = node.parentNode
return None
def child_nodes(elt):
c = elt.firstChild
while c:
yield c
c = c.nextSibling
all_includes = []
# Deprecated message for <include> tags that don't have <xacro:include> prepended:
deprecated_include_msg = """DEPRECATED IN HYDRO:
The <include> tag should be prepended with 'xacro' if that is the intended use
of it, such as <xacro:include ...>. Use the following script to fix incorrect
xacro includes:
sed -i 's/<include/<xacro:include/g' `find . -iname *.xacro`"""
include_no_matches_msg = """Include tag filename spec \"{}\" matched no files."""
## @throws XacroException if a parsing error occurs with an included document
def process_includes(doc, base_dir):
namespaces = {}
previous = doc.documentElement
elt = next_element(previous)
while elt:
# Xacro should not use plain 'include' tags but only namespaced ones. Causes conflicts with
# other XML elements including Gazebo's <gazebo> extensions
is_include = False
if elt.tagName == 'xacro:include' or elt.tagName == 'include':
is_include = True
# Temporary fix for ROS Hydro and the xacro include scope problem
if elt.tagName == 'include':
# check if there is any element within the <include> tag. mostly we are concerned
# with Gazebo's <uri> element, but it could be anything. also, make sure the child
# nodes aren't just a single Text node, which is still considered a deprecated
# instance
if elt.childNodes and not (len(elt.childNodes) == 1 and
elt.childNodes[0].nodeType == elt.TEXT_NODE):
# this is not intended to be a xacro element, so we can ignore it
is_include = False
else:
# throw a deprecated warning
print(deprecated_include_msg, file=sys.stderr)
# Process current element depending on previous conditions
if is_include:
filename_spec = eval_text(elt.getAttribute('filename'), {})
if not os.path.isabs(filename_spec):
filename_spec = os.path.join(base_dir, filename_spec)
if re.search('[*[?]+', filename_spec):
# Globbing behaviour
filenames = sorted(glob.glob(filename_spec))
if len(filenames) == 0:
print(include_no_matches_msg.format(filename_spec), file=sys.stderr)
else:
# Default behaviour
filenames = [filename_spec]
for filename in filenames:
global all_includes
all_includes.append(filename)
try:
with open(filename) as f:
try:
included = parse(f)
except Exception as e:
raise XacroException(
"included file \"%s\" generated an error during XML parsing: %s"
% (filename, str(e)))
except IOError as e:
raise XacroException("included file \"%s\" could not be opened: %s" % (filename, str(e)))
# Replaces the include tag with the elements of the included file
for c in child_nodes(included.documentElement):
elt.parentNode.insertBefore(c.cloneNode(deep=True), elt)
# Grabs all the declared namespaces of the included document
for name, value in included.documentElement.attributes.items():
if name.startswith('xmlns:'):
namespaces[name] = value
elt.parentNode.removeChild(elt)
elt = None
else:
previous = elt
elt = next_element(previous)
# Makes sure the final document declares all the namespaces of the included documents.
for k, v in namespaces.items():
doc.documentElement.setAttribute(k, v)
# Returns a dictionary: { macro_name => macro_xml_block }
def grab_macros(doc):
macros = {}
previous = doc.documentElement
elt = next_element(previous)
while elt:
if elt.tagName == 'macro' or elt.tagName == 'xacro:macro':
name = elt.getAttribute('name')
macros[name] = elt
macros['xacro:' + name] = elt
elt.parentNode.removeChild(elt)
elt = None
else:
previous = elt
elt = next_element(previous)
return macros
# Returns a Table of the properties
def grab_properties(doc):
table = Table()
previous = doc.documentElement
elt = next_element(previous)
while elt:
if elt.tagName == 'property' or elt.tagName == 'xacro:property':
name = elt.getAttribute('name')
value = None
if elt.hasAttribute('value'):
value = elt.getAttribute('value')
else:
name = '**' + name
value = elt # debug
bad = string.whitespace + "${}"
has_bad = False
for b in bad:
if b in name:
has_bad = True
break
if has_bad:
sys.stderr.write('Property names may not have whitespace, ' +
'"{", "}", or "$" : "' + name + '"')
else:
table[name] = value
elt.parentNode.removeChild(elt)
elt = None
else:
previous = elt
elt = next_element(previous)
return table
def eat_ignore(lex):
while lex.peek() and lex.peek()[0] == lex.IGNORE:
lex.next()
def eval_lit(lex, symbols):
eat_ignore(lex)
if lex.peek()[0] == lex.NUMBER:
return float(lex.next()[1])
if lex.peek()[0] == lex.SYMBOL:
try:
key = lex.next()[1]
value = symbols[key]
except KeyError as ex:
raise XacroException("Property wasn't defined: %s" % str(ex))
if not (isnumber(value) or isinstance(value, _basestr)):
if value is None:
raise XacroException("Property %s recursively used" % key)
raise XacroException("WTF2")
try:
return int(value)
except:
try:
return float(value)
except:
# prevent infinite recursion
symbols[key] = None
result = eval_text(value, symbols)
# restore old entry
symbols[key] = value
return result
raise XacroException("Bad literal")
def eval_factor(lex, symbols):
eat_ignore(lex)
neg = 1
if lex.peek()[1] == '-':
lex.next()
neg = -1
if lex.peek()[0] in [lex.NUMBER, lex.SYMBOL]:
return neg * eval_lit(lex, symbols)
if lex.peek()[0] == lex.LPAREN:
lex.next()
eat_ignore(lex)
result = eval_expr(lex, symbols)
eat_ignore(lex)
if lex.next()[0] != lex.RPAREN:
raise XacroException("Unmatched left paren")
eat_ignore(lex)
return neg * result
raise XacroException("Misplaced operator")
def eval_term(lex, symbols):
eat_ignore(lex)
result = 0
if lex.peek()[0] in [lex.NUMBER, lex.SYMBOL, lex.LPAREN] \
or lex.peek()[1] == '-':
result = eval_factor(lex, symbols)
eat_ignore(lex)
while lex.peek() and lex.peek()[1] in ['*', '/']:
op = lex.next()[1]
n = eval_factor(lex, symbols)
if op == '*':
result = float(result) * float(n)
elif op == '/':
result = float(result) / float(n)
else:
raise XacroException("WTF")
eat_ignore(lex)
return result
def eval_expr(lex, symbols):
eat_ignore(lex)
op = None
if lex.peek()[0] == lex.OP:
op = lex.next()[1]
if not op in ['+', '-']:
raise XacroException("Invalid operation. Must be '+' or '-'")
result = eval_term(lex, symbols)
if op == '-':
result = -float(result)
eat_ignore(lex)
while lex.peek() and lex.peek()[1] in ['+', '-']:
op = lex.next()[1]
n = eval_term(lex, symbols)
if op == '+':
result = float(result) + float(n)
if op == '-':
result = float(result) - float(n)
eat_ignore(lex)
return result
def eval_text(text, symbols):
def handle_expr(s):
lex = QuickLexer(IGNORE=r"\s+",
NUMBER=r"(\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?",
SYMBOL=r"[a-zA-Z_]\w*",
OP=r"[\+\-\*/^]",
LPAREN=r"\(",
RPAREN=r"\)")
lex.lex(s)
return eval_expr(lex, symbols)
def handle_extension(s):
return ("$(%s)" % s)
results = []
lex = QuickLexer(DOLLAR_DOLLAR_BRACE=r"\$\$+\{",
EXPR=r"\$\{[^\}]*\}",
EXTENSION=r"\$\([^\)]*\)",
TEXT=r"([^\$]|\$[^{(]|\$$)+")
lex.lex(text)
while lex.peek():
if lex.peek()[0] == lex.EXPR:
results.append(handle_expr(lex.next()[1][2:-1]))
elif lex.peek()[0] == lex.EXTENSION:
results.append(handle_extension(lex.next()[1][2:-1]))
elif lex.peek()[0] == lex.TEXT:
results.append(lex.next()[1])
elif lex.peek()[0] == lex.DOLLAR_DOLLAR_BRACE:
results.append(lex.next()[1][1:])
return ''.join(map(str, results))
# Expands macros, replaces properties, and evaluates expressions
def eval_all(root, macros, symbols):
# Evaluates the attributes for the root node
for at in root.attributes.items():
result = eval_text(at[1], symbols)
root.setAttribute(at[0], result)
previous = root
node = next_node(previous)
while node:
if node.nodeType == xml.dom.Node.ELEMENT_NODE:
if node.tagName in macros:
body = macros[node.tagName].cloneNode(deep=True)
params = body.getAttribute('params').split()
# Parse default values for any parameters
defaultmap = {}
for param in params[:]:
splitParam = param.split(':=')
if len(splitParam) == 2:
defaultmap[splitParam[0]] = splitParam[1]
params.remove(param)
params.append(splitParam[0])
elif len(splitParam) != 1:
raise XacroException("Invalid parameter definition")
# Expands the macro
scoped = Table(symbols)
for name, value in node.attributes.items():
if not name in params:
raise XacroException("Invalid parameter \"%s\" while expanding macro \"%s\"" %
(str(name), str(node.tagName)))
params.remove(name)
scoped[name] = eval_text(value, symbols)
# Pulls out the block arguments, in order
cloned = node.cloneNode(deep=True)
eval_all(cloned, macros, symbols)
block = cloned.firstChild
for param in params[:]:
if param[0] == '*':
while block and block.nodeType != xml.dom.Node.ELEMENT_NODE:
block = block.nextSibling
if not block:
raise XacroException("Not enough blocks while evaluating macro %s" % str(node.tagName))
params.remove(param)
scoped[param] = block
block = block.nextSibling
# Try to load defaults for any remaining non-block parameters
for param in params[:]:
if param[0] != '*' and param in defaultmap:
scoped[param] = defaultmap[param]
params.remove(param)
if params:
raise XacroException("Parameters [%s] were not set for macro %s" %
(",".join(params), str(node.tagName)))
eval_all(body, macros, scoped)
# Replaces the macro node with the expansion
for e in list(child_nodes(body)): # Ew
node.parentNode.insertBefore(e, node)
node.parentNode.removeChild(node)
node = None
elif node.tagName == 'arg' or node.tagName == 'xacro:arg':
name = node.getAttribute('name')
if not name:
raise XacroException("Argument name missing")
default = node.getAttribute('default')
if default and name not in substitution_args_context['arg']:
substitution_args_context['arg'][name] = default
node.parentNode.removeChild(node)
node = None
elif node.tagName == 'insert_block' or node.tagName == 'xacro:insert_block':
name = node.getAttribute('name')
if ("**" + name) in symbols:
# Multi-block
block = symbols['**' + name]
for e in list(child_nodes(block)):
node.parentNode.insertBefore(e.cloneNode(deep=True), node)
node.parentNode.removeChild(node)
elif ("*" + name) in symbols:
# Single block
block = symbols['*' + name]
node.parentNode.insertBefore(block.cloneNode(deep=True), node)
node.parentNode.removeChild(node)
else:
raise XacroException("Block \"%s\" was never declared" % name)
node = None
elif node.tagName in ['if', 'xacro:if', 'unless', 'xacro:unless']:
value = eval_text(node.getAttribute('value'), symbols)
try:
if value == 'true': keep = True
elif value == 'false': keep = False
else: keep = float(value)
except ValueError:
raise XacroException("Xacro conditional evaluated to \"%s\". Acceptable evaluations are one of [\"1\",\"true\",\"0\",\"false\"]" % value)
if node.tagName in ['unless', 'xacro:unless']: keep = not keep
if keep:
for e in list(child_nodes(node)):
node.parentNode.insertBefore(e.cloneNode(deep=True), node)
node.parentNode.removeChild(node)
else:
# Evals the attributes
for at in node.attributes.items():
result = eval_text(at[1], symbols)
node.setAttribute(at[0], result)
previous = node
elif node.nodeType == xml.dom.Node.TEXT_NODE:
node.data = eval_text(node.data, symbols)
previous = node
else:
previous = node
node = next_node(previous)
return macros
# Expands everything except includes
def eval_self_contained(doc):
macros = grab_macros(doc)
symbols = grab_properties(doc)
eval_all(doc.documentElement, macros, symbols)
def print_usage(exit_code=0):
print("Usage: %s [-o <output>] <input>" % 'xacro.py')
print(" %s --deps Prints dependencies" % 'xacro.py')
print(" %s --includes Only evalutes includes" % 'xacro.py')
sys.exit(exit_code)
def set_substitution_args_context(context={}):
substitution_args_context['arg'] = context
def open_output(output_filename):
if output_filename is None:
return sys.stdout
else:
return open(output_filename, 'w')
def main():
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "ho:", ['deps', 'includes'])
except getopt.GetoptError as err:
print(str(err))
print_usage(2)
just_deps = False
just_includes = False
output_filename = None
for o, a in opts:
if o == '-h':
print_usage(0)
elif o == '-o':
output_filename = a
elif o == '--deps':
just_deps = True
elif o == '--includes':
just_includes = True
if len(args) < 1:
print("No input given")
print_usage(2)
# Process substitution args
# set_substitution_args_context(load_mappings(sys.argv))
set_substitution_args_context((sys.argv))
f = open(args[0])
doc = None
try:
doc = parse(f)
except xml.parsers.expat.ExpatError:
sys.stderr.write("Expat parsing error. Check that:\n")
sys.stderr.write(" - Your XML is correctly formed\n")
sys.stderr.write(" - You have the xacro xmlns declaration: " +
"xmlns:xacro=\"http://www.ros.org/wiki/xacro\"\n")
sys.stderr.write("\n")
raise
finally:
f.close()
process_includes(doc, os.path.dirname(args[0]))
if just_deps:
for inc in all_includes:
sys.stdout.write(inc + " ")
sys.stdout.write("\n")
elif just_includes:
doc.writexml(open_output(output_filename))
print()
else:
eval_self_contained(doc)
banner = [xml.dom.minidom.Comment(c) for c in
[" %s " % ('=' * 83),
" | This document was autogenerated by xacro from %-30s | " % args[0],
" | EDITING THIS FILE BY HAND IS NOT RECOMMENDED %-30s | " % "",
" %s " % ('=' * 83)]]
first = doc.firstChild
for comment in banner:
doc.insertBefore(comment, first)
open_output(output_filename).write(doc.toprettyxml(indent=' '))
print()
if __name__ == '__main__':
main()
|
gpl-3.0
|
theheros/kbengine
|
kbe/src/lib/python/Lib/plat-freebsd8/IN.py
|
344
|
12956
|
# Generated by h2py from /usr/include/netinet/in.h
# Included from sys/cdefs.h
__GNUCLIKE_ASM = 3
__GNUCLIKE_ASM = 2
__GNUCLIKE___TYPEOF = 1
__GNUCLIKE___OFFSETOF = 1
__GNUCLIKE___SECTION = 1
__GNUCLIKE_ATTRIBUTE_MODE_DI = 1
__GNUCLIKE_CTOR_SECTION_HANDLING = 1
__GNUCLIKE_BUILTIN_CONSTANT_P = 1
__GNUCLIKE_BUILTIN_VARARGS = 1
__GNUCLIKE_BUILTIN_STDARG = 1
__GNUCLIKE_BUILTIN_VAALIST = 1
__GNUC_VA_LIST_COMPATIBILITY = 1
__GNUCLIKE_BUILTIN_NEXT_ARG = 1
__GNUCLIKE_BUILTIN_MEMCPY = 1
__CC_SUPPORTS_INLINE = 1
__CC_SUPPORTS___INLINE = 1
__CC_SUPPORTS___INLINE__ = 1
__CC_SUPPORTS___FUNC__ = 1
__CC_SUPPORTS_WARNING = 1
__CC_SUPPORTS_VARADIC_XXX = 1
__CC_SUPPORTS_DYNAMIC_ARRAY_INIT = 1
__CC_INT_IS_32BIT = 1
def __P(protos): return protos
def __STRING(x): return #x
def __XSTRING(x): return __STRING(x)
def __P(protos): return ()
def __STRING(x): return "x"
def __aligned(x): return __attribute__((__aligned__(x)))
def __section(x): return __attribute__((__section__(x)))
def __aligned(x): return __attribute__((__aligned__(x)))
def __section(x): return __attribute__((__section__(x)))
def __nonnull(x): return __attribute__((__nonnull__(x)))
def __predict_true(exp): return __builtin_expect((exp), 1)
def __predict_false(exp): return __builtin_expect((exp), 0)
def __predict_true(exp): return (exp)
def __predict_false(exp): return (exp)
def __format_arg(fmtarg): return __attribute__((__format_arg__ (fmtarg)))
def __FBSDID(s): return __IDSTRING(__CONCAT(__rcsid_,__LINE__),s)
def __RCSID(s): return __IDSTRING(__CONCAT(__rcsid_,__LINE__),s)
def __RCSID_SOURCE(s): return __IDSTRING(__CONCAT(__rcsid_source_,__LINE__),s)
def __SCCSID(s): return __IDSTRING(__CONCAT(__sccsid_,__LINE__),s)
def __COPYRIGHT(s): return __IDSTRING(__CONCAT(__copyright_,__LINE__),s)
_POSIX_C_SOURCE = 199009
_POSIX_C_SOURCE = 199209
__XSI_VISIBLE = 600
_POSIX_C_SOURCE = 200112
__XSI_VISIBLE = 500
_POSIX_C_SOURCE = 199506
_POSIX_C_SOURCE = 198808
__POSIX_VISIBLE = 200112
__ISO_C_VISIBLE = 1999
__POSIX_VISIBLE = 199506
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 199309
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 199209
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 199009
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 198808
__ISO_C_VISIBLE = 0
__POSIX_VISIBLE = 0
__XSI_VISIBLE = 0
__BSD_VISIBLE = 0
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 0
__XSI_VISIBLE = 0
__BSD_VISIBLE = 0
__ISO_C_VISIBLE = 1999
__POSIX_VISIBLE = 200112
__XSI_VISIBLE = 600
__BSD_VISIBLE = 1
__ISO_C_VISIBLE = 1999
# Included from sys/_types.h
# Included from machine/_types.h
# Included from machine/endian.h
_QUAD_HIGHWORD = 1
_QUAD_LOWWORD = 0
_LITTLE_ENDIAN = 1234
_BIG_ENDIAN = 4321
_PDP_ENDIAN = 3412
_BYTE_ORDER = _LITTLE_ENDIAN
LITTLE_ENDIAN = _LITTLE_ENDIAN
BIG_ENDIAN = _BIG_ENDIAN
PDP_ENDIAN = _PDP_ENDIAN
BYTE_ORDER = _BYTE_ORDER
def __word_swap_int_var(x): return \
def __word_swap_int_const(x): return \
def __word_swap_int(x): return __word_swap_int_var(x)
def __byte_swap_int_var(x): return \
def __byte_swap_int_const(x): return \
def __byte_swap_int(x): return __byte_swap_int_var(x)
def __byte_swap_word_var(x): return \
def __byte_swap_word_const(x): return \
def __byte_swap_word(x): return __byte_swap_word_var(x)
def __htonl(x): return __bswap32(x)
def __htons(x): return __bswap16(x)
def __ntohl(x): return __bswap32(x)
def __ntohs(x): return __bswap16(x)
IPPROTO_IP = 0
IPPROTO_ICMP = 1
IPPROTO_TCP = 6
IPPROTO_UDP = 17
def htonl(x): return __htonl(x)
def htons(x): return __htons(x)
def ntohl(x): return __ntohl(x)
def ntohs(x): return __ntohs(x)
IPPROTO_RAW = 255
INET_ADDRSTRLEN = 16
IPPROTO_HOPOPTS = 0
IPPROTO_IGMP = 2
IPPROTO_GGP = 3
IPPROTO_IPV4 = 4
IPPROTO_IPIP = IPPROTO_IPV4
IPPROTO_ST = 7
IPPROTO_EGP = 8
IPPROTO_PIGP = 9
IPPROTO_RCCMON = 10
IPPROTO_NVPII = 11
IPPROTO_PUP = 12
IPPROTO_ARGUS = 13
IPPROTO_EMCON = 14
IPPROTO_XNET = 15
IPPROTO_CHAOS = 16
IPPROTO_MUX = 18
IPPROTO_MEAS = 19
IPPROTO_HMP = 20
IPPROTO_PRM = 21
IPPROTO_IDP = 22
IPPROTO_TRUNK1 = 23
IPPROTO_TRUNK2 = 24
IPPROTO_LEAF1 = 25
IPPROTO_LEAF2 = 26
IPPROTO_RDP = 27
IPPROTO_IRTP = 28
IPPROTO_TP = 29
IPPROTO_BLT = 30
IPPROTO_NSP = 31
IPPROTO_INP = 32
IPPROTO_SEP = 33
IPPROTO_3PC = 34
IPPROTO_IDPR = 35
IPPROTO_XTP = 36
IPPROTO_DDP = 37
IPPROTO_CMTP = 38
IPPROTO_TPXX = 39
IPPROTO_IL = 40
IPPROTO_IPV6 = 41
IPPROTO_SDRP = 42
IPPROTO_ROUTING = 43
IPPROTO_FRAGMENT = 44
IPPROTO_IDRP = 45
IPPROTO_RSVP = 46
IPPROTO_GRE = 47
IPPROTO_MHRP = 48
IPPROTO_BHA = 49
IPPROTO_ESP = 50
IPPROTO_AH = 51
IPPROTO_INLSP = 52
IPPROTO_SWIPE = 53
IPPROTO_NHRP = 54
IPPROTO_MOBILE = 55
IPPROTO_TLSP = 56
IPPROTO_SKIP = 57
IPPROTO_ICMPV6 = 58
IPPROTO_NONE = 59
IPPROTO_DSTOPTS = 60
IPPROTO_AHIP = 61
IPPROTO_CFTP = 62
IPPROTO_HELLO = 63
IPPROTO_SATEXPAK = 64
IPPROTO_KRYPTOLAN = 65
IPPROTO_RVD = 66
IPPROTO_IPPC = 67
IPPROTO_ADFS = 68
IPPROTO_SATMON = 69
IPPROTO_VISA = 70
IPPROTO_IPCV = 71
IPPROTO_CPNX = 72
IPPROTO_CPHB = 73
IPPROTO_WSN = 74
IPPROTO_PVP = 75
IPPROTO_BRSATMON = 76
IPPROTO_ND = 77
IPPROTO_WBMON = 78
IPPROTO_WBEXPAK = 79
IPPROTO_EON = 80
IPPROTO_VMTP = 81
IPPROTO_SVMTP = 82
IPPROTO_VINES = 83
IPPROTO_TTP = 84
IPPROTO_IGP = 85
IPPROTO_DGP = 86
IPPROTO_TCF = 87
IPPROTO_IGRP = 88
IPPROTO_OSPFIGP = 89
IPPROTO_SRPC = 90
IPPROTO_LARP = 91
IPPROTO_MTP = 92
IPPROTO_AX25 = 93
IPPROTO_IPEIP = 94
IPPROTO_MICP = 95
IPPROTO_SCCSP = 96
IPPROTO_ETHERIP = 97
IPPROTO_ENCAP = 98
IPPROTO_APES = 99
IPPROTO_GMTP = 100
IPPROTO_IPCOMP = 108
IPPROTO_SCTP = 132
IPPROTO_PIM = 103
IPPROTO_CARP = 112
IPPROTO_PGM = 113
IPPROTO_PFSYNC = 240
IPPROTO_OLD_DIVERT = 254
IPPROTO_MAX = 256
IPPROTO_DONE = 257
IPPROTO_DIVERT = 258
IPPROTO_SPACER = 32767
IPPORT_RESERVED = 1024
IPPORT_HIFIRSTAUTO = 49152
IPPORT_HILASTAUTO = 65535
IPPORT_RESERVEDSTART = 600
IPPORT_MAX = 65535
def IN_CLASSA(i): return (((u_int32_t)(i) & (-2147483648)) == 0)
IN_CLASSA_NET = (-16777216)
IN_CLASSA_NSHIFT = 24
IN_CLASSA_HOST = 0x00ffffff
IN_CLASSA_MAX = 128
def IN_CLASSB(i): return (((u_int32_t)(i) & (-1073741824)) == (-2147483648))
IN_CLASSB_NET = (-65536)
IN_CLASSB_NSHIFT = 16
IN_CLASSB_HOST = 0x0000ffff
IN_CLASSB_MAX = 65536
def IN_CLASSC(i): return (((u_int32_t)(i) & (-536870912)) == (-1073741824))
IN_CLASSC_NET = (-256)
IN_CLASSC_NSHIFT = 8
IN_CLASSC_HOST = 0x000000ff
def IN_CLASSD(i): return (((u_int32_t)(i) & (-268435456)) == (-536870912))
IN_CLASSD_NET = (-268435456)
IN_CLASSD_NSHIFT = 28
IN_CLASSD_HOST = 0x0fffffff
def IN_MULTICAST(i): return IN_CLASSD(i)
def IN_EXPERIMENTAL(i): return (((u_int32_t)(i) & (-268435456)) == (-268435456))
def IN_BADCLASS(i): return (((u_int32_t)(i) & (-268435456)) == (-268435456))
def IN_LINKLOCAL(i): return (((u_int32_t)(i) & (-65536)) == (-1442971648))
def IN_LOCAL_GROUP(i): return (((u_int32_t)(i) & (-256)) == (-536870912))
INADDR_NONE = (-1)
IN_LOOPBACKNET = 127
IP_OPTIONS = 1
IP_HDRINCL = 2
IP_TOS = 3
IP_TTL = 4
IP_RECVOPTS = 5
IP_RECVRETOPTS = 6
IP_RECVDSTADDR = 7
IP_SENDSRCADDR = IP_RECVDSTADDR
IP_RETOPTS = 8
IP_MULTICAST_IF = 9
IP_MULTICAST_TTL = 10
IP_MULTICAST_LOOP = 11
IP_ADD_MEMBERSHIP = 12
IP_DROP_MEMBERSHIP = 13
IP_MULTICAST_VIF = 14
IP_RSVP_ON = 15
IP_RSVP_OFF = 16
IP_RSVP_VIF_ON = 17
IP_RSVP_VIF_OFF = 18
IP_PORTRANGE = 19
IP_RECVIF = 20
IP_IPSEC_POLICY = 21
IP_FAITH = 22
IP_ONESBCAST = 23
IP_FW_TABLE_ADD = 40
IP_FW_TABLE_DEL = 41
IP_FW_TABLE_FLUSH = 42
IP_FW_TABLE_GETSIZE = 43
IP_FW_TABLE_LIST = 44
IP_FW_ADD = 50
IP_FW_DEL = 51
IP_FW_FLUSH = 52
IP_FW_ZERO = 53
IP_FW_GET = 54
IP_FW_RESETLOG = 55
IP_FW_NAT_CFG = 56
IP_FW_NAT_DEL = 57
IP_FW_NAT_GET_CONFIG = 58
IP_FW_NAT_GET_LOG = 59
IP_DUMMYNET_CONFIGURE = 60
IP_DUMMYNET_DEL = 61
IP_DUMMYNET_FLUSH = 62
IP_DUMMYNET_GET = 64
IP_RECVTTL = 65
IP_MINTTL = 66
IP_DONTFRAG = 67
IP_ADD_SOURCE_MEMBERSHIP = 70
IP_DROP_SOURCE_MEMBERSHIP = 71
IP_BLOCK_SOURCE = 72
IP_UNBLOCK_SOURCE = 73
IP_MSFILTER = 74
MCAST_JOIN_GROUP = 80
MCAST_LEAVE_GROUP = 81
MCAST_JOIN_SOURCE_GROUP = 82
MCAST_LEAVE_SOURCE_GROUP = 83
MCAST_BLOCK_SOURCE = 84
MCAST_UNBLOCK_SOURCE = 85
IP_DEFAULT_MULTICAST_TTL = 1
IP_DEFAULT_MULTICAST_LOOP = 1
IP_MIN_MEMBERSHIPS = 31
IP_MAX_MEMBERSHIPS = 4095
IP_MAX_SOURCE_FILTER = 1024
MCAST_INCLUDE = 1
MCAST_EXCLUDE = 2
IP_PORTRANGE_DEFAULT = 0
IP_PORTRANGE_HIGH = 1
IP_PORTRANGE_LOW = 2
IPPROTO_MAXID = (IPPROTO_AH + 1)
IPCTL_FORWARDING = 1
IPCTL_SENDREDIRECTS = 2
IPCTL_DEFTTL = 3
IPCTL_DEFMTU = 4
IPCTL_RTEXPIRE = 5
IPCTL_RTMINEXPIRE = 6
IPCTL_RTMAXCACHE = 7
IPCTL_SOURCEROUTE = 8
IPCTL_DIRECTEDBROADCAST = 9
IPCTL_INTRQMAXLEN = 10
IPCTL_INTRQDROPS = 11
IPCTL_STATS = 12
IPCTL_ACCEPTSOURCEROUTE = 13
IPCTL_FASTFORWARDING = 14
IPCTL_KEEPFAITH = 15
IPCTL_GIF_TTL = 16
IPCTL_MAXID = 17
def in_nullhost(x): return ((x).s_addr == INADDR_ANY)
# Included from netinet6/in6.h
__KAME_VERSION = "FreeBSD"
IPV6PORT_RESERVED = 1024
IPV6PORT_ANONMIN = 49152
IPV6PORT_ANONMAX = 65535
IPV6PORT_RESERVEDMIN = 600
IPV6PORT_RESERVEDMAX = (IPV6PORT_RESERVED-1)
INET6_ADDRSTRLEN = 46
IPV6_ADDR_INT32_ONE = 1
IPV6_ADDR_INT32_TWO = 2
IPV6_ADDR_INT32_MNL = (-16711680)
IPV6_ADDR_INT32_MLL = (-16646144)
IPV6_ADDR_INT32_SMP = 0x0000ffff
IPV6_ADDR_INT16_ULL = 0xfe80
IPV6_ADDR_INT16_USL = 0xfec0
IPV6_ADDR_INT16_MLL = 0xff02
IPV6_ADDR_INT32_ONE = 0x01000000
IPV6_ADDR_INT32_TWO = 0x02000000
IPV6_ADDR_INT32_MNL = 0x000001ff
IPV6_ADDR_INT32_MLL = 0x000002ff
IPV6_ADDR_INT32_SMP = (-65536)
IPV6_ADDR_INT16_ULL = 0x80fe
IPV6_ADDR_INT16_USL = 0xc0fe
IPV6_ADDR_INT16_MLL = 0x02ff
def IN6_IS_ADDR_UNSPECIFIED(a): return \
def IN6_IS_ADDR_LOOPBACK(a): return \
def IN6_IS_ADDR_V4COMPAT(a): return \
def IN6_IS_ADDR_V4MAPPED(a): return \
IPV6_ADDR_SCOPE_NODELOCAL = 0x01
IPV6_ADDR_SCOPE_INTFACELOCAL = 0x01
IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
IPV6_ADDR_SCOPE_SITELOCAL = 0x05
IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
IPV6_ADDR_SCOPE_GLOBAL = 0x0e
__IPV6_ADDR_SCOPE_NODELOCAL = 0x01
__IPV6_ADDR_SCOPE_INTFACELOCAL = 0x01
__IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
__IPV6_ADDR_SCOPE_SITELOCAL = 0x05
__IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
__IPV6_ADDR_SCOPE_GLOBAL = 0x0e
def IN6_IS_ADDR_LINKLOCAL(a): return \
def IN6_IS_ADDR_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_NODELOCAL(a): return \
def IN6_IS_ADDR_MC_INTFACELOCAL(a): return \
def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
def IN6_IS_ADDR_MC_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
def IN6_IS_ADDR_MC_GLOBAL(a): return \
def IN6_IS_ADDR_MC_NODELOCAL(a): return \
def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
def IN6_IS_ADDR_MC_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
def IN6_IS_ADDR_MC_GLOBAL(a): return \
def IN6_IS_SCOPE_LINKLOCAL(a): return \
def IN6_IS_SCOPE_EMBED(a): return \
def IFA6_IS_DEPRECATED(a): return \
def IFA6_IS_INVALID(a): return \
IPV6_OPTIONS = 1
IPV6_RECVOPTS = 5
IPV6_RECVRETOPTS = 6
IPV6_RECVDSTADDR = 7
IPV6_RETOPTS = 8
IPV6_SOCKOPT_RESERVED1 = 3
IPV6_UNICAST_HOPS = 4
IPV6_MULTICAST_IF = 9
IPV6_MULTICAST_HOPS = 10
IPV6_MULTICAST_LOOP = 11
IPV6_JOIN_GROUP = 12
IPV6_LEAVE_GROUP = 13
IPV6_PORTRANGE = 14
ICMP6_FILTER = 18
IPV6_2292PKTINFO = 19
IPV6_2292HOPLIMIT = 20
IPV6_2292NEXTHOP = 21
IPV6_2292HOPOPTS = 22
IPV6_2292DSTOPTS = 23
IPV6_2292RTHDR = 24
IPV6_2292PKTOPTIONS = 25
IPV6_CHECKSUM = 26
IPV6_V6ONLY = 27
IPV6_BINDV6ONLY = IPV6_V6ONLY
IPV6_IPSEC_POLICY = 28
IPV6_FAITH = 29
IPV6_FW_ADD = 30
IPV6_FW_DEL = 31
IPV6_FW_FLUSH = 32
IPV6_FW_ZERO = 33
IPV6_FW_GET = 34
IPV6_RTHDRDSTOPTS = 35
IPV6_RECVPKTINFO = 36
IPV6_RECVHOPLIMIT = 37
IPV6_RECVRTHDR = 38
IPV6_RECVHOPOPTS = 39
IPV6_RECVDSTOPTS = 40
IPV6_RECVRTHDRDSTOPTS = 41
IPV6_USE_MIN_MTU = 42
IPV6_RECVPATHMTU = 43
IPV6_PATHMTU = 44
IPV6_REACHCONF = 45
IPV6_PKTINFO = 46
IPV6_HOPLIMIT = 47
IPV6_NEXTHOP = 48
IPV6_HOPOPTS = 49
IPV6_DSTOPTS = 50
IPV6_RTHDR = 51
IPV6_PKTOPTIONS = 52
IPV6_RECVTCLASS = 57
IPV6_AUTOFLOWLABEL = 59
IPV6_TCLASS = 61
IPV6_DONTFRAG = 62
IPV6_PREFER_TEMPADDR = 63
IPV6_MSFILTER = 74
IPV6_RTHDR_LOOSE = 0
IPV6_RTHDR_STRICT = 1
IPV6_RTHDR_TYPE_0 = 0
IPV6_DEFAULT_MULTICAST_HOPS = 1
IPV6_DEFAULT_MULTICAST_LOOP = 1
IPV6_PORTRANGE_DEFAULT = 0
IPV6_PORTRANGE_HIGH = 1
IPV6_PORTRANGE_LOW = 2
IPV6PROTO_MAXID = (IPPROTO_PIM + 1)
IPV6CTL_FORWARDING = 1
IPV6CTL_SENDREDIRECTS = 2
IPV6CTL_DEFHLIM = 3
IPV6CTL_DEFMTU = 4
IPV6CTL_FORWSRCRT = 5
IPV6CTL_STATS = 6
IPV6CTL_MRTSTATS = 7
IPV6CTL_MRTPROTO = 8
IPV6CTL_MAXFRAGPACKETS = 9
IPV6CTL_SOURCECHECK = 10
IPV6CTL_SOURCECHECK_LOGINT = 11
IPV6CTL_ACCEPT_RTADV = 12
IPV6CTL_KEEPFAITH = 13
IPV6CTL_LOG_INTERVAL = 14
IPV6CTL_HDRNESTLIMIT = 15
IPV6CTL_DAD_COUNT = 16
IPV6CTL_AUTO_FLOWLABEL = 17
IPV6CTL_DEFMCASTHLIM = 18
IPV6CTL_GIF_HLIM = 19
IPV6CTL_KAME_VERSION = 20
IPV6CTL_USE_DEPRECATED = 21
IPV6CTL_RR_PRUNE = 22
IPV6CTL_MAPPED_ADDR = 23
IPV6CTL_V6ONLY = 24
IPV6CTL_RTEXPIRE = 25
IPV6CTL_RTMINEXPIRE = 26
IPV6CTL_RTMAXCACHE = 27
IPV6CTL_USETEMPADDR = 32
IPV6CTL_TEMPPLTIME = 33
IPV6CTL_TEMPVLTIME = 34
IPV6CTL_AUTO_LINKLOCAL = 35
IPV6CTL_RIP6STATS = 36
IPV6CTL_PREFER_TEMPADDR = 37
IPV6CTL_ADDRCTLPOLICY = 38
IPV6CTL_USE_DEFAULTZONE = 39
IPV6CTL_MAXFRAGS = 41
IPV6CTL_IFQ = 42
IPV6CTL_ISATAPRTR = 43
IPV6CTL_MCAST_PMTU = 44
IPV6CTL_STEALTH = 45
IPV6CTL_MAXID = 46
|
lgpl-3.0
|
cstan11/Sick-Beard
|
lib/requests/packages/chardet/mbcssm.py
|
215
|
18214
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from constants import eStart, eError, eItsMe
# BIG5
BIG5_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
4,4,4,4,4,4,4,4, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
4,3,3,3,3,3,3,3, # a0 - a7
3,3,3,3,3,3,3,3, # a8 - af
3,3,3,3,3,3,3,3, # b0 - b7
3,3,3,3,3,3,3,3, # b8 - bf
3,3,3,3,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0) # f8 - ff
BIG5_st = ( \
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart)#10-17
Big5CharLenTable = (0, 1, 1, 2, 0)
Big5SMModel = {'classTable': BIG5_cls,
'classFactor': 5,
'stateTable': BIG5_st,
'charLenTable': Big5CharLenTable,
'name': 'Big5'}
# EUC-JP
EUCJP_cls = ( \
4,4,4,4,4,4,4,4, # 00 - 07
4,4,4,4,4,4,5,5, # 08 - 0f
4,4,4,4,4,4,4,4, # 10 - 17
4,4,4,5,4,4,4,4, # 18 - 1f
4,4,4,4,4,4,4,4, # 20 - 27
4,4,4,4,4,4,4,4, # 28 - 2f
4,4,4,4,4,4,4,4, # 30 - 37
4,4,4,4,4,4,4,4, # 38 - 3f
4,4,4,4,4,4,4,4, # 40 - 47
4,4,4,4,4,4,4,4, # 48 - 4f
4,4,4,4,4,4,4,4, # 50 - 57
4,4,4,4,4,4,4,4, # 58 - 5f
4,4,4,4,4,4,4,4, # 60 - 67
4,4,4,4,4,4,4,4, # 68 - 6f
4,4,4,4,4,4,4,4, # 70 - 77
4,4,4,4,4,4,4,4, # 78 - 7f
5,5,5,5,5,5,5,5, # 80 - 87
5,5,5,5,5,5,1,3, # 88 - 8f
5,5,5,5,5,5,5,5, # 90 - 97
5,5,5,5,5,5,5,5, # 98 - 9f
5,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,0,5) # f8 - ff
EUCJP_st = ( \
3, 4, 3, 5,eStart,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
3,eError,eError,eError,eStart,eStart,eStart,eStart)#20-27
EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
EUCJPSMModel = {'classTable': EUCJP_cls,
'classFactor': 6,
'stateTable': EUCJP_st,
'charLenTable': EUCJPCharLenTable,
'name': 'EUC-JP'}
# EUC-KR
EUCKR_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,3,3,3, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,3,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,0) # f8 - ff
EUCKR_st = (
eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart)#08-0f
EUCKRCharLenTable = (0, 1, 2, 0)
EUCKRSMModel = {'classTable': EUCKR_cls,
'classFactor': 4,
'stateTable': EUCKR_st,
'charLenTable': EUCKRCharLenTable,
'name': 'EUC-KR'}
# EUC-TW
EUCTW_cls = ( \
2,2,2,2,2,2,2,2, # 00 - 07
2,2,2,2,2,2,0,0, # 08 - 0f
2,2,2,2,2,2,2,2, # 10 - 17
2,2,2,0,2,2,2,2, # 18 - 1f
2,2,2,2,2,2,2,2, # 20 - 27
2,2,2,2,2,2,2,2, # 28 - 2f
2,2,2,2,2,2,2,2, # 30 - 37
2,2,2,2,2,2,2,2, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,2, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,6,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,3,4,4,4,4,4,4, # a0 - a7
5,5,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,3,1,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0) # f8 - ff
EUCTW_st = ( \
eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart)#28-2f
EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
EUCTWSMModel = {'classTable': EUCTW_cls,
'classFactor': 7,
'stateTable': EUCTW_st,
'charLenTable': EUCTWCharLenTable,
'name': 'x-euc-tw'}
# GB2312
GB2312_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
3,3,3,3,3,3,3,3, # 30 - 37
3,3,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,4, # 78 - 7f
5,6,6,6,6,6,6,6, # 80 - 87
6,6,6,6,6,6,6,6, # 88 - 8f
6,6,6,6,6,6,6,6, # 90 - 97
6,6,6,6,6,6,6,6, # 98 - 9f
6,6,6,6,6,6,6,6, # a0 - a7
6,6,6,6,6,6,6,6, # a8 - af
6,6,6,6,6,6,6,6, # b0 - b7
6,6,6,6,6,6,6,6, # b8 - bf
6,6,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
6,6,6,6,6,6,6,6, # e0 - e7
6,6,6,6,6,6,6,6, # e8 - ef
6,6,6,6,6,6,6,6, # f0 - f7
6,6,6,6,6,6,6,0) # f8 - ff
GB2312_st = ( \
eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart)#28-2f
# To be accurate, the length of class 6 can be either 2 or 4.
# But it is not necessary to discriminate between the two since
# it is used for frequency analysis only, and we are validing
# each code range there as well. So it is safe to set it to be
# 2 here.
GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
GB2312SMModel = {'classTable': GB2312_cls,
'classFactor': 7,
'stateTable': GB2312_st,
'charLenTable': GB2312CharLenTable,
'name': 'GB2312'}
# Shift_JIS
SJIS_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
3,3,3,3,3,3,3,3, # 80 - 87
3,3,3,3,3,3,3,3, # 88 - 8f
3,3,3,3,3,3,3,3, # 90 - 97
3,3,3,3,3,3,3,3, # 98 - 9f
#0xa0 is illegal in sjis encoding, but some pages does
#contain such byte. We need to be more error forgiven.
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,4,4,4, # e8 - ef
4,4,4,4,4,4,4,4, # f0 - f7
4,4,4,4,4,0,0,0) # f8 - ff
SJIS_st = ( \
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart)#10-17
SJISCharLenTable = (0, 1, 1, 2, 0, 0)
SJISSMModel = {'classTable': SJIS_cls,
'classFactor': 6,
'stateTable': SJIS_st,
'charLenTable': SJISCharLenTable,
'name': 'Shift_JIS'}
# UCS2-BE
UCS2BE_cls = ( \
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5) # f8 - ff
UCS2BE_st = ( \
5, 7, 7,eError, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
6, 6, 6, 6, 5, 7, 7,eError,#20-27
5, 8, 6, 6,eError, 6, 6, 6,#28-2f
6, 6, 6, 6,eError,eError,eStart,eStart)#30-37
UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
UCS2BESMModel = {'classTable': UCS2BE_cls,
'classFactor': 6,
'stateTable': UCS2BE_st,
'charLenTable': UCS2BECharLenTable,
'name': 'UTF-16BE'}
# UCS2-LE
UCS2LE_cls = ( \
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5) # f8 - ff
UCS2LE_st = ( \
6, 6, 7, 6, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
5, 5, 5,eError, 5,eError, 6, 6,#18-1f
7, 6, 8, 8, 5, 5, 5,eError,#20-27
5, 5, 5,eError,eError,eError, 5, 5,#28-2f
5, 5, 5,eError, 5,eError,eStart,eStart)#30-37
UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
UCS2LESMModel = {'classTable': UCS2LE_cls,
'classFactor': 6,
'stateTable': UCS2LE_st,
'charLenTable': UCS2LECharLenTable,
'name': 'UTF-16LE'}
# UTF-8
UTF8_cls = ( \
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
2,2,2,2,3,3,3,3, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
5,5,5,5,5,5,5,5, # a0 - a7
5,5,5,5,5,5,5,5, # a8 - af
5,5,5,5,5,5,5,5, # b0 - b7
5,5,5,5,5,5,5,5, # b8 - bf
0,0,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
7,8,8,8,8,8,8,8, # e0 - e7
8,8,8,8,8,9,8,8, # e8 - ef
10,11,11,11,11,11,11,11, # f0 - f7
12,13,13,13,14,15,0,0) # f8 - ff
UTF8_st = ( \
eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
9, 11, 8, 7, 6, 5, 4, 3,#08-0f
eError,eError,eError,eError,eError,eError,eError,eError,#10-17
eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
eError,eError, 5, 5, 5, 5,eError,eError,#30-37
eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
eError,eError,eError, 5, 5, 5,eError,eError,#40-47
eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
eError,eError, 7, 7, 7, 7,eError,eError,#50-57
eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
eError,eError,eError,eError, 7, 7,eError,eError,#60-67
eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
eError,eError, 9, 9, 9, 9,eError,eError,#70-77
eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
eError,eError,eError,eError,eError, 9,eError,eError,#80-87
eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
eError,eError, 12, 12, 12, 12,eError,eError,#90-97
eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
eError,eError,eError,eError,eError,eError,eError,eError)#c8-cf
UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
UTF8SMModel = {'classTable': UTF8_cls,
'classFactor': 16,
'stateTable': UTF8_st,
'charLenTable': UTF8CharLenTable,
'name': 'UTF-8'}
|
gpl-3.0
|
yceruto/django
|
tests/utils_tests/test_module_loading.py
|
5
|
8752
|
import imp
from importlib import import_module
import os
import sys
import unittest
from zipimport import zipimporter
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.utils import six
from django.utils.module_loading import autodiscover_modules, import_by_path, module_has_submodule
from django.utils._os import upath
class DefaultLoader(unittest.TestCase):
def setUp(self):
sys.meta_path.insert(0, ProxyFinder())
def tearDown(self):
sys.meta_path.pop(0)
def test_loader(self):
"Normal module existence can be tested"
test_module = import_module('utils_tests.test_module')
test_no_submodule = import_module(
'utils_tests.test_no_submodule')
# An importable child
self.assertTrue(module_has_submodule(test_module, 'good_module'))
mod = import_module('utils_tests.test_module.good_module')
self.assertEqual(mod.content, 'Good Module')
# A child that exists, but will generate an import error if loaded
self.assertTrue(module_has_submodule(test_module, 'bad_module'))
self.assertRaises(ImportError, import_module, 'utils_tests.test_module.bad_module')
# A child that doesn't exist
self.assertFalse(module_has_submodule(test_module, 'no_such_module'))
self.assertRaises(ImportError, import_module, 'utils_tests.test_module.no_such_module')
# A child that doesn't exist, but is the name of a package on the path
self.assertFalse(module_has_submodule(test_module, 'django'))
self.assertRaises(ImportError, import_module, 'utils_tests.test_module.django')
# Don't be confused by caching of import misses
import types # NOQA: causes attempted import of utils_tests.types
self.assertFalse(module_has_submodule(sys.modules['utils_tests'], 'types'))
# A module which doesn't have a __path__ (so no submodules)
self.assertFalse(module_has_submodule(test_no_submodule, 'anything'))
self.assertRaises(ImportError, import_module,
'utils_tests.test_no_submodule.anything')
class EggLoader(unittest.TestCase):
def setUp(self):
self.old_path = sys.path[:]
self.egg_dir = '%s/eggs' % os.path.dirname(upath(__file__))
def tearDown(self):
sys.path = self.old_path
sys.path_importer_cache.clear()
sys.modules.pop('egg_module.sub1.sub2.bad_module', None)
sys.modules.pop('egg_module.sub1.sub2.good_module', None)
sys.modules.pop('egg_module.sub1.sub2', None)
sys.modules.pop('egg_module.sub1', None)
sys.modules.pop('egg_module.bad_module', None)
sys.modules.pop('egg_module.good_module', None)
sys.modules.pop('egg_module', None)
def test_shallow_loader(self):
"Module existence can be tested inside eggs"
egg_name = '%s/test_egg.egg' % self.egg_dir
sys.path.append(egg_name)
egg_module = import_module('egg_module')
# An importable child
self.assertTrue(module_has_submodule(egg_module, 'good_module'))
mod = import_module('egg_module.good_module')
self.assertEqual(mod.content, 'Good Module')
# A child that exists, but will generate an import error if loaded
self.assertTrue(module_has_submodule(egg_module, 'bad_module'))
self.assertRaises(ImportError, import_module, 'egg_module.bad_module')
# A child that doesn't exist
self.assertFalse(module_has_submodule(egg_module, 'no_such_module'))
self.assertRaises(ImportError, import_module, 'egg_module.no_such_module')
def test_deep_loader(self):
"Modules deep inside an egg can still be tested for existence"
egg_name = '%s/test_egg.egg' % self.egg_dir
sys.path.append(egg_name)
egg_module = import_module('egg_module.sub1.sub2')
# An importable child
self.assertTrue(module_has_submodule(egg_module, 'good_module'))
mod = import_module('egg_module.sub1.sub2.good_module')
self.assertEqual(mod.content, 'Deep Good Module')
# A child that exists, but will generate an import error if loaded
self.assertTrue(module_has_submodule(egg_module, 'bad_module'))
self.assertRaises(ImportError, import_module, 'egg_module.sub1.sub2.bad_module')
# A child that doesn't exist
self.assertFalse(module_has_submodule(egg_module, 'no_such_module'))
self.assertRaises(ImportError, import_module, 'egg_module.sub1.sub2.no_such_module')
class ModuleImportTestCase(unittest.TestCase):
def test_import_by_path(self):
cls = import_by_path(
'django.utils.module_loading.import_by_path')
self.assertEqual(cls, import_by_path)
# Test exceptions raised
for path in ('no_dots_in_path', 'unexistent.path',
'utils_tests.unexistent'):
self.assertRaises(ImproperlyConfigured, import_by_path, path)
with self.assertRaises(ImproperlyConfigured) as cm:
import_by_path('unexistent.module.path', error_prefix="Foo")
self.assertTrue(str(cm.exception).startswith('Foo'))
def test_import_error_traceback(self):
"""Test preserving the original traceback on an ImportError."""
try:
import_by_path('test_module.bad_module.content')
except ImproperlyConfigured:
traceback = sys.exc_info()[2]
self.assertIsNotNone(traceback.tb_next.tb_next,
'Should have more than the calling frame in the traceback.')
@override_settings(INSTALLED_APPS=('utils_tests.test_module',))
class AutodiscoverModulesTestCase(SimpleTestCase):
def test_autodiscover_modules_found(self):
autodiscover_modules('good_module')
def test_autodiscover_modules_not_found(self):
autodiscover_modules('missing_module')
def test_autodiscover_modules_found_but_bad_module(self):
with six.assertRaisesRegex(self, ImportError, "No module named '?a_package_name_that_does_not_exist'?"):
autodiscover_modules('bad_module')
def test_autodiscover_modules_several_one_bad_module(self):
with six.assertRaisesRegex(self, ImportError, "No module named '?a_package_name_that_does_not_exist'?"):
autodiscover_modules('good_module', 'bad_module')
def test_autodiscover_modules_several_found(self):
autodiscover_modules('good_module', 'another_good_module')
def test_validate_registry_keeps_intact(self):
from .test_module import site
with six.assertRaisesRegex(self, Exception, "Some random exception."):
autodiscover_modules('another_bad_module', register_to=site)
self.assertEqual(site._registry, {})
class ProxyFinder(object):
def __init__(self):
self._cache = {}
def find_module(self, fullname, path=None):
tail = fullname.rsplit('.', 1)[-1]
try:
fd, fn, info = imp.find_module(tail, path)
if fullname in self._cache:
old_fd = self._cache[fullname][0]
if old_fd:
old_fd.close()
self._cache[fullname] = (fd, fn, info)
except ImportError:
return None
else:
return self # this is a loader as well
def load_module(self, fullname):
if fullname in sys.modules:
return sys.modules[fullname]
fd, fn, info = self._cache[fullname]
try:
return imp.load_module(fullname, fd, fn, info)
finally:
if fd:
fd.close()
class TestFinder(object):
def __init__(self, *args, **kwargs):
self.importer = zipimporter(*args, **kwargs)
def find_module(self, path):
importer = self.importer.find_module(path)
if importer is None:
return
return TestLoader(importer)
class TestLoader(object):
def __init__(self, importer):
self.importer = importer
def load_module(self, name):
mod = self.importer.load_module(name)
mod.__loader__ = self
return mod
class CustomLoader(EggLoader):
"""The Custom Loader test is exactly the same as the EggLoader, but
it uses a custom defined Loader and Finder that is intentionally
split into two classes. Although the EggLoader combines both functions
into one class, this isn't required.
"""
def setUp(self):
super(CustomLoader, self).setUp()
sys.path_hooks.insert(0, TestFinder)
sys.path_importer_cache.clear()
def tearDown(self):
super(CustomLoader, self).tearDown()
sys.path_hooks.pop(0)
|
bsd-3-clause
|
OpenUpgrade/OpenUpgrade
|
addons/hr_attendance/wizard/hr_attendance_error.py
|
377
|
2896
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_attendance_error(osv.osv_memory):
_name = 'hr.attendance.error'
_description = 'Print Error Attendance Report'
_columns = {
'init_date': fields.date('Starting Date', required=True),
'end_date': fields.date('Ending Date', required=True),
'max_delay': fields.integer('Max. Delay (Min)', required=True)
}
_defaults = {
'init_date': lambda *a: time.strftime('%Y-%m-%d'),
'end_date': lambda *a: time.strftime('%Y-%m-%d'),
'max_delay': 120,
}
def print_report(self, cr, uid, ids, context=None):
emp_ids = []
data_error = self.read(cr, uid, ids, context=context)[0]
date_from = data_error['init_date']
date_to = data_error['end_date']
cr.execute("SELECT id FROM hr_attendance WHERE employee_id IN %s AND to_char(name,'YYYY-mm-dd')<=%s AND to_char(name,'YYYY-mm-dd')>=%s AND action IN %s ORDER BY name" ,(tuple(context['active_ids']), date_to, date_from, tuple(['sign_in','sign_out'])))
attendance_ids = [x[0] for x in cr.fetchall()]
if not attendance_ids:
raise osv.except_osv(_('No Data Available!'), _('No records are found for your selection!'))
attendance_records = self.pool.get('hr.attendance').browse(cr, uid, attendance_ids, context=context)
for rec in attendance_records:
if rec.employee_id.id not in emp_ids:
emp_ids.append(rec.employee_id.id)
data_error['emp_ids'] = emp_ids
datas = {
'ids': [],
'model': 'hr.employee',
'form': data_error
}
return self.pool['report'].get_action(
cr, uid, [], 'hr_attendance.report_attendanceerrors', data=datas, context=context
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
0xFelix/python-viessmann-tools
|
viessmanntools/vitoreset.py
|
1
|
7193
|
from contextlib import contextmanager
from datetime import datetime
from enum import Enum
from json import dumps
from locale import LC_TIME, setlocale
from signal import SIGINT, SIGTERM, signal
from subprocess import TimeoutExpired
from time import sleep
from threading import Event
import paho.mqtt.client as mqtt
import RPi.GPIO as GPIO
from .config import ViessmannToolsConfig
from .vclient import CommunicationGarbledError, Vclient
class VitoResetState(Enum):
OK = "OK"
RESET_MAX_REACHED = "RESET_MAX_REACHED"
NOT_ALLOWED_CODE = "NOT_ALLOWED_CODE_"
RESET = "RESET"
class HeaterState:
def __init__(self, vclient_result, query_date_locale, query_date_format):
self._query_date_locale = query_date_locale
self._query_date_format = query_date_format
result_str = vclient_result.rstrip()
second_space_idx = self._find_nth(result_str, " ", 2)
self.datetime = self._parse_datetime(result_str[:second_space_idx])
self.msg = result_str[second_space_idx + 1 : -5]
self.code = result_str[-3:-1]
def __eq__(self, other):
return (
self.datetime == other.datetime
and self.msg == other.msg
and self.code == other.code
)
def _parse_datetime(self, date_str):
with self._setlocale(self._query_date_locale):
return datetime.strptime(date_str, self._query_date_format)
@staticmethod
@contextmanager
def _setlocale(locale_str):
saved = setlocale(LC_TIME)
try:
yield setlocale(LC_TIME, locale_str)
finally:
setlocale(LC_TIME, saved)
@staticmethod
def _find_nth(string, substr, nth):
idx = string.find(substr)
while idx >= 0 and nth > 1:
idx = string.find(substr, idx + len(substr))
nth -= 1
if idx == -1:
raise ValueError("Could not find nth occurence")
return idx
class VitoReset:
def __init__(self, config=None):
if config is None:
config = ViessmannToolsConfig.get_default_config()
self._config = config
config = config["VitoReset"]
self._gpio_pin = config.getint("gpio_pin")
self._query_period = config.getint("query_period")
self._query_date_locale = config.get("query_date_locale")
self._query_date_format = config.get("query_date_format")
self._allowed_codes = config.get("allowed_codes").split(",")
self._reset_max = config.getint("reset_max")
self._host = config.get("mqtt_broker")
self._port = config.getint("mqtt_port")
self._tls = config.getboolean("mqtt_tls")
self._username = config.get("mqtt_username")
self._password = config.get("mqtt_password")
self._topic = config.get("mqtt_topic")
self._exit = Event()
self._mqtt_client = mqtt.Client()
signal(SIGINT, self._signal_handler)
signal(SIGTERM, self._signal_handler)
def _signal_handler(self, *_):
self._exit.set()
def _on_connect(self, client, userdata, flags, rc):
self._mqtt_client.publish(f"{self._topic}/LWT", "Online", retain=True)
def _connect_mqtt_client(self):
if self._username != "":
if self._password != "":
self._mqtt_client.username_pw_set(self._username, self._password)
else:
self._mqtt_client.username_pw_set(self._username)
if self._tls:
self._mqtt_client.tls_set()
self._mqtt_client.will_set(f"{self._topic}/LWT", "Offline", retain=True)
self._mqtt_client.on_connect = self._on_connect
self._mqtt_client.connect(self._host, self._port)
self._mqtt_client.loop_start()
def _disconnect_mqtt_client(self):
self._mqtt_client.publish(f"{self._topic}/LWT", "Offline", retain=True)
self._mqtt_client.loop_stop()
self._mqtt_client.disconnect()
def _setup_gpio(self):
GPIO.setmode(GPIO.BOARD)
GPIO.setup(self._gpio_pin, GPIO.OUT)
self._set_gpio_output(False)
def _set_gpio_output(self, output):
print(f"Setting GPIO {self._gpio_pin} output to {output}", flush=True)
GPIO.output(self._gpio_pin, output)
def _publish_reset(self, code):
self._mqtt_client.publish(
f"{self._topic}/RESET",
dumps(
{"Time": datetime.now().isoformat(timespec="seconds"), "Code": code,}
),
retain=True,
)
def _publish_state(self, state):
self._mqtt_client.publish(
f"{self._topic}/STATE",
dumps(
{"Time": datetime.now().isoformat(timespec="seconds"), "State": state,}
),
retain=True,
)
def _reset_heater(self, code):
print("Resetting the heater", flush=True)
self._publish_reset(code)
self._set_gpio_output(True)
sleep(1)
self._set_gpio_output(False)
def _publish_state_and_exit(self, state, msg):
print(msg, flush=True)
self._publish_state(state)
self._exit.set()
def loop(self):
self._connect_mqtt_client()
self._setup_gpio()
last_state = HeaterState(
"Do,01.01.1970 00:00:00 Regelbetrieb (kein Fehler) (00)",
self._query_date_locale,
self._query_date_format,
)
reset_count = 0
vclient = Vclient(["getError0"], self._config)
first_run = True
while first_run or not self._exit.wait(self._query_period):
try:
heater_state = HeaterState(
vclient.run(), self._query_date_locale, self._query_date_format,
)
if heater_state != last_state:
if heater_state.code in self._allowed_codes:
if reset_count < self._reset_max:
self._reset_heater(heater_state.code)
last_state = heater_state
reset_count += 1
self._publish_state(VitoResetState.RESET.value)
else:
self._publish_state_and_exit(
VitoResetState.RESET_MAX_REACHED.value,
"reset max reached, exiting",
)
else:
self._publish_state_and_exit(
f"{VitoResetState.NOT_ALLOWED_CODE.value}{heater_state.code}",
f"Code {heater_state.code} not allowed, exiting",
)
else:
reset_count = 0
self._publish_state(VitoResetState.OK.value)
except CommunicationGarbledError as exc:
print(exc, flush=True)
except TimeoutExpired:
print(
"vclient query took too long, will try again next period",
flush=True,
)
finally:
first_run = False
self._disconnect_mqtt_client()
|
gpl-3.0
|
dharmabumstead/ansible
|
lib/ansible/modules/cloud/azure/azure_rm_virtualmachine_scaleset_facts.py
|
13
|
7164
|
#!/usr/bin/python
#
# Copyright (c) 2017 Sertac Ozercan, <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_virtualmachine_scaleset_facts
version_added: "2.4"
short_description: Get Virtual Machine Scale Set facts
description:
- Get facts for a virtual machine scale set
options:
name:
description:
- Limit results to a specific virtual machine scale set
resource_group:
description:
- The resource group to search for the desired virtual machine scale set
extends_documentation_fragment:
- azure
author:
- "Sertac Ozercan (@sozercan)"
'''
EXAMPLES = '''
- name: Get facts for a virtual machine scale set
azure_rm_virtualmachine_scaleset_facts:
resource_group: Testing
name: testvmss001
- name: Get facts for all virtual networks
azure_rm_virtualmachine_scaleset_facts:
resource_group: Testing
- name: Get facts by tags
azure_rm_virtualmachine_scaleset_facts:
resource_group: Testing
tags:
- testing
'''
RETURN = '''
azure_vmss:
description: List of virtual machine scale sets
returned: always
type: list
example: [{
"location": "eastus",
"properties": {
"overprovision": true,
"singlePlacementGroup": true,
"upgradePolicy": {
"mode": "Manual"
},
"virtualMachineProfile": {
"networkProfile": {
"networkInterfaceConfigurations": [
{
"name": "testvmss",
"properties": {
"dnsSettings": {
"dnsServers": []
},
"enableAcceleratedNetworking": false,
"ipConfigurations": [
{
"name": "default",
"properties": {
"privateIPAddressVersion": "IPv4",
"subnet": {
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/virtualNetworks/testvnet/subnets/testsubnet"
}
}
}
],
"primary": true
}
}
]
},
"osProfile": {
"adminUsername": "testuser",
"computerNamePrefix": "testvmss",
"linuxConfiguration": {
"disablePasswordAuthentication": true,
"ssh": {
"publicKeys": [
{
"keyData": "",
"path": "/home/testuser/.ssh/authorized_keys"
}
]
}
},
"secrets": []
},
"storageProfile": {
"imageReference": {
"offer": "CoreOS",
"publisher": "CoreOS",
"sku": "Stable",
"version": "899.17.0"
},
"osDisk": {
"caching": "ReadWrite",
"createOption": "fromImage",
"managedDisk": {
"storageAccountType": "Standard_LRS"
}
}
}
}
},
"sku": {
"capacity": 1,
"name": "Standard_DS1_v2",
"tier": "Standard"
}
}]
''' # NOQA
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from msrestazure.azure_exceptions import CloudError
except:
# handled in azure_rm_common
pass
AZURE_OBJECT_CLASS = 'VirtualMachineScaleSet'
AZURE_ENUM_MODULES = ['azure.mgmt.compute.models']
class AzureRMVirtualMachineScaleSetFacts(AzureRMModuleBase):
"""Utility class to get virtual machine scale set facts"""
def __init__(self):
self.module_args = dict(
name=dict(type='str'),
resource_group=dict(type='str'),
tags=dict(type='list')
)
self.results = dict(
changed=False,
ansible_facts=dict(
azure_vmss=[]
)
)
self.name = None
self.resource_group = None
self.tags = None
super(AzureRMVirtualMachineScaleSetFacts, self).__init__(
derived_arg_spec=self.module_args,
supports_tags=False,
facts_module=True
)
def exec_module(self, **kwargs):
for key in self.module_args:
setattr(self, key, kwargs[key])
if self.name and not self.resource_group:
self.fail("Parameter error: resource group required when filtering by name.")
if self.name:
self.results['ansible_facts']['azure_vmss'] = self.get_item()
else:
self.results['ansible_facts']['azure_vmss'] = self.list_items()
return self.results
def get_item(self):
"""Get a single virtual machine scale set"""
self.log('Get properties for {}'.format(self.name))
item = None
results = []
try:
item = self.compute_client.virtual_machine_scale_sets.get(self.resource_group, self.name)
except CloudError:
pass
if item and self.has_tags(item.tags, self.tags):
results = [self.serialize_obj(item, AZURE_OBJECT_CLASS, enum_modules=AZURE_ENUM_MODULES)]
return results
def list_items(self):
"""Get all virtual machine scale sets"""
self.log('List all virtual machine scale sets')
try:
response = self.compute_client.virtual_machine_scale_sets.list(self.resource_group)
except CloudError as exc:
self.fail('Failed to list all items - {}'.format(str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(self.serialize_obj(item, AZURE_OBJECT_CLASS, enum_modules=AZURE_ENUM_MODULES))
return results
def main():
"""Main module execution code path"""
AzureRMVirtualMachineScaleSetFacts()
if __name__ == '__main__':
main()
|
gpl-3.0
|
hujiajie/chromium-crosswalk
|
tools/win/sizeviewer/sizeviewer.py
|
46
|
7024
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import codecs
import json
import os
import string
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def Run(*args):
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, err = p.communicate()
if p.returncode != 0:
raise SystemExit(out)
def FindNode(node, component):
for child in node['children']:
if child['name'] == component:
return child
return None
def InsertIntoTree(tree, source_name, size):
components = source_name[3:].split('\\')
node = tree
for index, component in enumerate(components):
data = FindNode(node, component)
if not data:
data = { 'name': source_name, 'name': component }
if index == len(components) - 1:
data['size'] = size
else:
data['children'] = []
node['children'].append(data)
node = data
def FlattenTree(tree):
result = [['Path', 'Parent', 'Size', 'Value']]
def Flatten(node, parent):
name = node['name']
if parent and parent != '/':
name = parent + '/' + name
if 'children' in node:
result.append([name, parent, -1, -1])
for c in node['children']:
Flatten(c, name)
else:
result.append([name, parent, node['size'], node['size']])
Flatten(tree, '')
return result
def GetAsset(filename):
with open(os.path.join(BASE_DIR, filename), 'rb') as f:
return f.read()
def AppendAsScriptBlock(f, value, var=None):
f.write('<script type="text/javascript">\n')
if var:
f.write('var ' + var + ' = ')
f.write(value)
if var:
f.write(';\n')
f.write('</script>\n')
def main():
jsons = []
if len(sys.argv) > 1:
dlls = sys.argv[1:]
else:
out_dir = os.path.join(BASE_DIR, '..', '..', '..', 'out', 'Release')
dlls = [os.path.normpath(os.path.join(out_dir, dll))
for dll in ('chrome.dll', 'chrome_child.dll')]
for dll_path in dlls:
if os.path.exists(dll_path):
print 'Tallying %s...' % dll_path
json_path = dll_path + '.json'
Run(os.path.join(BASE_DIR, '..', '..', '..', 'third_party', 'syzygy',
'binaries', 'exe', 'experimental', 'code_tally.exe'),
'--input-image=' + dll_path,
'--input-pdb=' + dll_path + '.pdb',
'--output-file=' + json_path)
jsons.append(json_path)
if not jsons:
print 'Couldn\'t find dlls.'
print 'Pass fully qualified dll name(s) if you want to use something other '
print 'than out\\Release\\chrome.dll and chrome_child.dll.'
return 1
# Munge the code_tally json format into an easier-to-view format.
for json_name in jsons:
with open(json_name, 'r') as jsonf:
all_data = json.load(jsonf)
html_path = os.path.splitext(json_name)[0] + '.html'
print 'Generating %s... (standlone)' % html_path
by_source = {}
symbols_index = {}
symbols = []
for obj_name, obj_data in all_data['objects'].iteritems():
for symbol, symbol_data in obj_data.iteritems():
size = int(symbol_data['size'])
# Sometimes there's symbols with no source file, we just ignore those.
if 'contribs' in symbol_data:
i = 0
while i < len(symbol_data['contribs']):
src_index = symbol_data['contribs'][i]
i += 1
per_line = symbol_data['contribs'][i]
i += 1
source = all_data['sources'][int(src_index)]
if source not in by_source:
by_source[source] = {'lines': {}, 'total_size': 0}
size = 0
# per_line is [line, size, line, size, line, size, ...]
for j in range(0, len(per_line), 2):
line_number = per_line[j]
size += per_line[j + 1]
# Save some time/space in JS by using an array here. 0 == size,
# 1 == symbol list.
by_source[source]['lines'].setdefault(line_number, [0, []])
by_source[source]['lines'][line_number][0] += per_line[j + 1]
if symbol in symbols_index:
symindex = symbols_index[symbol]
else:
symbols.append(symbol)
symbols_index[symbol] = symindex = len(symbols) - 1
by_source[source]['lines'][line_number][1].append(
symindex)
by_source[source]['total_size'] += size
binary_name = all_data['executable']['name']
data = {}
data['name'] = '/'
data['children'] = []
file_contents = {}
line_data = {}
for source, file_data in by_source.iteritems():
InsertIntoTree(data, source, file_data['total_size'])
store_as = source[3:].replace('\\', '/')
try:
with codecs.open(source, 'rb', encoding='latin1') as f:
file_contents[store_as] = f.read()
except IOError:
file_contents[store_as] = '// Unable to load source.'
line_data[store_as] = file_data['lines']
# code_tally attempts to assign fractional bytes when code is shared
# across multiple symbols. Round off here for display after summing above.
for per_line in line_data[store_as].values():
per_line[0] = round(per_line[0])
flattened = FlattenTree(data)
maxval = 0
for i in flattened[1:]:
maxval = max(i[2], maxval)
flattened_str = json.dumps(flattened)
to_write = GetAsset('template.html')
# Save all data and what would normally be external resources into the
# one html so that it's a standalone report.
with open(html_path, 'w') as f:
f.write(to_write)
# These aren't subbed in as a silly workaround for 32-bit python.
# The end result is only ~100M, but while substituting these into a
# template, it otherwise raises a MemoryError, I guess due to
# fragmentation. So instead, we just append them as variables to the file
# and then refer to the variables in the main script.
filedata_str = json.dumps(file_contents).replace(
'</script>', '</scr"+"ipt>')
AppendAsScriptBlock(f, filedata_str, var='g_file_contents')
AppendAsScriptBlock(f, json.dumps(line_data), var='g_line_data')
AppendAsScriptBlock(f, json.dumps(symbols), var='g_symbol_list')
favicon_str = json.dumps(base64.b64encode(GetAsset('favicon.png')))
AppendAsScriptBlock(f, favicon_str, var='g_favicon')
AppendAsScriptBlock(f, flattened_str, var='g_raw_data')
AppendAsScriptBlock(f, str(maxval), var='g_maxval')
dllname_str = binary_name + ' ' + all_data['executable']['version']
AppendAsScriptBlock(f, json.dumps(dllname_str), var='g_dllname')
AppendAsScriptBlock(f, GetAsset('codemirror.js'))
AppendAsScriptBlock(f, GetAsset('clike.js'))
AppendAsScriptBlock(f, GetAsset('main.js'))
f.write('</html>')
return 0
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
kanagasabapathi/python-for-android
|
python-build/python-libs/gdata/src/atom/http_interface.py
|
133
|
5183
|
#!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module provides a common interface for all HTTP requests.
HttpResponse: Represents the server's response to an HTTP request. Provides
an interface identical to httplib.HTTPResponse which is the response
expected from higher level classes which use HttpClient.request.
GenericHttpClient: Provides an interface (superclass) for an object
responsible for making HTTP requests. Subclasses of this object are
used in AtomService and GDataService to make requests to the server. By
changing the http_client member object, the AtomService is able to make
HTTP requests using different logic (for example, when running on
Google App Engine, the http_client makes requests using the App Engine
urlfetch API).
"""
__author__ = 'api.jscudder (Jeff Scudder)'
import StringIO
USER_AGENT = '%s GData-Python/1.3.3'
class Error(Exception):
pass
class UnparsableUrlObject(Error):
pass
class ContentLengthRequired(Error):
pass
class HttpResponse(object):
def __init__(self, body=None, status=None, reason=None, headers=None):
"""Constructor for an HttpResponse object.
HttpResponse represents the server's response to an HTTP request from
the client. The HttpClient.request method returns a httplib.HTTPResponse
object and this HttpResponse class is designed to mirror the interface
exposed by httplib.HTTPResponse.
Args:
body: A file like object, with a read() method. The body could also
be a string, and the constructor will wrap it so that
HttpResponse.read(self) will return the full string.
status: The HTTP status code as an int. Example: 200, 201, 404.
reason: The HTTP status message which follows the code. Example:
OK, Created, Not Found
headers: A dictionary containing the HTTP headers in the server's
response. A common header in the response is Content-Length.
"""
if body:
if hasattr(body, 'read'):
self._body = body
else:
self._body = StringIO.StringIO(body)
else:
self._body = None
if status is not None:
self.status = int(status)
else:
self.status = None
self.reason = reason
self._headers = headers or {}
def getheader(self, name, default=None):
if name in self._headers:
return self._headers[name]
else:
return default
def read(self, amt=None):
if not amt:
return self._body.read()
else:
return self._body.read(amt)
class GenericHttpClient(object):
debug = False
def __init__(self, http_client, headers=None):
"""
Args:
http_client: An object which provides a request method to make an HTTP
request. The request method in GenericHttpClient performs a
call-through to the contained HTTP client object.
headers: A dictionary containing HTTP headers which should be included
in every HTTP request. Common persistent headers include
'User-Agent'.
"""
self.http_client = http_client
self.headers = headers or {}
def request(self, operation, url, data=None, headers=None):
all_headers = self.headers.copy()
if headers:
all_headers.update(headers)
return self.http_client.request(operation, url, data=data,
headers=all_headers)
def get(self, url, headers=None):
return self.request('GET', url, headers=headers)
def post(self, url, data, headers=None):
return self.request('POST', url, data=data, headers=headers)
def put(self, url, data, headers=None):
return self.request('PUT', url, data=data, headers=headers)
def delete(self, url, headers=None):
return self.request('DELETE', url, headers=headers)
class GenericToken(object):
"""Represents an Authorization token to be added to HTTP requests.
Some Authorization headers included calculated fields (digital
signatures for example) which are based on the parameters of the HTTP
request. Therefore the token is responsible for signing the request
and adding the Authorization header.
"""
def perform_request(self, http_client, operation, url, data=None,
headers=None):
"""For the GenericToken, no Authorization token is set."""
return http_client.request(operation, url, data=data, headers=headers)
def valid_for_scope(self, url):
"""Tells the caller if the token authorizes access to the desired URL.
Since the generic token doesn't add an auth header, it is not valid for
any scope.
"""
return False
|
apache-2.0
|
oouyang/fxos-certsuite
|
mcts/web-platform-tests/tests/tools/pywebsocket/src/mod_pywebsocket/util.py
|
18
|
13818
|
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""WebSocket utilities.
"""
import array
import errno
# Import hash classes from a module available and recommended for each Python
# version and re-export those symbol. Use sha and md5 module in Python 2.4, and
# hashlib module in Python 2.6.
try:
import hashlib
md5_hash = hashlib.md5
sha1_hash = hashlib.sha1
except ImportError:
import md5
import sha
md5_hash = md5.md5
sha1_hash = sha.sha
import StringIO
import logging
import os
import re
import socket
import traceback
import zlib
try:
from mod_pywebsocket import fast_masking
except ImportError:
pass
def get_stack_trace():
"""Get the current stack trace as string.
This is needed to support Python 2.3.
TODO: Remove this when we only support Python 2.4 and above.
Use traceback.format_exc instead.
"""
out = StringIO.StringIO()
traceback.print_exc(file=out)
return out.getvalue()
def prepend_message_to_exception(message, exc):
"""Prepend message to the exception."""
exc.args = (message + str(exc),)
return
def __translate_interp(interp, cygwin_path):
"""Translate interp program path for Win32 python to run cygwin program
(e.g. perl). Note that it doesn't support path that contains space,
which is typically true for Unix, where #!-script is written.
For Win32 python, cygwin_path is a directory of cygwin binaries.
Args:
interp: interp command line
cygwin_path: directory name of cygwin binary, or None
Returns:
translated interp command line.
"""
if not cygwin_path:
return interp
m = re.match('^[^ ]*/([^ ]+)( .*)?', interp)
if m:
cmd = os.path.join(cygwin_path, m.group(1))
return cmd + m.group(2)
return interp
def get_script_interp(script_path, cygwin_path=None):
"""Gets #!-interpreter command line from the script.
It also fixes command path. When Cygwin Python is used, e.g. in WebKit,
it could run "/usr/bin/perl -wT hello.pl".
When Win32 Python is used, e.g. in Chromium, it couldn't. So, fix
"/usr/bin/perl" to "<cygwin_path>\perl.exe".
Args:
script_path: pathname of the script
cygwin_path: directory name of cygwin binary, or None
Returns:
#!-interpreter command line, or None if it is not #!-script.
"""
fp = open(script_path)
line = fp.readline()
fp.close()
m = re.match('^#!(.*)', line)
if m:
return __translate_interp(m.group(1), cygwin_path)
return None
def wrap_popen3_for_win(cygwin_path):
"""Wrap popen3 to support #!-script on Windows.
Args:
cygwin_path: path for cygwin binary if command path is needed to be
translated. None if no translation required.
"""
__orig_popen3 = os.popen3
def __wrap_popen3(cmd, mode='t', bufsize=-1):
cmdline = cmd.split(' ')
interp = get_script_interp(cmdline[0], cygwin_path)
if interp:
cmd = interp + ' ' + cmd
return __orig_popen3(cmd, mode, bufsize)
os.popen3 = __wrap_popen3
def hexify(s):
return ' '.join(map(lambda x: '%02x' % ord(x), s))
def get_class_logger(o):
return logging.getLogger(
'%s.%s' % (o.__class__.__module__, o.__class__.__name__))
class NoopMasker(object):
"""A masking object that has the same interface as RepeatedXorMasker but
just returns the string passed in without making any change.
"""
def __init__(self):
pass
def mask(self, s):
return s
class RepeatedXorMasker(object):
"""A masking object that applies XOR on the string given to mask method
with the masking bytes given to the constructor repeatedly. This object
remembers the position in the masking bytes the last mask method call
ended and resumes from that point on the next mask method call.
"""
def __init__(self, masking_key):
self._masking_key = masking_key
self._masking_key_index = 0
def _mask_using_swig(self, s):
masked_data = fast_masking.mask(
s, self._masking_key, self._masking_key_index)
self._masking_key_index = (
(self._masking_key_index + len(s)) % len(self._masking_key))
return masked_data
def _mask_using_array(self, s):
result = array.array('B')
result.fromstring(s)
# Use temporary local variables to eliminate the cost to access
# attributes
masking_key = map(ord, self._masking_key)
masking_key_size = len(masking_key)
masking_key_index = self._masking_key_index
for i in xrange(len(result)):
result[i] ^= masking_key[masking_key_index]
masking_key_index = (masking_key_index + 1) % masking_key_size
self._masking_key_index = masking_key_index
return result.tostring()
if 'fast_masking' in globals():
mask = _mask_using_swig
else:
mask = _mask_using_array
# By making wbits option negative, we can suppress CMF/FLG (2 octet) and
# ADLER32 (4 octet) fields of zlib so that we can use zlib module just as
# deflate library. DICTID won't be added as far as we don't set dictionary.
# LZ77 window of 32K will be used for both compression and decompression.
# For decompression, we can just use 32K to cover any windows size. For
# compression, we use 32K so receivers must use 32K.
#
# Compression level is Z_DEFAULT_COMPRESSION. We don't have to match level
# to decode.
#
# See zconf.h, deflate.cc, inflate.cc of zlib library, and zlibmodule.c of
# Python. See also RFC1950 (ZLIB 3.3).
class _Deflater(object):
def __init__(self, window_bits):
self._logger = get_class_logger(self)
self._compress = zlib.compressobj(
zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -window_bits)
def compress(self, bytes):
compressed_bytes = self._compress.compress(bytes)
self._logger.debug('Compress input %r', bytes)
self._logger.debug('Compress result %r', compressed_bytes)
return compressed_bytes
def compress_and_flush(self, bytes):
compressed_bytes = self._compress.compress(bytes)
compressed_bytes += self._compress.flush(zlib.Z_SYNC_FLUSH)
self._logger.debug('Compress input %r', bytes)
self._logger.debug('Compress result %r', compressed_bytes)
return compressed_bytes
def compress_and_finish(self, bytes):
compressed_bytes = self._compress.compress(bytes)
compressed_bytes += self._compress.flush(zlib.Z_FINISH)
self._logger.debug('Compress input %r', bytes)
self._logger.debug('Compress result %r', compressed_bytes)
return compressed_bytes
class _Inflater(object):
def __init__(self, window_bits):
self._logger = get_class_logger(self)
self._window_bits = window_bits
self._unconsumed = ''
self.reset()
def decompress(self, size):
if not (size == -1 or size > 0):
raise Exception('size must be -1 or positive')
data = ''
while True:
if size == -1:
data += self._decompress.decompress(self._unconsumed)
# See Python bug http://bugs.python.org/issue12050 to
# understand why the same code cannot be used for updating
# self._unconsumed for here and else block.
self._unconsumed = ''
else:
data += self._decompress.decompress(
self._unconsumed, size - len(data))
self._unconsumed = self._decompress.unconsumed_tail
if self._decompress.unused_data:
# Encountered a last block (i.e. a block with BFINAL = 1) and
# found a new stream (unused_data). We cannot use the same
# zlib.Decompress object for the new stream. Create a new
# Decompress object to decompress the new one.
#
# It's fine to ignore unconsumed_tail if unused_data is not
# empty.
self._unconsumed = self._decompress.unused_data
self.reset()
if size >= 0 and len(data) == size:
# data is filled. Don't call decompress again.
break
else:
# Re-invoke Decompress.decompress to try to decompress all
# available bytes before invoking read which blocks until
# any new byte is available.
continue
else:
# Here, since unused_data is empty, even if unconsumed_tail is
# not empty, bytes of requested length are already in data. We
# don't have to "continue" here.
break
if data:
self._logger.debug('Decompressed %r', data)
return data
def append(self, data):
self._logger.debug('Appended %r', data)
self._unconsumed += data
def reset(self):
self._logger.debug('Reset')
self._decompress = zlib.decompressobj(-self._window_bits)
# Compresses/decompresses given octets using the method introduced in RFC1979.
class _RFC1979Deflater(object):
"""A compressor class that applies DEFLATE to given byte sequence and
flushes using the algorithm described in the RFC1979 section 2.1.
"""
def __init__(self, window_bits, no_context_takeover):
self._deflater = None
if window_bits is None:
window_bits = zlib.MAX_WBITS
self._window_bits = window_bits
self._no_context_takeover = no_context_takeover
def filter(self, bytes, flush=True, bfinal=False):
if self._deflater is None or (self._no_context_takeover and flush):
self._deflater = _Deflater(self._window_bits)
if bfinal:
result = self._deflater.compress_and_finish(bytes)
# Add a padding block with BFINAL = 0 and BTYPE = 0.
result = result + chr(0)
self._deflater = None
return result
if flush:
# Strip last 4 octets which is LEN and NLEN field of a
# non-compressed block added for Z_SYNC_FLUSH.
return self._deflater.compress_and_flush(bytes)[:-4]
return self._deflater.compress(bytes)
class _RFC1979Inflater(object):
"""A decompressor class for byte sequence compressed and flushed following
the algorithm described in the RFC1979 section 2.1.
"""
def __init__(self, window_bits=zlib.MAX_WBITS):
self._inflater = _Inflater(window_bits)
def filter(self, bytes):
# Restore stripped LEN and NLEN field of a non-compressed block added
# for Z_SYNC_FLUSH.
self._inflater.append(bytes + '\x00\x00\xff\xff')
return self._inflater.decompress(-1)
class DeflateSocket(object):
"""A wrapper class for socket object to intercept send and recv to perform
deflate compression and decompression transparently.
"""
# Size of the buffer passed to recv to receive compressed data.
_RECV_SIZE = 4096
def __init__(self, socket):
self._socket = socket
self._logger = get_class_logger(self)
self._deflater = _Deflater(zlib.MAX_WBITS)
self._inflater = _Inflater(zlib.MAX_WBITS)
def recv(self, size):
"""Receives data from the socket specified on the construction up
to the specified size. Once any data is available, returns it even
if it's smaller than the specified size.
"""
# TODO(tyoshino): Allow call with size=0. It should block until any
# decompressed data is available.
if size <= 0:
raise Exception('Non-positive size passed')
while True:
data = self._inflater.decompress(size)
if len(data) != 0:
return data
read_data = self._socket.recv(DeflateSocket._RECV_SIZE)
if not read_data:
return ''
self._inflater.append(read_data)
def sendall(self, bytes):
self.send(bytes)
def send(self, bytes):
self._socket.sendall(self._deflater.compress_and_flush(bytes))
return len(bytes)
# vi:sts=4 sw=4 et
|
mpl-2.0
|
mkeyran/EmotionRecognizer
|
training.py
|
1
|
1567
|
import nn_learn
import pickle
import base_model
dat = pickle.load(open("data/TrainingData/pickled_generated_sets", 'rb'))
pca_dat = pickle.load(open("data/TrainingData/pickled_generated_sets_pca", 'rb'))
model = nn_learn.NeuralNetwork(nn_learn.neural_net9_pca_lr03)
d = pca_dat
model.train(d["train_data"], d["train_labels"], d["valid_data"], d["valid_labels"], 600)
model.save()
print("Training Accuracy: {}".format(base_model.accuracy(model, pca_dat["train_data"], pca_dat["train_labels"])))
print("Accuracy:", base_model.accuracy(model, pca_dat["valid_data"], pca_dat["valid_labels"]))
print("Precision:", base_model.precision(model, pca_dat["valid_data"], pca_dat["valid_labels"]))
print("Recall:", base_model.recall(model, pca_dat["valid_data"], pca_dat["valid_labels"]))
print("Confusion Matrix:", base_model.confusion_matrix(model, pca_dat["valid_data"], pca_dat["valid_labels"]))
with open ("data/Models/"+model.params["model_name"]+"_props.txt","w") as f:
f.write("Training Accuracy: {}\n".format(base_model.accuracy(model, pca_dat["train_data"], pca_dat["train_labels"])))
f.write("Accuracy: {}\n".format(base_model.accuracy(model, pca_dat["valid_data"], pca_dat["valid_labels"])))
f.write("Precision: {}\n".format( base_model.precision(model, pca_dat["valid_data"], pca_dat["valid_labels"])))
f.write("Recall: {}\n".format( base_model.recall(model, pca_dat["valid_data"], pca_dat["valid_labels"])))
f.write("Confusion Matrix:\n {}\n".format(base_model.confusion_matrix(model, pca_dat["valid_data"], pca_dat["valid_labels"])))
|
mit
|
lmjohns3/downhill
|
downhill/__init__.py
|
1
|
4167
|
from .adaptive import *
from .base import build, Optimizer
from .dataset import Dataset
from .first_order import *
__version__ = '0.5.0pre'
def minimize(loss, train, valid=None, params=None, inputs=None, algo='rmsprop',
updates=(), monitors=(), monitor_gradients=False, batch_size=32,
train_batches=None, valid_batches=None, **kwargs):
'''Minimize a loss function with respect to some symbolic parameters.
Additional keyword arguments are passed to the underlying :class:`Optimizer
<downhill.base.Optimizer>` instance.
Parameters
----------
loss : Theano expression
Loss function to minimize. This must be a scalar-valued expression.
train : :class:`Dataset <downhill.dataset.Dataset>`, ndarray, or callable
Dataset to use for computing gradient updates.
valid : :class:`Dataset <downhill.dataset.Dataset>`, ndarray, or callable, optional
Dataset to use for validating the minimization process. The training
dataset is used if this is not provided.
params : list of Theano variables, optional
Symbolic variables to adjust to minimize the loss. If not given, these
will be computed automatically by walking the computation graph.
inputs : list of Theano variables, optional
Symbolic variables required to compute the loss. If not given, these
will be computed automatically by walking the computation graph.
algo : str, optional
Name of the minimization algorithm to use. Must be one of the strings
that can be passed to :func:`build`. Defaults to ``'rmsprop'``.
updates : list of update pairs, optional
A list of pairs providing updates for the internal of the loss
computation. Normally this is empty, but it can be provided if the loss,
for example, requires an update to an internal random number generator.
monitors : dict or sequence of (str, Theano expression) tuples, optional
Additional values to monitor during optimization. These must be provided
as either a sequence of (name, expression) tuples, or as a dictionary
mapping string names to Theano expressions.
monitor_gradients : bool, optional
If True, add monitors to log the norms of the parameter gradients during
optimization. Defaults to False.
batch_size : int, optional
Size of batches provided by datasets. Defaults to 32.
train_batches : int, optional
Number of batches of training data to iterate over during one pass of
optimization. Defaults to None, which uses the entire training dataset.
valid_batches : int, optional
Number of batches of validation data to iterate over during one pass of
validation. Defaults to None, which uses the entire validation dataset.
Returns
-------
train_monitors : dict
A dictionary mapping monitor names to monitor values. This dictionary
will always contain the ``'loss'`` key, giving the value of the loss
evaluated on the training dataset.
valid_monitors : dict
A dictionary mapping monitor names to monitor values, evaluated on the
validation dataset. This dictionary will always contain the ``'loss'``
key, giving the value of the loss function. Because validation is not
always computed after every optimization update, these monitor values
may be "stale"; however, they will always contain the most recently
computed values.
'''
if not isinstance(train, Dataset):
train = Dataset(
train,
name='train',
batch_size=batch_size,
iteration_size=train_batches,
)
if valid is not None and not isinstance(valid, Dataset):
valid = Dataset(
valid,
name='valid',
batch_size=batch_size,
iteration_size=valid_batches,
)
return build(
algo,
loss=loss,
params=params,
inputs=inputs,
updates=updates,
monitors=monitors,
monitor_gradients=monitor_gradients,
).minimize(train, valid, **kwargs)
|
mit
|
ReachingOut/unisubs
|
apps/messages/tests.py
|
2
|
21327
|
# -*- coding: utf-8 -*-
# Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import datetime
from django.core import mail
from django.core.urlresolvers import reverse
from django.test import TestCase
from auth.models import CustomUser as User, EmailConfirmation
from messages.models import Message
from subtitles import models as sub_models
from subtitles.pipeline import add_subtitles
from teams import tasks as team_tasks
from teams.forms import InviteForm
from teams.models import (
Team, TeamMember, Application, Workflow, TeamVideo, Task, Setting, Invite,
Application
)
from teams.moderation_const import WAITING_MODERATION
from utils import send_templated_email
from utils.factories import *
from videos.models import Action, Video
from videos.tasks import video_changed_tasks
import messages.tasks
class MessageTest(TestCase):
def setUp(self):
self.author = UserFactory()
self.subject = "Let's talk"
self.body = "Will you please help me out with Portuguese trans?"
self.user = UserFactory()
mail.outbox = []
def _create_message(self, to_user, message_type='M', reply_to=None):
self.message = Message(user=to_user,
author=self.author,
subject=self.subject,
message_type=message_type,
content=self.body)
if reply_to is not None:
if reply_to.thread:
self.message.thread = reply_to.thread
else:
self.message.thread = reply_to.pk
self.message.save()
return self.message
def _send_email(self, to_user):
send_templated_email(to_user, "test email", "messages/email/email-confirmed.html", {})
def test_message_cleanup(self):
self._create_message(self.user)
self.assertEquals(Message.objects.filter(user=self.user).count(), 1)
Message.objects.cleanup(0)
self.assertEquals(Message.objects.filter(user=self.user).count(), 0)
self._create_message(self.user)
self.assertEquals(Message.objects.filter(user=self.user).count(), 1)
Message.objects.filter(user=self.user).update(created=datetime.datetime.now() - datetime.timedelta(days=5))
Message.objects.cleanup(6)
self.assertEquals(Message.objects.filter(user=self.user).count(), 1)
Message.objects.cleanup(4, message_type='S')
self.assertEquals(Message.objects.filter(user=self.user).count(), 1)
Message.objects.cleanup(4, message_type='M')
self.assertEquals(Message.objects.filter(user=self.user).count(), 0)
def test_message_threads(self):
m = self._create_message(self.user)
self._create_message(self.user, reply_to=m)
self._create_message(self.user, reply_to=m)
n = self._create_message(self.user, reply_to=m)
n = self._create_message(self.user, reply_to=n)
n = self._create_message(self.user, reply_to=n)
self._create_message(self.user)
self._create_message(self.user)
self.assertEquals(Message.objects.thread(n, self.user).count(), 6)
self.assertEquals(Message.objects.thread(m, self.user).count(), 6)
def test_send_email_to_allowed_user(self):
self.user.notify_by_email = True
self.user.save()
assert self.user.is_active and self.user.email
self._send_email(self.user)
self.assertEqual(len(mail.outbox), 1)
def test_send_email_to_optout_user(self):
self.user.notify_by_email = False
self.user.save()
assert self.user.is_active and self.user.email
self._send_email(self.user)
self.assertEquals(len(mail.outbox), 0)
def test_message_to_optout_user(self):
self.user.notify_by_message = False
self.user.notify_by_email = False
self.user.save()
self._send_email(self.user)
self._create_message(self.user)
self.assertEquals(len(mail.outbox), 0)
self.assertEquals(Message.objects.unread().filter(user=self.user).count(), 0)
self.assertEquals(Message.objects.filter(user=self.user).count(), 1)
def test_member_join(self):
def _get_counts(member):
email_to = "%s" %( member.user.email)
return Message.objects.filter(user=member.user).count() , \
len([x for x in mail.outbox if email_to in x.recipients()])
team , created= Team.objects.get_or_create(name='test', slug='test')
# creates dummy users:
for x in xrange(0,5):
user = UserFactory(
username="test%s" % x,
email = "test%[email protected]" % x,
)
tm = TeamMember(team=team, user=user)
if x == 0:
tm.role = TeamMember.ROLE_OWNER
owner = tm
elif x == 1:
tm.role = TeamMember.ROLE_ADMIN
admin = tm
elif x == 2:
tm.role = TeamMember.ROLE_MANAGER
manager = tm
elif x == 3:
tm.role = TeamMember.ROLE_CONTRIBUTOR
contributor = tm
if x < 4:
# don't save the last role until we have counts
tm.save()
else:
tm.role= TeamMember.ROLE_CONTRIBUTOR
# now make sure we count previsou messages
owner_messge_count_1, owner_email_count_1 = _get_counts(owner)
admin_messge_count_1, admin_email_count_1 = _get_counts(admin)
manager_messge_count_1, manager_email_count_1 = _get_counts(manager)
contributor_messge_count_1, contributor_email_count_1 = _get_counts(contributor)
# save the last team member and check that each group has appropriate counts
tm.save()
messages.tasks.team_member_new(tm.pk)
# owner and admins should receive email + message
owner_messge_count_2, owner_email_count_2 = _get_counts(owner)
self.assertEqual(owner_messge_count_1 + 1, owner_messge_count_2)
self.assertEqual(owner_email_count_1 + 1, owner_email_count_2)
admin_messge_count_2, admin_email_count_2 = _get_counts(admin)
self.assertEqual(admin_messge_count_1 + 1, admin_messge_count_2)
self.assertEqual(admin_email_count_1 + 1, admin_email_count_2)
# manager shoud not
manager_messge_count_2, manager_email_count_2 = _get_counts(manager)
self.assertEqual(manager_messge_count_1 , manager_messge_count_2)
self.assertEqual(manager_email_count_1 , manager_email_count_2)
# contributor shoud not
contributor_messge_count_2, contributor_email_count_2 = _get_counts(contributor)
self.assertEqual(contributor_messge_count_1 , contributor_messge_count_2)
self.assertEqual(contributor_email_count_1 , contributor_email_count_2)
# now, this has to show up on everybody activitis fed
action = Action.objects.get(team=team, user=tm.user, action_type=Action.MEMBER_JOINED)
self.assertTrue(Action.objects.for_user(tm.user).filter(pk=action.pk).exists())
self.assertTrue(Action.objects.for_user(owner.user).filter(pk=action.pk).exists())
self.assertTrue(Action.objects.for_user(manager.user).filter(pk=action.pk).exists())
self.assertTrue(Action.objects.for_user(contributor.user).filter(pk=action.pk).exists())
self.assertTrue(Action.objects.for_user(admin.user).filter(pk=action.pk).exists())
def test_member_leave(self):
return # fix me now
def _get_counts(member):
email_to = "%s" %( member.user.email)
return Message.objects.filter(user=member.user).count() , \
len([x for x in mail.outbox if email_to in x.recipients()])
team , created= Team.objects.get_or_create(name='test', slug='test')
# creates dummy users:
for x in xrange(0,5):
user = UserFactory(
username="test%s" % x,
email = "test%[email protected]" % x,
notify_by_email = True,
)
tm = TeamMember(team=team, user=user)
if x == 0:
tm.role = TeamMember.ROLE_OWNER
owner = tm
elif x == 1:
tm.role = TeamMember.ROLE_ADMIN
admin = tm
elif x == 2:
tm.role = TeamMember.ROLE_MANAGER
manager = tm
elif x == 3:
tm.role = TeamMember.ROLE_CONTRIBUTOR
contributor = tm
if x < 4:
# don't save the last role until we have counts
tm.save()
else:
tm.role= TeamMember.ROLE_CONTRIBUTOR
tm.save()
# now make sure we count previsou messages
owner_messge_count_1, owner_email_count_1 = _get_counts(owner)
admin_messge_count_1, admin_email_count_1 = _get_counts(admin)
manager_messge_count_1, manager_email_count_1 = _get_counts(manager)
contributor_messge_count_1, contributor_email_count_1 = _get_counts(contributor)
# now delete and check numers
tm_user = tm.user
tm_user_pk = tm.user.pk
team_pk = tm.team.pk
tm.delete()
messages.tasks.team_member_leave(team_pk, tm_user_pk)
# save the last team member and check that each group has appropriate counts
# owner and admins should receive email + message
owner_messge_count_2, owner_email_count_2 = _get_counts(owner)
self.assertEqual(owner_messge_count_1 + 1, owner_messge_count_2)
self.assertEqual(owner_email_count_1 + 1, owner_email_count_2)
admin_messge_count_2, admin_email_count_2 = _get_counts(admin)
self.assertEqual(admin_messge_count_1 + 1, admin_messge_count_2)
self.assertEqual(admin_email_count_1 + 1, admin_email_count_2)
# manager shoud not
manager_messge_count_2, manager_email_count_2 = _get_counts(manager)
self.assertEqual(manager_messge_count_1 , manager_messge_count_2)
self.assertEqual(manager_email_count_1 , manager_email_count_2)
# contributor shoud not
contributor_messge_count_2, contributor_email_count_2 = _get_counts(contributor)
self.assertEqual(contributor_messge_count_1 , contributor_messge_count_2)
self.assertEqual(contributor_email_count_1 , contributor_email_count_2)
# now, this has to show up on everybody activitis fed
action = Action.objects.get(team=team, user=tm_user, action_type=Action.MEMBER_LEFT)
self.assertTrue(Action.objects.for_user(tm.user).filter(pk=action.pk).exists())
self.assertTrue(Action.objects.for_user(owner.user).filter(pk=action.pk).exists())
self.assertTrue(Action.objects.for_user(manager.user).filter(pk=action.pk).exists())
self.assertTrue(Action.objects.for_user(contributor.user).filter(pk=action.pk).exists())
self.assertTrue(Action.objects.for_user(admin.user).filter(pk=action.pk).exists())
def test_application_new(self):
def _get_counts(member):
email_to = "%s" %(member.user.email)
return Message.objects.filter(user=member.user).count() , \
len([x for x in mail.outbox if email_to in x.recipients()])
team , created= Team.objects.get_or_create(name='test', slug='test')
applying_user = UserFactory()
# creates dummy users:
for x in xrange(0,4):
user = UserFactory(
username="test%s" % x,
email = "test%[email protected]" % x,
notify_by_email = True,
notify_by_message = True,
)
tm = TeamMember(team=team, user=user)
if x == 0:
tm.role = TeamMember.ROLE_OWNER
owner = tm
elif x == 1:
tm.role = TeamMember.ROLE_ADMIN
admin = tm
elif x == 2:
tm.role = TeamMember.ROLE_MANAGER
manager = tm
elif x == 3:
tm.role = TeamMember.ROLE_CONTRIBUTOR
contributor = tm
tm.save()
# now make sure we count previsou messages
owner_messge_count_1, owner_email_count_1 = _get_counts(owner)
admin_messge_count_1, admin_email_count_1 = _get_counts(admin)
manager_messge_count_1, manager_email_count_1 = _get_counts(manager)
contributor_messge_count_1, contributor_email_count_1 = _get_counts(contributor)
# now delete and check numers
app = Application.objects.create(team=team,user=applying_user)
app.save()
messages.tasks.application_sent.run(app.pk)
# owner and admins should receive email + message
owner_messge_count_2, owner_email_count_2 = _get_counts(owner)
self.assertEqual(owner_messge_count_1 + 1, owner_messge_count_2)
self.assertEqual(owner_email_count_1 + 1, owner_email_count_2)
admin_messge_count_2, admin_email_count_2 = _get_counts(admin)
self.assertEqual(admin_messge_count_1 + 1, admin_messge_count_2)
self.assertEqual(admin_email_count_1 + 1, admin_email_count_2)
# manager shoud not
manager_messge_count_2, manager_email_count_2 = _get_counts(manager)
self.assertEqual(manager_messge_count_1 , manager_messge_count_2)
self.assertEqual(manager_email_count_1 , manager_email_count_2)
# contributor shoud not
contributor_messge_count_2, contributor_email_count_2 = _get_counts(contributor)
self.assertEqual(contributor_messge_count_1 , contributor_messge_count_2)
self.assertEqual(contributor_email_count_1 , contributor_email_count_2)
def test_account_verified(self):
user = UserFactory(notify_by_email=True)
c = EmailConfirmation.objects.send_confirmation(user)
num_emails = len(mail.outbox)
num_messages = Message.objects.filter(user=user).count()
EmailConfirmation.objects.confirm_email(c.confirmation_key)
self.assertEqual(num_emails +1, len(mail.outbox))
self.assertEqual(num_messages +1,
Message.objects.filter(user=user).count())
def test_team_inviation_sent(self):
team = TeamFactory(name='test', slug='test')
owner = TeamMemberFactory(team=team, role=TeamMember.ROLE_OWNER)
applying_user = UserFactory()
applying_user.notify_by_email = True
applying_user.save()
mail.outbox = []
message = "Will you be my valentine?"
f = InviteForm(user=owner.user, team=team,data={
"user_id":applying_user.id,
"role":"admin",
"message": message,
})
f.is_valid()
f.save()
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertIn(applying_user.email, msg.to[0] )
self.assertIn(message, msg.body, )
def test_send_message_view(self):
to_user = UserFactory()
user = UserFactory(username='username')
user.notify_by_email = True
user.set_password('username')
user.save()
mail.outbox = []
self.client.login(username='username', password='username')
self.client.post(reverse('messages:new'), {"user":to_user.pk, "subject": "hey", 'content':'test'})
self.assertEqual(len(mail.outbox), 1)
m = mail.outbox[0]
self.assertTrue(to_user.email in m.to)
def test_messages_remain_after_team_membership(self):
# Here's the scenario:
# User is invited to a team
# - User accepts invitation
# - Message for the invitation gets deleted -> wrong!
user = UserFactory(notify_by_message=True)
owner = UserFactory(notify_by_message=True)
team = Team.objects.create(name='test-team', slug='test-team', membership_policy=Team.APPLICATION)
invite_form = InviteForm(team, owner, {
'user_id': user.pk,
'message': 'Subtitle ALL the things!',
'role':'contributor',
})
invite_form.is_valid()
self.assertFalse(invite_form.errors)
self.assertEquals(Message.objects.for_user(user).count(), 0)
invite = invite_form.save()
# user has the invitation message on their inbox now
self.assertEquals(Message.objects.for_user(user).count(), 1)
invite_message = Message.objects.for_user(user)[0]
# now user accepts invite
invite.accept()
# he should be a team memebr
self.assertTrue(team.members.filter(user=user).exists())
# message should be still on their inbos
self.assertIn(invite_message, Message.objects.for_user(user))
class TeamBlockSettingsTest(TestCase):
def test_block_settings_for_team(self):
team = TeamFactory()
owner = UserFactory(
notify_by_email=True,
notify_by_message=True)
TeamMemberFactory(team=team, user=owner,
role=TeamMember.ROLE_OWNER)
user = UserFactory(notify_by_email=True)
member = TeamMemberFactory(team=team, user=user)
team_video = TeamVideoFactory(team=team)
video = team_video.video
invite = Invite.objects.create(team=team, user=user, author=owner)
task_assigned = Task.objects.create(team=team, team_video=team_video,
type=10, assignee=member.user)
subs = [
(0, 1000, 'Hello', {}),
(2000, 5000, 'world.', {})
]
sv = add_subtitles(video, 'en', subs)
task_with_version = Task.objects.create(team=team,
team_video=team_video,
type=10,
assignee=member.user,
new_subtitle_version=sv,
language='en')
to_test = (
("block_invitation_sent_message",
messages.tasks.team_invitation_sent,
(invite.pk,)),
("block_application_sent_message",
messages.tasks.application_sent,
(Application.objects.get_or_create(team=team, note='', user=user)[0].pk,)),
("block_application_denided_message",
messages.tasks.team_application_denied,
(Application.objects.get_or_create(team=team, note='', user=user)[0].pk,)),
("block_team_member_new_message",
messages.tasks.team_member_new,
(member.pk, )),
("block_team_member_leave_message",
messages.tasks.team_member_leave,
(team.pk,member.user.pk )),
("block_task_assigned_message",
messages.tasks.team_task_assigned,
(task_assigned.pk,)),
("block_reviewed_and_published_message",
messages.tasks.reviewed_and_published,
(task_with_version.pk,)),
("block_reviewed_and_pending_approval_message",
messages.tasks.reviewed_and_pending_approval,
(task_with_version.pk,)),
("block_reviewed_and_sent_back_message",
messages.tasks.reviewed_and_sent_back,
(task_with_version.pk,)),
("block_approved_message",
messages.tasks.approved_notification,
(task_with_version.pk,)),
)
for setting_name, function, args in to_test:
team.settings.all().delete()
Message.objects.all().delete()
if setting_name == 'block_application_sent_message':
pass
function.run(*args)
self.assertTrue(Message.objects.count() > 0,
"%s is off, so this message should be sent" % setting_name)
Setting.objects.create(team=team, key=Setting.KEY_IDS[setting_name])
Message.objects.all().delete()
function.run(*args)
self.assertEquals(Message.objects.all().count(), 0,
"%s is on, so this message should *not * be sent" % setting_name)
# add videos notification is a bit different
setting_name = "block_new_video_message"
Setting.objects.create(team=team, key=Setting.KEY_IDS[setting_name])
team_tasks.add_videos_notification_daily()
self.assertEquals(Message.objects.all().count(), 0,
"%s is on, so this message should *not * be sent" % setting_name)
|
agpl-3.0
|
adhoc-dev/oca-account-financial-tools
|
account_fiscal_position_vat_check/account_invoice.py
|
26
|
2832
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Fiscal Position VAT Check module for OpenERP
# Copyright (C) 2013-2014 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
from openerp.exceptions import except_orm
class account_fiscal_position(models.Model):
_inherit = 'account.fiscal.position'
customer_must_have_vat = fields.Boolean(
string='Customer Must Have VAT number',
help="If enabled, Odoo will check that the customer has a VAT "
"number when the user validates a customer invoice/refund.")
class account_invoice(models.Model):
_inherit = 'account.invoice'
@api.multi
def action_move_create(self):
'''Check that the customer has VAT set
if required by the fiscal position'''
for invoice in self:
if (
invoice.type in ('out_invoice', 'out_refund') and
invoice.fiscal_position and
invoice.fiscal_position.customer_must_have_vat and
not invoice.partner_id.vat):
if invoice.type == 'out_invoice':
type_label = _('a Customer Invoice')
else:
type_label = _('a Customer Refund')
raise except_orm(
_('Missing VAT number:'),
_("You are trying to validate %s "
"with the fiscal position '%s' "
"that require the customer to have a VAT number. "
"But the Customer '%s' doesn't "
"have a VAT number in OpenERP."
"Please add the VAT number of this Customer in Odoo "
" and try to validate again.")
% (type_label, invoice.fiscal_position.name,
invoice.partner_id.name))
return super(account_invoice, self).action_move_create()
|
agpl-3.0
|
akosyakov/intellij-community
|
python/lib/Lib/site-packages/django/test/simple.py
|
73
|
15048
|
import sys
import signal
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models import get_app, get_apps
from django.test import _doctest as doctest
from django.test.utils import setup_test_environment, teardown_test_environment
from django.test.testcases import OutputChecker, DocTestRunner, TestCase
from django.utils import unittest
try:
all
except NameError:
from django.utils.itercompat import all
__all__ = ('DjangoTestRunner', 'DjangoTestSuiteRunner', 'run_tests')
# The module name for tests outside models.py
TEST_MODULE = 'tests'
doctestOutputChecker = OutputChecker()
class DjangoTestRunner(unittest.TextTestRunner):
def __init__(self, *args, **kwargs):
import warnings
warnings.warn(
"DjangoTestRunner is deprecated; it's functionality is indistinguishable from TextTestRunner",
PendingDeprecationWarning
)
super(DjangoTestRunner, self).__init__(*args, **kwargs)
def get_tests(app_module):
try:
app_path = app_module.__name__.split('.')[:-1]
test_module = __import__('.'.join(app_path + [TEST_MODULE]), {}, {}, TEST_MODULE)
except ImportError, e:
# Couldn't import tests.py. Was it due to a missing file, or
# due to an import error in a tests.py that actually exists?
import os.path
from imp import find_module
try:
mod = find_module(TEST_MODULE, [os.path.dirname(app_module.__file__)])
except ImportError:
# 'tests' module doesn't exist. Move on.
test_module = None
else:
# The module exists, so there must be an import error in the
# test module itself. We don't need the module; so if the
# module was a single file module (i.e., tests.py), close the file
# handle returned by find_module. Otherwise, the test module
# is a directory, and there is nothing to close.
if mod[0]:
mod[0].close()
raise
return test_module
def build_suite(app_module):
"Create a complete Django test suite for the provided application module"
suite = unittest.TestSuite()
# Load unit and doctests in the models.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(app_module, 'suite'):
suite.addTest(app_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(app_module))
try:
suite.addTest(doctest.DocTestSuite(app_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in models.py
pass
# Check to see if a separate 'tests' module exists parallel to the
# models module
test_module = get_tests(app_module)
if test_module:
# Load unit and doctests in the tests.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(test_module, 'suite'):
suite.addTest(test_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_module))
try:
suite.addTest(doctest.DocTestSuite(test_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in tests.py
pass
return suite
def build_test(label):
"""Construct a test case with the specified label. Label should be of the
form model.TestClass or model.TestClass.test_method. Returns an
instantiated test or test suite corresponding to the label provided.
"""
parts = label.split('.')
if len(parts) < 2 or len(parts) > 3:
raise ValueError("Test label '%s' should be of the form app.TestCase or app.TestCase.test_method" % label)
#
# First, look for TestCase instances with a name that matches
#
app_module = get_app(parts[0])
test_module = get_tests(app_module)
TestClass = getattr(app_module, parts[1], None)
# Couldn't find the test class in models.py; look in tests.py
if TestClass is None:
if test_module:
TestClass = getattr(test_module, parts[1], None)
try:
if issubclass(TestClass, unittest.TestCase):
if len(parts) == 2: # label is app.TestClass
try:
return unittest.TestLoader().loadTestsFromTestCase(TestClass)
except TypeError:
raise ValueError("Test label '%s' does not refer to a test class" % label)
else: # label is app.TestClass.test_method
return TestClass(parts[2])
except TypeError:
# TestClass isn't a TestClass - it must be a method or normal class
pass
#
# If there isn't a TestCase, look for a doctest that matches
#
tests = []
for module in app_module, test_module:
try:
doctests = doctest.DocTestSuite(module,
checker=doctestOutputChecker,
runner=DocTestRunner)
# Now iterate over the suite, looking for doctests whose name
# matches the pattern that was given
for test in doctests:
if test._dt_test.name in (
'%s.%s' % (module.__name__, '.'.join(parts[1:])),
'%s.__test__.%s' % (module.__name__, '.'.join(parts[1:]))):
tests.append(test)
except ValueError:
# No doctests found.
pass
# If no tests were found, then we were given a bad test label.
if not tests:
raise ValueError("Test label '%s' does not refer to a test" % label)
# Construct a suite out of the tests that matched.
return unittest.TestSuite(tests)
def partition_suite(suite, classes, bins):
"""
Partitions a test suite by test type.
classes is a sequence of types
bins is a sequence of TestSuites, one more than classes
Tests of type classes[i] are added to bins[i],
tests with no match found in classes are place in bins[-1]
"""
for test in suite:
if isinstance(test, unittest.TestSuite):
partition_suite(test, classes, bins)
else:
for i in range(len(classes)):
if isinstance(test, classes[i]):
bins[i].addTest(test)
break
else:
bins[-1].addTest(test)
def reorder_suite(suite, classes):
"""
Reorders a test suite by test type.
classes is a sequence of types
All tests of type clases[0] are placed first, then tests of type classes[1], etc.
Tests with no match in classes are placed last.
"""
class_count = len(classes)
bins = [unittest.TestSuite() for i in range(class_count+1)]
partition_suite(suite, classes, bins)
for i in range(class_count):
bins[0].addTests(bins[i+1])
return bins[0]
def dependency_ordered(test_databases, dependencies):
"""Reorder test_databases into an order that honors the dependencies
described in TEST_DEPENDENCIES.
"""
ordered_test_databases = []
resolved_databases = set()
while test_databases:
changed = False
deferred = []
while test_databases:
signature, aliases = test_databases.pop()
dependencies_satisfied = True
for alias in aliases:
if alias in dependencies:
if all(a in resolved_databases for a in dependencies[alias]):
# all dependencies for this alias are satisfied
dependencies.pop(alias)
resolved_databases.add(alias)
else:
dependencies_satisfied = False
else:
resolved_databases.add(alias)
if dependencies_satisfied:
ordered_test_databases.append((signature, aliases))
changed = True
else:
deferred.append((signature, aliases))
if not changed:
raise ImproperlyConfigured("Circular dependency in TEST_DEPENDENCIES")
test_databases = deferred
return ordered_test_databases
class DjangoTestSuiteRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def setup_test_environment(self, **kwargs):
setup_test_environment()
settings.DEBUG = False
unittest.installHandler()
def build_suite(self, test_labels, extra_tests=None, **kwargs):
suite = unittest.TestSuite()
if test_labels:
for label in test_labels:
if '.' in label:
suite.addTest(build_test(label))
else:
app = get_app(label)
suite.addTest(build_suite(app))
else:
for app in get_apps():
suite.addTest(build_suite(app))
if extra_tests:
for test in extra_tests:
suite.addTest(test)
return reorder_suite(suite, (TestCase,))
def setup_databases(self, **kwargs):
from django.db import connections, DEFAULT_DB_ALIAS
# First pass -- work out which databases actually need to be created,
# and which ones are test mirrors or duplicate entries in DATABASES
mirrored_aliases = {}
test_databases = {}
dependencies = {}
for alias in connections:
connection = connections[alias]
if connection.settings_dict['TEST_MIRROR']:
# If the database is marked as a test mirror, save
# the alias.
mirrored_aliases[alias] = connection.settings_dict['TEST_MIRROR']
else:
# Store a tuple with DB parameters that uniquely identify it.
# If we have two aliases with the same values for that tuple,
# we only need to create the test database once.
test_databases.setdefault((
connection.settings_dict['HOST'],
connection.settings_dict['PORT'],
connection.settings_dict['ENGINE'],
connection.settings_dict['NAME'],
), []).append(alias)
if 'TEST_DEPENDENCIES' in connection.settings_dict:
dependencies[alias] = connection.settings_dict['TEST_DEPENDENCIES']
else:
if alias != DEFAULT_DB_ALIAS:
dependencies[alias] = connection.settings_dict.get('TEST_DEPENDENCIES', [DEFAULT_DB_ALIAS])
# Second pass -- actually create the databases.
old_names = []
mirrors = []
for (host, port, engine, db_name), aliases in dependency_ordered(test_databases.items(), dependencies):
# Actually create the database for the first connection
connection = connections[aliases[0]]
old_names.append((connection, db_name, True))
test_db_name = connection.creation.create_test_db(self.verbosity, autoclobber=not self.interactive)
for alias in aliases[1:]:
connection = connections[alias]
if db_name:
old_names.append((connection, db_name, False))
connection.settings_dict['NAME'] = test_db_name
else:
# If settings_dict['NAME'] isn't defined, we have a backend where
# the name isn't important -- e.g., SQLite, which uses :memory:.
# Force create the database instead of assuming it's a duplicate.
old_names.append((connection, db_name, True))
connection.creation.create_test_db(self.verbosity, autoclobber=not self.interactive)
for alias, mirror_alias in mirrored_aliases.items():
mirrors.append((alias, connections[alias].settings_dict['NAME']))
connections[alias].settings_dict['NAME'] = connections[mirror_alias].settings_dict['NAME']
return old_names, mirrors
def run_suite(self, suite, **kwargs):
return unittest.TextTestRunner(verbosity=self.verbosity, failfast=self.failfast).run(suite)
def teardown_databases(self, old_config, **kwargs):
from django.db import connections
old_names, mirrors = old_config
# Point all the mirrors back to the originals
for alias, old_name in mirrors:
connections[alias].settings_dict['NAME'] = old_name
# Destroy all the non-mirror databases
for connection, old_name, destroy in old_names:
if destroy:
connection.creation.destroy_test_db(old_name, self.verbosity)
else:
connection.settings_dict['NAME'] = old_name
def teardown_test_environment(self, **kwargs):
unittest.removeHandler()
teardown_test_environment()
def suite_result(self, suite, result, **kwargs):
return len(result.failures) + len(result.errors)
def run_tests(self, test_labels, extra_tests=None, **kwargs):
"""
Run the unit tests for all the test labels in the provided list.
Labels must be of the form:
- app.TestClass.test_method
Run a single specific test method
- app.TestClass
Run all the test methods in a given class
- app
Search for doctests and unittests in the named application.
When looking for tests, the test runner will look in the models and
tests modules for the application.
A list of 'extra' tests may also be provided; these tests
will be added to the test suite.
Returns the number of tests that failed.
"""
self.setup_test_environment()
suite = self.build_suite(test_labels, extra_tests)
old_config = self.setup_databases()
result = self.run_suite(suite)
self.teardown_databases(old_config)
self.teardown_test_environment()
return self.suite_result(suite, result)
def run_tests(test_labels, verbosity=1, interactive=True, failfast=False, extra_tests=None):
import warnings
warnings.warn(
'The run_tests() test runner has been deprecated in favor of DjangoTestSuiteRunner.',
DeprecationWarning
)
test_runner = DjangoTestSuiteRunner(verbosity=verbosity, interactive=interactive, failfast=failfast)
return test_runner.run_tests(test_labels, extra_tests=extra_tests)
|
apache-2.0
|
cchurch/ansible
|
test/units/modules/storage/netapp/test_netapp_e_asup.py
|
56
|
7858
|
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import json
from ansible.modules.storage.netapp.netapp_e_asup import Asup
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
from units.compat import mock
class AsupTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
}
REQ_FUNC = 'ansible.modules.storage.netapp.netapp_e_asup.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_get_config_asup_capable_false(self):
"""Ensure we fail correctly if ASUP is not available on this platform"""
self._set_args()
expected = dict(asupCapable=False, onDemandCapable=True)
asup = Asup()
# Expecting an update
with self.assertRaisesRegexp(AnsibleFailJson, r"not supported"):
with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
asup.get_configuration()
def test_get_config_on_demand_capable_false(self):
"""Ensure we fail correctly if ASUP is not available on this platform"""
self._set_args()
expected = dict(asupCapable=True, onDemandCapable=False)
asup = Asup()
# Expecting an update
with self.assertRaisesRegexp(AnsibleFailJson, r"not supported"):
with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
asup.get_configuration()
def test_get_config(self):
"""Validate retrieving the ASUP configuration"""
self._set_args()
expected = dict(asupCapable=True, onDemandCapable=True)
asup = Asup()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
config = asup.get_configuration()
self.assertEquals(config, expected)
def test_update_configuration(self):
"""Validate retrieving the ASUP configuration"""
self._set_args(dict(asup='enabled'))
expected = dict()
initial = dict(asupCapable=True,
asupEnabled=True,
onDemandEnabled=False,
remoteDiagsEnabled=False,
schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
asup = Asup()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(asup, 'get_configuration', return_value=initial):
updated = asup.update_configuration()
self.assertTrue(req.called)
self.assertTrue(updated)
def test_update_configuration_asup_disable(self):
"""Validate retrieving the ASUP configuration"""
self._set_args(dict(asup='disabled'))
expected = dict()
initial = dict(asupCapable=True,
asupEnabled=True,
onDemandEnabled=False,
remoteDiagsEnabled=False,
schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
asup = Asup()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(asup, 'get_configuration', return_value=initial):
updated = asup.update_configuration()
self.assertTrue(updated)
self.assertTrue(req.called)
# Ensure it was called with the right arguments
called_with = req.call_args
body = json.loads(called_with[1]['data'])
self.assertFalse(body['asupEnabled'])
def test_update_configuration_enable(self):
"""Validate retrieving the ASUP configuration"""
self._set_args(dict(asup='enabled'))
expected = dict()
initial = dict(asupCapable=False,
asupEnabled=False,
onDemandEnabled=False,
remoteDiagsEnabled=False,
schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
asup = Asup()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(asup, 'get_configuration', return_value=initial):
updated = asup.update_configuration()
self.assertTrue(updated)
self.assertTrue(req.called)
# Ensure it was called with the right arguments
called_with = req.call_args
body = json.loads(called_with[1]['data'])
self.assertTrue(body['asupEnabled'])
self.assertTrue(body['onDemandEnabled'])
self.assertTrue(body['remoteDiagsEnabled'])
def test_update_configuration_request_exception(self):
"""Validate exception handling when request throws an exception."""
config_response = dict(asupEnabled=True,
onDemandEnabled=True,
remoteDiagsEnabled=True,
schedule=dict(daysOfWeek=[],
dailyMinTime=0,
weeklyMinTime=0,
dailyMaxTime=24,
weeklyMaxTime=24))
self._set_args(dict(state="enabled"))
asup = Asup()
with self.assertRaises(Exception):
with mock.patch.object(asup, 'get_configuration', return_value=config_response):
with mock.patch(self.REQ_FUNC, side_effect=Exception):
asup.update_configuration()
def test_init_schedule(self):
"""Validate schedule correct schedule initialization"""
self._set_args(dict(state="enabled", active=True, days=["sunday", "monday", "tuesday"], start=20, end=24))
asup = Asup()
self.assertTrue(asup.asup)
self.assertEquals(asup.days, ["sunday", "monday", "tuesday"]),
self.assertEquals(asup.start, 1200)
self.assertEquals(asup.end, 1439)
def test_init_schedule_invalid(self):
"""Validate updating ASUP with invalid schedule fails test."""
self._set_args(dict(state="enabled", active=True, start=22, end=20))
with self.assertRaisesRegexp(AnsibleFailJson, r"start time is invalid"):
Asup()
def test_init_schedule_days_invalid(self):
"""Validate updating ASUP with invalid schedule fails test."""
self._set_args(dict(state="enabled", active=True, days=["someday", "thataday", "nonday"]))
with self.assertRaises(AnsibleFailJson):
Asup()
def test_update(self):
"""Validate updating ASUP with valid schedule passes"""
initial = dict(asupCapable=True,
onDemandCapable=True,
asupEnabled=True,
onDemandEnabled=False,
remoteDiagsEnabled=False,
schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
self._set_args(dict(state="enabled", active=True, days=["sunday", "monday", "tuesday"], start=10, end=20))
asup = Asup()
with self.assertRaisesRegexp(AnsibleExitJson, r"ASUP settings have been updated"):
with mock.patch(self.REQ_FUNC, return_value=(200, dict(asupCapable=True))):
with mock.patch.object(asup, "get_configuration", return_value=initial):
asup.update()
|
gpl-3.0
|
Kongsea/tensorflow
|
tensorflow/python/tools/optimize_for_inference.py
|
106
|
4714
|
# pylint: disable=g-bad-file-header
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Removes parts of a graph that are only needed for training.
There are several common transformations that can be applied to GraphDefs
created to train a model, that help reduce the amount of computation needed when
the network is used only for inference. These include:
- Removing training-only operations like checkpoint saving.
- Stripping out parts of the graph that are never reached.
- Removing debug operations like CheckNumerics.
- Folding batch normalization ops into the pre-calculated weights.
- Fusing common operations into unified versions.
This script takes either a frozen binary GraphDef file (where the weight
variables have been converted into constants by the freeze_graph script), or a
text GraphDef proto file (the weight variables are stored in a separate
checkpoint file), and outputs a new GraphDef with the optimizations applied.
If the input graph is a text graph file, make sure to include the node that
restores the variable weights in output_names. That node is usually named
"restore_all".
An example of command-line usage is:
bazel build tensorflow/python/tools:optimize_for_inference && \
bazel-bin/tensorflow/python/tools/optimize_for_inference \
--input=frozen_inception_graph.pb \
--output=optimized_inception_graph.pb \
--frozen_graph=True \
--input_names=Mul \
--output_names=softmax
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import sys
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_io
from tensorflow.python.platform import app
from tensorflow.python.platform import gfile
from tensorflow.python.tools import optimize_for_inference_lib
FLAGS = None
def main(unused_args):
if not gfile.Exists(FLAGS.input):
print("Input graph file '" + FLAGS.input + "' does not exist!")
return -1
input_graph_def = graph_pb2.GraphDef()
with gfile.Open(FLAGS.input, "rb") as f:
data = f.read()
if FLAGS.frozen_graph:
input_graph_def.ParseFromString(data)
else:
text_format.Merge(data.decode("utf-8"), input_graph_def)
output_graph_def = optimize_for_inference_lib.optimize_for_inference(
input_graph_def,
FLAGS.input_names.split(","),
FLAGS.output_names.split(","), FLAGS.placeholder_type_enum)
if FLAGS.frozen_graph:
f = gfile.FastGFile(FLAGS.output, "w")
f.write(output_graph_def.SerializeToString())
else:
graph_io.write_graph(output_graph_def,
os.path.dirname(FLAGS.output),
os.path.basename(FLAGS.output))
return 0
def parse_args():
"""Parses command line arguments."""
parser = argparse.ArgumentParser()
parser.register("type", "bool", lambda v: v.lower() == "true")
parser.add_argument(
"--input",
type=str,
default="",
help="TensorFlow \'GraphDef\' file to load.")
parser.add_argument(
"--output",
type=str,
default="",
help="File to save the output graph to.")
parser.add_argument(
"--input_names",
type=str,
default="",
help="Input node names, comma separated.")
parser.add_argument(
"--output_names",
type=str,
default="",
help="Output node names, comma separated.")
parser.add_argument(
"--frozen_graph",
nargs="?",
const=True,
type="bool",
default=True,
help="""\
If true, the input graph is a binary frozen GraphDef
file; if false, it is a text GraphDef proto file.\
""")
parser.add_argument(
"--placeholder_type_enum",
type=int,
default=dtypes.float32.as_datatype_enum,
help="The AttrValue enum to use for placeholders.")
return parser.parse_known_args()
if __name__ == "__main__":
FLAGS, unparsed = parse_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
apache-2.0
|
xzYue/odoo
|
addons/l10n_us/__openerp__.py
|
341
|
1763
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'United States - Chart of accounts',
'version': '1.1',
'author': 'OpenERP SA',
'category': 'Localization/Account Charts',
'description': """
United States - Chart of accounts.
==================================
""",
'website': 'http://www.openerp.com',
'depends': ['account_chart', 'account_anglo_saxon'],
'data': [
'l10n_us_account_type.xml',
'account_chart_template.xml',
'account.account.template.csv',
'account_tax_code_template.xml',
'account_tax_template.xml',
'account_chart_template_after.xml',
'l10n_us_wizard.xml'
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
jart/tensorflow
|
tensorflow/contrib/solvers/python/ops/linear_equations.py
|
36
|
5609
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Solvers for linear equations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.contrib.solvers.python.ops import util
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
def conjugate_gradient(operator,
rhs,
preconditioner=None,
x=None,
tol=1e-4,
max_iter=20,
name="conjugate_gradient"):
r"""Conjugate gradient solver.
Solves a linear system of equations `A*x = rhs` for selfadjoint, positive
definite matrix `A` and right-hand side vector `rhs`, using an iterative,
matrix-free algorithm where the action of the matrix A is represented by
`operator`. The iteration terminates when either the number of iterations
exceeds `max_iter` or when the residual norm has been reduced to `tol`
times its initial value, i.e. \\(||rhs - A x_k|| <= tol ||rhs||\\).
Args:
operator: An object representing a linear operator with attributes:
- shape: Either a list of integers or a 1-D `Tensor` of type `int32` of
length 2. `shape[0]` is the dimension on the domain of the operator,
`shape[1]` is the dimension of the co-domain of the operator. On other
words, if operator represents an N x N matrix A, `shape` must contain
`[N, N]`.
- dtype: The datatype of input to and output from `apply`.
- apply: Callable object taking a vector `x` as input and returning a
vector with the result of applying the operator to `x`, i.e. if
`operator` represents matrix `A`, `apply` should return `A * x`.
rhs: A rank-1 `Tensor` of shape `[N]` containing the right-hand size vector.
preconditioner: An object representing a linear operator, see `operator`
for detail. The preconditioner should approximate the inverse of `A`.
An efficient preconditioner could dramatically improve the rate of
convergence. If `preconditioner` represents matrix `M`(`M` approximates
`A^{-1}`), the algorithm uses `preconditioner.apply(x)` to estimate
`A^{-1}x`. For this to be useful, the cost of applying `M` should be
much lower than computing `A^{-1}` directly.
x: A rank-1 `Tensor` of shape `[N]` containing the initial guess for the
solution.
tol: A float scalar convergence tolerance.
max_iter: An integer giving the maximum number of iterations.
name: A name scope for the operation.
Returns:
output: A namedtuple representing the final state with fields:
- i: A scalar `int32` `Tensor`. Number of iterations executed.
- x: A rank-1 `Tensor` of shape `[N]` containing the computed solution.
- r: A rank-1 `Tensor` of shape `[M]` containing the residual vector.
- p: A rank-1 `Tensor` of shape `[N]`. `A`-conjugate basis vector.
- gamma: \\(r \dot M \dot r\\), equivalent to \\(||r||_2^2\\) when
`preconditioner=None`.
"""
# ephemeral class holding CG state.
cg_state = collections.namedtuple("CGState", ["i", "x", "r", "p", "gamma"])
def stopping_criterion(i, state):
return math_ops.logical_and(i < max_iter, linalg_ops.norm(state.r) > tol)
def cg_step(i, state): # pylint: disable=missing-docstring
z = operator.apply(state.p)
alpha = state.gamma / util.dot(state.p, z)
x = state.x + alpha * state.p
r = state.r - alpha * z
if preconditioner is None:
gamma = util.dot(r, r)
beta = gamma / state.gamma
p = r + beta * state.p
else:
q = preconditioner.apply(r)
gamma = util.dot(r, q)
beta = gamma / state.gamma
p = q + beta * state.p
return i + 1, cg_state(i + 1, x, r, p, gamma)
with ops.name_scope(name):
n = operator.shape[1:]
rhs = array_ops.expand_dims(rhs, -1)
if x is None:
x = array_ops.expand_dims(
array_ops.zeros(n, dtype=rhs.dtype.base_dtype), -1)
r0 = rhs
else:
x = array_ops.expand_dims(x, -1)
r0 = rhs - operator.apply(x)
if preconditioner is None:
p0 = r0
else:
p0 = preconditioner.apply(r0)
gamma0 = util.dot(r0, p0)
tol *= linalg_ops.norm(r0)
i = constant_op.constant(0, dtype=dtypes.int32)
state = cg_state(i=i, x=x, r=r0, p=p0, gamma=gamma0)
_, state = control_flow_ops.while_loop(stopping_criterion, cg_step,
[i, state])
return cg_state(
state.i,
x=array_ops.squeeze(state.x),
r=array_ops.squeeze(state.r),
p=array_ops.squeeze(state.p),
gamma=state.gamma)
|
apache-2.0
|
gbaty/pyside2
|
tests/signals/signal_emission_gui_test.py
|
3
|
4080
|
#!/usr/bin/env python
"""Tests covering signal emission and receiving to python slots"""
import unittest
from PySide2.QtCore import QObject, SIGNAL, SLOT
try:
from PySide2.QtWidgets import QSpinBox, QPushButton
hasQtGui = True
except ImportError:
hasQtGui = False
from helper import BasicPySlotCase, UsesQApplication
if hasQtGui:
class ButtonPySlot(UsesQApplication, BasicPySlotCase):
"""Tests the connection of python slots to QPushButton signals"""
def testButtonClicked(self):
"""Connection of a python slot to QPushButton.clicked()"""
button = QPushButton('Mylabel')
QObject.connect(button, SIGNAL('clicked()'), self.cb)
self.args = tuple()
button.emit(SIGNAL('clicked(bool)'), False)
self.assert_(self.called)
def testButtonClick(self):
"""Indirect qt signal emission using the QPushButton.click() method """
button = QPushButton('label')
QObject.connect(button, SIGNAL('clicked()'), self.cb)
self.args = tuple()
button.click()
self.assert_(self.called)
if hasQtGui:
class SpinBoxPySlot(UsesQApplication, BasicPySlotCase):
"""Tests the connection of python slots to QSpinBox signals"""
def setUp(self):
super(SpinBoxPySlot, self).setUp()
self.spin = QSpinBox()
def tearDown(self):
del self.spin
super(SpinBoxPySlot, self).tearDown()
def testSpinBoxValueChanged(self):
"""Connection of a python slot to QSpinBox.valueChanged(int)"""
QObject.connect(self.spin, SIGNAL('valueChanged(int)'), self.cb)
self.args = [3]
self.spin.emit(SIGNAL('valueChanged(int)'), *self.args)
self.assert_(self.called)
def testSpinBoxValueChangedImplicit(self):
"""Indirect qt signal emission using QSpinBox.setValue(int)"""
QObject.connect(self.spin, SIGNAL('valueChanged(int)'), self.cb)
self.args = [42]
self.spin.setValue(self.args[0])
self.assert_(self.called)
def atestSpinBoxValueChangedFewArgs(self):
"""Emission of signals with fewer arguments than needed"""
# XXX: PyQt4 crashes on the assertRaises
QObject.connect(self.spin, SIGNAL('valueChanged(int)'), self.cb)
self.args = (554,)
self.assertRaises(TypeError, self.spin.emit, SIGNAL('valueChanged(int)'))
if hasQtGui:
class QSpinBoxQtSlots(UsesQApplication):
"""Tests the connection to QSpinBox qt slots"""
qapplication = True
def testSetValueIndirect(self):
"""Indirect signal emission: QSpinBox using valueChanged(int)/setValue(int)"""
spinSend = QSpinBox()
spinRec = QSpinBox()
spinRec.setValue(5)
QObject.connect(spinSend, SIGNAL('valueChanged(int)'), spinRec, SLOT('setValue(int)'))
self.assertEqual(spinRec.value(), 5)
spinSend.setValue(3)
self.assertEqual(spinRec.value(), 3)
self.assertEqual(spinSend.value(), 3)
def testSetValue(self):
"""Direct signal emission: QSpinBox using valueChanged(int)/setValue(int)"""
spinSend = QSpinBox()
spinRec = QSpinBox()
spinRec.setValue(5)
spinSend.setValue(42)
QObject.connect(spinSend, SIGNAL('valueChanged(int)'), spinRec, SLOT('setValue(int)'))
self.assertEqual(spinRec.value(), 5)
self.assertEqual(spinSend.value(), 42)
spinSend.emit(SIGNAL('valueChanged(int)'), 3)
self.assertEqual(spinRec.value(), 3)
#Direct emission shouldn't change the value of the emitter
self.assertEqual(spinSend.value(), 42)
spinSend.emit(SIGNAL('valueChanged(int)'), 66)
self.assertEqual(spinRec.value(), 66)
self.assertEqual(spinSend.value(), 42)
if __name__ == '__main__':
unittest.main()
|
lgpl-2.1
|
vmarkovtsev/django
|
django/contrib/gis/db/backends/mysql/operations.py
|
328
|
2746
|
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import \
BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
gis_operators = {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # .. ..
'contained': SpatialOperator(func='MBRWithin'), # .. ..
'contains': SpatialOperator(func='MBRContains'),
'disjoint': SpatialOperator(func='MBRDisjoint'),
'equals': SpatialOperator(func='MBREqual'),
'exact': SpatialOperator(func='MBREqual'),
'intersects': SpatialOperator(func='MBRIntersects'),
'overlaps': SpatialOperator(func='MBROverlaps'),
'same_as': SpatialOperator(func='MBREqual'),
'touches': SpatialOperator(func='MBRTouches'),
'within': SpatialOperator(func='MBRWithin'),
}
function_names = {
'Distance': 'ST_Distance',
'Length': 'GLength',
'Union': 'ST_Union',
}
disallowed_aggregates = (
aggregates.Collect, aggregates.Extent, aggregates.Extent3D,
aggregates.MakeLine, aggregates.Union,
)
@cached_property
def unsupported_functions(self):
unsupported = {
'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle',
'Difference', 'ForceRHR', 'GeoHash', 'Intersection', 'MemSize',
'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid',
'SymDifference', 'Transform', 'Translate',
}
if self.connection.mysql_version < (5, 6, 1):
unsupported.update({'Distance', 'Union'})
return unsupported
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, f, value, compiler):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'as_sql'):
placeholder, _ = compiler.compile(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
|
bsd-3-clause
|
mlperf/training_results_v0.6
|
Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/nnvm/python/nnvm/testing/mobilenet_v2.py
|
1
|
1220
|
"""
MobileNetV2, load model from gluon model zoo
Reference:
Inverted Residuals and Linear Bottlenecks:
Mobile Networks for Classification, Detection and Segmentation
https://arxiv.org/abs/1801.04381
"""
from .utils import create_workload
from ..frontend.mxnet import _from_mxnet_impl
def get_workload(batch_size, num_classes=1000, multiplier=1.0, dtype="float32"):
"""Get benchmark workload for mobilenet
Parameters
----------
batch_size : int
The batch size used in the model
num_classes : int, optional
Number of classes
multiplier : tuple, optional
The input image shape
dtype : str, optional
The data type
Returns
-------
net : nnvm.Symbol
The computational graph
params : dict of str to NDArray
The parameters.
"""
import mxnet as mx
from mxnet.gluon.model_zoo.vision.mobilenet import MobileNetV2
image_shape = (1, 3, 224, 224)
block = MobileNetV2(multiplier=multiplier, classes=num_classes)
data = mx.sym.Variable('data')
sym = block(data)
sym = mx.sym.SoftmaxOutput(sym)
net = _from_mxnet_impl(sym, {})
return create_workload(net, batch_size, image_shape[1:], dtype)
|
apache-2.0
|
Communities-Communications/cc-odoo
|
addons/print_receipt/reports/account_voucher.py
|
1
|
1173
|
# -*- coding: utf-8 -*-
import time
from openerp.report import report_sxw
from openerp import pooler
from openerp.tools.amount_to_text_pt import amount_to_text
class account_voucher(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_voucher, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'getLines': self._lines_get,
'amount_to_text': amount_to_text,
})
self.context = context
def _lines_get(self, voucher):
voucherline_obj = pooler.get_pool(self.cr.dbname).get('account.voucher.line')
voucherlines = voucherline_obj.search(self.cr, self.uid,[('voucher_id','=',voucher.id)])
voucherlines = voucherline_obj.browse(self.cr, self.uid, voucherlines)
return voucherlines
report_sxw.report_sxw('report.account_voucher', 'account.voucher',
'addons/print_receipt/reports/account_voucher.rml',
parser=account_voucher)
|
agpl-3.0
|
johnkit/vtk-dev
|
IO/EnSight/Testing/Python/EnSightSelectArrays.py
|
20
|
1547
|
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# create a rendering window and renderer
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
renWin.StereoCapableWindowOn()
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
reader = vtk.vtkGenericEnSightReader()
# Make sure all algorithms use the composite data pipeline
cdp = vtk.vtkCompositeDataPipeline()
reader.SetDefaultExecutivePrototype(cdp)
reader.SetCaseFileName("" + str(VTK_DATA_ROOT) + "/Data/EnSight/blow1_ascii.case")
reader.SetTimeValue(1)
reader.ReadAllVariablesOff()
reader.SetPointArrayStatus("displacement",1)
reader.SetCellArrayStatus("thickness",1)
reader.SetCellArrayStatus("displacement",1)
geom = vtk.vtkGeometryFilter()
geom.SetInputConnection(reader.GetOutputPort())
mapper = vtk.vtkHierarchicalPolyDataMapper()
mapper.SetInputConnection(geom.GetOutputPort())
mapper.SetScalarRange(0.5,1.0)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# assign our actor to the renderer
ren1.AddActor(actor)
# enable user interface interactor
iren.Initialize()
ren1.GetActiveCamera().SetPosition(99.3932,17.6571,-22.6071)
ren1.GetActiveCamera().SetFocalPoint(3.5,12,1.5)
ren1.GetActiveCamera().SetViewAngle(30)
ren1.GetActiveCamera().SetViewUp(0.239617,-0.01054,0.97081)
ren1.ResetCameraClippingRange()
renWin.Render()
# prevent the tk window from showing up then start the event loop
reader.SetDefaultExecutivePrototype(None)
# --- end of script --
|
bsd-3-clause
|
cjhak/b2share
|
invenio/legacy/websearch/scripts/webcoll.py
|
6
|
3085
|
# This file is part of Invenio.
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.base.factory import with_app_context
@with_app_context()
def main():
"""Main that construct all the bibtask."""
from invenio.legacy.bibsched.bibtask import task_init
from invenio.legacy.websearch.webcoll import (
task_submit_elaborate_specific_parameter, task_submit_check_options,
task_run_core, __revision__)
task_init(authorization_action="runwebcoll",
authorization_msg="WebColl Task Submission",
description="""Description:
webcoll updates the collection cache (record universe for a
given collection plus web page elements) based on invenio.conf and DB
configuration parameters. If the collection name is passed as an argument,
only this collection's cache will be updated. If the recursive option is
set as well, the collection's descendants will also be updated.\n""",
help_specific_usage=" -c, --collection\t Update cache for the given "
"collection only. [all]\n"
" -r, --recursive\t Update cache for the given collection and all its\n"
"\t\t\t descendants (to be used in combination with -c). [no]\n"
" -q, --quick\t\t Skip webpage cache update for those collections whose\n"
"\t\t\t reclist was not changed. Note: if you use this option, it is advised\n"
"\t\t\t to schedule, e.g. a nightly 'webcoll --force'. [no]\n"
" -f, --force\t\t Force update even if cache is up to date. [no]\n"
" -p, --part\t\t Update only certain cache parts (1=reclist,"
" 2=webpage). [both]\n"
" -l, --language\t Update pages in only certain language"
" (e.g. fr,it,...). [all]\n",
version=__revision__,
specific_params=("c:rqfp:l:", [
"collection=",
"recursive",
"quick",
"force",
"part=",
"language="
]),
task_submit_elaborate_specific_parameter_fnc=task_submit_elaborate_specific_parameter,
task_submit_check_options_fnc=task_submit_check_options,
task_run_fnc=task_run_core)
|
gpl-2.0
|
jdmmiranda307/dataviva-api
|
tests/api/test_hedu_api.py
|
3
|
1241
|
from test_base import BaseTestCase
class TestHeduApiTests(BaseTestCase):
def test_should_respond_ok_to_hedu_path(self):
response = self.client.get('/hedu/year/')
self.assert_200(response)
def test_should_check_if_all_years_are_loaded(self):
response = self.client.get('/hedu/year/?order=year')
first_year = 2010
last_year = 2013
data = response.json['data']
year_index = response.json['headers'].index('year')
self.assertEqual(data[0][year_index], first_year)
self.assertEqual(data[-1][year_index], last_year)
year = first_year
for item in data:
self.assertEqual(item[year_index], year)
year += 1
def test_should_check_default_headers(self):
response = self.client.get('/hedu/year/')
headers = response.json['headers']
for header in ['year', 'average_age']:
self.assertIn(header, headers)
def test_should_check_average_age_in_2008(self):
response = self.client.get('/hedu/year/?year=2010')
data = response.json['data']
value_index = response.json['headers'].index('average_age')
self.assertEqual(data[0][value_index], 26)
|
mit
|
baliga-lab/pf1010-web
|
aqxWeb/api.py
|
1
|
4401
|
from aqxWeb.dao.users import UserDAO
from aqxWeb.dao.systems import SystemDAO
from aqxWeb.dao.metadata import MetadataDAO
from aqxWeb.dao.subscriptions import SubscriptionDAO
from aqxWeb.dao.measurements import MeasurementDAO
from collections import defaultdict
import json
class API:
def __init__(self, app):
self.systemDAO = SystemDAO(app)
self.userDAO = UserDAO(app)
self.metadataDAO = MetadataDAO(app)
self.subscriptionDAO = SubscriptionDAO(app)
self.measurementDAO = MeasurementDAO(app)
###########################################################################
# SystemAPI
###########################################################################
def get_system(self, systemUID):
result = self.systemDAO.get_system(systemUID)
systemID = result[0]
# Get the crops
results = self.systemDAO.crops_for_system(systemID)
crops = []
for crop in results:
crops.append({
'id': crop[0],
'name': crop[1],
'count': crop[2]
})
# Get the grow bed media
results = self.systemDAO.getGrowBedMediaForSystem(systemID)
media = []
for medium in results:
media.append({
'name': medium[0],
'count': medium[1]
})
# Get the organisms
results = self.systemDAO.organisms_for_system(systemID)
organisms = []
for organism in results:
organisms.append({
'id': organism[0],
'name': organism[1],
'count': organism[2]
})
# Get the status
status = self.systemDAO.getStatusForSystem(systemUID)[0]
# Recompile the system
return {
'ID': result[0],
'UID': result[1],
'user': result[2],
'name': result[3],
'creationTime': str(result[4]),
'startDate': str(result[5]),
'location': {'lat': str(result[6]), 'lng': str(result[7])},
'technique': result[8],
'status': status,
'gbMedia': media,
'crops': crops,
'organisms': organisms,
}
def getSystemsForUser(self, userID):
systems = []
results = self.systemDAO.getSystemsForUser(userID)
for result in results:
systems.append({
'ID': result[0],
'UID': result[1],
'name': result[2]
})
return json.dumps(systems)
def create_system(self, system):
"""this is just a delegation to the DAO, no JSON serialization
because it reduces reusability"""
return self.systemDAO.create_system(system)
def update_system(self, system):
"""this is just a delegation to the DAO, no JSON serialization
because it reduces reusability"""
return self.systemDAO.update_system(system)
###########################################################################
# UserAPI
###########################################################################
def getUserID(self, googleID):
userID = self.userDAO.getUserID(googleID)
return json.dumps({'userID': userID})
def hasUser(self, googleID):
count = self.userDAO.hasUser(googleID)
return json.dumps({'hasUser': count == 1})
def createUser(self, googleProfile):
userID = self.userDAO.createUser(googleProfile)
return json.dumps({'userID': userID})
###########################################################################
# MetadataAPI
###########################################################################
def catalogs(self):
results = self.metadataDAO.catalogs()
enums = defaultdict(list)
for result in results:
table = result[0]
if not enums[table]:
enums[table] = []
enums[table].append({
'ID': result[1],
'name': result[2]
})
return enums
def subscribe(self, email):
subscriptionID = self.subscriptionDAO.subscribe(email)
return {'status': 'ok', 'subscriptionID': subscriptionID}
def measurement_types(self):
return json.dumps(self.measurementDAO.measurement_types())
|
lgpl-3.0
|
zcbenz/cefode-chromium
|
ppapi/native_client/src/untrusted/pnacl_support_extension/pnacl_component_crx_gen.py
|
5
|
23962
|
#!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script packages the PNaCl translator files as:
(1) a Chrome Extension (crx), which can be used as a straight-forward CRX,
or used with the Chrome incremental installer (component updater)
(2) a Chrome Extension as a zip for uploading to the CWS.
(3) layout files for a normal Chrome installer.
This script depends on and pulls in the translator nexes and libraries
from the toolchain directory (so that must be downloaded first) and
it depends on the pnacl_irt_shim.
"""
import glob
import logging
import optparse
import os
import platform
import re
import shutil
import subprocess
import sys
import tempfile
import zipfile
# shutil.copytree does not allow the target directory to exist.
# Borrow this copy_tree, which does allow it (overwrites conflicts?).
from distutils.dir_util import copy_tree as copytree_existing
J = os.path.join
######################################################################
# Target arch and build arch junk to convert between all the
# silly conventions between SCons, Chrome and PNaCl.
# The version of the arch used by NaCl manifest files.
# This is based on the machine "building" this extension.
# We also used this to identify the arch-specific different versions of
# this extension.
def CanonicalArch(arch):
if arch in ('x86_64', 'x86-64', 'x64', 'amd64'):
return 'x86-64'
# TODO(jvoung): be more specific about the arm architecture version?
if arch in ('arm', 'armv7'):
return 'arm'
if re.match('^i.86$', arch) or arch in ('x86_32', 'x86-32', 'ia32', 'x86'):
return 'x86-32'
return None
def GetBuildArch():
arch = platform.machine()
return CanonicalArch(arch)
BUILD_ARCH = GetBuildArch()
ARCHES = ['x86-32', 'x86-64', 'arm']
def IsValidArch(arch):
return arch in ARCHES
# The version of the arch used by configure and pnacl's build.sh.
def StandardArch(arch):
return {'x86-32': 'i686',
'x86-64': 'x86_64',
'arm' : 'armv7'}[arch]
######################################################################
def GetNaClRoot():
""" Find the native_client path, relative to this script.
This script is in ppapi/... and native_client is a sibling of ppapi.
"""
script_file = os.path.abspath(__file__)
def SearchForNaCl(cur_dir):
if cur_dir.endswith('ppapi'):
parent = os.path.dirname(cur_dir)
sibling = os.path.join(parent, 'native_client')
if not os.path.isdir(sibling):
raise Exception('Could not find native_client relative to %s' %
script_file)
return sibling
# Detect when we've the root (linux is /, but windows is not...)
next_dir = os.path.dirname(cur_dir)
if cur_dir == next_dir:
raise Exception('Could not find native_client relative to %s' %
script_file)
return SearchForNaCl(next_dir)
return SearchForNaCl(script_file)
NACL_ROOT = GetNaClRoot()
######################################################################
# Normalize the platform name to be the way SCons finds chrome binaries.
# This is based on the platform "building" the extension.
def GetBuildPlatform():
if sys.platform == 'darwin':
platform = 'mac'
elif sys.platform.startswith('linux'):
platform = 'linux'
elif sys.platform in ('cygwin', 'win32'):
platform = 'windows'
else:
raise Exception('Unknown platform: %s' % sys.platform)
return platform
BUILD_PLATFORM = GetBuildPlatform()
def DetermineInstallerArches(target_arch):
arch = CanonicalArch(target_arch)
if not IsValidArch(arch):
raise Exception('Unknown target_arch %s' % target_arch)
# On windows, we need x86-32 and x86-64 (assuming non-windows RT).
if BUILD_PLATFORM == 'windows':
if arch.startswith('x86'):
return ['x86-32', 'x86-64']
else:
raise Exception('Unknown target_arch on windows w/ target_arch == %s' %
target_arch)
else:
return [arch]
class CRXGen(object):
""" Generate a CRX file. Can generate a fresh CRX and private key, or
create a version of new CRX with the same AppID, using an existing
private key.
NOTE: We use the chrome binary to do CRX packing. There is also a bash
script available at: http://code.google.com/chrome/extensions/crx.html
but it is not featureful (doesn't know how to generate private keys).
We should probably make a version of this that doesn't require chrome.
"""
@staticmethod
def RunCRXGen(chrome_path, manifest_dir, private_key=None):
if chrome_path is None:
raise Exception('Chrome binary not specified!')
if not os.path.isfile(chrome_path):
raise Exception('Chrome binary not found: %s' % chrome_path)
cmdline = []
if BUILD_PLATFORM == 'linux':
# In linux, run chrome in headless mode (even though crx-packing should
# be headless, it's not quite with the zygote). This allows you to
# run the tool under ssh or screen, etc.
cmdline.append('xvfb-run')
cmdline += [chrome_path, '--pack-extension=%s' % manifest_dir]
if private_key is not None:
cmdline.append('--pack-extension-key=%s' % private_key)
StepBanner('GEN CRX', str(cmdline))
if subprocess.call(cmdline) != 0:
raise Exception('Failed to RunCRXGen: %s' % (cmdline))
######################################################################
def IsValidVersion(version):
""" Return true if the version is a valid ID (a quad like 0.0.0.0).
"""
pat = re.compile('^\d+\.\d+\.\d+\.\d+$')
return pat.search(version)
######################################################################
class PnaclPackaging(object):
# For dogfooding, we also create a webstore extension.
# See: https://chrome.google.com/webstore/a/google.com/detail/gcodniebolpnpaiggndmcmmfpldlknih
# To test offline, we need to be able to load via the command line on chrome,
# but we also need the AppID to remain the same. Thus we supply the
# public key in the unpacked/offline extension manifest. See:
# http://code.google.com/chrome/extensions/manifest.html#key
# Summary:
# 1) install the extension, then look for key in
# 2) <profile>/Default/Extensions/<extensionId>/<versionString>/manifest.json
# (Fret not -- this is not the private key, it's just a key stored in the
# user's profile directory).
WEBSTORE_PUBLIC_KEY = ("MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC7zhW8iyt"
"dYid7SXLokWfxNoz2Co9x2ItkVUS53Iq12xDLfcKkUZ2RNX"
"Qtua+yKgRTRMP0HigPtn2KZeeJYzvBYLP/kz62B3nM5nS8M"
"o0qQKEsJiNgTf1uOgYGPyrE6GrFBFolLGstnZ1msVgNHEv2"
"dZruC2XewOJihvmeQsOjjwIDAQAB")
package_base = os.path.dirname(__file__)
# The extension system's manifest.json.
manifest_template = J(package_base, 'pnacl_manifest_template.json')
# Pnacl-specific info
pnacl_template = J(package_base, 'pnacl_info_template.json')
# Agreed-upon name for pnacl-specific info.
pnacl_json = 'pnacl.json'
@staticmethod
def GenerateManifests(target_dir, version, arch, web_accessible,
all_host_permissions,
manifest_key=None):
PnaclPackaging.GenerateExtensionManifest(target_dir, version,
web_accessible,
all_host_permissions,
manifest_key)
# For now, make the ABI version the same as pnacl-version...
# It should probably be separate though.
PnaclPackaging.GeneratePnaclInfo(target_dir, version, arch)
@staticmethod
def GenerateExtensionManifest(target_dir, version,
web_accessible, all_host_permissions,
manifest_key):
manifest_template_fd = open(PnaclPackaging.manifest_template, 'r')
manifest_template = manifest_template_fd.read()
manifest_template_fd.close()
output_fd = open(J(target_dir, 'manifest.json'), 'w')
extra = ''
if web_accessible != []:
extra += '"web_accessible_resources": [\n%s],\n' % ',\n'.join(
[ ' "%s"' % to_quote for to_quote in web_accessible ])
if manifest_key is not None:
extra += ' "key": "%s",\n' % manifest_key
if all_host_permissions:
extra += ' "permissions": ["http://*/"],\n'
output_fd.write(manifest_template % { "version" : version,
"extra" : extra, })
output_fd.close()
@staticmethod
def GeneratePnaclInfo(target_dir, version, arch, is_installer=False):
pnacl_template_fd = open(PnaclPackaging.pnacl_template, 'r')
pnacl_template = pnacl_template_fd.read()
pnacl_template_fd.close()
if is_installer:
out_name = J(target_dir, UseWhitelistedChars(PnaclPackaging.pnacl_json,
None))
else:
out_name = J(target_dir, PnaclPackaging.pnacl_json)
output_fd = open(out_name, 'w')
if isinstance(arch, list):
# FIXME: Handle a list of arches, not just a wildcard "all".
# Alternatively, perhaps we shouldn't bother checking what arch is
# installed and assume the installer does the right thing.
arch = 'all'
output_fd.write(pnacl_template % { "abi-version" : version,
"arch" : arch, })
output_fd.close()
######################################################################
class PnaclDirs(object):
toolchain_dir = J(NACL_ROOT, 'toolchain')
output_dir = J(toolchain_dir, 'pnacl-package')
@staticmethod
def TranslatorRoot():
return J(PnaclDirs.toolchain_dir, 'pnacl_translator')
@staticmethod
def LibDir(target_arch):
return J(PnaclDirs.TranslatorRoot(), 'lib-%s' % target_arch)
@staticmethod
def SandboxedCompilerDir(target_arch):
return J(PnaclDirs.toolchain_dir,
'pnacl_translator', StandardArch(target_arch), 'bin')
@staticmethod
def SetOutputDir(d):
PnaclDirs.output_dir = d
@staticmethod
def OutputDir():
return PnaclDirs.output_dir
@staticmethod
def OutputAllDir(version_quad):
return J(PnaclDirs.OutputDir(), version_quad)
@staticmethod
def OutputArchBase(arch):
return '%s' % arch
@staticmethod
def OutputArchDir(arch):
# Nest this in another directory so that the layout will be the same
# as the "all"/universal version.
parent_dir = J(PnaclDirs.OutputDir(), PnaclDirs.OutputArchBase(arch))
return (parent_dir, J(parent_dir, PnaclDirs.OutputArchBase(arch)))
######################################################################
def StepBanner(short_desc, long_desc):
logging.info("**** %s\t%s", short_desc, long_desc)
def Clean():
out_dir = PnaclDirs.OutputDir()
StepBanner('CLEAN', 'Cleaning out old packaging: %s' % out_dir)
if os.path.isdir(out_dir):
shutil.rmtree(out_dir)
else:
logging.info('Clean skipped -- no previous output directory!')
######################################################################
def ZipDirectory(base_dir, zipfile):
""" Zip all the files in base_dir into the given opened zipfile object.
"""
for (root, dirs, files) in os.walk(base_dir, followlinks=True):
for f in files:
full_name = J(root, f)
zipfile.write(full_name, os.path.relpath(full_name, base_dir))
def ListDirectoryRecursivelyAsURLs(base_dir):
""" List all files that can be found from base_dir. Return names as
URLs relative to the base_dir.
"""
file_list = []
for (root, dirs, files) in os.walk(base_dir, followlinks=True):
for f in files:
full_name = J(root, f)
if os.path.isfile(full_name):
rel_name = os.path.relpath(full_name, base_dir)
url = '/'.join(rel_name.split(os.path.sep))
file_list.append(url)
return file_list
def GetWebAccessibleResources(base_dir):
''' Return the default list of web_accessible_resources to allow us
to do a CORS request to get extension files. '''
resources = ListDirectoryRecursivelyAsURLs(base_dir)
# Make sure that the pnacl.json file is accessible.
resources.append(os.path.basename(PnaclPackaging.pnacl_json))
return resources
def GeneratePrivateKey(options):
""" Generate a dummy extension to generate a fresh private key. This will
be left in the build dir, and the dummy extension will be cleaned up.
"""
StepBanner('GEN PRIVATE KEY', 'Generating fresh private key')
tempdir = tempfile.mkdtemp(dir=PnaclDirs.OutputDir())
ext_dir = J(tempdir, 'dummy_extension')
os.mkdir(ext_dir)
PnaclPackaging.GenerateManifests(ext_dir,
'0.0.0.0',
'dummy_arch',
[],
False)
CRXGen.RunCRXGen(options.chrome_path, ext_dir)
shutil.copy2(J(tempdir, 'dummy_extension.pem'),
PnaclDirs.OutputDir())
shutil.rmtree(tempdir)
logging.info('\n<<< Fresh key is now in %s/dummy_extension.pem >>>\n' %
PnaclDirs.OutputDir())
def BuildArchCRXForComponentUpdater(version_quad, arch, lib_overrides,
options):
""" Build an architecture specific version for the chrome component
install (an actual CRX, vs a zip file). Though this is a CRX,
it is not used as a chrome extension as the CWS and unpacked version.
"""
parent_dir, target_dir = PnaclDirs.OutputArchDir(arch)
StepBanner('BUILD ARCH CRX %s' % arch,
'Packaging for arch %s in %s' % (arch, target_dir))
# Copy llc and ld.
copytree_existing(PnaclDirs.SandboxedCompilerDir(arch), target_dir)
# Rename llc.nexe to llc, ld.nexe to ld
for tool in ('llc', 'ld'):
shutil.move(J(target_dir, '%s.nexe' % tool), J(target_dir, tool))
# Copy native libraries.
copytree_existing(PnaclDirs.LibDir(arch), target_dir)
# Also copy files from the list of overrides.
if arch in lib_overrides:
for override in lib_overrides[arch]:
logging.info('Copying override %s to %s' % (override, target_dir))
shutil.copy2(override, target_dir)
# Skip the CRX generation if we are only building the unpacked version
# for commandline testing.
if options.unpacked_only:
return
# Generate manifest one level up (to have layout look like the "all" package).
# NOTE: this does not have 'web_accessible_resources' and does not have
# the all_host_permissions, since it isn't used via chrome-extension://
# URL requests.
PnaclPackaging.GenerateManifests(parent_dir,
version_quad,
arch,
[],
False)
CRXGen.RunCRXGen(options.chrome_path, parent_dir, options.prev_priv_key)
def LayoutAllDir(version_quad):
StepBanner("Layout All Dir", "Copying Arch specific to Arch-independent.")
target_dir = PnaclDirs.OutputAllDir(version_quad)
for arch in ARCHES:
arch_parent, arch_dir = PnaclDirs.OutputArchDir(arch)
# NOTE: The arch_parent contains the arch-specific manifest.json files.
# We carefully avoid copying those to the "all dir" since having
# more than one manifest.json will confuse the CRX tools (e.g., you will
# get a mysterious failure when uploading to the webstore).
copytree_existing(arch_dir,
J(target_dir, PnaclDirs.OutputArchBase(arch)))
def BuildCWSZip(version_quad):
""" Build a 'universal' chrome extension zipfile for webstore use (where the
installer doesn't know the target arch). Assumes the individual arch
versions were built.
"""
StepBanner("CWS ZIP", "Making a zip with all architectures.")
target_dir = PnaclDirs.OutputAllDir(version_quad)
web_accessible = GetWebAccessibleResources(target_dir)
# Overwrite the arch-specific 'manifest.json' that was there.
PnaclPackaging.GenerateManifests(target_dir,
version_quad,
'all',
web_accessible,
True)
target_zip = J(PnaclDirs.OutputDir(), 'pnacl_all.zip')
zipf = zipfile.ZipFile(target_zip, 'w', compression=zipfile.ZIP_DEFLATED)
ZipDirectory(target_dir, zipf)
zipf.close()
def BuildUnpacked(version_quad):
""" Build an unpacked chrome extension with all files for commandline
testing (load on chrome commandline).
"""
StepBanner("UNPACKED CRX", "Making an unpacked CRX of all architectures.")
target_dir = PnaclDirs.OutputAllDir(version_quad)
web_accessible = GetWebAccessibleResources(target_dir)
# Overwrite the manifest file (if there was one already).
PnaclPackaging.GenerateManifests(target_dir,
version_quad,
'all',
web_accessible,
True,
PnaclPackaging.WEBSTORE_PUBLIC_KEY)
def BuildExtensionStyle(version_quad, lib_overrides, options):
""" Package the pnacl components 3 ways, all of which are
chrome-extension-like.
1) Arch-specific CRXes that can be queried by Omaha.
2) A zip containing all arch files for the Chrome Webstore.
3) An unpacked extension with all arch files for offline testing.
"""
StepBanner("BUILD_ALL", "Packaging extension for version: %s" % version_quad)
for arch in ARCHES:
BuildArchCRXForComponentUpdater(version_quad, arch, lib_overrides, options)
LayoutAllDir(version_quad)
if not options.unpacked_only:
BuildCWSZip(version_quad)
BuildUnpacked(version_quad)
######################################################################
def UseWhitelistedChars(orig_basename, arch):
""" Make the filename match the pattern expected by pnacl_file_host.
Currently, this assumes there is prefix "pnacl_public_" and
that the allowed chars are in the set [a-zA-Z0-9_].
"""
if arch:
target_basename = 'pnacl_public_%s_%s' % (arch, orig_basename)
else:
target_basename = 'pnacl_public_%s' % orig_basename
result = re.sub(r'[^a-zA-Z0-9_]', '_', target_basename)
logging.info('UseWhitelistedChars using: %s' % result)
return result
def CopyFlattenDirsAndPrefix(src_dir, arch, dest_dir):
""" Copy files from src_dir to dest_dir.
When copying, also rename the files such that they match the white-listing
pattern in chrome/browser/nacl_host/pnacl_file_host.cc.
"""
for (root, dirs, files) in os.walk(src_dir, followlinks=True):
for f in files:
# Assume a flat directory.
assert (f == os.path.basename(f))
full_name = J(root, f)
target_name = UseWhitelistedChars(f, arch)
shutil.copy2(full_name, J(dest_dir, target_name))
def BuildArchForInstaller(version_quad, arch, lib_overrides, options):
""" Build an architecture specific version for the chrome installer.
"""
target_dir = PnaclDirs.OutputDir()
StepBanner('BUILD INSTALLER',
'Packaging for arch %s in %s' % (arch, target_dir))
# Copy llc.nexe and ld.nexe, but with some renaming and directory flattening.
CopyFlattenDirsAndPrefix(PnaclDirs.SandboxedCompilerDir(arch),
arch,
target_dir)
# Copy native libraries, also with renaming and directory flattening.
CopyFlattenDirsAndPrefix(PnaclDirs.LibDir(arch), arch, target_dir)
# Also copy files from the list of overrides.
# This needs the arch tagged onto the name too, like the other files.
if arch in lib_overrides:
for override in lib_overrides[arch]:
override_base = os.path.basename(override)
target_name = UseWhitelistedChars(override_base, arch)
shutil.copy2(override, J(target_dir, target_name))
def BuildInstallerStyle(version_quad, lib_overrides, options):
""" Package the pnacl component for use within the chrome installer
infrastructure. These files need to be named in a special way
so that white-listing of files is easy.
"""
StepBanner("BUILD_ALL", "Packaging installer for version: %s" % version_quad)
arches = DetermineInstallerArches(options.installer_only)
for arch in arches:
BuildArchForInstaller(version_quad, arch, lib_overrides, options)
# Generate pnacl info manifest.
# Hack around the fact that there may be more than one arch, on Windows.
if len(arches) == 1:
arches = arches[0]
PnaclPackaging.GeneratePnaclInfo(PnaclDirs.OutputDir(), version_quad,
arches, is_installer=True)
######################################################################
def Main():
usage = 'usage: %prog [options] version_arg'
parser = optparse.OptionParser(usage)
# We may want to accept a target directory to dump it in the usual
# output directory (e.g., scons-out).
parser.add_option('-c', '--clean', dest='clean',
action='store_true', default=False,
help='Clean out destination directory first.')
parser.add_option('-u', '--unpacked_only', action='store_true',
dest='unpacked_only', default=False,
help='Only generate the unpacked version')
parser.add_option('-i', '--installer_only',
dest='installer_only', default=None,
help='Only generate the chrome installer version for arch')
parser.add_option('-d', '--dest', dest='dest',
help='The destination root for laying out the extension')
parser.add_option('-p', '--priv_key',
dest='prev_priv_key', default=None,
help='Specify the old private key')
parser.add_option('-L', '--lib_override',
dest='lib_overrides', action='append', default=[],
help='Specify path to a fresher native library ' +
'that overrides the tarball library with ' +
'(arch:libfile) tuple.')
parser.add_option('-g', '--generate_key',
action='store_true', dest='gen_key',
help='Generate a fresh private key, and exit.')
parser.add_option('-C', '--chrome_path', dest='chrome_path',
help='Location of chrome.')
parser.add_option('-v', '--verbose', dest='verbose', default=False,
action='store_true',
help='Print verbose debug messages.')
(options, args) = parser.parse_args()
if options.verbose:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.ERROR)
logging.info('pnacl_component_crx_gen w/ options %s and args %s\n'
% (options, args))
# Set destination directory before doing any cleaning, etc.
if options.dest:
PnaclDirs.SetOutputDir(options.dest)
if options.clean:
Clean()
if options.gen_key:
GeneratePrivateKey(options)
return 0
lib_overrides = {}
for o in options.lib_overrides:
arch, override_lib = o.split(',')
arch = CanonicalArch(arch)
if not IsValidArch(arch):
raise Exception('Unknown arch for -L: %s (from %s)' % (arch, o))
if not os.path.isfile(override_lib):
raise Exception('Override native lib not a file for -L: %s (from %s)' %
(override_lib, o))
override_list = lib_overrides.get(arch, [])
override_list.append(override_lib)
lib_overrides[arch] = override_list
if len(args) != 1:
parser.print_help()
parser.error('Incorrect number of arguments')
version_quad = args[0]
if not IsValidVersion(version_quad):
print 'Invalid version format: %s\n' % version_quad
return 1
if options.installer_only:
BuildInstallerStyle(version_quad, lib_overrides, options)
else:
BuildExtensionStyle(version_quad, lib_overrides, options)
return 0
if __name__ == '__main__':
sys.exit(Main())
|
bsd-3-clause
|
willzhang05/postgrestesting1
|
postgrestesting1/lib/python3.5/site-packages/gunicorn/app/django_wsgi.py
|
87
|
4363
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
""" module used to build the django wsgi application """
from __future__ import print_function
import os
import re
import sys
import time
try:
from StringIO import StringIO
except:
from io import StringIO
from imp import reload
from django.conf import settings
from django.core.management.validation import get_validation_errors
from django.utils import translation
try:
from django.core.servers.basehttp import get_internal_wsgi_application
django14 = True
except ImportError:
from django.core.handlers.wsgi import WSGIHandler
django14 = False
from gunicorn import util
def make_wsgi_application():
# validate models
s = StringIO()
if get_validation_errors(s):
s.seek(0)
error = s.read()
msg = "One or more models did not validate:\n%s" % error
print(msg, file=sys.stderr)
sys.stderr.flush()
sys.exit(1)
translation.activate(settings.LANGUAGE_CODE)
if django14:
return get_internal_wsgi_application()
return WSGIHandler()
def reload_django_settings():
mod = util.import_module(os.environ['DJANGO_SETTINGS_MODULE'])
# Reload module.
reload(mod)
# Reload settings.
# Use code from django.settings.Settings module.
# Settings that should be converted into tuples if they're mistakenly entered
# as strings.
tuple_settings = ("INSTALLED_APPS", "TEMPLATE_DIRS")
for setting in dir(mod):
if setting == setting.upper():
setting_value = getattr(mod, setting)
if setting in tuple_settings and type(setting_value) == str:
setting_value = (setting_value,) # In case the user forgot the comma.
setattr(settings, setting, setting_value)
# Expand entries in INSTALLED_APPS like "django.contrib.*" to a list
# of all those apps.
new_installed_apps = []
for app in settings.INSTALLED_APPS:
if app.endswith('.*'):
app_mod = util.import_module(app[:-2])
appdir = os.path.dirname(app_mod.__file__)
app_subdirs = os.listdir(appdir)
name_pattern = re.compile(r'[a-zA-Z]\w*')
for d in sorted(app_subdirs):
if (name_pattern.match(d) and
os.path.isdir(os.path.join(appdir, d))):
new_installed_apps.append('%s.%s' % (app[:-2], d))
else:
new_installed_apps.append(app)
setattr(settings, "INSTALLED_APPS", new_installed_apps)
if hasattr(time, 'tzset') and settings.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = '/usr/share/zoneinfo'
if (os.path.exists(zoneinfo_root) and not
os.path.exists(os.path.join(zoneinfo_root,
*(settings.TIME_ZONE.split('/'))))):
raise ValueError("Incorrect timezone setting: %s" %
settings.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ['TZ'] = settings.TIME_ZONE
time.tzset()
# Settings are configured, so we can set up the logger if required
if getattr(settings, 'LOGGING_CONFIG', False):
# First find the logging configuration function ...
logging_config_path, logging_config_func_name = settings.LOGGING_CONFIG.rsplit('.', 1)
logging_config_module = util.import_module(logging_config_path)
logging_config_func = getattr(logging_config_module, logging_config_func_name)
# ... then invoke it with the logging settings
logging_config_func(settings.LOGGING)
def make_command_wsgi_application(admin_mediapath):
reload_django_settings()
try:
from django.core.servers.basehttp import AdminMediaHandler
return AdminMediaHandler(make_wsgi_application(), admin_mediapath)
except ImportError:
return make_wsgi_application()
|
mit
|
yade/trunk
|
examples/simple-scene/simple-scene-energy-tracking.py
|
2
|
4828
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
############################################
##### interesting parameters #####
############################################
# Cundall non-viscous damping
# try zero damping and watch total energy...
from __future__ import print_function
damping = 0.2
# initial angular velocity
angVel = 3.0
# use two spheres?
two_spheres =True
# sphere rotating more?
rotate_in_two_directions = True
############################################
##### material #####
############################################
import matplotlib
matplotlib.use('TkAgg')
O.materials.append(CohFrictMat(
young=3e8,
poisson=0.3,
frictionAngle=radians(30),
density=2600,
isCohesive=False,
alphaKr=0.031,
alphaKtw=0.031,
momentRotationLaw=False,
etaRoll=5.0,
label='granular_material'))
############################################
##### calculation loop #####
############################################
law=Law2_ScGeom6D_CohFrictPhys_CohesionMoment(always_use_moment_law=False)
g=9.81
O.trackEnergy=True
O.engines=[
ForceResetter(),
InsertionSortCollider([Bo1_Sphere_Aabb(),Bo1_Box_Aabb()]),
InteractionLoop(
[Ig2_Sphere_Sphere_ScGeom6D(),Ig2_Box_Sphere_ScGeom6D()],
[Ip2_CohFrictMat_CohFrictMat_CohFrictPhys()],
[law]
),
GlobalStiffnessTimeStepper(active=1,timeStepUpdateInterval=50,timestepSafetyCoefficient=.0001),
NewtonIntegrator(damping=damping,kinSplit=True,gravity=(0,0,-g)),
PyRunner(iterPeriod=20,command='myAddPlotData()')
]
O.bodies.append(box(center=[0,0,0],extents=[.5,.5,.5],fixed=True,color=[1,0,0],material='granular_material'))
O.bodies.append(sphere([0,0,2],1,color=[0,1,0],material='granular_material'))
if(two_spheres):
O.bodies.append(sphere([0,0,4],1,color=[0,1,0],material='granular_material'))
O.dt=.002*PWaveTimeStep()
O.bodies[1].state.angVel[1]=angVel
if(rotate_in_two_directions):
O.bodies[1].state.angVel[2]=angVel
############################################
##### now the part pertaining to plots #####
############################################
from yade import plot
## we will have 2 plots:
## 1. t as function of i (joke test function)
## 2. i as function of t on left y-axis ('|||' makes the separation) and z_sph, v_sph (as green circles connected with line) and z_sph_half again as function of t
plot.labels={'t':'time [s]',
'normal_Work':'Normal work: W=kx^2/2',
'shear_Work':'Shear work: W=kx^2/2',
'E_kin_translation':'Translation energy: E_kin=m*V^2/2',
'E_kin_rotation':'Rotation energy: E_kin=I*$\omega$^2/2',
'E_pot':'Gravitational potential: E_pot=m*g*h',
'E_plastic':'Plastic dissipation on shearing: E_pl=F*$\Delta$F/k',
'total':'total',
'total_plus_damp':'total + damping'}
plot.plots={'t':(
('normal_Work','b-'),
('shear_Work','r-'),
('E_kin_translation','b-.'),
('E_kin_rotation','r-.'),
('E_plastic','c-'),
('E_pot','y-'),
('total','k:'),
('total_plus_damp','k-')
)}
## this function is called by plotDataCollector
## it should add data with the labels that we will plot
## if a datum is not specified (but exists), it will be NaN and will not be plotted
def myAddPlotData():
normal_Work = law.normElastEnergy()
shear_Work = law.shearElastEnergy()
E_kin_translation = 0
E_kin_rotation = 0
E_pot = 0
E_plastic = 0
E_tracker = dict(list(O.energy.items()))
if(two_spheres):## for more bodies we better use the energy tracker, because it's tracking all bodies
E_kin_translation = E_tracker['kinTrans']
E_kin_rotation = E_tracker['kinRot']
E_pot = E_tracker['gravWork']
else: ## for one sphere we can just calculate, and it will be correct
sph=O.bodies[1]
h=sph.state.pos[2]
V=sph.state.vel.norm()
w=sph.state.angVel.norm()
m=sph.state.mass
I=sph.state.inertia[0]
E_kin_translation = m*V**2.0/2.0
E_kin_rotation = I*w**2.0/2.0
E_pot = m*g*h
if('plastDissip' in E_tracker):
E_plastic = E_tracker['plastDissip']
total = normal_Work + shear_Work + E_plastic + E_kin_translation + E_kin_rotation + E_pot
total_plus_damp = 0
if(damping!=0):
total_plus_damp = total + E_tracker['nonviscDamp']
else:
total_plus_damp = total
plot.addData(
t=O.time,
normal_Work = normal_Work ,
shear_Work = shear_Work ,
E_kin_translation = E_kin_translation,
E_kin_rotation = E_kin_rotation ,
E_pot = E_pot ,
E_plastic = E_plastic ,
total = total ,
total_plus_damp = total_plus_damp ,
)
print("Now calling plot.plot() to show the figures. The timestep is artificially low so that you can watch graphs being updated live.")
plot.liveInterval=2
plot.plot(subPlots=False)
#from yade import qt
#qt.View()
O.run(int(20./O.dt));
#plot.saveGnuplot('/tmp/a')
## you can also access the data in plot.data['t'], etc, under the labels they were saved.
|
gpl-2.0
|
wangcy6/storm_app
|
frame/c++/webrtc-master/tools_webrtc/sslroots/generate_sslroots.py
|
4
|
6554
|
# -*- coding:utf-8 -*-
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""This is a tool to transform a crt file into a C/C++ header.
Usage:
generate_sslroots.py cert_file.crt [--verbose | -v] [--full_cert | -f]
Arguments:
-v Print output while running.
-f Add public key and certificate name. Default is to skip and reduce
generated file size.
"""
import commands
from optparse import OptionParser
import os
import re
import string
_GENERATED_FILE = 'sslroots.h'
_PREFIX = '__generated__'
_EXTENSION = '.crt'
_SUBJECT_NAME_ARRAY = 'subject_name'
_SUBJECT_NAME_VARIABLE = 'SubjectName'
_PUBLIC_KEY_ARRAY = 'public_key'
_PUBLIC_KEY_VARIABLE = 'PublicKey'
_CERTIFICATE_ARRAY = 'certificate'
_CERTIFICATE_VARIABLE = 'Certificate'
_CERTIFICATE_SIZE_VARIABLE = 'CertificateSize'
_INT_TYPE = 'size_t'
_CHAR_TYPE = 'const unsigned char*'
_VERBOSE = 'verbose'
def main():
"""The main entrypoint."""
parser = OptionParser('usage %prog FILE')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true')
parser.add_option('-f', '--full_cert', dest='full_cert', action='store_true')
options, args = parser.parse_args()
if len(args) < 1:
parser.error('No crt file specified.')
return
root_dir = _SplitCrt(args[0], options)
_GenCFiles(root_dir, options)
_Cleanup(root_dir)
def _SplitCrt(source_file, options):
sub_file_blocks = []
label_name = ''
root_dir = os.path.dirname(os.path.abspath(source_file)) + '/'
_PrintOutput(root_dir, options)
f = open(source_file)
for line in f:
if line.startswith('# Label: '):
sub_file_blocks.append(line)
label = re.search(r'\".*\"', line)
temp_label = label.group(0)
end = len(temp_label)-1
label_name = _SafeName(temp_label[1:end])
elif line.startswith('-----END CERTIFICATE-----'):
sub_file_blocks.append(line)
new_file_name = root_dir + _PREFIX + label_name + _EXTENSION
_PrintOutput('Generating: ' + new_file_name, options)
new_file = open(new_file_name, 'w')
for out_line in sub_file_blocks:
new_file.write(out_line)
new_file.close()
sub_file_blocks = []
else:
sub_file_blocks.append(line)
f.close()
return root_dir
def _GenCFiles(root_dir, options):
output_header_file = open(root_dir + _GENERATED_FILE, 'w')
output_header_file.write(_CreateOutputHeader())
if options.full_cert:
subject_name_list = _CreateArraySectionHeader(_SUBJECT_NAME_VARIABLE,
_CHAR_TYPE, options)
public_key_list = _CreateArraySectionHeader(_PUBLIC_KEY_VARIABLE,
_CHAR_TYPE, options)
certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE,
_CHAR_TYPE, options)
certificate_size_list = _CreateArraySectionHeader(_CERTIFICATE_SIZE_VARIABLE,
_INT_TYPE, options)
for _, _, files in os.walk(root_dir):
for current_file in files:
if current_file.startswith(_PREFIX):
prefix_length = len(_PREFIX)
length = len(current_file) - len(_EXTENSION)
label = current_file[prefix_length:length]
filtered_output, cert_size = _CreateCertSection(root_dir, current_file,
label, options)
output_header_file.write(filtered_output + '\n\n\n')
if options.full_cert:
subject_name_list += _AddLabelToArray(label, _SUBJECT_NAME_ARRAY)
public_key_list += _AddLabelToArray(label, _PUBLIC_KEY_ARRAY)
certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY)
certificate_size_list += (' %s,\n') %(cert_size)
if options.full_cert:
subject_name_list += _CreateArraySectionFooter()
output_header_file.write(subject_name_list)
public_key_list += _CreateArraySectionFooter()
output_header_file.write(public_key_list)
certificate_list += _CreateArraySectionFooter()
output_header_file.write(certificate_list)
certificate_size_list += _CreateArraySectionFooter()
output_header_file.write(certificate_size_list)
output_header_file.close()
def _Cleanup(root_dir):
for f in os.listdir(root_dir):
if f.startswith(_PREFIX):
os.remove(root_dir + f)
def _CreateCertSection(root_dir, source_file, label, options):
command = 'openssl x509 -in %s%s -noout -C' %(root_dir, source_file)
_PrintOutput(command, options)
output = commands.getstatusoutput(command)[1]
renamed_output = output.replace('unsigned char XXX_',
'const unsigned char ' + label + '_')
filtered_output = ''
cert_block = '^const unsigned char.*?};$'
prog = re.compile(cert_block, re.IGNORECASE | re.MULTILINE | re.DOTALL)
if not options.full_cert:
filtered_output = prog.sub('', renamed_output, count=2)
else:
filtered_output = renamed_output
cert_size_block = r'\d\d\d+'
prog2 = re.compile(cert_size_block, re.MULTILINE | re.VERBOSE)
result = prog2.findall(renamed_output)
cert_size = result[len(result) - 1]
return filtered_output, cert_size
def _CreateOutputHeader():
output = ('// This file is the root certificates in C form that are needed to'
' connect to\n// Google.\n\n'
'// It was generated with the following command line:\n'
'// > python tools/certs/generate_sslroots.py'
'\n// https://pki.google.com/roots.pem\n\n')
return output
def _CreateArraySectionHeader(type_name, type_type, options):
output = ('const %s kSSLCert%sList[] = {\n') %(type_type, type_name)
_PrintOutput(output, options)
return output
def _AddLabelToArray(label, type_name):
return ' %s_%s,\n' %(label, type_name)
def _CreateArraySectionFooter():
return '};\n\n'
def _SafeName(original_file_name):
bad_chars = ' -./\\()áéíőú'
replacement_chars = ''
for _ in bad_chars:
replacement_chars += '_'
translation_table = string.maketrans(bad_chars, replacement_chars)
return original_file_name.translate(translation_table)
def _PrintOutput(output, options):
if options.verbose:
print output
if __name__ == '__main__':
main()
|
apache-2.0
|
esno/polartray
|
src/protobuf/act_samples_pb2.py
|
1
|
15529
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: protobuf/act_samples.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from protobuf import types_pb2 as protobuf_dot_types__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='protobuf/act_samples.proto',
package='data',
syntax='proto2',
serialized_pb=_b('\n\x1aprotobuf/act_samples.proto\x12\x04\x64\x61ta\x1a\x14protobuf/types.proto\"]\n\x0bPbSportInfo\x12\x0e\n\x06\x66\x61\x63tor\x18\x01 \x02(\x02\x12$\n\ntime_stamp\x18\x02 \x02(\x0b\x32\x10.PbLocalDateTime\x12\x18\n\x10sport_profile_id\x18\x03 \x01(\x04\"\xa6\x02\n\x0ePbActivityInfo\x12\x31\n\x05value\x18\x01 \x02(\x0e\x32\".data.PbActivityInfo.ActivityClass\x12$\n\ntime_stamp\x18\x02 \x02(\x0b\x32\x10.PbLocalDateTime\x12\x0e\n\x06\x66\x61\x63tor\x18\x03 \x01(\x02\"\xaa\x01\n\rActivityClass\x12\t\n\x05SLEEP\x10\x01\x12\r\n\tSEDENTARY\x10\x02\x12\t\n\x05LIGHT\x10\x03\x12\x17\n\x13\x43ONTINUOUS_MODERATE\x10\x04\x12\x19\n\x15INTERMITTENT_MODERATE\x10\x05\x12\x17\n\x13\x43ONTINUOUS_VIGOROUS\x10\x06\x12\x19\n\x15INTERMITTENT_VIGOROUS\x10\x07\x12\x0c\n\x08NON_WEAR\x10\x08\"?\n\x17PbInActivityTriggerInfo\x12$\n\ntime_stamp\x18\x01 \x02(\x0b\x32\x10.PbLocalDateTime\"v\n\x1ePbInActivityNonWearTriggerInfo\x12*\n\x10start_time_stamp\x18\x01 \x02(\x0b\x32\x10.PbLocalDateTime\x12(\n\x0e\x65nd_time_stamp\x18\x02 \x02(\x0b\x32\x10.PbLocalDateTime\"\x9b\x03\n\x11PbActivitySamples\x12$\n\nstart_time\x18\x01 \x02(\x0b\x32\x10.PbLocalDateTime\x12+\n\x16met_recording_interval\x18\x02 \x02(\x0b\x32\x0b.PbDuration\x12-\n\x18steps_recording_interval\x18\x03 \x02(\x0b\x32\x0b.PbDuration\x12\x13\n\x0bmet_samples\x18\x04 \x03(\x02\x12\x15\n\rsteps_samples\x18\x05 \x03(\r\x12%\n\nsport_info\x18\x06 \x03(\x0b\x32\x11.data.PbSportInfo\x12+\n\ractivity_info\x18\x07 \x03(\x0b\x32\x14.data.PbActivityInfo\x12\x39\n\x12inactivity_trigger\x18\x08 \x03(\x0b\x32\x1d.data.PbInActivityTriggerInfo\x12I\n\x1binactivity_non_wear_trigger\x18\t \x03(\x0b\x32$.data.PbInActivityNonWearTriggerInfo')
,
dependencies=[protobuf_dot_types__pb2.DESCRIPTOR,])
_PBACTIVITYINFO_ACTIVITYCLASS = _descriptor.EnumDescriptor(
name='ActivityClass',
full_name='data.PbActivityInfo.ActivityClass',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SLEEP', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SEDENTARY', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LIGHT', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONTINUOUS_MODERATE', index=3, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INTERMITTENT_MODERATE', index=4, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONTINUOUS_VIGOROUS', index=5, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INTERMITTENT_VIGOROUS', index=6, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NON_WEAR', index=7, number=8,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=278,
serialized_end=448,
)
_sym_db.RegisterEnumDescriptor(_PBACTIVITYINFO_ACTIVITYCLASS)
_PBSPORTINFO = _descriptor.Descriptor(
name='PbSportInfo',
full_name='data.PbSportInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='factor', full_name='data.PbSportInfo.factor', index=0,
number=1, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='time_stamp', full_name='data.PbSportInfo.time_stamp', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sport_profile_id', full_name='data.PbSportInfo.sport_profile_id', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=58,
serialized_end=151,
)
_PBACTIVITYINFO = _descriptor.Descriptor(
name='PbActivityInfo',
full_name='data.PbActivityInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='data.PbActivityInfo.value', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='time_stamp', full_name='data.PbActivityInfo.time_stamp', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='factor', full_name='data.PbActivityInfo.factor', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PBACTIVITYINFO_ACTIVITYCLASS,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=154,
serialized_end=448,
)
_PBINACTIVITYTRIGGERINFO = _descriptor.Descriptor(
name='PbInActivityTriggerInfo',
full_name='data.PbInActivityTriggerInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time_stamp', full_name='data.PbInActivityTriggerInfo.time_stamp', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=450,
serialized_end=513,
)
_PBINACTIVITYNONWEARTRIGGERINFO = _descriptor.Descriptor(
name='PbInActivityNonWearTriggerInfo',
full_name='data.PbInActivityNonWearTriggerInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='start_time_stamp', full_name='data.PbInActivityNonWearTriggerInfo.start_time_stamp', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='end_time_stamp', full_name='data.PbInActivityNonWearTriggerInfo.end_time_stamp', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=515,
serialized_end=633,
)
_PBACTIVITYSAMPLES = _descriptor.Descriptor(
name='PbActivitySamples',
full_name='data.PbActivitySamples',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='start_time', full_name='data.PbActivitySamples.start_time', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='met_recording_interval', full_name='data.PbActivitySamples.met_recording_interval', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='steps_recording_interval', full_name='data.PbActivitySamples.steps_recording_interval', index=2,
number=3, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='met_samples', full_name='data.PbActivitySamples.met_samples', index=3,
number=4, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='steps_samples', full_name='data.PbActivitySamples.steps_samples', index=4,
number=5, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sport_info', full_name='data.PbActivitySamples.sport_info', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='activity_info', full_name='data.PbActivitySamples.activity_info', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inactivity_trigger', full_name='data.PbActivitySamples.inactivity_trigger', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inactivity_non_wear_trigger', full_name='data.PbActivitySamples.inactivity_non_wear_trigger', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=636,
serialized_end=1047,
)
_PBSPORTINFO.fields_by_name['time_stamp'].message_type = protobuf_dot_types__pb2._PBLOCALDATETIME
_PBACTIVITYINFO.fields_by_name['value'].enum_type = _PBACTIVITYINFO_ACTIVITYCLASS
_PBACTIVITYINFO.fields_by_name['time_stamp'].message_type = protobuf_dot_types__pb2._PBLOCALDATETIME
_PBACTIVITYINFO_ACTIVITYCLASS.containing_type = _PBACTIVITYINFO
_PBINACTIVITYTRIGGERINFO.fields_by_name['time_stamp'].message_type = protobuf_dot_types__pb2._PBLOCALDATETIME
_PBINACTIVITYNONWEARTRIGGERINFO.fields_by_name['start_time_stamp'].message_type = protobuf_dot_types__pb2._PBLOCALDATETIME
_PBINACTIVITYNONWEARTRIGGERINFO.fields_by_name['end_time_stamp'].message_type = protobuf_dot_types__pb2._PBLOCALDATETIME
_PBACTIVITYSAMPLES.fields_by_name['start_time'].message_type = protobuf_dot_types__pb2._PBLOCALDATETIME
_PBACTIVITYSAMPLES.fields_by_name['met_recording_interval'].message_type = protobuf_dot_types__pb2._PBDURATION
_PBACTIVITYSAMPLES.fields_by_name['steps_recording_interval'].message_type = protobuf_dot_types__pb2._PBDURATION
_PBACTIVITYSAMPLES.fields_by_name['sport_info'].message_type = _PBSPORTINFO
_PBACTIVITYSAMPLES.fields_by_name['activity_info'].message_type = _PBACTIVITYINFO
_PBACTIVITYSAMPLES.fields_by_name['inactivity_trigger'].message_type = _PBINACTIVITYTRIGGERINFO
_PBACTIVITYSAMPLES.fields_by_name['inactivity_non_wear_trigger'].message_type = _PBINACTIVITYNONWEARTRIGGERINFO
DESCRIPTOR.message_types_by_name['PbSportInfo'] = _PBSPORTINFO
DESCRIPTOR.message_types_by_name['PbActivityInfo'] = _PBACTIVITYINFO
DESCRIPTOR.message_types_by_name['PbInActivityTriggerInfo'] = _PBINACTIVITYTRIGGERINFO
DESCRIPTOR.message_types_by_name['PbInActivityNonWearTriggerInfo'] = _PBINACTIVITYNONWEARTRIGGERINFO
DESCRIPTOR.message_types_by_name['PbActivitySamples'] = _PBACTIVITYSAMPLES
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PbSportInfo = _reflection.GeneratedProtocolMessageType('PbSportInfo', (_message.Message,), dict(
DESCRIPTOR = _PBSPORTINFO,
__module__ = 'protobuf.act_samples_pb2'
# @@protoc_insertion_point(class_scope:data.PbSportInfo)
))
_sym_db.RegisterMessage(PbSportInfo)
PbActivityInfo = _reflection.GeneratedProtocolMessageType('PbActivityInfo', (_message.Message,), dict(
DESCRIPTOR = _PBACTIVITYINFO,
__module__ = 'protobuf.act_samples_pb2'
# @@protoc_insertion_point(class_scope:data.PbActivityInfo)
))
_sym_db.RegisterMessage(PbActivityInfo)
PbInActivityTriggerInfo = _reflection.GeneratedProtocolMessageType('PbInActivityTriggerInfo', (_message.Message,), dict(
DESCRIPTOR = _PBINACTIVITYTRIGGERINFO,
__module__ = 'protobuf.act_samples_pb2'
# @@protoc_insertion_point(class_scope:data.PbInActivityTriggerInfo)
))
_sym_db.RegisterMessage(PbInActivityTriggerInfo)
PbInActivityNonWearTriggerInfo = _reflection.GeneratedProtocolMessageType('PbInActivityNonWearTriggerInfo', (_message.Message,), dict(
DESCRIPTOR = _PBINACTIVITYNONWEARTRIGGERINFO,
__module__ = 'protobuf.act_samples_pb2'
# @@protoc_insertion_point(class_scope:data.PbInActivityNonWearTriggerInfo)
))
_sym_db.RegisterMessage(PbInActivityNonWearTriggerInfo)
PbActivitySamples = _reflection.GeneratedProtocolMessageType('PbActivitySamples', (_message.Message,), dict(
DESCRIPTOR = _PBACTIVITYSAMPLES,
__module__ = 'protobuf.act_samples_pb2'
# @@protoc_insertion_point(class_scope:data.PbActivitySamples)
))
_sym_db.RegisterMessage(PbActivitySamples)
# @@protoc_insertion_point(module_scope)
|
mit
|
nviennot/rethinkdb
|
test/interface/dead_machine_issues.py
|
7
|
2742
|
#!/usr/bin/env python
# Copyright 2010-2012 RethinkDB, all rights reserved.
import sys, os, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, http_admin, scenario_common
from vcoptparse import *
op = OptParser()
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
with driver.Metacluster() as metacluster:
cluster = driver.Cluster(metacluster)
executable_path, command_prefix, serve_options = scenario_common.parse_mode_flags(opts)
print "Spinning up two processes..."
prince_hamlet_files = driver.Files(metacluster, machine_name = "PrinceHamlet", db_path = "prince-hamlet-db",
log_path = "prince-hamlet-create-output",
executable_path = executable_path, command_prefix = command_prefix)
prince_hamlet = driver.Process(cluster, prince_hamlet_files, log_path = "prince-hamlet-log",
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
king_hamlet_files = driver.Files(metacluster, machine_name = "KingHamlet", db_path = "king-hamlet-db",
log_path = "king-hamlet-create-output",
executable_path = executable_path, command_prefix = command_prefix)
king_hamlet = driver.Process(cluster, king_hamlet_files, log_path = "king-hamlet-log",
executable_path = executable_path, command_prefix = command_prefix, extra_options = serve_options)
prince_hamlet.wait_until_started_up()
king_hamlet.wait_until_started_up()
cluster.check()
access = http_admin.ClusterAccess([("localhost", prince_hamlet.http_port)])
assert access.get_issues() == []
print "Killing one of them..."
king_hamlet.close()
time.sleep(1)
cluster.check()
print "Checking that the other has an issue..."
issues = access.get_issues()
assert len(issues) == 1
assert issues[0]["type"] == "MACHINE_DOWN"
print "Declaring it dead..."
access.declare_machine_dead(issues[0]["victim"])
time.sleep(1)
cluster.check()
print "Checking that the issue is gone..."
assert access.get_issues() == []
print "Bringing it back as a ghost..."
ghost_of_king_hamlet = driver.Process(cluster, king_hamlet_files, log_path = "king-hamlet-ghost-log", executable_path = executable_path, command_prefix = command_prefix)
ghost_of_king_hamlet.wait_until_started_up()
cluster.check()
print "Checking that there is an issue..."
issues = access.get_issues()
assert len(issues) == 1
assert issues[0]["type"] == "MACHINE_GHOST"
cluster.check_and_stop()
print "Done."
|
agpl-3.0
|
talha81/TACTIC-DEV
|
src/tactic/command/watch_drop_folder.py
|
1
|
17599
|
############################################################
#
# Copyright (c) 2012, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
__all__ = ['WatchDropFolderTask']
import tacticenv
import time, os, shutil, sys
import os.path
import sys
from dateutil import parser
from pyasm.common import Environment, Config, Common
from pyasm.security import Batch
from pyasm.biz import Project
from pyasm.search import DbContainer
from pyasm.search import Search, Transaction, SearchType
from pyasm.command import Command
from tactic.command import SchedulerTask, Scheduler
from time import gmtime, strftime
from optparse import OptionParser
from tactic.command import PythonCmd
import threading
import logging
logging.basicConfig(filename='/tmp/myapp.log', level=logging.INFO)
try:
from watchdog.observers import Observer
from watchdog.events import LoggingEventHandler
except:
Observer = None
LoggingEventHandler = object
class TestLoggingEventHandler(LoggingEventHandler):
"""Logs all the events captured."""
def on_moved(self, event):
super(LoggingEventHandler, self).on_moved(event)
what = 'directory' if event.is_directory else 'file'
print "Moved %s: from %s to %s" % (what, event.src_path, event.dest_path)
def on_created(self, event):
super(LoggingEventHandler, self).on_created(event)
what = 'directory' if event.is_directory else 'file'
print "Created %s: %s" % (what, event.src_path)
def on_deleted(self, event):
super(LoggingEventHandler, self).on_deleted(event)
what = 'directory' if event.is_directory else 'file'
print "Deleted %s: %s" % (what, event.src_path)
def on_modified(self, event):
super(LoggingEventHandler, self).on_modified(event)
what = 'directory' if event.is_directory else 'file'
print "Modified %s: %s" % (what, event.src_path)
class WatchFolderFileActionThread(threading.Thread):
def __init__(my, **kwargs):
my.kwargs = kwargs
super(WatchFolderFileActionThread, my).__init__()
def run(my):
Batch()
try:
my._run()
finally:
task = my.kwargs.get("task")
paths = task.get_paths()
for path in paths:
checkin_path = "%s.lock" % path
if os.path.exists(checkin_path):
os.unlink(checkin_path)
def _run(my):
task = my.kwargs.get("task")
paths = task.get_paths()
count = 0
restart = False
while True:
if not paths:
time.sleep(1)
continue
path = paths.pop(0)
checkin_path = "%s.checkin" % path
error_path = "%s.error" % path
if not os.path.exists(checkin_path):
print "ERROR: no lock path [%s]" % checkin_path
continue
try:
kwargs = {
"project_code": task.project_code,
"search_type": task.search_type,
"base_dir": task.base_dir,
"process": task.process,
"script_path":task.script_path,
"path": path
}
# create a "custom" command that will act on the file
cmd = CustomCmd(
**kwargs
)
cmd.execute()
# TEST
#time.sleep(1)
#if os.path.exists(path):
# os.unlink(path)
count += 1
if count > 50:
restart = True
break
except Exception, e:
print "Error: ", e
f = open(error_path,"w")
f.write(str(e))
f.close()
#raise
finally:
os.unlink(checkin_path)
# restart every 20 check-ins
if restart:
for path in paths:
checkin_path = "%s.checkin" % path
if os.path.exists(checkin_path):
os.unlink(checkin_path)
Common.restart()
class WatchFolderCheckFileThread(threading.Thread):
def __init__(my, **kwargs):
my.kwargs = kwargs
super(WatchFolderCheckFileThread, my).__init__()
path = my.kwargs.get("path")
my.lock_path = "%s.lock" % path
my.error_path = "%s.error" % path
my.checkin_path = "%s.checkin" % path
def run(my):
try:
path = my.kwargs.get("path")
if os.path.exists(my.lock_path):
return
f = open(my.lock_path, "w")
f.close()
changed = my.verify_file_size(path)
if changed:
if os.path.exists(my.lock_path):
os.unlink(my.lock_path)
return
f = open(my.checkin_path, "w")
f.close()
task = my.kwargs.get("task")
task.add_path(path)
except Exception, e:
print "Error: ", e
f = open(my.error_path, "w")
f.write(str(e))
f.close()
raise
finally:
if os.path.exists(my.lock_path):
os.unlink(my.lock_path)
def verify_file_size(my, file_path):
'''Check if the file size changes over a period of 5 seconds. If so, file is not ready'''
# assume nothing has changed
changed = False
# Chech whether the file_path exists or not. Once the file is ready, the or
if not os.path.exists(file_path):
return True
for i in range(0, 5):
file_size = os.path.getsize(file_path)
mtime = os.path.getmtime(file_path)
#print "file_size: ", file_size
#print "mtime: ", mtime
time.sleep(2)
if not os.path.exists(file_path):
changed = True
break
file_size2 = os.path.getsize(file_path)
mtime2 = os.path.getmtime(file_path)
#print "file_size2: ", file_size2
#print "mtime2: ", mtime2
#print
if file_size != file_size2:
changed = True
break
if mtime != mtime2:
changed = True
break
return changed
class CustomCmd(object):
def __init__(my, **kwargs):
my.kwargs = kwargs
def is_image(my, file_name):
base, ext = os.path.splitext(file_name)
ext = ext.lstrip(".").lower()
if ext in ['tif','tiff','jpg','jpeg','png','pic','bmp','gif','psd']:
return True
else:
return False
def is_movie(my, file_name):
base, ext = os.path.splitext(file_name)
ext = ext.lstrip(".").lower()
if ext in ['mov','wmv','mpg','mpeg','m1v','mp2','mpa','mpe','mp4','wma','asf','asx','avi','wax','wm','wvx']:
return True
else:
return False
def get_asset_type(my, file_path):
if my.is_movie(file_path):
return 'movie'
elif my.is_image(file_path):
return 'image'
else:
return 'other'
def create_checkin_log(my):
base_dir = my.kwargs.get("base_dir")
log_path = '%s/TACTIC_log.txt' %(base_dir)
if not (os.path.isfile(log_path)):
file = open(log_path, 'w')
title='File Name'+40*' '+'Checkin-Time'+20*' '+'Version#'+6*' ' +'Message\n'
f = open(log_path, 'a')
f.write(title)
f.close()
def execute(my):
file_path = my.kwargs.get("path")
project_code = my.kwargs.get("project_code")
base_dir = my.kwargs.get("base_dir")
search_type = my.kwargs.get("search_type")
process = my.kwargs.get("process")
watch_script_path = my.kwargs.get("script_path")
if not process:
process = "publish"
basename = os.path.basename(file_path)
context = my.kwargs.get("context")
if not context:
context = '%s/%s' % (process, basename)
# find the relative_dir and relative_path
relative_path = file_path.replace("%s/" % base_dir, "")
relative_dir = os.path.dirname(relative_path)
file_name = os.path.basename(file_path)
log_path = '%s/TACTIC_log.txt' %(base_dir)
my.create_checkin_log()
# Define asset type of the file
asset_type = my.get_asset_type(file_path)
description = "drop folder check-in of %s" %file_name
from client.tactic_client_lib import TacticServerStub
server = TacticServerStub.get(protocol='local')
server.set_project(project_code)
transaction = Transaction.get(create=True)
server.start(title='Check-in of media', description='Check-in of media')
server_return_value = {}
try:
filters = [
[ 'name', '=', file_name ],
#[ 'relative_dir', '=', relative_dir ]
]
sobj = server.query(search_type, filters=filters, single=True)
if not sobj:
# create sobject if it does not yet exist
sobj = SearchType.create(search_type)
if SearchType.column_exists(search_type, "name"):
sobj.set_value("name", basename)
if SearchType.column_exists(search_type, "media_type"):
sobj.set_value("media_type", asset_type)
if SearchType.column_exists(search_type, "relative_dir"):
sobj.set_value("relative_dir", relative_dir)
if SearchType.column_exists(search_type, "keywords"):
relative_path = relative_path
keywords = Common.get_keywords_from_path(relative_path)
keywords = " ".join( keywords )
sobj.set_value("keywords", keywords)
sobj.commit()
search_key = sobj.get_search_key()
else:
search_key = sobj.get("__search_key__")
#task = server.create_task(sobj.get('__search_key__'),process='publish')
#server.update(task, {'status': 'New'})
server_return_value = server.simple_checkin(search_key, context, file_path, description=description, mode='copy')
cmd = PythonCmd(script_path=watch_script_path,search_type=search_type,file_path=file_path,search_key=search_key)
cmd.execute()
except Exception, e:
print "Error occurred", e
error_message=str(e)
version_num='Error:'
system_time=strftime("%Y/%m/%d %H:%M", gmtime())
pre_log=file_name+(50-len(file_name))*' '+system_time+(33-len(system_time))*' '+version_num+(15-len(version_num))*' ' +error_message+'\n'
# Write data into TACTIC_log file under /tmp/drop
f = open(log_path, 'a')
f.write(pre_log)
f.close()
#server.abort()
transaction.rollback()
raise
else:
transaction.commit()
#server.finish()
if server_return_value:
# Create the TACTIC_log file to record every check-in.
# Search for all required data
checkin_time=server_return_value.get('timestamp')
version_nu=server_return_value.get('version')
version_num=str(version_nu)
try:
value = parser.parse(checkin_time)
value = value.strftime("%Y/%m/%d %H:%M")
except:
value = checkin_time
pre_log=file_name+(50-len(file_name))*' '+value+(33-len(value))*' '+version_num+(15-len(version_num))*' ' +'ok\n'
# Write data into TACTIC_log file under /tmp/drop
f = open(log_path, 'a')
f.write(pre_log)
f.close()
# Delete the sourse file after check-in step.
print "File checked in. Source file [%s] deleted: " %file_name
os.unlink(file_path)
class WatchDropFolderTask(SchedulerTask):
def __init__(my, **kwargs):
my.base_dir = kwargs.get("base_dir")
my.project_code = kwargs.get("project_code")
my.search_type = kwargs.get("search_type")
my.process = kwargs.get("process")
my.script_path = kwargs.get("script_path")
super(WatchDropFolderTask, my).__init__()
my.checkin_paths = []
def add_path(my, path):
my.checkin_paths.append(path)
def get_paths(my):
return my.checkin_paths
def _execute(my):
base_dir = my.base_dir
if not os.path.exists(base_dir):
os.makedirs(base_dir)
dirs = os.listdir(base_dir)
test_dirs = dirs[:]
for dirname in test_dirs:
base, ext = os.path.splitext(dirname)
if ext in [".lock", ".error", ".checkin"]:
dirs.remove(dirname)
try:
dirs.remove(base)
except:
pass
if not dirs:
return
# skip certain files like log
dir_set = set(dirs)
for dirname in dirs:
if dirname.startswith("TACTIC_log"):
dir_set.remove(dirname)
if dirname.startswith("."):
dir_set.remove(dirname)
dirs = list(dir_set)
if not dirs:
return
#print "Found new: ", dirs
# go thru the list to check each file
for file_name in dirs:
file_path = '%s/%s' %(my.base_dir, file_name)
thread = WatchFolderCheckFileThread(
task=my,
path=file_path
)
thread.daemon = True
thread.start()
def execute(my):
base_dir = my.base_dir
if not base_dir:
print "WARNING: No base dir defined."
return
# Start check-in thread
checkin = WatchFolderFileActionThread(
task=my,
)
checkin.start()
# execute and react based on a loop every second
mode = "loop"
if mode == "loop":
while True:
my._execute()
time.sleep(1)
elif mode == "event":
try:
event_handler = TestLoggingEventHandler()
observer = Observer()
print "base: ", my.base_dir
path = my.base_dir
observer.schedule(event_handler, path=path, recursive=True)
observer.start()
except Exception, e:
print "... skipping: ", e
raise
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
for observer in observers:
observer.stop()
def start(cls):
print "Running Watch Folder ..."
# Check whether the user define the drop folder path.
# Default dop folder path: /tmp/drop
parser = OptionParser()
parser.add_option("-p", "--project", dest="project", help="Define the project_name.")
parser.add_option("-d", "--drop_path", dest="drop_path", help="Define drop folder path")
parser.add_option("-s", "--search_type", dest="search_type", help="Define search_type.")
parser.add_option("-P", "--process", dest="process", help="Define process.")
parser.add_option("-S", "--script_path",dest="script_path", help="Define script_path.")
(options, args) = parser.parse_args()
if options.project != None :
project_code= options.project
else:
project_code= 'jobs'
if options.drop_path!=None :
drop_path= options.drop_path
else:
tmp_dir = Environment.get_tmp_dir()
drop_path = "%s/drop" % tmp_dir
print " using [%s]" % drop_path
if not os.path.exists(drop_path):
os.makedirs(drop_path)
if options.search_type!=None :
search_type = options.search_type
else:
search_type = 'jobs/media'
if options.process!=None :
process = options.process
else:
process= 'publish'
if options.script_path!=None :
script_path = options.script_path
else:
script_path="None"
task = WatchDropFolderTask(base_dir=drop_path, project_code=project_code,search_type=search_type, process=process,script_path=script_path)
scheduler = Scheduler.get()
scheduler.add_single_task(task, delay=1)
scheduler.start_thread()
return scheduler
start = classmethod(start)
if __name__ == '__main__':
Batch()
WatchDropFolderTask.start()
while 1:
try:
time.sleep(15)
except (KeyboardInterrupt, SystemExit), e:
scheduler = Scheduler.get()
scheduler.stop()
break
|
epl-1.0
|
florian-dacosta/OCB
|
addons/hr_payroll_account/hr_payroll_account.py
|
240
|
10840
|
#-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from datetime import date, datetime, timedelta
from openerp.osv import fields, osv
from openerp.tools import float_compare, float_is_zero
from openerp.tools.translate import _
class hr_payslip(osv.osv):
'''
Pay Slip
'''
_inherit = 'hr.payslip'
_description = 'Pay Slip'
_columns = {
'period_id': fields.many2one('account.period', 'Force Period',states={'draft': [('readonly', False)]}, readonly=True, domain=[('state','<>','done')], help="Keep empty to use the period of the validation(Payslip) date."),
'journal_id': fields.many2one('account.journal', 'Salary Journal',states={'draft': [('readonly', False)]}, readonly=True, required=True),
'move_id': fields.many2one('account.move', 'Accounting Entry', readonly=True, copy=False),
}
def _get_default_journal(self, cr, uid, context=None):
model_data = self.pool.get('ir.model.data')
res = model_data.search(cr, uid, [('name', '=', 'expenses_journal')])
if res:
return model_data.browse(cr, uid, res[0]).res_id
return False
_defaults = {
'journal_id': _get_default_journal,
}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if 'journal_id' in context:
vals.update({'journal_id': context.get('journal_id')})
return super(hr_payslip, self).create(cr, uid, vals, context=context)
def onchange_contract_id(self, cr, uid, ids, date_from, date_to, employee_id=False, contract_id=False, context=None):
contract_obj = self.pool.get('hr.contract')
res = super(hr_payslip, self).onchange_contract_id(cr, uid, ids, date_from=date_from, date_to=date_to, employee_id=employee_id, contract_id=contract_id, context=context)
journal_id = contract_id and contract_obj.browse(cr, uid, contract_id, context=context).journal_id.id or False
res['value'].update({'journal_id': journal_id})
return res
def cancel_sheet(self, cr, uid, ids, context=None):
move_pool = self.pool.get('account.move')
move_ids = []
move_to_cancel = []
for slip in self.browse(cr, uid, ids, context=context):
if slip.move_id:
move_ids.append(slip.move_id.id)
if slip.move_id.state == 'posted':
move_to_cancel.append(slip.move_id.id)
move_pool.button_cancel(cr, uid, move_to_cancel, context=context)
move_pool.unlink(cr, uid, move_ids, context=context)
return super(hr_payslip, self).cancel_sheet(cr, uid, ids, context=context)
def process_sheet(self, cr, uid, ids, context=None):
move_pool = self.pool.get('account.move')
period_pool = self.pool.get('account.period')
precision = self.pool.get('decimal.precision').precision_get(cr, uid, 'Payroll')
timenow = time.strftime('%Y-%m-%d')
for slip in self.browse(cr, uid, ids, context=context):
line_ids = []
debit_sum = 0.0
credit_sum = 0.0
if not slip.period_id:
search_periods = period_pool.find(cr, uid, slip.date_to, context=context)
period_id = search_periods[0]
else:
period_id = slip.period_id.id
default_partner_id = slip.employee_id.address_home_id.id
name = _('Payslip of %s') % (slip.employee_id.name)
move = {
'narration': name,
'date': timenow,
'ref': slip.number,
'journal_id': slip.journal_id.id,
'period_id': period_id,
}
for line in slip.details_by_salary_rule_category:
amt = slip.credit_note and -line.total or line.total
if float_is_zero(amt, precision_digits=precision):
continue
partner_id = line.salary_rule_id.register_id.partner_id and line.salary_rule_id.register_id.partner_id.id or default_partner_id
debit_account_id = line.salary_rule_id.account_debit.id
credit_account_id = line.salary_rule_id.account_credit.id
if debit_account_id:
debit_line = (0, 0, {
'name': line.name,
'date': timenow,
'partner_id': (line.salary_rule_id.register_id.partner_id or line.salary_rule_id.account_debit.type in ('receivable', 'payable')) and partner_id or False,
'account_id': debit_account_id,
'journal_id': slip.journal_id.id,
'period_id': period_id,
'debit': amt > 0.0 and amt or 0.0,
'credit': amt < 0.0 and -amt or 0.0,
'analytic_account_id': line.salary_rule_id.analytic_account_id and line.salary_rule_id.analytic_account_id.id or False,
'tax_code_id': line.salary_rule_id.account_tax_id and line.salary_rule_id.account_tax_id.id or False,
'tax_amount': line.salary_rule_id.account_tax_id and amt or 0.0,
})
line_ids.append(debit_line)
debit_sum += debit_line[2]['debit'] - debit_line[2]['credit']
if credit_account_id:
credit_line = (0, 0, {
'name': line.name,
'date': timenow,
'partner_id': (line.salary_rule_id.register_id.partner_id or line.salary_rule_id.account_credit.type in ('receivable', 'payable')) and partner_id or False,
'account_id': credit_account_id,
'journal_id': slip.journal_id.id,
'period_id': period_id,
'debit': amt < 0.0 and -amt or 0.0,
'credit': amt > 0.0 and amt or 0.0,
'analytic_account_id': line.salary_rule_id.analytic_account_id and line.salary_rule_id.analytic_account_id.id or False,
'tax_code_id': line.salary_rule_id.account_tax_id and line.salary_rule_id.account_tax_id.id or False,
'tax_amount': line.salary_rule_id.account_tax_id and amt or 0.0,
})
line_ids.append(credit_line)
credit_sum += credit_line[2]['credit'] - credit_line[2]['debit']
if float_compare(credit_sum, debit_sum, precision_digits=precision) == -1:
acc_id = slip.journal_id.default_credit_account_id.id
if not acc_id:
raise osv.except_osv(_('Configuration Error!'),_('The Expense Journal "%s" has not properly configured the Credit Account!')%(slip.journal_id.name))
adjust_credit = (0, 0, {
'name': _('Adjustment Entry'),
'date': timenow,
'partner_id': False,
'account_id': acc_id,
'journal_id': slip.journal_id.id,
'period_id': period_id,
'debit': 0.0,
'credit': debit_sum - credit_sum,
})
line_ids.append(adjust_credit)
elif float_compare(debit_sum, credit_sum, precision_digits=precision) == -1:
acc_id = slip.journal_id.default_debit_account_id.id
if not acc_id:
raise osv.except_osv(_('Configuration Error!'),_('The Expense Journal "%s" has not properly configured the Debit Account!')%(slip.journal_id.name))
adjust_debit = (0, 0, {
'name': _('Adjustment Entry'),
'date': timenow,
'partner_id': False,
'account_id': acc_id,
'journal_id': slip.journal_id.id,
'period_id': period_id,
'debit': credit_sum - debit_sum,
'credit': 0.0,
})
line_ids.append(adjust_debit)
move.update({'line_id': line_ids})
move_id = move_pool.create(cr, uid, move, context=context)
self.write(cr, uid, [slip.id], {'move_id': move_id, 'period_id' : period_id}, context=context)
if slip.journal_id.entry_posted:
move_pool.post(cr, uid, [move_id], context=context)
return super(hr_payslip, self).process_sheet(cr, uid, [slip.id], context=context)
class hr_salary_rule(osv.osv):
_inherit = 'hr.salary.rule'
_columns = {
'analytic_account_id':fields.many2one('account.analytic.account', 'Analytic Account'),
'account_tax_id':fields.many2one('account.tax.code', 'Tax Code'),
'account_debit': fields.many2one('account.account', 'Debit Account'),
'account_credit': fields.many2one('account.account', 'Credit Account'),
}
class hr_contract(osv.osv):
_inherit = 'hr.contract'
_description = 'Employee Contract'
_columns = {
'analytic_account_id':fields.many2one('account.analytic.account', 'Analytic Account'),
'journal_id': fields.many2one('account.journal', 'Salary Journal'),
}
class hr_payslip_run(osv.osv):
_inherit = 'hr.payslip.run'
_description = 'Payslip Run'
_columns = {
'journal_id': fields.many2one('account.journal', 'Salary Journal', states={'draft': [('readonly', False)]}, readonly=True, required=True),
}
def _get_default_journal(self, cr, uid, context=None):
model_data = self.pool.get('ir.model.data')
res = model_data.search(cr, uid, [('name', '=', 'expenses_journal')])
if res:
return model_data.browse(cr, uid, res[0]).res_id
return False
_defaults = {
'journal_id': _get_default_journal,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
lexyan/SickBeard
|
lib/hachoir_metadata/filter.py
|
90
|
1668
|
from lib.hachoir_metadata.timezone import UTC
from datetime import date, datetime
# Year in 1850..2030
MIN_YEAR = 1850
MAX_YEAR = 2030
class Filter:
def __init__(self, valid_types, min=None, max=None):
self.types = valid_types
self.min = min
self.max = max
def __call__(self, value):
if not isinstance(value, self.types):
return True
if self.min is not None and value < self.min:
return False
if self.max is not None and self.max < value:
return False
return True
class NumberFilter(Filter):
def __init__(self, min=None, max=None):
Filter.__init__(self, (int, long, float), min, max)
class DatetimeFilter(Filter):
def __init__(self, min=None, max=None):
Filter.__init__(self, (date, datetime),
datetime(MIN_YEAR, 1, 1),
datetime(MAX_YEAR, 12, 31))
self.min_date = date(MIN_YEAR, 1, 1)
self.max_date = date(MAX_YEAR, 12, 31)
self.min_tz = datetime(MIN_YEAR, 1, 1, tzinfo=UTC)
self.max_tz = datetime(MAX_YEAR, 12, 31, tzinfo=UTC)
def __call__(self, value):
"""
Use different min/max values depending on value type
(datetime with timezone, datetime or date).
"""
if not isinstance(value, self.types):
return True
if hasattr(value, "tzinfo") and value.tzinfo:
return (self.min_tz <= value <= self.max_tz)
elif isinstance(value, datetime):
return (self.min <= value <= self.max)
else:
return (self.min_date <= value <= self.max_date)
DATETIME_FILTER = DatetimeFilter()
|
gpl-3.0
|
xyb994/housing
|
ssu_housing/contrib/sites/migrations/0003_set_site_domain_and_name.py
|
1
|
1162
|
"""
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'ssuhousing.com',
'name': 'Salem State Project'
}
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'ssuhousing.com',
'name': 'ssuhousing.com'
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0002_alter_domain_unique'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
|
mit
|
diego-d5000/MisValesMd
|
env/lib/python2.7/site-packages/django/contrib/gis/geos/geometry.py
|
1
|
25383
|
"""
This module contains the 'base' GEOSGeometry object -- all GEOS Geometries
inherit from this object.
"""
from __future__ import unicode_literals
# Python, ctypes and types dependencies.
from ctypes import addressof, byref, c_double
from django.contrib.gis.gdal.error import SRSException
from django.contrib.gis.geometry.regex import hex_regex, json_regex, wkt_regex
from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.base import GEOSBase, gdal
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException, GEOSIndexError
from django.contrib.gis.geos.libgeos import GEOM_PTR
from django.contrib.gis.geos.mutable_list import ListMixin
# These functions provide access to a thread-local instance
# of their corresponding GEOS I/O class.
from django.contrib.gis.geos.prototypes.io import (
ewkb_w, wkb_r, wkb_w, wkt_r, wkt_w,
)
from django.utils import six
from django.utils.encoding import force_bytes, force_text
class GEOSGeometry(GEOSBase, ListMixin):
"A class that, generally, encapsulates a GEOS geometry."
# Raise GEOSIndexError instead of plain IndexError
# (see ticket #4740 and GEOSIndexError docstring)
_IndexError = GEOSIndexError
ptr_type = GEOM_PTR
def __init__(self, geo_input, srid=None):
"""
The base constructor for GEOS geometry objects, and may take the
following inputs:
* strings:
- WKT
- HEXEWKB (a PostGIS-specific canonical form)
- GeoJSON (requires GDAL)
* buffer:
- WKB
The `srid` keyword is used to specify the Source Reference Identifier
(SRID) number for this Geometry. If not set, the SRID will be None.
"""
if isinstance(geo_input, bytes):
geo_input = force_text(geo_input)
if isinstance(geo_input, six.string_types):
wkt_m = wkt_regex.match(geo_input)
if wkt_m:
# Handling WKT input.
if wkt_m.group('srid'):
srid = int(wkt_m.group('srid'))
g = wkt_r().read(force_bytes(wkt_m.group('wkt')))
elif hex_regex.match(geo_input):
# Handling HEXEWKB input.
g = wkb_r().read(force_bytes(geo_input))
elif json_regex.match(geo_input):
# Handling GeoJSON input.
if not gdal.HAS_GDAL:
raise ValueError('Initializing geometry from JSON input requires GDAL.')
g = wkb_r().read(gdal.OGRGeometry(geo_input).wkb)
else:
raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.')
elif isinstance(geo_input, GEOM_PTR):
# When the input is a pointer to a geometry (GEOM_PTR).
g = geo_input
elif isinstance(geo_input, six.memoryview):
# When the input is a buffer (WKB).
g = wkb_r().read(geo_input)
elif isinstance(geo_input, GEOSGeometry):
g = capi.geom_clone(geo_input.ptr)
else:
# Invalid geometry type.
raise TypeError('Improper geometry input type: %s' % str(type(geo_input)))
if g:
# Setting the pointer object with a valid pointer.
self.ptr = g
else:
raise GEOSException('Could not initialize GEOS Geometry with given input.')
# Post-initialization setup.
self._post_init(srid)
def _post_init(self, srid):
"Helper routine for performing post-initialization setup."
# Setting the SRID, if given.
if srid and isinstance(srid, int):
self.srid = srid
# Setting the class type (e.g., Point, Polygon, etc.)
self.__class__ = GEOS_CLASSES[self.geom_typeid]
# Setting the coordinate sequence for the geometry (will be None on
# geometries that do not have coordinate sequences)
self._set_cs()
def __del__(self):
"""
Destroys this Geometry; in other words, frees the memory used by the
GEOS C++ object.
"""
if self._ptr and capi:
capi.destroy_geom(self._ptr)
def __copy__(self):
"""
Returns a clone because the copy of a GEOSGeometry may contain an
invalid pointer location if the original is garbage collected.
"""
return self.clone()
def __deepcopy__(self, memodict):
"""
The `deepcopy` routine is used by the `Node` class of django.utils.tree;
thus, the protocol routine needs to be implemented to return correct
copies (clones) of these GEOS objects, which use C pointers.
"""
return self.clone()
def __str__(self):
"EWKT is used for the string representation."
return self.ewkt
def __repr__(self):
"Short-hand representation because WKT may be very large."
return '<%s object at %s>' % (self.geom_type, hex(addressof(self.ptr)))
# Pickling support
def __getstate__(self):
# The pickled state is simply a tuple of the WKB (in string form)
# and the SRID.
return bytes(self.wkb), self.srid
def __setstate__(self, state):
# Instantiating from the tuple state that was pickled.
wkb, srid = state
ptr = wkb_r().read(six.memoryview(wkb))
if not ptr:
raise GEOSException('Invalid Geometry loaded from pickled state.')
self.ptr = ptr
self._post_init(srid)
# Comparison operators
def __eq__(self, other):
"""
Equivalence testing, a Geometry may be compared with another Geometry
or a WKT representation.
"""
if isinstance(other, six.string_types):
return self.wkt == other
elif isinstance(other, GEOSGeometry):
return self.equals_exact(other)
else:
return False
def __ne__(self, other):
"The not equals operator."
return not (self == other)
# ### Geometry set-like operations ###
# Thanks to Sean Gillies for inspiration:
# http://lists.gispython.org/pipermail/community/2007-July/001034.html
# g = g1 | g2
def __or__(self, other):
"Returns the union of this Geometry and the other."
return self.union(other)
# g = g1 & g2
def __and__(self, other):
"Returns the intersection of this Geometry and the other."
return self.intersection(other)
# g = g1 - g2
def __sub__(self, other):
"Return the difference this Geometry and the other."
return self.difference(other)
# g = g1 ^ g2
def __xor__(self, other):
"Return the symmetric difference of this Geometry and the other."
return self.sym_difference(other)
# #### Coordinate Sequence Routines ####
@property
def has_cs(self):
"Returns True if this Geometry has a coordinate sequence, False if not."
# Only these geometries are allowed to have coordinate sequences.
if isinstance(self, (Point, LineString, LinearRing)):
return True
else:
return False
def _set_cs(self):
"Sets the coordinate sequence for this Geometry."
if self.has_cs:
self._cs = GEOSCoordSeq(capi.get_cs(self.ptr), self.hasz)
else:
self._cs = None
@property
def coord_seq(self):
"Returns a clone of the coordinate sequence for this Geometry."
if self.has_cs:
return self._cs.clone()
# #### Geometry Info ####
@property
def geom_type(self):
"Returns a string representing the Geometry type, e.g. 'Polygon'"
return capi.geos_type(self.ptr).decode()
@property
def geom_typeid(self):
"Returns an integer representing the Geometry type."
return capi.geos_typeid(self.ptr)
@property
def num_geom(self):
"Returns the number of geometries in the Geometry."
return capi.get_num_geoms(self.ptr)
@property
def num_coords(self):
"Returns the number of coordinates in the Geometry."
return capi.get_num_coords(self.ptr)
@property
def num_points(self):
"Returns the number points, or coordinates, in the Geometry."
return self.num_coords
@property
def dims(self):
"Returns the dimension of this Geometry (0=point, 1=line, 2=surface)."
return capi.get_dims(self.ptr)
def normalize(self):
"Converts this Geometry to normal form (or canonical form)."
return capi.geos_normalize(self.ptr)
# #### Unary predicates ####
@property
def empty(self):
"""
Returns a boolean indicating whether the set of points in this Geometry
are empty.
"""
return capi.geos_isempty(self.ptr)
@property
def hasz(self):
"Returns whether the geometry has a 3D dimension."
return capi.geos_hasz(self.ptr)
@property
def ring(self):
"Returns whether or not the geometry is a ring."
return capi.geos_isring(self.ptr)
@property
def simple(self):
"Returns false if the Geometry not simple."
return capi.geos_issimple(self.ptr)
@property
def valid(self):
"This property tests the validity of this Geometry."
return capi.geos_isvalid(self.ptr)
@property
def valid_reason(self):
"""
Returns a string containing the reason for any invalidity.
"""
return capi.geos_isvalidreason(self.ptr).decode()
# #### Binary predicates. ####
def contains(self, other):
"Returns true if other.within(this) returns true."
return capi.geos_contains(self.ptr, other.ptr)
def crosses(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T****** (for a point and a curve,a point and an area or a line and
an area) 0******** (for two curves).
"""
return capi.geos_crosses(self.ptr, other.ptr)
def disjoint(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FF*FF****.
"""
return capi.geos_disjoint(self.ptr, other.ptr)
def equals(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**FFF*.
"""
return capi.geos_equals(self.ptr, other.ptr)
def equals_exact(self, other, tolerance=0):
"""
Returns true if the two Geometries are exactly equal, up to a
specified tolerance.
"""
return capi.geos_equalsexact(self.ptr, other.ptr, float(tolerance))
def intersects(self, other):
"Returns true if disjoint returns false."
return capi.geos_intersects(self.ptr, other.ptr)
def overlaps(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*T***T** (for two points or two surfaces) 1*T***T** (for two curves).
"""
return capi.geos_overlaps(self.ptr, other.ptr)
def relate_pattern(self, other, pattern):
"""
Returns true if the elements in the DE-9IM intersection matrix for the
two Geometries match the elements in pattern.
"""
if not isinstance(pattern, six.string_types) or len(pattern) > 9:
raise GEOSException('invalid intersection matrix pattern')
return capi.geos_relatepattern(self.ptr, other.ptr, force_bytes(pattern))
def touches(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is FT*******, F**T***** or F***T****.
"""
return capi.geos_touches(self.ptr, other.ptr)
def within(self, other):
"""
Returns true if the DE-9IM intersection matrix for the two Geometries
is T*F**F***.
"""
return capi.geos_within(self.ptr, other.ptr)
# #### SRID Routines ####
def get_srid(self):
"Gets the SRID for the geometry, returns None if no SRID is set."
s = capi.geos_get_srid(self.ptr)
if s == 0:
return None
else:
return s
def set_srid(self, srid):
"Sets the SRID for the geometry."
capi.geos_set_srid(self.ptr, srid)
srid = property(get_srid, set_srid)
# #### Output Routines ####
@property
def ewkt(self):
"""
Returns the EWKT (SRID + WKT) of the Geometry. Note that Z values
are only included in this representation if GEOS >= 3.3.0.
"""
if self.get_srid():
return 'SRID=%s;%s' % (self.srid, self.wkt)
else:
return self.wkt
@property
def wkt(self):
"Returns the WKT (Well-Known Text) representation of this Geometry."
return wkt_w(3 if self.hasz else 2).write(self).decode()
@property
def hex(self):
"""
Returns the WKB of this Geometry in hexadecimal form. Please note
that the SRID is not included in this representation because it is not
a part of the OGC specification (use the `hexewkb` property instead).
"""
# A possible faster, all-python, implementation:
# str(self.wkb).encode('hex')
return wkb_w(3 if self.hasz else 2).write_hex(self)
@property
def hexewkb(self):
"""
Returns the EWKB of this Geometry in hexadecimal form. This is an
extension of the WKB specification that includes SRID value that are
a part of this geometry.
"""
return ewkb_w(3 if self.hasz else 2).write_hex(self)
@property
def json(self):
"""
Returns GeoJSON representation of this Geometry if GDAL is installed.
"""
if gdal.HAS_GDAL:
return self.ogr.json
else:
raise GEOSException('GeoJSON output only supported when GDAL is installed.')
geojson = json
@property
def wkb(self):
"""
Returns the WKB (Well-Known Binary) representation of this Geometry
as a Python buffer. SRID and Z values are not included, use the
`ewkb` property instead.
"""
return wkb_w(3 if self.hasz else 2).write(self)
@property
def ewkb(self):
"""
Return the EWKB representation of this Geometry as a Python buffer.
This is an extension of the WKB specification that includes any SRID
value that are a part of this geometry.
"""
return ewkb_w(3 if self.hasz else 2).write(self)
@property
def kml(self):
"Returns the KML representation of this Geometry."
gtype = self.geom_type
return '<%s>%s</%s>' % (gtype, self.coord_seq.kml, gtype)
@property
def prepared(self):
"""
Returns a PreparedGeometry corresponding to this geometry -- it is
optimized for the contains, intersects, and covers operations.
"""
return PreparedGeometry(self)
# #### GDAL-specific output routines ####
@property
def ogr(self):
"Returns the OGR Geometry for this Geometry."
if not gdal.HAS_GDAL:
raise GEOSException('GDAL required to convert to an OGRGeometry.')
if self.srid:
try:
return gdal.OGRGeometry(self.wkb, self.srid)
except SRSException:
pass
return gdal.OGRGeometry(self.wkb)
@property
def srs(self):
"Returns the OSR SpatialReference for SRID of this Geometry."
if not gdal.HAS_GDAL:
raise GEOSException('GDAL required to return a SpatialReference object.')
if self.srid:
try:
return gdal.SpatialReference(self.srid)
except SRSException:
pass
return None
@property
def crs(self):
"Alias for `srs` property."
return self.srs
def transform(self, ct, clone=False):
"""
Requires GDAL. Transforms the geometry according to the given
transformation object, which may be an integer SRID, and WKT or
PROJ.4 string. By default, the geometry is transformed in-place and
nothing is returned. However if the `clone` keyword is set, then this
geometry will not be modified and a transformed clone will be returned
instead.
"""
srid = self.srid
if ct == srid:
# short-circuit where source & dest SRIDs match
if clone:
return self.clone()
else:
return
if (srid is None) or (srid < 0):
raise GEOSException("Calling transform() with no SRID set is not supported")
if not gdal.HAS_GDAL:
raise GEOSException("GDAL library is not available to transform() geometry.")
# Creating an OGR Geometry, which is then transformed.
g = self.ogr
g.transform(ct)
# Getting a new GEOS pointer
ptr = wkb_r().read(g.wkb)
if clone:
# User wants a cloned transformed geometry returned.
return GEOSGeometry(ptr, srid=g.srid)
if ptr:
# Reassigning pointer, and performing post-initialization setup
# again due to the reassignment.
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(g.srid)
else:
raise GEOSException('Transformed WKB was invalid.')
# #### Topology Routines ####
def _topology(self, gptr):
"Helper routine to return Geometry from the given pointer."
return GEOSGeometry(gptr, srid=self.srid)
@property
def boundary(self):
"Returns the boundary as a newly allocated Geometry object."
return self._topology(capi.geos_boundary(self.ptr))
def buffer(self, width, quadsegs=8):
"""
Returns a geometry that represents all points whose distance from this
Geometry is less than or equal to distance. Calculations are in the
Spatial Reference System of this Geometry. The optional third parameter sets
the number of segment used to approximate a quarter circle (defaults to 8).
(Text from PostGIS documentation at ch. 6.1.3)
"""
return self._topology(capi.geos_buffer(self.ptr, width, quadsegs))
@property
def centroid(self):
"""
The centroid is equal to the centroid of the set of component Geometries
of highest dimension (since the lower-dimension geometries contribute zero
"weight" to the centroid).
"""
return self._topology(capi.geos_centroid(self.ptr))
@property
def convex_hull(self):
"""
Returns the smallest convex Polygon that contains all the points
in the Geometry.
"""
return self._topology(capi.geos_convexhull(self.ptr))
def difference(self, other):
"""
Returns a Geometry representing the points making up this Geometry
that do not make up other.
"""
return self._topology(capi.geos_difference(self.ptr, other.ptr))
@property
def envelope(self):
"Return the envelope for this geometry (a polygon)."
return self._topology(capi.geos_envelope(self.ptr))
def interpolate(self, distance):
if not isinstance(self, (LineString, MultiLineString)):
raise TypeError('interpolate only works on LineString and MultiLineString geometries')
return self._topology(capi.geos_interpolate(self.ptr, distance))
def interpolate_normalized(self, distance):
if not isinstance(self, (LineString, MultiLineString)):
raise TypeError('interpolate only works on LineString and MultiLineString geometries')
return self._topology(capi.geos_interpolate_normalized(self.ptr, distance))
def intersection(self, other):
"Returns a Geometry representing the points shared by this Geometry and other."
return self._topology(capi.geos_intersection(self.ptr, other.ptr))
@property
def point_on_surface(self):
"Computes an interior point of this Geometry."
return self._topology(capi.geos_pointonsurface(self.ptr))
def project(self, point):
if not isinstance(point, Point):
raise TypeError('locate_point argument must be a Point')
if not isinstance(self, (LineString, MultiLineString)):
raise TypeError('locate_point only works on LineString and MultiLineString geometries')
return capi.geos_project(self.ptr, point.ptr)
def project_normalized(self, point):
if not isinstance(point, Point):
raise TypeError('locate_point argument must be a Point')
if not isinstance(self, (LineString, MultiLineString)):
raise TypeError('locate_point only works on LineString and MultiLineString geometries')
return capi.geos_project_normalized(self.ptr, point.ptr)
def relate(self, other):
"Returns the DE-9IM intersection matrix for this Geometry and the other."
return capi.geos_relate(self.ptr, other.ptr).decode()
def simplify(self, tolerance=0.0, preserve_topology=False):
"""
Returns the Geometry, simplified using the Douglas-Peucker algorithm
to the specified tolerance (higher tolerance => less points). If no
tolerance provided, defaults to 0.
By default, this function does not preserve topology - e.g. polygons can
be split, collapse to lines or disappear holes can be created or
disappear, and lines can cross. By specifying preserve_topology=True,
the result will have the same dimension and number of components as the
input. This is significantly slower.
"""
if preserve_topology:
return self._topology(capi.geos_preservesimplify(self.ptr, tolerance))
else:
return self._topology(capi.geos_simplify(self.ptr, tolerance))
def sym_difference(self, other):
"""
Returns a set combining the points in this Geometry not in other,
and the points in other not in this Geometry.
"""
return self._topology(capi.geos_symdifference(self.ptr, other.ptr))
def union(self, other):
"Returns a Geometry representing all the points in this Geometry and other."
return self._topology(capi.geos_union(self.ptr, other.ptr))
# #### Other Routines ####
@property
def area(self):
"Returns the area of the Geometry."
return capi.geos_area(self.ptr, byref(c_double()))
def distance(self, other):
"""
Returns the distance between the closest points on this Geometry
and the other. Units will be in those of the coordinate system of
the Geometry.
"""
if not isinstance(other, GEOSGeometry):
raise TypeError('distance() works only on other GEOS Geometries.')
return capi.geos_distance(self.ptr, other.ptr, byref(c_double()))
@property
def extent(self):
"""
Returns the extent of this geometry as a 4-tuple, consisting of
(xmin, ymin, xmax, ymax).
"""
env = self.envelope
if isinstance(env, Point):
xmin, ymin = env.tuple
xmax, ymax = xmin, ymin
else:
xmin, ymin = env[0][0]
xmax, ymax = env[0][2]
return (xmin, ymin, xmax, ymax)
@property
def length(self):
"""
Returns the length of this Geometry (e.g., 0 for point, or the
circumference of a Polygon).
"""
return capi.geos_length(self.ptr, byref(c_double()))
def clone(self):
"Clones this Geometry."
return GEOSGeometry(capi.geom_clone(self.ptr), srid=self.srid)
# Class mapping dictionary. Has to be at the end to avoid import
# conflicts with GEOSGeometry.
from django.contrib.gis.geos.linestring import LineString, LinearRing # isort:skip
from django.contrib.gis.geos.point import Point # isort:skip
from django.contrib.gis.geos.polygon import Polygon # isort:skip
from django.contrib.gis.geos.collections import ( # isort:skip
GeometryCollection, MultiPoint, MultiLineString, MultiPolygon)
from django.contrib.gis.geos.prepared import PreparedGeometry # isort:skip
GEOS_CLASSES = {
0: Point,
1: LineString,
2: LinearRing,
3: Polygon,
4: MultiPoint,
5: MultiLineString,
6: MultiPolygon,
7: GeometryCollection,
}
|
mit
|
digimarc/django
|
django/contrib/sites/models.py
|
22
|
3243
|
from __future__ import unicode_literals
import string
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.db import models
from django.db.models.signals import pre_delete, pre_save
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
SITE_CACHE = {}
def _simple_domain_name_validator(value):
"""
Validates that the given value contains no whitespaces to prevent common
typos.
"""
if not value:
return
checks = ((s in value) for s in string.whitespace)
if any(checks):
raise ValidationError(
_("The domain name cannot contain any spaces or tabs."),
code='invalid',
)
class SiteManager(models.Manager):
use_in_migrations = True
def _get_site_by_id(self, site_id):
if site_id not in SITE_CACHE:
site = self.get(pk=site_id)
SITE_CACHE[site_id] = site
return SITE_CACHE[site_id]
def _get_site_by_request(self, request):
host = request.get_host()
if host not in SITE_CACHE:
site = self.get(domain__iexact=host)
SITE_CACHE[host] = site
return SITE_CACHE[host]
def get_current(self, request=None):
"""
Returns the current Site based on the SITE_ID in the project's settings.
If SITE_ID isn't defined, it returns the site with domain matching
request.get_host(). The ``Site`` object is cached the first time it's
retrieved from the database.
"""
from django.conf import settings
if getattr(settings, 'SITE_ID', ''):
site_id = settings.SITE_ID
return self._get_site_by_id(site_id)
elif request:
return self._get_site_by_request(request)
raise ImproperlyConfigured(
"You're using the Django \"sites framework\" without having "
"set the SITE_ID setting. Create a site in your database and "
"set the SITE_ID setting or pass a request to "
"Site.objects.get_current() to fix this error."
)
def clear_cache(self):
"""Clears the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
@python_2_unicode_compatible
class Site(models.Model):
domain = models.CharField(_('domain name'), max_length=100,
validators=[_simple_domain_name_validator])
name = models.CharField(_('display name'), max_length=50)
objects = SiteManager()
class Meta:
db_table = 'django_site'
verbose_name = _('site')
verbose_name_plural = _('sites')
ordering = ('domain',)
def __str__(self):
return self.domain
def clear_site_cache(sender, **kwargs):
"""
Clears the cache (if primed) each time a site is saved or deleted
"""
instance = kwargs['instance']
using = kwargs['using']
try:
del SITE_CACHE[instance.pk]
except KeyError:
pass
try:
del SITE_CACHE[Site.objects.using(using).get(pk=instance.pk).domain]
except (KeyError, Site.DoesNotExist):
pass
pre_save.connect(clear_site_cache, sender=Site)
pre_delete.connect(clear_site_cache, sender=Site)
|
bsd-3-clause
|
LiuVII/Self-driving-RC-car
|
capture.py
|
1
|
2906
|
from __future__ import print_function
import os, sys, re
import time, datetime
import numpy as np
import signal, atexit
from subprocess import Popen, PIPE, STDOUT
import subprocess
def ctrl_c_handler(signum, frame):
print ("\rStopping... Time Elapsed: %s" % time.strftime("%H:%M:%S", time.gmtime(time.time() - start_time)))
sys.exit(1)
def cleanup():
global process_list
try:
for p in process_list:
if not p.poll():
p.terminate()
p.wait()
subprocess.Popen(['reset']).wait()
process_list = []
except:
pass
print("Stopped capturing...")
if __name__ == "__main__":
signal.signal(signal.SIGINT, ctrl_c_handler)
url = ["rtsp://admin:[email protected]/onvif2", \
"rtsp://admin:[email protected]/onvif2"]
time_last = 3600
fps = 20
outdir = "./st_dir"
outdir_sub = ["/left","/right"]
outdir_log = ["/left.log","/right.log"]
## Param for output directory
argc = len(sys.argv)
if argc > 1:
outdir = sys.argv[1]
if not os.path.exists(outdir):
os.mkdir(outdir)
outdir_sub = [outdir+x for x in outdir_sub]
for x in outdir_sub:
if not os.path.exists(x):
os.mkdir(x)
## Param for fps
if argc > 2:
fps = sys.argv[2]
if float(fps) <= 0.0:
print("Warning: FPS should be a rational number more than zero. Value set to 5")
exit(0)
## Param for total training time
if argc > 3:
time_last = sys.argv[3]
if int(time_last) <= 0:
print("Warning: Total time should be an integer more than zero. Value set to 3600")
time_last = 3600
## Calculated number of zeros in filename counter
num = int(np.log10(int(time_last) * float(fps))) + 1
command_list = []
# Save images to the folder
for i in range(len(url)):
command_str = "ffmpeg -i "+url[i]+" -vf fps="+str(fps)+" '"+outdir_sub[i]+"/IMG_%0"+str(num)+"d.bmp' &> " + outdir+outdir_log[i]
command_list.append(command_str)
process_list = []
atexit.register(cleanup)
process_list = [Popen(cmd, shell=True) for cmd in command_list]
print("Initializing...")
start_time = time.time()
time.sleep(5)
# if process_list[1].poll() or process_list[0].poll():
# print("Cameras Down")
print("Subprocesses Created:")
print("Capture Folder: %s" % outdir)
while True:
# if re.search(r".*timed out.*", process_list[1].stdout.readline()) or\
# re.search(r".*timed out.*", process_list[0].stdout.readline()):
# print("Cameras Down")
# exit(0)
for p in process_list:
if p.poll():
exit(1)
print("\rCapturing... Time Elapsed: %s" % time.strftime("%H:%M:%S", time.gmtime(time.time() - start_time)),end='\r')
print("Cameras Down")
exit(0)
|
mit
|
marshall007/rethinkdb
|
external/v8_3.30.33.16/testing/gmock/gtest/test/gtest_output_test.py
|
496
|
12051
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the text output of Google C++ Testing Framework.
SYNOPSIS
gtest_output_test.py --build_dir=BUILD/DIR --gengolden
# where BUILD/DIR contains the built gtest_output_test_ file.
gtest_output_test.py --gengolden
gtest_output_test.py
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
import sys
import gtest_test_utils
# The flag for generating the golden file
GENGOLDEN_FLAG = '--gengolden'
CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
IS_WINDOWS = os.name == 'nt'
# TODO([email protected]): remove the _lin suffix.
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
# At least one command we exercise must not have the
# --gtest_internal_skip_environment_and_ad_hoc_tests flag.
COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
'--gtest_print_time',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
COMMAND_WITH_DISABLED = (
{}, [PROGRAM_PATH,
'--gtest_also_run_disabled_tests',
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=*DISABLED_*'])
COMMAND_WITH_SHARDING = (
{'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
[PROGRAM_PATH,
'--gtest_internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=PassingTest.*'])
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
def ToUnixLineEnding(s):
"""Changes all Windows/Mac line endings in s to UNIX line endings."""
return s.replace('\r\n', '\n').replace('\r', '\n')
def RemoveLocations(test_output):
"""Removes all file location info from a Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with all file location info (in the form of
'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
'FILE_NAME:#: '.
"""
return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\: ', r'\1:#: ', test_output)
def RemoveStackTraceDetails(output):
"""Removes all stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n',
'Stack trace: (omitted)\n\n', output)
def RemoveStackTraces(output):
"""Removes all traces of stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
def RemoveTime(output):
"""Removes all time information from a Google Test program's output."""
return re.sub(r'\(\d+ ms', '(? ms', output)
def RemoveTypeInfoDetails(test_output):
"""Removes compiler-specific type info from Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with type information normalized to canonical form.
"""
# some compilers output the name of type 'unsigned int' as 'unsigned'
return re.sub(r'unsigned int', 'unsigned', test_output)
def NormalizeToCurrentPlatform(test_output):
"""Normalizes platform specific output details for easier comparison."""
if IS_WINDOWS:
# Removes the color information that is not present on Windows.
test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output)
# Changes failure message headers into the Windows format.
test_output = re.sub(r': Failure\n', r': error: ', test_output)
# Changes file(line_number) to file:line_number.
test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output)
return test_output
def RemoveTestCounts(output):
"""Removes test counts from a Google Test program's output."""
output = re.sub(r'\d+ tests?, listed below',
'? tests, listed below', output)
output = re.sub(r'\d+ FAILED TESTS',
'? FAILED TESTS', output)
output = re.sub(r'\d+ tests? from \d+ test cases?',
'? tests from ? test cases', output)
output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
r'? tests from \1', output)
return re.sub(r'\d+ tests?\.', '? tests.', output)
def RemoveMatchingTests(test_output, pattern):
"""Removes output of specified tests from a Google Test program's output.
This function strips not only the beginning and the end of a test but also
all output in between.
Args:
test_output: A string containing the test output.
pattern: A regex string that matches names of test cases or
tests to remove.
Returns:
Contents of test_output with tests whose names match pattern removed.
"""
test_output = re.sub(
r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % (
pattern, pattern),
'',
test_output)
return re.sub(r'.*%s.*\n' % pattern, '', test_output)
def NormalizeOutput(output):
"""Normalizes output (the output of gtest_output_test_.exe)."""
output = ToUnixLineEnding(output)
output = RemoveLocations(output)
output = RemoveStackTraceDetails(output)
output = RemoveTime(output)
return output
def GetShellCommandOutput(env_cmd):
"""Runs a command in a sub-process, and returns its output in a string.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
Returns:
A string with the command's combined standard and diagnostic output.
"""
# Spawns cmd in a sub-process, and gets its standard I/O file objects.
# Set and save the environment properly.
environ = os.environ.copy()
environ.update(env_cmd[0])
p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)
return p.output
def GetCommandOutput(env_cmd):
"""Runs a command and returns its output with all file location
info stripped off.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
"""
# Disables exception pop-ups on Windows.
environ, cmdline = env_cmd
environ = dict(environ) # Ensures we are modifying a copy.
environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'
return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))
def GetOutputOfAllCommands():
"""Returns concatenated output from several representative commands."""
return (GetCommandOutput(COMMAND_WITH_COLOR) +
GetCommandOutput(COMMAND_WITH_TIME) +
GetCommandOutput(COMMAND_WITH_DISABLED) +
GetCommandOutput(COMMAND_WITH_SHARDING))
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
SUPPORTS_STACK_TRACES = False
CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
SUPPORTS_TYPED_TESTS and
SUPPORTS_THREADS and
not IS_WINDOWS)
class GTestOutputTest(gtest_test_utils.TestCase):
def RemoveUnsupportedTests(self, test_output):
if not SUPPORTS_DEATH_TESTS:
test_output = RemoveMatchingTests(test_output, 'DeathTest')
if not SUPPORTS_TYPED_TESTS:
test_output = RemoveMatchingTests(test_output, 'TypedTest')
test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
if not SUPPORTS_THREADS:
test_output = RemoveMatchingTests(test_output,
'ExpectFailureWithThreadsTest')
test_output = RemoveMatchingTests(test_output,
'ScopedFakeTestPartResultReporterTest')
test_output = RemoveMatchingTests(test_output,
'WorksConcurrently')
if not SUPPORTS_STACK_TRACES:
test_output = RemoveStackTraces(test_output)
return test_output
def testOutput(self):
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'rb')
# A mis-configured source control system can cause \r appear in EOL
# sequences when we read the golden file irrespective of an operating
# system used. Therefore, we need to strip those \r's from newlines
# unconditionally.
golden = ToUnixLineEnding(golden_file.read())
golden_file.close()
# We want the test to pass regardless of certain features being
# supported or not.
# We still have to remove type name specifics in all cases.
normalized_actual = RemoveTypeInfoDetails(output)
normalized_golden = RemoveTypeInfoDetails(golden)
if CAN_GENERATE_GOLDEN_FILE:
self.assertEqual(normalized_golden, normalized_actual)
else:
normalized_actual = NormalizeToCurrentPlatform(
RemoveTestCounts(normalized_actual))
normalized_golden = NormalizeToCurrentPlatform(
RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
# This code is very handy when debugging golden file differences:
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_actual.txt'), 'wb').write(
normalized_actual)
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_golden.txt'), 'wb').write(
normalized_golden)
self.assertEqual(normalized_golden, normalized_actual)
if __name__ == '__main__':
if sys.argv[1:] == [GENGOLDEN_FLAG]:
if CAN_GENERATE_GOLDEN_FILE:
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'wb')
golden_file.write(output)
golden_file.close()
else:
message = (
"""Unable to write a golden file when compiled in an environment
that does not support all the required features (death tests, typed tests,
and multiple threads). Please generate the golden file using a binary built
with those features enabled.""")
sys.stderr.write(message)
sys.exit(1)
else:
gtest_test_utils.Main()
|
agpl-3.0
|
dc3-plaso/dfvfs
|
dfvfs/file_io/vhdi_file_io.py
|
1
|
4312
|
# -*- coding: utf-8 -*-
"""The VHD image file-like object."""
import pyvhdi
from dfvfs.file_io import file_object_io
from dfvfs.lib import errors
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver import resolver
class VHDIFile(file_object_io.FileObjectIO):
"""Class that implements a file-like object using pyvhdi."""
def __init__(self, resolver_context, file_object=None):
"""Initializes the file-like object.
Args:
resolver_context (Context): resolver context.
file_object (Optional[FileIO]): file-like object.
"""
super(VHDIFile, self).__init__(resolver_context, file_object=file_object)
self._parent_vhdi_files = []
self._sub_file_objects = []
def _Close(self):
"""Closes the file-like object."""
super(VHDIFile, self)._Close()
for vhdi_file in self._parent_vhdi_files:
vhdi_file.close()
for file_object in self._sub_file_objects:
file_object.close()
self._parent_vhdi_files = []
self._sub_file_objects = []
def _OpenFileObject(self, path_spec):
"""Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
A file-like object.
Raises:
PathSpecError: if the path specification is incorrect.
"""
if not path_spec.HasParent():
raise errors.PathSpecError(
u'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
vhdi_file = pyvhdi.file()
vhdi_file.open_file_object(file_object)
if vhdi_file.parent_identifier:
file_system = resolver.Resolver.OpenFileSystem(
path_spec.parent, resolver_context=self._resolver_context)
try:
self._OpenParentFile(file_system, path_spec.parent, vhdi_file)
finally:
file_system.Close()
self._sub_file_objects.append(file_object)
self._parent_vhdi_files.reverse()
self._sub_file_objects.reverse()
return vhdi_file
def _OpenParentFile(self, file_system, path_spec, vhdi_file):
"""Opens the parent file.
Args:
file_system (FileSystem): file system of the VHDI file.
path_spec (PathSpec): path specification of the VHDI file.
vhdi_file (pyvhdi.file): VHDI file.
Raises:
PathSpecError: if the path specification is incorrect.
"""
location = getattr(path_spec, u'location', None)
if not location:
raise errors.PathSpecError(
u'Unsupported path specification without location.')
location_path_segments = file_system.SplitPath(location)
parent_filename = vhdi_file.parent_filename
_, _, parent_filename = parent_filename.rpartition(u'\\')
location_path_segments.pop()
location_path_segments.append(parent_filename)
parent_file_location = file_system.JoinPath(location_path_segments)
# Note that we don't want to set the keyword arguments when not used
# because the path specification base class will check for unused
# keyword arguments and raise.
kwargs = path_spec_factory.Factory.GetProperties(path_spec)
kwargs[u'location'] = parent_file_location
if path_spec.parent is not None:
kwargs[u'parent'] = path_spec.parent
parent_file_path_spec = path_spec_factory.Factory.NewPathSpec(
path_spec.type_indicator, **kwargs)
if not file_system.FileEntryExistsByPathSpec(parent_file_path_spec):
return
file_object = resolver.Resolver.OpenFileObject(
parent_file_path_spec, resolver_context=self._resolver_context)
vhdi_parent_file = pyvhdi.file()
vhdi_parent_file.open_file_object(file_object)
if vhdi_parent_file.parent_identifier:
self._OpenParentFile(
file_system, parent_file_path_spec, vhdi_parent_file)
vhdi_file.set_parent(vhdi_parent_file)
self._parent_vhdi_files.append(vhdi_parent_file)
self._sub_file_objects.append(file_object)
def get_size(self):
"""Retrieves the size of the file-like object.
Returns:
int: size of the file-like object data.
Raises:
IOError: if the file-like object has not been opened.
"""
if not self._is_open:
raise IOError(u'Not opened.')
return self._file_object.get_media_size()
|
apache-2.0
|
mszczodrak/openthread
|
tools/harness-automation/cases/router_5_1_4.py
|
16
|
1875
|
#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class Router_5_1_4(HarnessCase):
role = HarnessCase.ROLE_ROUTER
case = '5 1 4'
golden_devices_required = 2
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
houshengbo/nova_vmware_compute_driver
|
nova/tests/fake_crypto.py
|
29
|
5329
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def ensure_ca_filesystem():
pass
def fetch_ca(project_id=None):
rootca = """-----BEGIN CERTIFICATE-----
MIICyzCCAjSgAwIBAgIJAIJ/UoFWKoOUMA0GCSqGSIb3DQEBBAUAME4xEjAQBgNV
BAoTCU5PVkEgUk9PVDEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzETMBEGA1UECBMK
Q2FsaWZvcm5pYTELMAkGA1UEBhMCVVMwHhcNMTIxMDAyMTg1NzQ1WhcNMTMxMDAy
MTg1NzQ1WjBOMRIwEAYDVQQKEwlOT1ZBIFJPT1QxFjAUBgNVBAcTDU1vdW50YWlu
IFZpZXcxEzARBgNVBAgTCkNhbGlmb3JuaWExCzAJBgNVBAYTAlVTMIGfMA0GCSqG
SIb3DQEBAQUAA4GNADCBiQKBgQCg0Bn8WSqbJF3QNTZUxo1TzmFBxuqvhjZLKbnQ
IiShdVIWUK7RC8frq8FJI7dgJNmvkIBn9njABWDoZmurQRCzD65yCSbUc4R2ea5H
IK4wQIui0CJykvMBNjAe3bzztVVs8/ccDTsjtqq3F/KeQkKzQVfSWBrJSmYtG5tO
G+dOSwIDAQABo4GwMIGtMAwGA1UdEwQFMAMBAf8wHQYDVR0OBBYEFCljRfaNOsA/
9mHuq0io7Lt83FtaMH4GA1UdIwR3MHWAFCljRfaNOsA/9mHuq0io7Lt83FtaoVKk
UDBOMRIwEAYDVQQKEwlOT1ZBIFJPT1QxFjAUBgNVBAcTDU1vdW50YWluIFZpZXcx
EzARBgNVBAgTCkNhbGlmb3JuaWExCzAJBgNVBAYTAlVTggkAgn9SgVYqg5QwDQYJ
KoZIhvcNAQEEBQADgYEAEbpJOOlpKCh5omwfAwAfFg1ml4h/FJiCH3PETmOCc+3l
CtWTBd4MG8AoH7A3PU2JKAGVQ5XWo6+ihpW1RgfQpCnloI6vIeGcws+rSLnlzULt
IvfCJpRg7iQdR3jZGt3295behtP1GsCqipJEulOkOaEIs8iLlXgSOG94Mkwlb4Q=
-----END CERTIFICATE-----
"""
return rootca
def generate_x509_cert(user_id, project_id, bits=1024):
pk = """-----BEGIN RSA PRIVATE KEY-----
MIICXAIBAAKBgQC4h2d63ijt9l0fIBRY37D3Yj2FYajCMUlftSoHNA4lEw0uTXnH
Jjbd0j7HNlSADWeAMuaoSDNp7CIsXMt6iA/ASN5nFFTZlLRqIzYoI0RHiiSJjvSG
d1n4Yrar1eC8tK3Rld1Zo6rj6tOuIxfFVJajJVZykCAHjGNNvulgfhBXFwIDAQAB
AoGBAIjfxx4YU/vO1lwUC4OwyS92q3OYcPk6XdakJryZHDTb4NcLmNzjt6bqIK7b
2enyB2fMWdNRWvGiueZ2HmiRLDyOGsAVdEsHvL4qbr9EZGTqC8Qxx+zTevWWf6pB
F1zxzbXNQDFZDf9kVsSLCkbMHITnW1k4MrM++9gfCO3WrfehAkEA4nd8TyCCZazq
KMOQwFLTNaiVLeTXCtvGopl4ZNiKYZ1qI3KDXb2wbAyArFuERlotxFlylXpwtlMo
SlI/C/sYqwJBANCX1sdfRJq8DpdP44ThWqOkWFLB9rBiwyyBt8746fX8amwr8eyz
H44/z5GT/Vyp8qFsjkuDzeP93eeDnr2qE0UCP1zipRnPO6x4P5J4o+Y+EmLvwkAQ
nCLYAaCvUbILHrbq2Z2wWjEYnEO03RHUd2xjkGH4TgcBMTmW4e+ZzEIduwJACnIw
LVfWBbG5QVac3EC021EVoz9XbUnk4Eu2usS4Yrs7USN6QBJQWD1V1cKFg6h3ICJh
leKJ4wsJm9h5kKH9yQJBAN8CaX223MlTSuBOVuIOwNA+09iLfx4UCLiH1fGMKDpe
xVcmkM3qCnTqNxrAPSFdT9IyB3IXiaLWbvzl7MfiOwQ=
-----END RSA PRIVATE KEY-----
"""
csr = """Certificate:
Data:
Version: 1 (0x0)
Serial Number: 23 (0x17)
Signature Algorithm: md5WithRSAEncryption
Issuer: O=NOVA ROOT, L=Mountain View, ST=California, C=US
Validity
Not Before: Oct 2 19:31:45 2012 GMT
Not After : Oct 2 19:31:45 2013 GMT
Subject: C=US, ST=California, O=OpenStack, OU=NovaDev, """
"""CN=openstack-fake-2012-10-02T19:31:45Z
Subject Public Key Info:
Public Key Algorithm: rsaEncryption
RSA Public Key: (1024 bit)
Modulus (1024 bit):
00:b8:87:67:7a:de:28:ed:f6:5d:1f:20:14:58:df:
b0:f7:62:3d:85:61:a8:c2:31:49:5f:b5:2a:07:34:
0e:25:13:0d:2e:4d:79:c7:26:36:dd:d2:3e:c7:36:
54:80:0d:67:80:32:e6:a8:48:33:69:ec:22:2c:5c:
cb:7a:88:0f:c0:48:de:67:14:54:d9:94:b4:6a:23:
36:28:23:44:47:8a:24:89:8e:f4:86:77:59:f8:62:
b6:ab:d5:e0:bc:b4:ad:d1:95:dd:59:a3:aa:e3:ea:
d3:ae:23:17:c5:54:96:a3:25:56:72:90:20:07:8c:
63:4d:be:e9:60:7e:10:57:17
Exponent: 65537 (0x10001)
Signature Algorithm: md5WithRSAEncryption
32:82:ff:8b:92:0e:8d:9c:6b:ce:7e:fe:34:16:2a:4c:47:4f:
c7:28:a2:33:1e:48:56:2e:4b:e8:e8:e3:48:b1:3d:a3:43:21:
ef:83:e7:df:e2:10:91:7e:9a:c0:4d:1e:96:68:2b:b9:f7:84:
7f:ec:84:8a:bf:bc:5e:50:05:d9:ce:4a:1a:bf:d2:bf:0c:d1:
7e:ec:64:c3:a5:37:78:a3:a6:2b:a1:b7:1c:cc:c8:b9:78:61:
98:50:3c:e6:28:34:f1:0e:62:bb:b5:d7:a1:dd:1f:38:c6:0d:
58:9f:81:67:ff:9c:32:fc:52:7e:6d:8c:91:43:49:fe:e3:48:
bb:40
-----BEGIN CERTIFICATE-----
MIICMzCCAZwCARcwDQYJKoZIhvcNAQEEBQAwTjESMBAGA1UEChMJTk9WQSBST09U
MRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MRMwEQYDVQQIEwpDYWxpZm9ybmlhMQsw
CQYDVQQGEwJVUzAeFw0xMjEwMDIxOTMxNDVaFw0xMzEwMDIxOTMxNDVaMHYxCzAJ
BgNVBAYTAlVTMRMwEQYDVQQIEwpDYWxpZm9ybmlhMRIwEAYDVQQKEwlPcGVuU3Rh
Y2sxEDAOBgNVBAsTB05vdmFEZXYxLDAqBgNVBAMTI29wZW5zdGFjay1mYWtlLTIw
MTItMTAtMDJUMTk6MzE6NDVaMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC4
h2d63ijt9l0fIBRY37D3Yj2FYajCMUlftSoHNA4lEw0uTXnHJjbd0j7HNlSADWeA
MuaoSDNp7CIsXMt6iA/ASN5nFFTZlLRqIzYoI0RHiiSJjvSGd1n4Yrar1eC8tK3R
ld1Zo6rj6tOuIxfFVJajJVZykCAHjGNNvulgfhBXFwIDAQABMA0GCSqGSIb3DQEB
BAUAA4GBADKC/4uSDo2ca85+/jQWKkxHT8coojMeSFYuS+jo40ixPaNDIe+D59/i
EJF+msBNHpZoK7n3hH/shIq/vF5QBdnOShq/0r8M0X7sZMOlN3ijpiuhtxzMyLl4
YZhQPOYoNPEOYru116HdHzjGDVifgWf/nDL8Un5tjJFDSf7jSLtA
-----END CERTIFICATE-----
"""
return pk, csr
|
apache-2.0
|
elifesciences/elife-ga
|
setup.py
|
2
|
1126
|
from setuptools import setup
MODULE = 'elife_ga_metrics' # name of the subdirectory your code resides in
NAME = 'elife-ga-metrics' # project name
AUTHORS = ["Luke Skibinski <[email protected]>"] # list of all contributing authors
LICENCE = 'GPLv3' # licence short name
COPYRIGHT = 'eLife Sciences' # copyright owner
VERSION = '2016.02.29' # some sort of natural ordering key
DESCRIPTION = 'google analytics wrangling for elifesciences.org' # long description
def groupby(f, l):
x, y = [], []
for v in l:
(x if f(v) else y).append(v)
return x, y
def requirements():
requisites = open('requirements.txt', 'r').read().splitlines()
pypi, non_pypi = groupby(lambda r: not r.startswith('-e '), requisites)
non_pypi = map(lambda v: v[len('-e '):], non_pypi)
return {
'install_requires': pypi,
'dependency_links': non_pypi,
}
setup(
name = NAME,
version = VERSION,
description = DESCRIPTION,
long_description = open('README.md', 'r').read(),
packages = [MODULE],
license = open('LICENCE.txt', 'r').read(),
**requirements()
)
|
gpl-3.0
|
thumbimigwe/echorizr
|
lib/python2.7/site-packages/django/core/cache/__init__.py
|
230
|
3806
|
"""
Caching framework.
This package defines set of cache backends that all conform to a simple API.
In a nutshell, a cache is a set of values -- which can be any object that
may be pickled -- identified by string keys. For the complete API, see
the abstract BaseCache class in django.core.cache.backends.base.
Client code should use the `cache` variable defined here to access the default
cache backend and look up non-default cache backends in the `caches` dict-like
object.
See docs/topics/cache.txt for information on the public API.
"""
from threading import local
from django.conf import settings
from django.core import signals
from django.core.cache.backends.base import (
BaseCache, CacheKeyWarning, InvalidCacheBackendError,
)
from django.utils.module_loading import import_string
__all__ = [
'cache', 'DEFAULT_CACHE_ALIAS', 'InvalidCacheBackendError',
'CacheKeyWarning', 'BaseCache',
]
DEFAULT_CACHE_ALIAS = 'default'
def _create_cache(backend, **kwargs):
try:
# Try to get the CACHES entry for the given backend name first
try:
conf = settings.CACHES[backend]
except KeyError:
try:
# Trying to import the given backend, in case it's a dotted path
import_string(backend)
except ImportError as e:
raise InvalidCacheBackendError("Could not find backend '%s': %s" % (
backend, e))
location = kwargs.pop('LOCATION', '')
params = kwargs
else:
params = conf.copy()
params.update(kwargs)
backend = params.pop('BACKEND')
location = params.pop('LOCATION', '')
backend_cls = import_string(backend)
except ImportError as e:
raise InvalidCacheBackendError(
"Could not find backend '%s': %s" % (backend, e))
return backend_cls(location, params)
class CacheHandler(object):
"""
A Cache Handler to manage access to Cache instances.
Ensures only one instance of each alias exists per thread.
"""
def __init__(self):
self._caches = local()
def __getitem__(self, alias):
try:
return self._caches.caches[alias]
except AttributeError:
self._caches.caches = {}
except KeyError:
pass
if alias not in settings.CACHES:
raise InvalidCacheBackendError(
"Could not find config for '%s' in settings.CACHES" % alias
)
cache = _create_cache(alias)
self._caches.caches[alias] = cache
return cache
def all(self):
return getattr(self._caches, 'caches', {}).values()
caches = CacheHandler()
class DefaultCacheProxy(object):
"""
Proxy access to the default Cache object's attributes.
This allows the legacy `cache` object to be thread-safe using the new
``caches`` API.
"""
def __getattr__(self, name):
return getattr(caches[DEFAULT_CACHE_ALIAS], name)
def __setattr__(self, name, value):
return setattr(caches[DEFAULT_CACHE_ALIAS], name, value)
def __delattr__(self, name):
return delattr(caches[DEFAULT_CACHE_ALIAS], name)
def __contains__(self, key):
return key in caches[DEFAULT_CACHE_ALIAS]
def __eq__(self, other):
return caches[DEFAULT_CACHE_ALIAS] == other
def __ne__(self, other):
return caches[DEFAULT_CACHE_ALIAS] != other
cache = DefaultCacheProxy()
def close_caches(**kwargs):
# Some caches -- python-memcached in particular -- need to do a cleanup at the
# end of a request cycle. If not implemented in a particular backend
# cache.close is a no-op
for cache in caches.all():
cache.close()
signals.request_finished.connect(close_caches)
|
mit
|
foobnix/foobnix
|
foobnix/util/bean_utils.py
|
3
|
2333
|
#-*- coding: utf-8 -*-
'''
Created on 20 окт. 2010
@author: ivan
'''
import os
import logging
from foobnix.gui.model import FDModel, FModel
from foobnix.util.text_utils import normalize_text
from foobnix.fc.fc import FC
from foobnix.fc.fc_cache import FCache
def update_parent_for_beans(beans, parent):
for bean in beans:
if not bean.get_is_file():
bean.parent(parent)
"""update bean info form text if possible"""
def update_bean_from_normalized_text(bean):
if not bean.artist or not bean.title:
bean.text = normalize_text(bean.text)
text_artist = bean.get_artist_from_text()
text_title = bean.get_title_from_text()
if text_artist and text_title:
bean.artist, bean.title = text_artist, text_title
return bean
def get_bean_posible_paths(bean):
logging.debug("get bean path: %s" % bean)
path = get_bean_download_path(bean, path=FC().online_save_to_folder)
if path and os.path.exists(path):
return path
for paths in FCache().music_paths:
for path in paths:
path = get_bean_download_path(bean, path)
if path and os.path.exists(path):
return path
return None
def get_bean_download_path(bean, path=FC().online_save_to_folder, nosubfolder = FC().nosubfolder):
ext = ".mp3"
if nosubfolder:
name = bean.get_display_name()
name = name.replace("/", "-")
name = name.replace("\\", "-")
path = os.path.join(path, name + ext)
return path
elif bean.artist:
bean.artist = bean.artist.replace("/", "-")
bean.artist = bean.artist.replace("\\", "-")
path = os.path.join(path, bean.artist, bean.get_display_name() + ext)
logging.debug("bean path %s" % path)
return path
else:
logging.debug("get bean path: %s" % bean)
path = os.path.join(path, bean.get_display_name() + ext)
logging.debug("bean path %s" % path)
return path
def get_bean_from_file(f):
if not os.path.exists(f):
logging.debug("not exists" + str(f))
return None
bean = FDModel(text=os.path.basename(f), path=f)
is_file = True if os.path.isfile(f) else False
bean = bean.add_is_file(is_file)
if not is_file:
bean.add_font("bold")
return bean
|
gpl-3.0
|
andykimpe/chromium-test-npapi
|
tools/cygprofile/PRESUBMIT.py
|
52
|
1141
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for cygprofile.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
def CommonChecks(input_api, output_api):
output = []
blacklist = []
output.extend(input_api.canned_checks.RunPylint(
input_api, output_api, black_list=blacklist))
output.extend(input_api.canned_checks.RunUnitTests(
input_api,
output_api,
[input_api.os_path.join(input_api.PresubmitLocalPath(), 'run_tests')]))
if input_api.is_committing:
output.extend(input_api.canned_checks.PanProjectChecks(input_api,
output_api,
owners_check=False))
return output
def CheckChangeOnUpload(input_api, output_api):
return CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return CommonChecks(input_api, output_api)
|
bsd-3-clause
|
0xPoly/ooni-probe
|
ooni/resources/update.py
|
5
|
1416
|
import os
from twisted.internet import defer
from twisted.web.client import downloadPage
from ooni.resources import inputs, geoip, resources_directory
from ooni.utils import unzip, gunzip
@defer.inlineCallbacks
def download_resource(resources):
for filename, resource in resources.items():
print "Downloading %s" % filename
if resource["action"] in [unzip, gunzip] and resource["action_args"]:
dirname = resource["action_args"][0]
filename = os.path.join(dirname, filename)
else:
filename = os.path.join(resources_directory, filename)
if not os.path.exists(filename):
directory = os.path.dirname(filename)
if not os.path.isdir(directory):
os.makedirs(directory)
f = open(filename, 'w')
f.close()
elif not os.path.isfile(filename):
print "[!] %s must be a file." % filename
defer.returnValue(False)
yield downloadPage(resource['url'], filename)
if resource['action'] is not None:
yield defer.maybeDeferred(resource['action'],
filename,
*resource['action_args'])
print "%s written." % filename
def download_inputs():
return download_resource(inputs)
def download_geoip():
return download_resource(geoip)
|
bsd-2-clause
|
foufou55/Sick-Beard
|
lib/requests/packages/chardet/charsetprober.py
|
216
|
1914
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import constants, re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(r'([\x00-\x7F])+', ' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(r'([A-Za-z])+', ' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
|
gpl-3.0
|
JianyuWang/neutron
|
neutron/tests/unit/objects/test_base.py
|
8
|
13607
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import random
import string
import mock
from oslo_db import exception as obj_exc
from oslo_versionedobjects import base as obj_base
from oslo_versionedobjects import fields as obj_fields
from neutron.common import exceptions as n_exc
from neutron.common import utils as common_utils
from neutron import context
from neutron.db import api as db_api
from neutron.objects import base
from neutron.tests import base as test_base
SQLALCHEMY_COMMIT = 'sqlalchemy.engine.Connection._commit_impl'
OBJECTS_BASE_OBJ_FROM_PRIMITIVE = ('oslo_versionedobjects.base.'
'VersionedObject.obj_from_primitive')
class FakeModel(object):
def __init__(self, *args, **kwargs):
pass
@obj_base.VersionedObjectRegistry.register
class FakeNeutronObject(base.NeutronDbObject):
db_model = FakeModel
fields = {
'id': obj_fields.UUIDField(),
'field1': obj_fields.StringField(),
'field2': obj_fields.StringField()
}
fields_no_update = ['id']
synthetic_fields = ['field2']
def _random_string(n=10):
return ''.join(random.choice(string.ascii_lowercase) for _ in range(n))
def _random_boolean():
return bool(random.getrandbits(1))
def _random_integer():
return random.randint(0, 1000)
FIELD_TYPE_VALUE_GENERATOR_MAP = {
obj_fields.BooleanField: _random_boolean,
obj_fields.IntegerField: _random_integer,
obj_fields.StringField: _random_string,
obj_fields.UUIDField: _random_string,
obj_fields.ListOfObjectsField: lambda: []
}
def get_obj_db_fields(obj):
return {field: getattr(obj, field) for field in obj.fields
if field not in obj.synthetic_fields}
class _BaseObjectTestCase(object):
_test_class = FakeNeutronObject
def setUp(self):
super(_BaseObjectTestCase, self).setUp()
self.context = context.get_admin_context()
self.db_objs = list(self.get_random_fields() for _ in range(3))
self.db_obj = self.db_objs[0]
valid_field = [f for f in self._test_class.fields
if f not in self._test_class.synthetic_fields][0]
self.valid_field_filter = {valid_field: self.db_obj[valid_field]}
@classmethod
def get_random_fields(cls, obj_cls=None):
obj_cls = obj_cls or cls._test_class
fields = {}
for field, field_obj in obj_cls.fields.items():
if field not in obj_cls.synthetic_fields:
generator = FIELD_TYPE_VALUE_GENERATOR_MAP[type(field_obj)]
fields[field] = generator()
return fields
def get_updatable_fields(self, fields):
return base.get_updatable_fields(self._test_class, fields)
@classmethod
def _is_test_class(cls, obj):
return isinstance(obj, cls._test_class)
class BaseObjectIfaceTestCase(_BaseObjectTestCase, test_base.BaseTestCase):
def test_get_by_id(self):
with mock.patch.object(db_api, 'get_object',
return_value=self.db_obj) as get_object_mock:
obj = self._test_class.get_by_id(self.context, id='fake_id')
self.assertTrue(self._is_test_class(obj))
self.assertEqual(self.db_obj, get_obj_db_fields(obj))
get_object_mock.assert_called_once_with(
self.context, self._test_class.db_model, id='fake_id')
def test_get_by_id_missing_object(self):
with mock.patch.object(db_api, 'get_object', return_value=None):
obj = self._test_class.get_by_id(self.context, id='fake_id')
self.assertIsNone(obj)
def test_get_objects(self):
with mock.patch.object(db_api, 'get_objects',
return_value=self.db_objs) as get_objects_mock:
objs = self._test_class.get_objects(self.context)
self._validate_objects(self.db_objs, objs)
get_objects_mock.assert_called_once_with(
self.context, self._test_class.db_model)
def test_get_objects_valid_fields(self):
with mock.patch.object(
db_api, 'get_objects',
return_value=[self.db_obj]) as get_objects_mock:
objs = self._test_class.get_objects(self.context,
**self.valid_field_filter)
self._validate_objects([self.db_obj], objs)
get_objects_mock.assert_called_with(
self.context, self._test_class.db_model,
**self.valid_field_filter)
def test_get_objects_mixed_fields(self):
synthetic_fields = self._test_class.synthetic_fields
if not synthetic_fields:
self.skipTest('No synthetic fields found in test class %r' %
self._test_class)
filters = copy.copy(self.valid_field_filter)
filters[synthetic_fields[0]] = 'xxx'
with mock.patch.object(db_api, 'get_objects',
return_value=self.db_objs):
self.assertRaises(base.exceptions.InvalidInput,
self._test_class.get_objects, self.context,
**filters)
def test_get_objects_synthetic_fields(self):
synthetic_fields = self._test_class.synthetic_fields
if not synthetic_fields:
self.skipTest('No synthetic fields found in test class %r' %
self._test_class)
with mock.patch.object(db_api, 'get_objects',
return_value=self.db_objs):
self.assertRaises(base.exceptions.InvalidInput,
self._test_class.get_objects, self.context,
**{synthetic_fields[0]: 'xxx'})
def test_get_objects_invalid_fields(self):
with mock.patch.object(db_api, 'get_objects',
return_value=self.db_objs):
self.assertRaises(base.exceptions.InvalidInput,
self._test_class.get_objects, self.context,
fake_field='xxx')
def _validate_objects(self, expected, observed):
self.assertTrue(all(self._is_test_class(obj) for obj in observed))
self.assertEqual(
sorted(expected,
key=common_utils.safe_sort_key),
sorted([get_obj_db_fields(obj) for obj in observed],
key=common_utils.safe_sort_key))
def _check_equal(self, obj, db_obj):
self.assertEqual(
sorted(db_obj),
sorted(get_obj_db_fields(obj)))
def test_create(self):
with mock.patch.object(db_api, 'create_object',
return_value=self.db_obj) as create_mock:
obj = self._test_class(self.context, **self.db_obj)
self._check_equal(obj, self.db_obj)
obj.create()
self._check_equal(obj, self.db_obj)
create_mock.assert_called_once_with(
self.context, self._test_class.db_model, self.db_obj)
def test_create_updates_from_db_object(self):
with mock.patch.object(db_api, 'create_object',
return_value=self.db_obj):
obj = self._test_class(self.context, **self.db_objs[1])
self._check_equal(obj, self.db_objs[1])
obj.create()
self._check_equal(obj, self.db_obj)
def test_create_duplicates(self):
with mock.patch.object(db_api, 'create_object',
side_effect=obj_exc.DBDuplicateEntry):
obj = self._test_class(self.context, **self.db_obj)
self.assertRaises(base.NeutronDbObjectDuplicateEntry, obj.create)
@mock.patch.object(db_api, 'update_object')
def test_update_no_changes(self, update_mock):
with mock.patch.object(base.NeutronDbObject,
'_get_changed_persistent_fields',
return_value={}):
obj = self._test_class(self.context)
obj.update()
self.assertFalse(update_mock.called)
@mock.patch.object(db_api, 'update_object')
def test_update_changes(self, update_mock):
fields_to_update = self.get_updatable_fields(self.db_obj)
with mock.patch.object(base.NeutronDbObject,
'_get_changed_persistent_fields',
return_value=fields_to_update):
obj = self._test_class(self.context, **self.db_obj)
obj.update()
update_mock.assert_called_once_with(
self.context, self._test_class.db_model,
self.db_obj['id'], fields_to_update)
@mock.patch.object(base.NeutronDbObject,
'_get_changed_persistent_fields',
return_value={'a': 'a', 'b': 'b', 'c': 'c'})
def test_update_changes_forbidden(self, *mocks):
with mock.patch.object(
self._test_class,
'fields_no_update',
new_callable=mock.PropertyMock(return_value=['a', 'c']),
create=True):
obj = self._test_class(self.context, **self.db_obj)
self.assertRaises(base.NeutronObjectUpdateForbidden, obj.update)
def test_update_updates_from_db_object(self):
with mock.patch.object(db_api, 'update_object',
return_value=self.db_obj):
obj = self._test_class(self.context, **self.db_objs[1])
fields_to_update = self.get_updatable_fields(self.db_objs[1])
with mock.patch.object(base.NeutronDbObject,
'_get_changed_persistent_fields',
return_value=fields_to_update):
obj.update()
self._check_equal(obj, self.db_obj)
@mock.patch.object(db_api, 'delete_object')
def test_delete(self, delete_mock):
obj = self._test_class(self.context, **self.db_obj)
self._check_equal(obj, self.db_obj)
obj.delete()
self._check_equal(obj, self.db_obj)
delete_mock.assert_called_once_with(
self.context, self._test_class.db_model, self.db_obj['id'])
@mock.patch(OBJECTS_BASE_OBJ_FROM_PRIMITIVE)
def test_clean_obj_from_primitive(self, get_prim_m):
expected_obj = get_prim_m.return_value
observed_obj = self._test_class.clean_obj_from_primitive('foo', 'bar')
self.assertIs(expected_obj, observed_obj)
self.assertTrue(observed_obj.obj_reset_changes.called)
class BaseDbObjectTestCase(_BaseObjectTestCase):
def test_get_by_id_create_update_delete(self):
obj = self._test_class(self.context, **self.db_obj)
obj.create()
new = self._test_class.get_by_id(self.context, id=obj.id)
self.assertEqual(obj, new)
obj = new
for key, val in self.get_updatable_fields(self.db_objs[1]).items():
setattr(obj, key, val)
obj.update()
new = self._test_class.get_by_id(self.context, id=obj.id)
self.assertEqual(obj, new)
obj = new
new.delete()
new = self._test_class.get_by_id(self.context, id=obj.id)
self.assertIsNone(new)
def test_update_non_existent_object_raises_not_found(self):
obj = self._test_class(self.context, **self.db_obj)
obj.obj_reset_changes()
for key, val in self.get_updatable_fields(self.db_obj).items():
setattr(obj, key, val)
self.assertRaises(n_exc.ObjectNotFound, obj.update)
def test_delete_non_existent_object_raises_not_found(self):
obj = self._test_class(self.context, **self.db_obj)
self.assertRaises(n_exc.ObjectNotFound, obj.delete)
@mock.patch(SQLALCHEMY_COMMIT)
def test_create_single_transaction(self, mock_commit):
obj = self._test_class(self.context, **self.db_obj)
obj.create()
self.assertEqual(1, mock_commit.call_count)
def test_update_single_transaction(self):
obj = self._test_class(self.context, **self.db_obj)
obj.create()
for key, val in self.get_updatable_fields(self.db_obj).items():
setattr(obj, key, val)
with mock.patch(SQLALCHEMY_COMMIT) as mock_commit:
obj.update()
self.assertEqual(1, mock_commit.call_count)
def test_delete_single_transaction(self):
obj = self._test_class(self.context, **self.db_obj)
obj.create()
with mock.patch(SQLALCHEMY_COMMIT) as mock_commit:
obj.delete()
self.assertEqual(1, mock_commit.call_count)
@mock.patch(SQLALCHEMY_COMMIT)
def test_get_objects_single_transaction(self, mock_commit):
self._test_class.get_objects(self.context)
self.assertEqual(1, mock_commit.call_count)
@mock.patch(SQLALCHEMY_COMMIT)
def test_get_by_id_single_transaction(self, mock_commit):
obj = self._test_class(self.context, **self.db_obj)
obj.create()
obj = self._test_class.get_by_id(self.context, obj.id)
self.assertEqual(2, mock_commit.call_count)
|
apache-2.0
|
alazaro/tennis_tournament
|
django/contrib/sessions/backends/cache.py
|
55
|
1979
|
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.cache import cache
KEY_PREFIX = "django.contrib.sessions.cache"
class SessionStore(SessionBase):
"""
A cache-based session store.
"""
def __init__(self, session_key=None):
self._cache = cache
super(SessionStore, self).__init__(session_key)
def load(self):
session_data = self._cache.get(KEY_PREFIX + self.session_key)
if session_data is not None:
return session_data
self.create()
return {}
def create(self):
# Because a cache can fail silently (e.g. memcache), we don't know if
# we are failing to create a new session because of a key collision or
# because the cache is missing. So we try for a (large) number of times
# and then raise an exception. That's the risk you shoulder if using
# cache backing.
for i in xrange(10000):
self.session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
return
raise RuntimeError("Unable to create a new session key.")
def save(self, must_create=False):
if must_create:
func = self._cache.add
else:
func = self._cache.set
result = func(KEY_PREFIX + self.session_key, self._get_session(no_load=must_create),
self.get_expiry_age())
if must_create and not result:
raise CreateError
def exists(self, session_key):
if self._cache.has_key(KEY_PREFIX + session_key):
return True
return False
def delete(self, session_key=None):
if session_key is None:
if self._session_key is None:
return
session_key = self._session_key
self._cache.delete(KEY_PREFIX + session_key)
|
gpl-3.0
|
cloudera/hue
|
desktop/core/ext-py/Django-1.11.29/tests/contenttypes_tests/test_models.py
|
5
|
10780
|
from __future__ import unicode_literals
from django.contrib.contenttypes.models import ContentType, ContentTypeManager
from django.contrib.contenttypes.views import shortcut
from django.contrib.sites.shortcuts import get_current_site
from django.http import Http404, HttpRequest
from django.test import TestCase, override_settings
from django.utils import six
from .models import (
ConcreteModel, FooWithBrokenAbsoluteUrl, FooWithoutUrl, FooWithUrl,
ProxyModel,
)
class ContentTypesTests(TestCase):
def setUp(self):
ContentType.objects.clear_cache()
def tearDown(self):
ContentType.objects.clear_cache()
def test_lookup_cache(self):
"""
Make sure that the content type cache (see ContentTypeManager)
works correctly. Lookups for a particular content type -- by model, ID
or natural key -- should hit the database only on the first lookup.
"""
# At this point, a lookup for a ContentType should hit the DB
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# A second hit, though, won't hit the DB, nor will a lookup by ID
# or natural key
with self.assertNumQueries(0):
ct = ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_id(ct.id)
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes', 'contenttype')
# Once we clear the cache, another lookup will again hit the DB
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# The same should happen with a lookup by natural key
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_by_natural_key('contenttypes', 'contenttype')
# And a second hit shouldn't hit the DB
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes', 'contenttype')
def test_get_for_models_creation(self):
ContentType.objects.all().delete()
with self.assertNumQueries(4):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl, ProxyModel, ConcreteModel)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
ProxyModel: ContentType.objects.get_for_model(ProxyModel),
ConcreteModel: ContentType.objects.get_for_model(ConcreteModel),
})
def test_get_for_models_empty_cache(self):
# Empty cache.
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl, ProxyModel, ConcreteModel)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
ProxyModel: ContentType.objects.get_for_model(ProxyModel),
ConcreteModel: ContentType.objects.get_for_model(ConcreteModel),
})
def test_get_for_models_partial_cache(self):
# Partial cache
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_models_full_cache(self):
# Full cache
ContentType.objects.get_for_model(ContentType)
ContentType.objects.get_for_model(FooWithUrl)
with self.assertNumQueries(0):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_concrete_model(self):
"""
Make sure the `for_concrete_model` kwarg correctly works
with concrete, proxy and deferred models
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
self.assertEqual(concrete_model_ct, ContentType.objects.get_for_model(ProxyModel))
self.assertEqual(concrete_model_ct, ContentType.objects.get_for_model(ConcreteModel, for_concrete_model=False))
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel, for_concrete_model=False)
self.assertNotEqual(concrete_model_ct, proxy_model_ct)
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
self.assertEqual(concrete_model_ct, ContentType.objects.get_for_model(DeferredConcreteModel))
self.assertEqual(
concrete_model_ct,
ContentType.objects.get_for_model(DeferredConcreteModel, for_concrete_model=False)
)
self.assertEqual(concrete_model_ct, ContentType.objects.get_for_model(DeferredProxyModel))
self.assertEqual(
proxy_model_ct,
ContentType.objects.get_for_model(DeferredProxyModel, for_concrete_model=False)
)
def test_get_for_concrete_models(self):
"""
Make sure the `for_concrete_models` kwarg correctly works
with concrete, proxy and deferred models.
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: concrete_model_ct,
})
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel, for_concrete_model=False)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel, for_concrete_models=False)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: proxy_model_ct,
})
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
cts = ContentType.objects.get_for_models(DeferredConcreteModel, DeferredProxyModel)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: concrete_model_ct,
})
cts = ContentType.objects.get_for_models(
DeferredConcreteModel, DeferredProxyModel, for_concrete_models=False
)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: proxy_model_ct,
})
def test_cache_not_shared_between_managers(self):
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_model(ContentType)
other_manager = ContentTypeManager()
other_manager.model = ContentType
with self.assertNumQueries(1):
other_manager.get_for_model(ContentType)
with self.assertNumQueries(0):
other_manager.get_for_model(ContentType)
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_shortcut_view(self):
"""
The shortcut view (used for the admin "view on site" functionality)
returns a complete URL regardless of whether the sites framework is
installed.
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithUrl)
obj = FooWithUrl.objects.create(name="john")
with self.modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'}):
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual(
"http://%s/users/john/" % get_current_site(request).domain,
response._headers.get("location")[1]
)
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://Example.com/users/john/", response._headers.get("location")[1])
def test_shortcut_view_without_get_absolute_url(self):
"""
The shortcut view (used for the admin "view on site" functionality)
returns 404 when get_absolute_url is not defined.
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithoutUrl)
obj = FooWithoutUrl.objects.create(name="john")
with self.assertRaises(Http404):
shortcut(request, user_ct.id, obj.id)
def test_shortcut_view_with_broken_get_absolute_url(self):
"""
The shortcut view does not catch an AttributeError raised by
the model's get_absolute_url() method (#8997).
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithBrokenAbsoluteUrl)
obj = FooWithBrokenAbsoluteUrl.objects.create(name="john")
with self.assertRaises(AttributeError):
shortcut(request, user_ct.id, obj.id)
def test_missing_model(self):
"""
Displaying content types in admin (or anywhere) doesn't break on
leftover content type records in the DB for which no model is defined
anymore.
"""
ct = ContentType.objects.create(
app_label='contenttypes',
model='OldModel',
)
self.assertEqual(six.text_type(ct), 'OldModel')
self.assertIsNone(ct.model_class())
# Make sure stale ContentTypes can be fetched like any other object.
# Before Django 1.6 this caused a NoneType error in the caching mechanism.
# Instead, just return the ContentType object and let the app detect stale states.
ct_fetched = ContentType.objects.get_for_id(ct.pk)
self.assertIsNone(ct_fetched.model_class())
|
apache-2.0
|
ceeblet/OST_PythonCertificationTrack
|
Python2/IntroGUI/src/tkdemo.py
|
1
|
1298
|
from tkinter import *
class Application(Frame):
def say_hi(self):
print("Hi there, everyone!")
def createWidgets(self):
self.hi_there = Button(self, text="Hello", fg="blue", command=self.say_hi)
#be careful of this. the () at the end of the command will call the command right away
# and not when the button is pushed
#self.hi_there = Button(self, text="Hello", fg="blue", command=self.say_hi())
# self.hi_there["text"] = "Hello"
# self.hi_there["fg"] = "blue"
# self.hi_there["command"] = self.say_hi
self.hi_there.pack({"side": "left"})
self.QUIT = Button(self, text="Goodbye", fg="red", command=self.quit)
#be careful of this. the () at the end of the command will keep the command from being
# executed when the button is pushed
#self.QUIT = Button(self, text="Goodbye", fg="red", command=self.quit())
# self.QUIT["text"] = "Goodbye"
# self.QUIT["fg"] = "red"
# self.QUIT["command"] = self.quit
self.QUIT.pack({"side": "left"})
def __init__(self, master=None):
Frame.__init__(self, master)
self.pack()
self.createWidgets()
root = Tk()
app = Application(master=root)
app.mainloop()
|
mit
|
waltBB/neutron_read
|
neutron/agent/l2population_rpc.py
|
6
|
11961
|
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from oslo_config import cfg
from oslo_log import log as logging
import six
from neutron.common import constants as n_const
from neutron.common import log
from neutron.plugins.ml2.drivers.l2pop import rpc as l2pop_rpc
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class L2populationRpcCallBackMixin(object):
'''General mixin class of L2-population RPC call back.
The following methods are called through RPC.
add_fdb_entries(), remove_fdb_entries(), update_fdb_entries()
The following methods are used in an agent as internal methods.
fdb_add(), fdb_remove(), fdb_update()
'''
@log.log
def add_fdb_entries(self, context, fdb_entries, host=None):
if not host or host == cfg.CONF.host:
self.fdb_add(context, self._unmarshall_fdb_entries(fdb_entries))
@log.log
def remove_fdb_entries(self, context, fdb_entries, host=None):
if not host or host == cfg.CONF.host:
self.fdb_remove(context, self._unmarshall_fdb_entries(fdb_entries))
@log.log
def update_fdb_entries(self, context, fdb_entries, host=None):
if not host or host == cfg.CONF.host:
self.fdb_update(context, self._unmarshall_fdb_entries(fdb_entries))
@staticmethod
def _unmarshall_fdb_entries(fdb_entries):
"""Prepares fdb_entries from JSON.
All methods in this class that receive messages should call this to
unmarshall fdb_entries from the wire.
:param fdb_entries: Original fdb_entries data-structure. Looks like:
{
<uuid>: {
...,
'ports': {
<ip address>: [ [<mac>, <ip>], ... ],
...
:returns: Deep copy with [<mac>, <ip>] converted to PortInfo
"""
unmarshalled = dict(fdb_entries)
for value in unmarshalled.values():
if 'ports' in value:
value['ports'] = dict(
(address, [l2pop_rpc.PortInfo(*pi) for pi in port_infos])
for address, port_infos in value['ports'].items()
)
return unmarshalled
@abc.abstractmethod
def fdb_add(self, context, fdb_entries):
pass
@abc.abstractmethod
def fdb_remove(self, context, fdb_entries):
pass
@abc.abstractmethod
def fdb_update(self, context, fdb_entries):
pass
class L2populationRpcCallBackTunnelMixin(L2populationRpcCallBackMixin):
'''Mixin class of L2-population call back for Tunnel.
The following methods are all used in agents as internal methods.
Some of the methods in this class use Local VLAN Mapping, aka lvm.
It's a python object with at least the following attributes:
============ =========================================================
Attribute Description
============ =========================================================
vlan An identifier used by the agent to identify a neutron
network.
network_type A network type found in neutron.plugins.common.constants.
============ =========================================================
NOTE(yamamoto): "Local VLAN" is an OVS-agent term. OVS-agent internally
uses 802.1q VLAN tagging to isolate networks. While this class inherited
the terms from OVS-agent, it does not assume the specific underlying
technologies. E.g. this class is also used by ofagent, where a different
mechanism is used.
'''
@abc.abstractmethod
def add_fdb_flow(self, br, port_info, remote_ip, lvm, ofport):
'''Add flow for fdb
This method is assumed to be used by method fdb_add_tun.
We expect to add a flow entry to send a packet to specified port
on bridge.
And you may edit some information for local arp response.
:param br: represent the bridge on which add_fdb_flow should be
applied.
:param port_info: PortInfo instance to include mac and ip.
.mac_address
.ip_address
:remote_ip: remote ip address.
:param lvm: a local VLAN map of network.
:param ofport: a port to add.
'''
pass
@abc.abstractmethod
def del_fdb_flow(self, br, port_info, remote_ip, lvm, ofport):
'''Delete flow for fdb
This method is assumed to be used by method fdb_remove_tun.
We expect to delete a flow entry to send a packet to specified port
from bridge.
And you may delete some information for local arp response.
:param br: represent the bridge on which del_fdb_flow should be
applied.
:param port_info: PortInfo instance to include mac and ip.
.mac_address
.ip_address
:remote_ip: remote ip address.
:param lvm: local VLAN map of a network. See add_fdb_flow for
more explanation.
:param ofport: a port to delete.
'''
pass
@abc.abstractmethod
def setup_tunnel_port(self, br, remote_ip, network_type):
'''Setup an added tunnel port.
This method is assumed to be used by method fdb_add_tun.
We expect to prepare to call add_fdb_flow. It will be mainly adding
a port to a bridge.
If you need, you may do some preparations for a bridge.
:param br: represent the bridge on which setup_tunnel_port should be
applied.
:param remote_ip: an ip for a port to setup.
:param network_type: a type of a network.
:returns: an ofport value. value 0 means the port is unavailable.
'''
pass
@abc.abstractmethod
def cleanup_tunnel_port(self, br, tun_ofport, tunnel_type):
'''Clean up a deleted tunnel port.
This method is assumed to be used by method fdb_remove_tun.
We expect to clean up after calling del_fdb_flow. It will be mainly
deleting a port from a bridge.
If you need, you may do some cleanup for a bridge.
:param br: represent the bridge on which cleanup_tunnel_port should be
applied.
:param tun_ofport: a port value to cleanup.
:param tunnel_type: a type of a tunnel.
'''
pass
@abc.abstractmethod
def setup_entry_for_arp_reply(self, br, action, local_vid, mac_address,
ip_address):
'''Operate the ARP respond information.
Update MAC/IPv4 associations, which is typically used by
the local ARP responder. For example, OVS-agent sets up
flow entries to perform ARP responses.
:param br: represent the bridge on which setup_entry_for_arp_reply
should be applied.
:param action: add/remove flow for arp response information.
:param local_vid: id in local VLAN map of network's ARP entry.
:param mac_address: MAC string value.
:param ip_address: IP string value.
'''
pass
def get_agent_ports(self, fdb_entries, local_vlan_map):
"""Generator to yield port info.
For each known (i.e found in local_vlan_map) network in
fdb_entries, yield (lvm, fdb_entries[network_id]['ports']) pair.
:param fdb_entries: l2pop fdb entries
:param local_vlan_map: A dict to map network_id to
the corresponding lvm entry.
"""
for network_id, values in fdb_entries.items():
lvm = local_vlan_map.get(network_id)
if lvm is None:
continue
agent_ports = values.get('ports')
yield (lvm, agent_ports)
@log.log
def fdb_add_tun(self, context, br, lvm, agent_ports, lookup_port):
for remote_ip, ports in agent_ports.items():
# Ensure we have a tunnel port with this remote agent
ofport = lookup_port(lvm.network_type, remote_ip)
if not ofport:
ofport = self.setup_tunnel_port(br, remote_ip,
lvm.network_type)
if ofport == 0:
continue
for port in ports:
self.add_fdb_flow(br, port, remote_ip, lvm, ofport)
@log.log
def fdb_remove_tun(self, context, br, lvm, agent_ports, lookup_port):
for remote_ip, ports in agent_ports.items():
ofport = lookup_port(lvm.network_type, remote_ip)
if not ofport:
continue
for port in ports:
self.del_fdb_flow(br, port, remote_ip, lvm, ofport)
if port == n_const.FLOODING_ENTRY:
# Check if this tunnel port is still used
self.cleanup_tunnel_port(br, ofport, lvm.network_type)
@log.log
def fdb_update(self, context, fdb_entries):
'''Call methods named '_fdb_<action>'.
This method assumes that methods '_fdb_<action>' are defined in class.
Currently the following actions are available.
chg_ip
'''
for action, values in fdb_entries.items():
method = '_fdb_' + action
if not hasattr(self, method):
raise NotImplementedError()
getattr(self, method)(context, values)
@log.log
def fdb_chg_ip_tun(self, context, br, fdb_entries, local_ip,
local_vlan_map):
'''fdb update when an IP of a port is updated.
The ML2 l2-pop mechanism driver sends an fdb update rpc message when an
IP of a port is updated.
:param context: RPC context.
:param br: represent the bridge on which fdb_chg_ip_tun should be
applied.
:param fdb_entries: fdb dicts that contain all mac/IP information per
agent and network.
{'net1':
{'agent_ip':
{'before': PortInfo,
'after': PortInfo
}
}
'net2':
...
}
PortInfo has .mac_address and .ip_address attrs.
:param local_ip: local IP address of this agent.
:param local_vlan_map: A dict to map network_id to
the corresponding lvm entry.
'''
for network_id, agent_ports in fdb_entries.items():
lvm = local_vlan_map.get(network_id)
if not lvm:
continue
for agent_ip, state in agent_ports.items():
if agent_ip == local_ip:
continue
after = state.get('after', [])
for mac_ip in after:
self.setup_entry_for_arp_reply(br, 'add', lvm.vlan,
mac_ip.mac_address,
mac_ip.ip_address)
before = state.get('before', [])
for mac_ip in before:
self.setup_entry_for_arp_reply(br, 'remove', lvm.vlan,
mac_ip.mac_address,
mac_ip.ip_address)
|
apache-2.0
|
masood-anwer/Aspose_Words_Java
|
Plugins/Aspose_Words_Java_for_Jython/asposewords/programming_documents/ProcessComments.py
|
4
|
2560
|
from asposewords import Settings
from com.aspose.words import Document
from com.aspose.words import NodeType
from com.aspose.words import SaveFormat
#from java.util import ArrayList
class ProcessComments:
def __init__(self):
dataDir = Settings.dataDir + 'programming_documents/'
# Open the document.
doc = Document(dataDir + "TestFile.doc")
#ExStart
#ExId:ProcessComments_Main
#ExSummary: The demo-code that illustrates the methods for the comments extraction and removal.
# Extract the information about the comments of all the authors.
comments = self.extract_comments(doc)
for comment in comments:
print comment
# Remove all comments.
self.remove_comments(doc)
print "All comments are removed!"
# Save the document.
doc.save(dataDir + "Comments.doc")
def extract_comments(self, *args):
doc = args[0]
collectedComments = []
# Collect all comments in the document
comments = doc.getChildNodes(NodeType.COMMENT, True)
# Look through all comments and gather information about them.
for comment in comments :
if 1 < len(args) and args[1] is not None :
authorName = args[1]
if str(comment.getAuthor()) == authorName:
collectedComments.append(str(comment.getAuthor()) + " " + str(comment.getDateTime()) + " " + comment.toString(SaveFormat.TEXT))
else:
collectedComments.append(str(comment.getAuthor()) + " " + str(comment.getDateTime()) + " " + comment.toString(SaveFormat.TEXT))
return collectedComments
def remove_comments(self,*args):
doc = args[0]
if 1 < len(args) and args[1] is not None :
authorName = args[1]
# Collect all comments in the document
comments = doc.getChildNodes(NodeType.COMMENT, True)
comments_count = comments.getCount()
# Look through all comments and remove those written by the authorName author.
i = comments_count
i = i - 1
while i >= 0 :
comment = comments.get(i)
if 1 < len(args) and args[1] is not None :
authorName = args[1]
if str(comment.getAuthor()) == authorName:
comment.remove()
else:
comment.remove()
i = i - 1
if __name__ == '__main__':
ProcessComments()
|
mit
|
jkDesignDE/ScriptumX
|
ScriptumX/X/tests.py
|
9
|
1045
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
"""
import django
from django.test import TestCase
# TODO: Configure your database in settings.py and sync before running tests.
class ViewTest(TestCase):
"""Tests for the application views."""
if django.VERSION[:2] >= (1, 7):
# Django 1.7 requires an explicit setup() when running tests in PTVS
@classmethod
def setUpClass(cls):
super(ViewTest, cls).setUpClass()
django.setup()
def test_home(self):
"""Tests the home page."""
response = self.client.get('/')
self.assertContains(response, 'Polls.', 1, 200)
def test_contact(self):
"""Tests the contact page."""
response = self.client.get('/contact')
self.assertContains(response, 'Contact', 3, 200)
def test_about(self):
"""Tests the about page."""
response = self.client.get('/about')
self.assertContains(response, 'About', 3, 200)
|
gpl-3.0
|
dyoung418/tensorflow
|
tensorflow/contrib/factorization/examples/mnist.py
|
136
|
11854
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Example mnist model with jointly computed k-means clustering.
This is a toy example of how clustering can be embedded into larger tensorflow
graphs. In this case, we learn a clustering on-the-fly and transform the input
into the 'distance to clusters' space. These are then fed into hidden layers to
learn the supervised objective.
To train this model on real mnist data, run this model as follows:
mnist --fake_data=False --max_steps=2000
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import math
import sys
import tempfile
import time
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow.examples.tutorials.mnist import mnist
FLAGS = None
# The MNIST dataset has 10 classes, representing the digits 0 through 9.
NUM_CLASSES = 10
# The MNIST images are always 28x28 pixels.
IMAGE_SIZE = 28
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE
def placeholder_inputs():
"""Generate placeholder variables to represent the input tensors.
Returns:
images_placeholder: Images placeholder.
labels_placeholder: Labels placeholder.
"""
images_placeholder = tf.placeholder(tf.float32, shape=(None,
mnist.IMAGE_PIXELS))
labels_placeholder = tf.placeholder(tf.int32, shape=(None))
return images_placeholder, labels_placeholder
def fill_feed_dict(data_set, images_pl, labels_pl, batch_size):
"""Fills the feed_dict for training the given step.
Args:
data_set: The set of images and labels, from input_data.read_data_sets()
images_pl: The images placeholder, from placeholder_inputs().
labels_pl: The labels placeholder, from placeholder_inputs().
batch_size: Batch size of data to feed.
Returns:
feed_dict: The feed dictionary mapping from placeholders to values.
"""
# Create the feed_dict for the placeholders filled with the next
# `batch size ` examples.
images_feed, labels_feed = data_set.next_batch(batch_size, FLAGS.fake_data)
feed_dict = {
images_pl: images_feed,
labels_pl: labels_feed,
}
return feed_dict
def do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_set):
"""Runs one evaluation against the full epoch of data.
Args:
sess: The session in which the model has been trained.
eval_correct: The Tensor that returns the number of correct predictions.
images_placeholder: The images placeholder.
labels_placeholder: The labels placeholder.
data_set: The set of images and labels to evaluate, from
input_data.read_data_sets().
Returns:
Precision value on the dataset.
"""
# And run one epoch of eval.
true_count = 0 # Counts the number of correct predictions.
batch_size = min(FLAGS.batch_size, data_set.num_examples)
steps_per_epoch = data_set.num_examples // batch_size
num_examples = steps_per_epoch * batch_size
for _ in xrange(steps_per_epoch):
feed_dict = fill_feed_dict(data_set,
images_placeholder,
labels_placeholder,
batch_size)
true_count += sess.run(eval_correct, feed_dict=feed_dict)
precision = true_count / num_examples
print(' Num examples: %d Num correct: %d Precision @ 1: %0.04f' %
(num_examples, true_count, precision))
return precision
def inference(inp, num_clusters, hidden1_units, hidden2_units):
"""Build the MNIST model up to where it may be used for inference.
Args:
inp: input data
num_clusters: number of clusters of input features to train.
hidden1_units: Size of the first hidden layer.
hidden2_units: Size of the second hidden layer.
Returns:
logits: Output tensor with the computed logits.
clustering_loss: Clustering loss.
kmeans_training_op: An op to train the clustering.
"""
# Clustering
kmeans = tf.contrib.factorization.KMeans(
inp,
num_clusters,
distance_metric=tf.contrib.factorization.COSINE_DISTANCE,
# TODO(agarwal): kmeans++ is currently causing crash in dbg mode.
# Enable this after fixing.
# initial_clusters=tf.contrib.factorization.KMEANS_PLUS_PLUS_INIT,
use_mini_batch=True)
(all_scores, _, clustering_scores, _, kmeans_init,
kmeans_training_op) = kmeans.training_graph()
# Some heuristics to approximately whiten this output.
all_scores = (all_scores[0] - 0.5) * 5
# Here we avoid passing the gradients from the supervised objective back to
# the clusters by creating a stop_gradient node.
all_scores = tf.stop_gradient(all_scores)
clustering_loss = tf.reduce_sum(clustering_scores[0])
# Hidden 1
with tf.name_scope('hidden1'):
weights = tf.Variable(
tf.truncated_normal([num_clusters, hidden1_units],
stddev=1.0 / math.sqrt(float(IMAGE_PIXELS))),
name='weights')
biases = tf.Variable(tf.zeros([hidden1_units]),
name='biases')
hidden1 = tf.nn.relu(tf.matmul(all_scores, weights) + biases)
# Hidden 2
with tf.name_scope('hidden2'):
weights = tf.Variable(
tf.truncated_normal([hidden1_units, hidden2_units],
stddev=1.0 / math.sqrt(float(hidden1_units))),
name='weights')
biases = tf.Variable(tf.zeros([hidden2_units]),
name='biases')
hidden2 = tf.nn.relu(tf.matmul(hidden1, weights) + biases)
# Linear
with tf.name_scope('softmax_linear'):
weights = tf.Variable(
tf.truncated_normal([hidden2_units, NUM_CLASSES],
stddev=1.0 / math.sqrt(float(hidden2_units))),
name='weights')
biases = tf.Variable(tf.zeros([NUM_CLASSES]),
name='biases')
logits = tf.matmul(hidden2, weights) + biases
return logits, clustering_loss, kmeans_init, kmeans_training_op
def run_training():
"""Train MNIST for a number of steps."""
# Get the sets of images and labels for training, validation, and
# test on MNIST.
train_dir = tempfile.mkdtemp()
data_sets = input_data.read_data_sets(train_dir, FLAGS.fake_data)
# Tell TensorFlow that the model will be built into the default Graph.
with tf.Graph().as_default():
# Generate placeholders for the images and labels.
images_placeholder, labels_placeholder = placeholder_inputs()
# Build a Graph that computes predictions from the inference model.
logits, clustering_loss, kmeans_init, kmeans_training_op = inference(
images_placeholder,
FLAGS.num_clusters,
FLAGS.hidden1,
FLAGS.hidden2)
# Add to the Graph the Ops for loss calculation.
loss = mnist.loss(logits, labels_placeholder)
# Add to the Graph the Ops that calculate and apply gradients.
train_op = tf.group(mnist.training(loss, FLAGS.learning_rate),
kmeans_training_op)
# Add the Op to compare the logits to the labels during evaluation.
eval_correct = mnist.evaluation(logits, labels_placeholder)
# Add the variable initializer Op.
init = tf.global_variables_initializer()
# Create a session for running Ops on the Graph.
sess = tf.Session()
# Run the Op to initialize the variables.
sess.run(init)
feed_dict = fill_feed_dict(data_sets.train,
images_placeholder,
labels_placeholder,
batch_size=max(FLAGS.batch_size, 5000))
# Run the Op to initialize the clusters.
sess.run(kmeans_init, feed_dict=feed_dict)
# Start the training loop.
max_test_prec = 0
for step in xrange(FLAGS.max_steps):
start_time = time.time()
# Fill a feed dictionary with the actual set of images and labels
# for this particular training step.
feed_dict = fill_feed_dict(data_sets.train,
images_placeholder,
labels_placeholder,
FLAGS.batch_size)
# Run one step of the model.
_, loss_value, clustering_loss_value = sess.run([train_op,
loss,
clustering_loss],
feed_dict=feed_dict)
duration = time.time() - start_time
if step % 100 == 0:
# Print status to stdout.
print('Step %d: loss = %.2f, clustering_loss = %.2f (%.3f sec)' % (
step, loss_value, clustering_loss_value, duration))
# Save a checkpoint and evaluate the model periodically.
if (step + 1) % 1000 == 0 or (step + 1) == FLAGS.max_steps:
# Evaluate against the training set.
print('Training Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.train)
# Evaluate against the validation set.
print('Validation Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.validation)
# Evaluate against the test set.
print('Test Data Eval:')
test_prec = do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.test)
max_test_prec = max(max_test_prec, test_prec)
return max_test_prec
class MnistTest(tf.test.TestCase):
def test_train(self):
self.assertTrue(run_training() > 0.6)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Basic model parameters as external flags.'
)
parser.register('type', 'bool', lambda v: v.lower() == 'true')
parser.add_argument(
'--learning_rate',
type=float,
default=0.3,
help='Initial learning rate.'
)
parser.add_argument(
'--max_steps',
type=int,
default=200,
help='Number of steps to run trainer.'
)
parser.add_argument(
'--num_clusters',
type=int,
default=384,
help='Number of input feature clusters'
)
parser.add_argument(
'--hidden1',
type=int,
default=256,
help='Number of units in hidden layer 1.'
)
parser.add_argument(
'--hidden2',
type=int,
default=32,
help='Number of units in hidden layer 2.'
)
parser.add_argument(
'--batch_size',
type=int,
default=100,
help='Batch size. Must divide evenly into the dataset sizes.'
)
parser.add_argument(
'--train_dir',
type=str,
default='data',
help='Directory to put the training data.'
)
parser.add_argument(
'--fake_data',
type='bool',
default=True,
help='Use fake input data.'
)
FLAGS, unparsed = parser.parse_known_args()
sys.argv = [sys.argv[0]] + unparsed
tf.test.main()
|
apache-2.0
|
pedroma/python-linkedin
|
tests/linkedin_test.py
|
8
|
2542
|
from tests import *
from linkedin import linkedin
from random import randint
class LinkedInMethodsTestBase:
@classmethod
def set_up_class(cls, gae):
cls._gae = gae
cls.api = linkedin.LinkedIn(API_KEY, SECRET_KEY, RETURN_URL, gae)
cls.api.request_token()
print "----------------"
print "Go to this address please and fill in the details"
print cls.api.get_authorize_url()
print "----------------"
result = []
httpd = cls._create_http_server(result)
httpd.handle_request()
httpd.server_close()
cls.api._verifier = result[0]
cls.api.access_token()
def _generate_email(self):
return "mail_for_tests" + str(randint(1, 1000)) + "@nothing.com"
def test_get_profile(self):
self.assertTrue(self.api.get_profile(fields=['first-name', 'last-name']))
def test_default_get_profile(self):
p = self.api.get_profile()
self.assertTrue(p)
self.assertTrue(p.first_name)
self.assertTrue(p.last_name)
self.assertTrue(p.headline)
self.assertTrue(p.private_url)
def test_get_connections(self):
self.assertTrue(self.api.get_connections())
def test_get_search(self):
self.assertTrue(self.api.get_search({"name" : "Iftach Bar"}))
self.assertTrue(self.api.get_search({}))
def test_send_message(self):
"send_message doesn't raise error"
self.api.send_message("python linkedin integration test",
"This is the message. GAE : " + str(self._gae),
send_yourself = True)
def test_send_invitation(self):
"send_invitation doesn't raise error"
self.api.send_invitation("python linkedin integration test",
"This is the message. GAE : " + str(self._gae),
"first",
"last",
self._generate_email())
def test_set_and_clear_status(self):
"set_status and clear_status don't raise error"
self.api.set_status("Testing linkedin API")
self.api.clear_status()
def test_share_comment(self):
"share_update and clear_status don't raise error"
self.api.share_update(comment = "Testing linkedin API")
self.api.clear_status()
class LinkedInRegularTest(LinkedInMethodsTestBase, LinkedInTestBase):
@classmethod
def setUpClass(cls):
cls.set_up_class(False)
|
mit
|
PeterWangIntel/chromium-crosswalk
|
tools/perf/measurements/page_cycler.py
|
17
|
6434
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The page cycler measurement.
This measurement registers a window load handler in which is forces a layout and
then records the value of performance.now(). This call to now() measures the
time from navigationStart (immediately after the previous page's beforeunload
event) until after the layout in the page's load event. In addition, two garbage
collections are performed in between the page loads (in the beforeunload event).
This extra garbage collection time is not included in the measurement times.
Finally, various memory and IO statistics are gathered at the very end of
cycling all pages.
"""
import collections
import os
from telemetry.core import util
from telemetry.page import page_test
from telemetry.value import scalar
from metrics import cpu
from metrics import keychain_metric
from metrics import memory
from metrics import power
from metrics import speedindex
class PageCycler(page_test.PageTest):
def __init__(self, page_repeat, pageset_repeat, cold_load_percent=50,
report_speed_index=False, clear_cache_before_each_run=False):
super(PageCycler, self).__init__(
clear_cache_before_each_run=clear_cache_before_each_run)
with open(os.path.join(os.path.dirname(__file__),
'page_cycler.js'), 'r') as f:
self._page_cycler_js = f.read()
self._report_speed_index = report_speed_index
self._speedindex_metric = speedindex.SpeedIndexMetric()
self._memory_metric = None
self._power_metric = None
self._cpu_metric = None
self._has_loaded_page = collections.defaultdict(int)
self._initial_renderer_url = None # to avoid cross-renderer navigation
cold_runs_percent_set = (cold_load_percent != None)
# Handle requests for cold cache runs
if (cold_runs_percent_set and
(cold_load_percent < 0 or cold_load_percent > 100)):
raise Exception('cold-load-percent must be in the range [0-100]')
# Make sure _cold_run_start_index is an integer multiple of page_repeat.
# Without this, --pageset_shuffle + --page_repeat could lead to
# assertion failures on _started_warm in WillNavigateToPage.
if cold_runs_percent_set:
number_warm_pageset_runs = int(
(int(pageset_repeat) - 1) * (100 - cold_load_percent) / 100)
number_warm_runs = number_warm_pageset_runs * page_repeat
self._cold_run_start_index = number_warm_runs + page_repeat
else:
self._cold_run_start_index = pageset_repeat * page_repeat
def WillStartBrowser(self, platform):
"""Initialize metrics once right before the browser has been launched."""
self._power_metric = power.PowerMetric(platform)
def DidStartBrowser(self, browser):
"""Initialize metrics once right after the browser has been launched."""
self._memory_metric = memory.MemoryMetric(browser)
self._cpu_metric = cpu.CpuMetric(browser)
def WillNavigateToPage(self, page, tab):
if page.is_file:
# For legacy page cyclers which use the filesystem, do an initial
# navigate to avoid paying for a cross-renderer navigation.
initial_url = tab.browser.http_server.UrlOf('nonexistent.html')
if self._initial_renderer_url != initial_url:
self._initial_renderer_url = initial_url
tab.Navigate(self._initial_renderer_url)
page.script_to_evaluate_on_commit = self._page_cycler_js
if self.ShouldRunCold(page.url):
tab.ClearCache(force=True)
if self._report_speed_index:
self._speedindex_metric.Start(page, tab)
self._cpu_metric.Start(page, tab)
self._power_metric.Start(page, tab)
def DidNavigateToPage(self, page, tab):
self._memory_metric.Start(page, tab)
def CustomizeBrowserOptions(self, options):
memory.MemoryMetric.CustomizeBrowserOptions(options)
power.PowerMetric.CustomizeBrowserOptions(options)
options.AppendExtraBrowserArgs('--js-flags=--expose_gc')
if self._report_speed_index:
self._speedindex_metric.CustomizeBrowserOptions(options)
keychain_metric.KeychainMetric.CustomizeBrowserOptions(options)
def ValidateAndMeasurePage(self, page, tab, results):
tab.WaitForJavaScriptExpression('__pc_load_time', 60)
chart_name_prefix = ('cold_' if self.IsRunCold(page.url) else
'warm_')
results.AddValue(scalar.ScalarValue(
results.current_page, '%stimes.page_load_time' % chart_name_prefix,
'ms', tab.EvaluateJavaScript('__pc_load_time'),
description='Average page load time. Measured from '
'performance.timing.navigationStart until the completion '
'time of a layout after the window.load event. Cold times '
'are the times when the page is loaded cold, i.e. without '
'loading it before, and warm times are times when the '
'page is loaded after being loaded previously.'))
self._has_loaded_page[page.url] += 1
self._power_metric.Stop(page, tab)
self._memory_metric.Stop(page, tab)
self._memory_metric.AddResults(tab, results)
self._power_metric.AddResults(tab, results)
self._cpu_metric.Stop(page, tab)
self._cpu_metric.AddResults(tab, results)
if self._report_speed_index:
def SpeedIndexIsFinished():
return self._speedindex_metric.IsFinished(tab)
util.WaitFor(SpeedIndexIsFinished, 60)
self._speedindex_metric.Stop(page, tab)
self._speedindex_metric.AddResults(
tab, results, chart_name=chart_name_prefix+'speed_index')
keychain_metric.KeychainMetric().AddResults(tab, results)
def IsRunCold(self, url):
return self.ShouldRunCold(url) or self._has_loaded_page[url] == 0
def ShouldRunCold(self, url):
# We do the warm runs first for two reasons. The first is so we can
# preserve any initial profile cache for as long as possible.
# The second is that, if we did cold runs first, we'd have a transition
# page set during which we wanted the run for each URL to both
# contribute to the cold data and warm the catch for the following
# warm run, and clearing the cache before the load of the following
# URL would eliminate the intended warmup for the previous URL.
return self._has_loaded_page[url] >= self._cold_run_start_index
|
bsd-3-clause
|
sjshank/spotEmployee
|
node_modules/npm/node_modules/node-gyp/gyp/tools/graphviz.py
|
2679
|
2878
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Using the JSON dumped by the dump-dependency-json generator,
generate input suitable for graphviz to render a dependency graph of
targets."""
import collections
import json
import sys
def ParseTarget(target):
target, _, suffix = target.partition('#')
filename, _, target = target.partition(':')
return filename, target, suffix
def LoadEdges(filename, targets):
"""Load the edges map from the dump file, and filter it to only
show targets in |targets| and their depedendents."""
file = open('dump.json')
edges = json.load(file)
file.close()
# Copy out only the edges we're interested in from the full edge list.
target_edges = {}
to_visit = targets[:]
while to_visit:
src = to_visit.pop()
if src in target_edges:
continue
target_edges[src] = edges[src]
to_visit.extend(edges[src])
return target_edges
def WriteGraph(edges):
"""Print a graphviz graph to stdout.
|edges| is a map of target to a list of other targets it depends on."""
# Bucket targets by file.
files = collections.defaultdict(list)
for src, dst in edges.items():
build_file, target_name, toolset = ParseTarget(src)
files[build_file].append(src)
print 'digraph D {'
print ' fontsize=8' # Used by subgraphs.
print ' node [fontsize=8]'
# Output nodes by file. We must first write out each node within
# its file grouping before writing out any edges that may refer
# to those nodes.
for filename, targets in files.items():
if len(targets) == 1:
# If there's only one node for this file, simplify
# the display by making it a box without an internal node.
target = targets[0]
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
target_name)
else:
# Group multiple nodes together in a subgraph.
print ' subgraph "cluster_%s" {' % filename
print ' label = "%s"' % filename
for target in targets:
build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [label="%s"]' % (target, target_name)
print ' }'
# Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes.
for src, dsts in edges.items():
for dst in dsts:
print ' "%s" -> "%s"' % (src, dst)
print '}'
def main():
if len(sys.argv) < 2:
print >>sys.stderr, __doc__
print >>sys.stderr
print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
return 1
edges = LoadEdges('dump.json', sys.argv[1:])
WriteGraph(edges)
return 0
if __name__ == '__main__':
sys.exit(main())
|
epl-1.0
|
abacuspix/NFV_project
|
Mastering Flask_Code Bundle/chapter_8/webapp/controllers/rest/parsers.py
|
11
|
1408
|
from flask.ext.restful import reqparse
user_post_parser = reqparse.RequestParser()
user_post_parser.add_argument('username', type=str, required=True)
user_post_parser.add_argument('password', type=str, required=True)
post_get_parser = reqparse.RequestParser()
post_get_parser.add_argument('page', type=int, location=['args', 'headers'])
post_get_parser.add_argument('user', type=str, location=['args', 'headers'])
post_post_parser = reqparse.RequestParser()
post_post_parser.add_argument(
'token',
type=str,
required=True,
help="Auth Token is required to edit posts"
)
post_post_parser.add_argument(
'title',
type=str,
required=True,
help="Title is required"
)
post_post_parser.add_argument(
'text',
type=str,
required=True,
help="Body text is required"
)
post_post_parser.add_argument(
'tags',
type=str,
action='append'
)
post_put_parser = reqparse.RequestParser()
post_put_parser.add_argument(
'token',
type=str,
required=True,
help="Auth Token is required to create posts"
)
post_put_parser.add_argument(
'title',
type=str
)
post_put_parser.add_argument(
'text',
type=str
)
post_put_parser.add_argument(
'tags',
type=str
)
post_delete_parser = reqparse.RequestParser()
post_delete_parser.add_argument(
'token',
type=str,
required=True,
help="Auth Token is required to delete posts"
)
|
mit
|
joeyt83/androguard
|
androguard/decompiler/dad/basic_blocks.py
|
34
|
10681
|
# This file is part of Androguard.
#
# Copyright (c) 2012 Geoffroy Gueguen <[email protected]>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from collections import defaultdict
from androguard.decompiler.dad.opcode_ins import INSTRUCTION_SET
from androguard.decompiler.dad.node import Node
logger = logging.getLogger('dad.basic_blocks')
class BasicBlock(Node):
def __init__(self, name, block_ins):
super(BasicBlock, self).__init__(name)
self.ins = block_ins
self.ins_range = None
self.loc_ins = None
self.var_to_declare = set()
def get_ins(self):
return self.ins
def get_loc_with_ins(self):
if self.loc_ins is None:
self.loc_ins = zip(range(*self.ins_range), self.ins)
return self.loc_ins
def remove_ins(self, loc, ins):
self.ins.remove(ins)
self.loc_ins.remove((loc, ins))
def add_ins(self, new_ins_list):
for new_ins in new_ins_list:
self.ins.append(new_ins)
def add_variable_declaration(self, variable):
self.var_to_declare.add(variable)
def number_ins(self, num):
last_ins_num = num + len(self.ins)
self.ins_range = [num, last_ins_num]
self.loc_ins = None
return last_ins_num
class StatementBlock(BasicBlock):
def __init__(self, name, block_ins):
super(StatementBlock, self).__init__(name, block_ins)
self.type.is_stmt = True
def visit(self, visitor):
return visitor.visit_statement_node(self)
def __str__(self):
return '%d-Statement(%s)' % (self.num, self.name)
class ReturnBlock(BasicBlock):
def __init__(self, name, block_ins):
super(ReturnBlock, self).__init__(name, block_ins)
self.type.is_return = True
def visit(self, visitor):
return visitor.visit_return_node(self)
def __str__(self):
return '%d-Return(%s)' % (self.num, self.name)
class ThrowBlock(BasicBlock):
def __init__(self, name, block_ins):
super(ThrowBlock, self).__init__(name, block_ins)
self.type.is_throw = True
def visit(self, visitor):
return visitor.visit_throw_node(self)
def __str__(self):
return '%d-Throw(%s)' % (self.num, self.name)
class SwitchBlock(BasicBlock):
def __init__(self, name, switch, block_ins):
super(SwitchBlock, self).__init__(name, block_ins)
self.switch = switch
self.cases = []
self.default = None
self.node_to_case = defaultdict(list)
self.type.is_switch = True
def add_case(self, case):
self.cases.append(case)
def visit(self, visitor):
return visitor.visit_switch_node(self)
def copy_from(self, node):
super(SwitchBlock, self).copy_from(node)
self.cases = node.cases[:]
self.switch = node.switch[:]
def update_attribute_with(self, n_map):
super(SwitchBlock, self).update_attribute_with(n_map)
self.cases = [n_map.get(n, n) for n in self.cases]
for node1, node2 in n_map.iteritems():
if node1 in self.node_to_case:
self.node_to_case[node2] = self.node_to_case.pop(node1)
def order_cases(self):
values = self.switch.get_values()
if len(values) < len(self.cases):
self.default = self.cases.pop(0)
for case, node in zip(values, self.cases):
self.node_to_case[node].append(case)
def __str__(self):
return '%d-Switch(%s)' % (self.num, self.name)
class CondBlock(BasicBlock):
def __init__(self, name, block_ins):
super(CondBlock, self).__init__(name, block_ins)
self.true = None
self.false = None
self.type.is_cond = True
def update_attribute_with(self, n_map):
super(CondBlock, self).update_attribute_with(n_map)
self.true = n_map.get(self.true, self.true)
self.false = n_map.get(self.false, self.false)
def neg(self):
if len(self.ins) != 1:
raise RuntimeWarning('Condition should have only 1 instruction !')
self.ins[-1].neg()
def visit(self, visitor):
return visitor.visit_cond_node(self)
def visit_cond(self, visitor):
if len(self.ins) != 1:
raise RuntimeWarning('Condition should have only 1 instruction !')
return visitor.visit_ins(self.ins[-1])
def __str__(self):
return '%d-If(%s)' % (self.num, self.name)
class Condition(object):
def __init__(self, cond1, cond2, isand, isnot):
self.cond1 = cond1
self.cond2 = cond2
self.isand = isand
self.isnot = isnot
def neg(self):
self.isand = not self.isand
self.cond1.neg()
self.cond2.neg()
def get_ins(self):
lins = []
lins.extend(self.cond1.get_ins())
lins.extend(self.cond2.get_ins())
return lins
def get_loc_with_ins(self):
loc_ins = []
loc_ins.extend(self.cond1.get_loc_with_ins())
loc_ins.extend(self.cond2.get_loc_with_ins())
return loc_ins
def visit(self, visitor):
return visitor.visit_short_circuit_condition(self.isnot, self.isand,
self.cond1, self.cond2)
def __str__(self):
if self.isnot:
ret = '!%s %s %s'
else:
ret = '%s %s %s'
return ret % (self.cond1, ['||', '&&'][self.isand], self.cond2)
class ShortCircuitBlock(CondBlock):
def __init__(self, name, cond):
super(ShortCircuitBlock, self).__init__(name, None)
self.cond = cond
def get_ins(self):
return self.cond.get_ins()
def get_loc_with_ins(self):
return self.cond.get_loc_with_ins()
def neg(self):
self.cond.neg()
def visit_cond(self, visitor):
return self.cond.visit(visitor)
def __str__(self):
return '%d-SC(%s)' % (self.num, self.cond)
class LoopBlock(CondBlock):
def __init__(self, name, cond):
super(LoopBlock, self).__init__(name, None)
self.cond = cond
def get_ins(self):
return self.cond.get_ins()
def neg(self):
self.cond.neg()
def get_loc_with_ins(self):
return self.cond.get_loc_with_ins()
def visit(self, visitor):
return visitor.visit_loop_node(self)
def visit_cond(self, visitor):
return self.cond.visit_cond(visitor)
def update_attribute_with(self, n_map):
super(LoopBlock, self).update_attribute_with(n_map)
self.cond.update_attribute_with(n_map)
def __str__(self):
if self.looptype.is_pretest:
if self.false in self.loop_nodes:
return '%d-While(!%s)[%s]' % (self.num, self.name, self.cond)
return '%d-While(%s)[%s]' % (self.num, self.name, self.cond)
elif self.looptype.is_posttest:
return '%d-DoWhile(%s)[%s]' % (self.num, self.name, self.cond)
elif self.looptype.is_endless:
return '%d-WhileTrue(%s)[%s]' % (self.num, self.name, self.cond)
return '%d-WhileNoType(%s)' % (self.num, self.name)
class TryBlock(BasicBlock):
def __init__(self, node):
super(TryBlock, self).__init__('Try-%s' % node.name, None)
self.try_start = node
self.catch = []
# FIXME:
@property
def num(self):
return self.try_start.num
@num.setter
def num(self, value):
pass
def add_catch_node(self, node):
self.catch.append(node)
def visit(self, visitor):
visitor.visit_try_node(self)
def __str__(self):
return 'Try(%s)[%s]' % (self.name, self.catch)
class CatchBlock(BasicBlock):
def __init__(self, node):
self.exception = node.ins[0]
node.ins.pop(0)
super(CatchBlock, self).__init__('Catch-%s' % node.name, node.ins)
self.catch_start = node
def visit(self, visitor):
visitor.visit_catch_node(self)
def visit_exception(self, visitor):
visitor.visit_ins(self.exception)
def __str__(self):
return 'Catch(%s)' % self.name
def build_node_from_block(block, vmap, gen_ret, exception_type=None):
ins, lins = None, []
idx = block.get_start()
for ins in block.get_instructions():
opcode = ins.get_op_value()
if opcode == -1: # FIXME? or opcode in (0x0300, 0x0200, 0x0100):
idx += ins.get_length()
continue
try:
_ins = INSTRUCTION_SET[opcode]
except IndexError:
logger.error('Unknown instruction : %s.', ins.get_name().lower())
_ins = INSTRUCTION_SET[0]
# fill-array-data
if opcode == 0x26:
fillaray = block.get_special_ins(idx)
lins.append(_ins(ins, vmap, fillaray))
# invoke-kind[/range]
elif (0x6e <= opcode <= 0x72 or 0x74 <= opcode <= 0x78):
lins.append(_ins(ins, vmap, gen_ret))
# filled-new-array[/range]
elif 0x24 <= opcode <= 0x25:
lins.append(_ins(ins, vmap, gen_ret.new()))
# move-result*
elif 0xa <= opcode <= 0xc:
lins.append(_ins(ins, vmap, gen_ret.last()))
# move-exception
elif opcode == 0xd:
lins.append(_ins(ins, vmap, exception_type))
# monitor-{enter,exit}
elif 0x1d <= opcode <= 0x1e:
idx += ins.get_length()
continue
else:
lins.append(_ins(ins, vmap))
idx += ins.get_length()
name = block.get_name()
# return*
if 0xe <= opcode <= 0x11:
node = ReturnBlock(name, lins)
# {packed,sparse}-switch
elif 0x2b <= opcode <= 0x2c:
idx -= ins.get_length()
values = block.get_special_ins(idx)
node = SwitchBlock(name, values, lins)
# if-test[z]
elif 0x32 <= opcode <= 0x3d:
node = CondBlock(name, lins)
node.off_last_ins = ins.get_ref_off()
# throw
elif opcode == 0x27:
node = ThrowBlock(name, lins)
else:
# goto*
if 0x28 <= opcode <= 0x2a:
lins.pop()
node = StatementBlock(name, lins)
return node
|
apache-2.0
|
phillipwei/crazyflie-clients-python
|
lib/cflib/crazyflie/log.py
|
22
|
22076
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Enables logging of variables from the Crazyflie.
When a Crazyflie is connected it's possible to download a TableOfContent of all
the variables that can be logged. Using this it's possible to add logging
configurations where selected variables are sent to the client at a
specified period.
Terminology:
Log configuration - A configuration with a period and a number of variables
that are present in the TOC.
Stored as - The size and type of the variable as declared in the
Crazyflie firmware
Fetch as - The size and type that a variable should be fetched as.
This does not have to be the same as the size and type
it's stored as.
States of a configuration:
Created on host - When a configuration is created the contents is checked
so that all the variables are present in the TOC. If not
then the configuration cannot be created.
Created on CF - When the configuration is deemed valid it is added to the
Crazyflie. At this time the memory constraint is checked
and the status returned.
Started on CF - Any added block that is not started can be started. Once
started the Crazyflie will send back logdata periodically
according to the specified period when it's created.
Stopped on CF - Any started configuration can be stopped. The memory taken
by the configuration on the Crazyflie is NOT freed, the
only effect is that the Crazyflie will stop sending
logdata back to the host.
Deleted on CF - Any block that is added can be deleted. When this is done
the memory taken by the configuration is freed on the
Crazyflie. The configuration will have to be re-added to
be used again.
"""
__author__ = 'Bitcraze AB'
__all__ = ['Log', 'LogTocElement']
import struct
import errno
from cflib.crtp.crtpstack import CRTPPacket, CRTPPort
from cflib.utils.callbacks import Caller
from .toc import Toc, TocFetcher
# Channels used for the logging port
CHAN_TOC = 0
CHAN_SETTINGS = 1
CHAN_LOGDATA = 2
# Commands used when accessing the Table of Contents
CMD_TOC_ELEMENT = 0
CMD_TOC_INFO = 1
# Commands used when accessing the Log configurations
CMD_CREATE_BLOCK = 0
CMD_APPEND_BLOCK = 1
CMD_DELETE_BLOCK = 2
CMD_START_LOGGING = 3
CMD_STOP_LOGGING = 4
CMD_RESET_LOGGING = 5
# Possible states when receiving TOC
IDLE = "IDLE"
GET_TOC_INF = "GET_TOC_INFO"
GET_TOC_ELEMENT = "GET_TOC_ELEMENT"
# The max size of a CRTP packet payload
MAX_LOG_DATA_PACKET_SIZE = 30
import logging
logger = logging.getLogger(__name__)
class LogVariable():
"""A logging variable"""
TOC_TYPE = 0
MEM_TYPE = 1
def __init__(self, name="", fetchAs="uint8_t", varType=TOC_TYPE,
storedAs="", address=0):
self.name = name
self.fetch_as = LogTocElement.get_id_from_cstring(fetchAs)
if (len(storedAs) == 0):
self.stored_as = self.fetch_as
else:
self.stored_as = LogTocElement.get_id_from_cstring(storedAs)
self.address = address
self.type = varType
self.stored_as_string = storedAs
self.fetch_as_string = fetchAs
def is_toc_variable(self):
"""
Return true if the variable should be in the TOC, false if raw memory
variable
"""
return self.type == LogVariable.TOC_TYPE
def get_storage_and_fetch_byte(self):
"""Return what the variable is stored as and fetched as"""
return (self.fetch_as | (self.stored_as << 4))
def __str__(self):
return ("LogVariable: name=%s, store=%s, fetch=%s" %
(self.name, LogTocElement.get_cstring_from_id(self.stored_as),
LogTocElement.get_cstring_from_id(self.fetch_as)))
class LogConfig(object):
"""Representation of one log configuration that enables logging
from the Crazyflie"""
_config_id_counter = 1
def __init__(self, name, period_in_ms):
"""Initialize the entry"""
self.data_received_cb = Caller()
self.error_cb = Caller()
self.started_cb = Caller()
self.added_cb = Caller()
self.err_no = 0
self.id = LogConfig._config_id_counter
LogConfig._config_id_counter = (LogConfig._config_id_counter + 1) % 255
self.cf = None
self.period = period_in_ms / 10
self.period_in_ms = period_in_ms
self._added = False
self._started = False
self.valid = False
self.variables = []
self.default_fetch_as = []
self.name = name
def add_variable(self, name, fetch_as=None):
"""Add a new variable to the configuration.
name - Complete name of the variable in the form group.name
fetch_as - String representation of the type the variable should be
fetched as (i.e uint8_t, float, FP16, etc)
If no fetch_as type is supplied, then the stored as type will be used
(i.e the type of the fetched variable is the same as it's stored in the
Crazyflie)."""
if fetch_as:
self.variables.append(LogVariable(name, fetch_as))
else:
# We cannot determine the default type until we have connected. So
# save the name and we will add these once we are connected.
self.default_fetch_as.append(name)
def add_memory(self, name, fetch_as, stored_as, address):
"""Add a raw memory position to log.
name - Arbitrary name of the variable
fetch_as - String representation of the type of the data the memory
should be fetch as (i.e uint8_t, float, FP16)
stored_as - String representation of the type the data is stored as
in the Crazyflie
address - The address of the data
"""
self.variables.append(LogVariable(name, fetch_as, LogVariable.MEM_TYPE,
stored_as, address))
def _set_added(self, added):
self._added = added
self.added_cb.call(added)
def _get_added(self):
return self._added
def _set_started(self, started):
self._started = started
self.started_cb.call(started)
def _get_started(self):
return self._started
added = property(_get_added, _set_added)
started = property(_get_started, _set_started)
def start(self):
"""Start the logging for this entry"""
if (self.cf.link is not None):
if (self._added is False):
logger.debug("First time block is started, add block")
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_CREATE_BLOCK, self.id)
for var in self.variables:
if (var.is_toc_variable() is False): # Memory location
logger.debug("Logging to raw memory %d, 0x%04X",
var.get_storage_and_fetch_byte(), var.address)
pk.data += struct.pack('<B', var.get_storage_and_fetch_byte())
pk.data += struct.pack('<I', var.address)
else: # Item in TOC
logger.debug("Adding %s with id=%d and type=0x%02X",
var.name,
self.cf.log._toc.get_element_id(
var.name), var.get_storage_and_fetch_byte())
pk.data += struct.pack('<B', var.get_storage_and_fetch_byte())
pk.data += struct.pack('<B', self.cf.log._toc.
get_element_id(var.name))
logger.debug("Adding log block id {}".format(self.id))
self.cf.send_packet(pk, expected_reply=(CMD_CREATE_BLOCK, self.id))
else:
logger.debug("Block already registered, starting logging"
" for id=%d", self.id)
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_START_LOGGING, self.id, self.period)
self.cf.send_packet(pk, expected_reply=(CMD_START_LOGGING, self.id))
def stop(self):
"""Stop the logging for this entry"""
if (self.cf.link is not None):
if (self.id is None):
logger.warning("Stopping block, but no block registered")
else:
logger.debug("Sending stop logging for block id=%d", self.id)
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_STOP_LOGGING, self.id)
self.cf.send_packet(pk, expected_reply=(CMD_STOP_LOGGING, self.id))
def delete(self):
"""Delete this entry in the Crazyflie"""
if (self.cf.link is not None):
if (self.id is None):
logger.warning("Delete block, but no block registered")
else:
logger.debug("LogEntry: Sending delete logging for block id=%d"
% self.id)
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_DELETE_BLOCK, self.id)
self.cf.send_packet(pk, expected_reply=(CMD_DELETE_BLOCK, self.id))
def unpack_log_data(self, log_data, timestamp):
"""Unpack received logging data so it represent real values according
to the configuration in the entry"""
ret_data = {}
data_index = 0
for var in self.variables:
size = LogTocElement.get_size_from_id(var.fetch_as)
name = var.name
unpackstring = LogTocElement.get_unpack_string_from_id(
var.fetch_as)
value = struct.unpack(unpackstring,
log_data[data_index:data_index + size])[0]
data_index += size
ret_data[name] = value
self.data_received_cb.call(timestamp, ret_data, self)
class LogTocElement:
"""An element in the Log TOC."""
types = {0x01: ("uint8_t", '<B', 1),
0x02: ("uint16_t", '<H', 2),
0x03: ("uint32_t", '<L', 4),
0x04: ("int8_t", '<b', 1),
0x05: ("int16_t", '<h', 2),
0x06: ("int32_t", '<i', 4),
0x08: ("FP16", '<h', 2),
0x07: ("float", '<f', 4)}
@staticmethod
def get_id_from_cstring(name):
"""Return variable type id given the C-storage name"""
for key in LogTocElement.types.keys():
if (LogTocElement.types[key][0] == name):
return key
raise KeyError("Type [%s] not found in LogTocElement.types!" % name)
@staticmethod
def get_cstring_from_id(ident):
"""Return the C-storage name given the variable type id"""
try:
return LogTocElement.types[ident][0]
except KeyError:
raise KeyError("Type [%d] not found in LogTocElement.types"
"!" % ident)
@staticmethod
def get_size_from_id(ident):
"""Return the size in bytes given the variable type id"""
try:
return LogTocElement.types[ident][2]
except KeyError:
raise KeyError("Type [%d] not found in LogTocElement.types"
"!" % ident)
@staticmethod
def get_unpack_string_from_id(ident):
"""Return the Python unpack string given the variable type id"""
try:
return LogTocElement.types[ident][1]
except KeyError:
raise KeyError("Type [%d] not found in LogTocElement.types"
"!" % ident)
def __init__(self, data=None):
"""TocElement creator. Data is the binary payload of the element."""
if (data):
strs = struct.unpack("s" * len(data[2:]), data[2:])
strs = ("{}" * len(strs)).format(*strs).split("\0")
self.group = strs[0]
self.name = strs[1]
self.ident = ord(data[0])
self.ctype = LogTocElement.get_cstring_from_id(ord(data[1]))
self.pytype = LogTocElement.get_unpack_string_from_id(ord(data[1]))
self.access = ord(data[1]) & 0x10
class Log():
"""Create log configuration"""
# These codes can be decoded using os.stderror, but
# some of the text messages will look very stange
# in the UI, so they are redefined here
_err_codes = {
errno.ENOMEM: "No more memory available",
errno.ENOEXEC: "Command not found",
errno.ENOENT: "No such block id",
errno.E2BIG: "Block too large",
errno.EEXIST: "Block already exists"
}
def __init__(self, crazyflie=None):
self.log_blocks = []
# Called with newly created blocks
self.block_added_cb = Caller()
self.cf = crazyflie
self._toc = None
self.cf.add_port_callback(CRTPPort.LOGGING, self._new_packet_cb)
self.toc_updated = Caller()
self.state = IDLE
self.fake_toc_crc = 0xDEADBEEF
self._refresh_callback = None
self._toc_cache = None
def add_config(self, logconf):
"""Add a log configuration to the logging framework.
When doing this the contents of the log configuration will be validated
and listeners for new log configurations will be notified. When
validating the configuration the variables are checked against the TOC
to see that they actually exist. If they don't then the configuration
cannot be used. Since a valid TOC is required, a Crazyflie has to be
connected when calling this method, otherwise it will fail."""
if not self.cf.link:
logger.error("Cannot add configs without being connected to a "
"Crazyflie!")
return
# If the log configuration contains variables that we added without
# type (i.e we want the stored as type for fetching as well) then
# resolve this now and add them to the block again.
for name in logconf.default_fetch_as:
var = self._toc.get_element_by_complete_name(name)
if not var:
logger.warning("%s not in TOC, this block cannot be"
" used!", name)
logconf.valid = False
return
# Now that we know what type this variable has, add it to the log
# config again with the correct type
logconf.add_variable(name, var.ctype)
# Now check that all the added variables are in the TOC and that
# the total size constraint of a data packet with logging data is
# not
size = 0
for var in logconf.variables:
size += LogTocElement.get_size_from_id(var.fetch_as)
# Check that we are able to find the variable in the TOC so
# we can return error already now and not when the config is sent
if var.is_toc_variable():
if (self._toc.get_element_by_complete_name(
var.name) is None):
logger.warning("Log: %s not in TOC, this block cannot be"
" used!", var.name)
logconf.valid = False
return
if (size <= MAX_LOG_DATA_PACKET_SIZE and
(logconf.period > 0 and logconf.period < 0xFF)):
logconf.valid = True
logconf.cf = self.cf
self.log_blocks.append(logconf)
self.block_added_cb.call(logconf)
else:
logconf.valid = False
def refresh_toc(self, refresh_done_callback, toc_cache):
"""Start refreshing the table of loggale variables"""
self._toc_cache = toc_cache
self._refresh_callback = refresh_done_callback
self._toc = None
pk = CRTPPacket()
pk.set_header(CRTPPort.LOGGING, CHAN_SETTINGS)
pk.data = (CMD_RESET_LOGGING, )
self.cf.send_packet(pk, expected_reply=(CMD_RESET_LOGGING,))
def _find_block(self, id):
for block in self.log_blocks:
if block.id == id:
return block
return None
def _new_packet_cb(self, packet):
"""Callback for newly arrived packets with TOC information"""
chan = packet.channel
cmd = packet.datal[0]
payload = struct.pack("B" * (len(packet.datal) - 1), *packet.datal[1:])
if (chan == CHAN_SETTINGS):
id = ord(payload[0])
error_status = ord(payload[1])
block = self._find_block(id)
if (cmd == CMD_CREATE_BLOCK):
if (block is not None):
if error_status == 0 or error_status == errno.EEXIST:
if not block.added:
logger.debug("Have successfully added id=%d",
id)
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_START_LOGGING, id,
block.period)
self.cf.send_packet(pk, expected_reply=(CMD_START_LOGGING, id))
block.added = True
else:
msg = self._err_codes[error_status]
logger.warning("Error %d when adding id=%d (%s)"
, error_status, id, msg)
block.err_no = error_status
block.added_cb.call(False)
block.error_cb.call(block, msg)
else:
logger.warning("No LogEntry to assign block to !!!")
if (cmd == CMD_START_LOGGING):
if (error_status == 0x00):
logger.info("Have successfully started logging for id=%d",
id)
if block:
block.started = True
else:
msg = self._err_codes[error_status]
logger.warning("Error %d when starting id=%d (%s)"
, error_status, id, msg)
if block:
block.err_no = error_status
block.started_cb.call(False)
# This is a temporary fix, we are adding a new issue
# for this. For some reason we get an error back after
# the block has been started and added. This will show
# an error in the UI, but everything is still working.
#block.error_cb.call(block, msg)
if (cmd == CMD_STOP_LOGGING):
if (error_status == 0x00):
logger.info("Have successfully stopped logging for id=%d",
id)
if block:
block.started = False
if (cmd == CMD_DELETE_BLOCK):
# Accept deletion of a block that isn't added. This could
# happen due to timing (i.e add/start/delete in fast sequence)
if error_status == 0x00 or error_status == errno.ENOENT:
logger.info("Have successfully deleted id=%d",
id)
if block:
block.started = False
block.added = False
if (cmd == CMD_RESET_LOGGING):
# Guard against multiple responses due to re-sending
if not self._toc:
logger.debug("Logging reset, continue with TOC download")
self.log_blocks = []
self._toc = Toc()
toc_fetcher = TocFetcher(self.cf, LogTocElement,
CRTPPort.LOGGING,
self._toc, self._refresh_callback,
self._toc_cache)
toc_fetcher.start()
if (chan == CHAN_LOGDATA):
chan = packet.channel
id = packet.datal[0]
block = self._find_block(id)
timestamps = struct.unpack("<BBB", packet.data[1:4])
timestamp = (timestamps[0] | timestamps[1] << 8 | timestamps[2] << 16)
logdata = packet.data[4:]
if (block is not None):
block.unpack_log_data(logdata, timestamp)
else:
logger.warning("Error no LogEntry to handle id=%d", id)
|
gpl-2.0
|
NeuralEnsemble/neuroConstruct
|
lib/jython/Lib/test/test_zipfile64.py
|
158
|
4421
|
# Tests of the full ZIP64 functionality of zipfile
# The test_support.requires call is the only reason for keeping this separate
# from test_zipfile
from test import test_support
# XXX(nnorwitz): disable this test by looking for extra largfile resource
# which doesn't exist. This test takes over 30 minutes to run in general
# and requires more disk space than most of the buildbots.
test_support.requires(
'extralargefile',
'test requires loads of disk-space bytes and a long time to run'
)
# We can test part of the module without zlib.
try:
import zlib
except ImportError:
zlib = None
import zipfile, os, unittest
import time
import sys
from tempfile import TemporaryFile
from test.test_support import TESTFN, run_unittest
TESTFN2 = TESTFN + "2"
# How much time in seconds can pass before we print a 'Still working' message.
_PRINT_WORKING_MSG_INTERVAL = 5 * 60
class TestsWithSourceFile(unittest.TestCase):
def setUp(self):
# Create test data.
# xrange() is important here -- don't want to create immortal space
# for a million ints.
line_gen = ("Test of zipfile line %d." % i for i in xrange(1000000))
self.data = '\n'.join(line_gen)
# And write it to a file.
fp = open(TESTFN, "wb")
fp.write(self.data)
fp.close()
def zipTest(self, f, compression):
# Create the ZIP archive.
zipfp = zipfile.ZipFile(f, "w", compression, allowZip64=True)
# It will contain enough copies of self.data to reach about 6GB of
# raw data to store.
filecount = 6*1024**3 // len(self.data)
next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
for num in range(filecount):
zipfp.writestr("testfn%d" % num, self.data)
# Print still working message since this test can be really slow
if next_time <= time.time():
next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
print >>sys.__stdout__, (
' zipTest still writing %d of %d, be patient...' %
(num, filecount))
sys.__stdout__.flush()
zipfp.close()
# Read the ZIP archive
zipfp = zipfile.ZipFile(f, "r", compression)
for num in range(filecount):
self.assertEqual(zipfp.read("testfn%d" % num), self.data)
# Print still working message since this test can be really slow
if next_time <= time.time():
next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
print >>sys.__stdout__, (
' zipTest still reading %d of %d, be patient...' %
(num, filecount))
sys.__stdout__.flush()
zipfp.close()
def testStored(self):
# Try the temp file first. If we do TESTFN2 first, then it hogs
# gigabytes of disk space for the duration of the test.
for f in TemporaryFile(), TESTFN2:
self.zipTest(f, zipfile.ZIP_STORED)
if zlib:
def testDeflated(self):
# Try the temp file first. If we do TESTFN2 first, then it hogs
# gigabytes of disk space for the duration of the test.
for f in TemporaryFile(), TESTFN2:
self.zipTest(f, zipfile.ZIP_DEFLATED)
def tearDown(self):
for fname in TESTFN, TESTFN2:
if os.path.exists(fname):
os.remove(fname)
class OtherTests(unittest.TestCase):
def testMoreThan64kFiles(self):
# This test checks that more than 64k files can be added to an archive,
# and that the resulting archive can be read properly by ZipFile
zipf = zipfile.ZipFile(TESTFN, mode="w")
zipf.debug = 100
numfiles = (1 << 16) * 3/2
for i in xrange(numfiles):
zipf.writestr("foo%08d" % i, "%d" % (i**3 % 57))
self.assertEqual(len(zipf.namelist()), numfiles)
zipf.close()
zipf2 = zipfile.ZipFile(TESTFN, mode="r")
self.assertEqual(len(zipf2.namelist()), numfiles)
for i in xrange(numfiles):
self.assertEqual(zipf2.read("foo%08d" % i), "%d" % (i**3 % 57))
zipf.close()
def tearDown(self):
test_support.unlink(TESTFN)
test_support.unlink(TESTFN2)
def test_main():
run_unittest(TestsWithSourceFile, OtherTests)
if __name__ == "__main__":
test_main()
|
gpl-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.