code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# Test just the SSL support in the socket module, in a moderately bogus way.
from test import test_support
import socket
import time
# Optionally test SSL support. This requires the 'network' resource as given
# on the regrtest command line.
skip_expected = not (test_support.is_resource_enabled('network') and
hasattr(socket, "ssl"))
def test_basic():
test_support.requires('network')
import urllib
socket.RAND_status()
try:
socket.RAND_egd(1)
except TypeError:
pass
else:
print "didn't raise TypeError"
socket.RAND_add("this is a random string", 75.0)
f = urllib.urlopen('https://sf.net')
buf = f.read()
f.close()
def test_rude_shutdown():
try:
import thread
except ImportError:
return
# some random port to connect to
PORT = 9934
def listener():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', PORT))
s.listen(5)
s.accept()
del s
thread.exit()
def connector():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('localhost', PORT))
try:
ssl_sock = socket.ssl(s)
except socket.sslerror:
pass
else:
raise test_support.TestFailed, \
'connecting to closed SSL socket failed'
thread.start_new_thread(listener, ())
time.sleep(1)
connector()
def test_main():
if not hasattr(socket, "ssl"):
raise test_support.TestSkipped("socket module has no ssl support")
test_rude_shutdown()
test_basic()
if __name__ == "__main__":
test_main()
| trivoldus28/pulsarch-verilog | tools/local/bas-release/bas,3.9/lib/python/lib/python2.3/test/test_socket_ssl.py | Python | gpl-2.0 | 1,680 |
#!/usr/bin/env python3
# Copyright (c) 2016-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Wallet commands for signing and verifying messages."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
)
class SignMessagesWithAddressTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-addresstype=legacy"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
message = 'This is just a test message'
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert self.nodes[0].verifymessage(address, signature, message)
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert not self.nodes[0].verifymessage(other_address, signature, message)
assert not self.nodes[0].verifymessage(address, other_signature, message)
self.log.info('test parameter validity and error codes')
# signmessage has two required parameters
for num_params in [0, 1, 3, 4, 5]:
param_list = ["dummy"]*num_params
assert_raises_rpc_error(-1, "signmessage", self.nodes[0].signmessage, *param_list)
# invalid key or address provided
assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].signmessage, "invalid_addr", message)
if __name__ == '__main__':
SignMessagesWithAddressTest().main()
| particl/particl-core | test/functional/wallet_signmessagewithaddress.py | Python | mit | 1,904 |
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_admin
short_description: Configure admin users in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify system feature and admin category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
system_admin:
description:
- Configure admin users.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
accprofile:
description:
- Access profile for this administrator. Access profiles control administrator access to FortiGate features. Source system.accprofile.name.
accprofile-override:
description:
- Enable to use the name of an access profile provided by the remote authentication server to control the FortiGate features that this
administrator can access.
choices:
- enable
- disable
allow-remove-admin-session:
description:
- Enable/disable allow admin session to be removed by privileged admin users.
choices:
- enable
- disable
comments:
description:
- Comment.
email-to:
description:
- This administrator's email address.
force-password-change:
description:
- Enable/disable force password change on next login.
choices:
- enable
- disable
fortitoken:
description:
- This administrator's FortiToken serial number.
guest-auth:
description:
- Enable/disable guest authentication.
choices:
- disable
- enable
guest-lang:
description:
- Guest management portal language. Source system.custom-language.name.
guest-usergroups:
description:
- Select guest user groups.
suboptions:
name:
description:
- Select guest user groups.
required: true
gui-dashboard:
description:
- GUI dashboards.
suboptions:
columns:
description:
- Number of columns.
id:
description:
- Dashboard ID.
required: true
layout-type:
description:
- Layout type.
choices:
- responsive
- fixed
name:
description:
- Dashboard name.
scope:
description:
- Dashboard scope.
choices:
- global
- vdom
widget:
description:
- Dashboard widgets.
suboptions:
fabric-device:
description:
- Fabric device to monitor.
filters:
description:
- FortiView filters.
suboptions:
id:
description:
- FortiView Filter ID.
required: true
key:
description:
- Filter key.
value:
description:
- Filter value.
height:
description:
- Height.
id:
description:
- Widget ID.
required: true
industry:
description:
- Security Audit Rating industry.
choices:
- default
- custom
interface:
description:
- Interface to monitor. Source system.interface.name.
region:
description:
- Security Audit Rating region.
choices:
- default
- custom
report-by:
description:
- Field to aggregate the data by.
choices:
- source
- destination
- country
- intfpair
- srcintf
- dstintf
- policy
- wificlient
- shaper
- endpoint-vulnerability
- endpoint-device
- application
- cloud-app
- cloud-user
- web-domain
- web-category
- web-search-phrase
- threat
- system
- unauth
- admin
- vpn
sort-by:
description:
- Field to sort the data by.
timeframe:
description:
- Timeframe period of reported data.
choices:
- realtime
- 5min
- hour
- day
- week
title:
description:
- Widget title.
type:
description:
- Widget type.
choices:
- sysinfo
- licinfo
- vminfo
- forticloud
- cpu-usage
- memory-usage
- disk-usage
- log-rate
- sessions
- session-rate
- tr-history
- analytics
- usb-modem
- admins
- security-fabric
- security-fabric-ranking
- ha-status
- vulnerability-summary
- host-scan-summary
- fortiview
- botnet-activity
- fortimail
visualization:
description:
- Visualization to use.
choices:
- table
- bubble
- country
- chord
width:
description:
- Width.
x-pos:
description:
- X position.
y-pos:
description:
- Y position.
gui-global-menu-favorites:
description:
- Favorite GUI menu IDs for the global VDOM.
suboptions:
id:
description:
- Select menu ID.
required: true
gui-vdom-menu-favorites:
description:
- Favorite GUI menu IDs for VDOMs.
suboptions:
id:
description:
- Select menu ID.
required: true
hidden:
description:
- Admin user hidden attribute.
history0:
description:
- history0
history1:
description:
- history1
ip6-trusthost1:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost10:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost2:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost3:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost4:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost5:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost6:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost7:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost8:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
ip6-trusthost9:
description:
- Any IPv6 address from which the administrator can connect to the FortiGate unit. Default allows access from any IPv6 address.
login-time:
description:
- Record user login time.
suboptions:
last-failed-login:
description:
- Last failed login time.
last-login:
description:
- Last successful login time.
usr-name:
description:
- User name.
required: true
name:
description:
- User name.
required: true
password:
description:
- Admin user password.
password-expire:
description:
- Password expire time.
peer-auth:
description:
- Set to enable peer certificate authentication (for HTTPS admin access).
choices:
- enable
- disable
peer-group:
description:
- Name of peer group defined under config user group which has PKI members. Used for peer certificate authentication (for HTTPS admin
access).
radius-vdom-override:
description:
- Enable to use the names of VDOMs provided by the remote authentication server to control the VDOMs that this administrator can access.
choices:
- enable
- disable
remote-auth:
description:
- Enable/disable authentication using a remote RADIUS, LDAP, or TACACS+ server.
choices:
- enable
- disable
remote-group:
description:
- User group name used for remote auth.
schedule:
description:
- Firewall schedule used to restrict when the administrator can log in. No schedule means no restrictions.
sms-custom-server:
description:
- Custom SMS server to send SMS messages to. Source system.sms-server.name.
sms-phone:
description:
- Phone number on which the administrator receives SMS messages.
sms-server:
description:
- Send SMS messages using the FortiGuard SMS server or a custom server.
choices:
- fortiguard
- custom
ssh-certificate:
description:
- Select the certificate to be used by the FortiGate for authentication with an SSH client. Source certificate.local.name.
ssh-public-key1:
description:
- Public key of an SSH client. The client is authenticated without being asked for credentials. Create the public-private key pair in the
SSH client application.
ssh-public-key2:
description:
- Public key of an SSH client. The client is authenticated without being asked for credentials. Create the public-private key pair in the
SSH client application.
ssh-public-key3:
description:
- Public key of an SSH client. The client is authenticated without being asked for credentials. Create the public-private key pair in the
SSH client application.
trusthost1:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost10:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost2:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost3:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost4:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost5:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost6:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost7:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost8:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
trusthost9:
description:
- Any IPv4 address or subnet address and netmask from which the administrator can connect to the FortiGate unit. Default allows access
from any IPv4 address.
two-factor:
description:
- Enable/disable two-factor authentication.
choices:
- disable
- fortitoken
- email
- sms
vdom:
description:
- Virtual domain(s) that the administrator can access.
suboptions:
name:
description:
- Virtual domain name. Source system.vdom.name.
required: true
wildcard:
description:
- Enable/disable wildcard RADIUS authentication.
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure admin users.
fortios_system_admin:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
system_admin:
state: "present"
accprofile: "<your_own_value> (source system.accprofile.name)"
accprofile-override: "enable"
allow-remove-admin-session: "enable"
comments: "<your_own_value>"
email-to: "<your_own_value>"
force-password-change: "enable"
fortitoken: "<your_own_value>"
guest-auth: "disable"
guest-lang: "<your_own_value> (source system.custom-language.name)"
guest-usergroups:
-
name: "default_name_13"
gui-dashboard:
-
columns: "15"
id: "16"
layout-type: "responsive"
name: "default_name_18"
scope: "global"
widget:
-
fabric-device: "<your_own_value>"
filters:
-
id: "23"
key: "<your_own_value>"
value: "<your_own_value>"
height: "26"
id: "27"
industry: "default"
interface: "<your_own_value> (source system.interface.name)"
region: "default"
report-by: "source"
sort-by: "<your_own_value>"
timeframe: "realtime"
title: "<your_own_value>"
type: "sysinfo"
visualization: "table"
width: "37"
x-pos: "38"
y-pos: "39"
gui-global-menu-favorites:
-
id: "41"
gui-vdom-menu-favorites:
-
id: "43"
hidden: "44"
history0: "<your_own_value>"
history1: "<your_own_value>"
ip6-trusthost1: "<your_own_value>"
ip6-trusthost10: "<your_own_value>"
ip6-trusthost2: "<your_own_value>"
ip6-trusthost3: "<your_own_value>"
ip6-trusthost4: "<your_own_value>"
ip6-trusthost5: "<your_own_value>"
ip6-trusthost6: "<your_own_value>"
ip6-trusthost7: "<your_own_value>"
ip6-trusthost8: "<your_own_value>"
ip6-trusthost9: "<your_own_value>"
login-time:
-
last-failed-login: "<your_own_value>"
last-login: "<your_own_value>"
usr-name: "<your_own_value>"
name: "default_name_61"
password: "<your_own_value>"
password-expire: "<your_own_value>"
peer-auth: "enable"
peer-group: "<your_own_value>"
radius-vdom-override: "enable"
remote-auth: "enable"
remote-group: "<your_own_value>"
schedule: "<your_own_value>"
sms-custom-server: "<your_own_value> (source system.sms-server.name)"
sms-phone: "<your_own_value>"
sms-server: "fortiguard"
ssh-certificate: "<your_own_value> (source certificate.local.name)"
ssh-public-key1: "<your_own_value>"
ssh-public-key2: "<your_own_value>"
ssh-public-key3: "<your_own_value>"
trusthost1: "<your_own_value>"
trusthost10: "<your_own_value>"
trusthost2: "<your_own_value>"
trusthost3: "<your_own_value>"
trusthost4: "<your_own_value>"
trusthost5: "<your_own_value>"
trusthost6: "<your_own_value>"
trusthost7: "<your_own_value>"
trusthost8: "<your_own_value>"
trusthost9: "<your_own_value>"
two-factor: "disable"
vdom:
-
name: "default_name_89 (source system.vdom.name)"
wildcard: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_system_admin_data(json):
option_list = ['accprofile', 'accprofile-override', 'allow-remove-admin-session',
'comments', 'email-to', 'force-password-change',
'fortitoken', 'guest-auth', 'guest-lang',
'guest-usergroups', 'gui-dashboard', 'gui-global-menu-favorites',
'gui-vdom-menu-favorites', 'hidden', 'history0',
'history1', 'ip6-trusthost1', 'ip6-trusthost10',
'ip6-trusthost2', 'ip6-trusthost3', 'ip6-trusthost4',
'ip6-trusthost5', 'ip6-trusthost6', 'ip6-trusthost7',
'ip6-trusthost8', 'ip6-trusthost9', 'login-time',
'name', 'password', 'password-expire',
'peer-auth', 'peer-group', 'radius-vdom-override',
'remote-auth', 'remote-group', 'schedule',
'sms-custom-server', 'sms-phone', 'sms-server',
'ssh-certificate', 'ssh-public-key1', 'ssh-public-key2',
'ssh-public-key3', 'trusthost1', 'trusthost10',
'trusthost2', 'trusthost3', 'trusthost4',
'trusthost5', 'trusthost6', 'trusthost7',
'trusthost8', 'trusthost9', 'two-factor',
'vdom', 'wildcard']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_multilists_attributes(data):
multilist_attrs = []
for attr in multilist_attrs:
try:
path = "data['" + "']['".join(elem for elem in attr) + "']"
current_val = eval(path)
flattened_val = ' '.join(elem for elem in current_val)
exec(path + '= flattened_val')
except BaseException:
pass
return data
def system_admin(data, fos):
vdom = data['vdom']
system_admin_data = data['system_admin']
flattened_data = flatten_multilists_attributes(system_admin_data)
filtered_data = filter_system_admin_data(flattened_data)
if system_admin_data['state'] == "present":
return fos.set('system',
'admin',
data=filtered_data,
vdom=vdom)
elif system_admin_data['state'] == "absent":
return fos.delete('system',
'admin',
mkey=filtered_data['name'],
vdom=vdom)
def fortios_system(data, fos):
login(data)
if data['system_admin']:
resp = system_admin(data, fos)
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"system_admin": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"accprofile": {"required": False, "type": "str"},
"accprofile-override": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"allow-remove-admin-session": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"comments": {"required": False, "type": "str"},
"email-to": {"required": False, "type": "str"},
"force-password-change": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"fortitoken": {"required": False, "type": "str"},
"guest-auth": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"guest-lang": {"required": False, "type": "str"},
"guest-usergroups": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"gui-dashboard": {"required": False, "type": "list",
"options": {
"columns": {"required": False, "type": "int"},
"id": {"required": True, "type": "int"},
"layout-type": {"required": False, "type": "str",
"choices": ["responsive", "fixed"]},
"name": {"required": False, "type": "str"},
"scope": {"required": False, "type": "str",
"choices": ["global", "vdom"]},
"widget": {"required": False, "type": "list",
"options": {
"fabric-device": {"required": False, "type": "str"},
"filters": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"},
"key": {"required": False, "type": "str"},
"value": {"required": False, "type": "str"}
}},
"height": {"required": False, "type": "int"},
"id": {"required": True, "type": "int"},
"industry": {"required": False, "type": "str",
"choices": ["default", "custom"]},
"interface": {"required": False, "type": "str"},
"region": {"required": False, "type": "str",
"choices": ["default", "custom"]},
"report-by": {"required": False, "type": "str",
"choices": ["source", "destination", "country",
"intfpair", "srcintf", "dstintf",
"policy", "wificlient", "shaper",
"endpoint-vulnerability", "endpoint-device", "application",
"cloud-app", "cloud-user", "web-domain",
"web-category", "web-search-phrase", "threat",
"system", "unauth", "admin",
"vpn"]},
"sort-by": {"required": False, "type": "str"},
"timeframe": {"required": False, "type": "str",
"choices": ["realtime", "5min", "hour",
"day", "week"]},
"title": {"required": False, "type": "str"},
"type": {"required": False, "type": "str",
"choices": ["sysinfo", "licinfo", "vminfo",
"forticloud", "cpu-usage", "memory-usage",
"disk-usage", "log-rate", "sessions",
"session-rate", "tr-history", "analytics",
"usb-modem", "admins", "security-fabric",
"security-fabric-ranking", "ha-status", "vulnerability-summary",
"host-scan-summary", "fortiview", "botnet-activity",
"fortimail"]},
"visualization": {"required": False, "type": "str",
"choices": ["table", "bubble", "country",
"chord"]},
"width": {"required": False, "type": "int"},
"x-pos": {"required": False, "type": "int"},
"y-pos": {"required": False, "type": "int"}
}}
}},
"gui-global-menu-favorites": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "str"}
}},
"gui-vdom-menu-favorites": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "str"}
}},
"hidden": {"required": False, "type": "int"},
"history0": {"required": False, "type": "str"},
"history1": {"required": False, "type": "str"},
"ip6-trusthost1": {"required": False, "type": "str"},
"ip6-trusthost10": {"required": False, "type": "str"},
"ip6-trusthost2": {"required": False, "type": "str"},
"ip6-trusthost3": {"required": False, "type": "str"},
"ip6-trusthost4": {"required": False, "type": "str"},
"ip6-trusthost5": {"required": False, "type": "str"},
"ip6-trusthost6": {"required": False, "type": "str"},
"ip6-trusthost7": {"required": False, "type": "str"},
"ip6-trusthost8": {"required": False, "type": "str"},
"ip6-trusthost9": {"required": False, "type": "str"},
"login-time": {"required": False, "type": "list",
"options": {
"last-failed-login": {"required": False, "type": "str"},
"last-login": {"required": False, "type": "str"},
"usr-name": {"required": True, "type": "str"}
}},
"name": {"required": True, "type": "str"},
"password": {"required": False, "type": "str"},
"password-expire": {"required": False, "type": "str"},
"peer-auth": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"peer-group": {"required": False, "type": "str"},
"radius-vdom-override": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"remote-auth": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"remote-group": {"required": False, "type": "str"},
"schedule": {"required": False, "type": "str"},
"sms-custom-server": {"required": False, "type": "str"},
"sms-phone": {"required": False, "type": "str"},
"sms-server": {"required": False, "type": "str",
"choices": ["fortiguard", "custom"]},
"ssh-certificate": {"required": False, "type": "str"},
"ssh-public-key1": {"required": False, "type": "str"},
"ssh-public-key2": {"required": False, "type": "str"},
"ssh-public-key3": {"required": False, "type": "str"},
"trusthost1": {"required": False, "type": "str"},
"trusthost10": {"required": False, "type": "str"},
"trusthost2": {"required": False, "type": "str"},
"trusthost3": {"required": False, "type": "str"},
"trusthost4": {"required": False, "type": "str"},
"trusthost5": {"required": False, "type": "str"},
"trusthost6": {"required": False, "type": "str"},
"trusthost7": {"required": False, "type": "str"},
"trusthost8": {"required": False, "type": "str"},
"trusthost9": {"required": False, "type": "str"},
"two-factor": {"required": False, "type": "str",
"choices": ["disable", "fortitoken", "email",
"sms"]},
"vdom": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"wildcard": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_system(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| aperigault/ansible | lib/ansible/modules/network/fortios/fortios_system_admin.py | Python | gpl-3.0 | 41,930 |
VERSION = "0.12rc3"
VERSION_NAME = "Anderssen"
| Karlon/pychess | lib/pychess/__init__.py | Python | gpl-3.0 | 47 |
"""Ask tankerkoenig.de for petrol price information."""
from datetime import timedelta
import logging
from math import ceil
import pytankerkoenig
import voluptuous as vol
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
CONF_API_KEY,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_RADIUS,
CONF_SCAN_INTERVAL,
CONF_SHOW_ON_MAP,
)
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from .const import CONF_FUEL_TYPES, CONF_STATIONS, DOMAIN, FUEL_TYPES
_LOGGER = logging.getLogger(__name__)
DEFAULT_RADIUS = 2
DEFAULT_SCAN_INTERVAL = timedelta(minutes=30)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): cv.time_period,
vol.Optional(CONF_FUEL_TYPES, default=FUEL_TYPES): vol.All(
cv.ensure_list, [vol.In(FUEL_TYPES)]
),
vol.Inclusive(
CONF_LATITUDE,
"coordinates",
"Latitude and longitude must exist together",
): cv.latitude,
vol.Inclusive(
CONF_LONGITUDE,
"coordinates",
"Latitude and longitude must exist together",
): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): vol.All(
cv.positive_int, vol.Range(min=1)
),
vol.Optional(CONF_STATIONS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_SHOW_ON_MAP, default=True): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set the tankerkoenig component up."""
if DOMAIN not in config:
return True
conf = config[DOMAIN]
_LOGGER.debug("Setting up integration")
tankerkoenig = TankerkoenigData(hass, conf)
latitude = conf.get(CONF_LATITUDE, hass.config.latitude)
longitude = conf.get(CONF_LONGITUDE, hass.config.longitude)
radius = conf[CONF_RADIUS]
additional_stations = conf[CONF_STATIONS]
setup_ok = await hass.async_add_executor_job(
tankerkoenig.setup, latitude, longitude, radius, additional_stations
)
if not setup_ok:
_LOGGER.error("Could not setup integration")
return False
hass.data[DOMAIN] = tankerkoenig
hass.async_create_task(
async_load_platform(
hass,
SENSOR_DOMAIN,
DOMAIN,
discovered=tankerkoenig.stations,
hass_config=conf,
)
)
return True
class TankerkoenigData:
"""Get the latest data from the API."""
def __init__(self, hass, conf):
"""Initialize the data object."""
self._api_key = conf[CONF_API_KEY]
self.stations = {}
self.fuel_types = conf[CONF_FUEL_TYPES]
self.update_interval = conf[CONF_SCAN_INTERVAL]
self.show_on_map = conf[CONF_SHOW_ON_MAP]
self._hass = hass
def setup(self, latitude, longitude, radius, additional_stations):
"""Set up the tankerkoenig API.
Read the initial data from the server, to initialize the list of fuel stations to monitor.
"""
_LOGGER.debug("Fetching data for (%s, %s) rad: %s", latitude, longitude, radius)
try:
data = pytankerkoenig.getNearbyStations(
self._api_key, latitude, longitude, radius, "all", "dist"
)
except pytankerkoenig.customException as err:
data = {"ok": False, "message": err, "exception": True}
_LOGGER.debug("Received data: %s", data)
if not data["ok"]:
_LOGGER.error(
"Setup for sensors was unsuccessful. Error occurred while fetching data from tankerkoenig.de: %s",
data["message"],
)
return False
# Add stations found via location + radius
nearby_stations = data["stations"]
if not nearby_stations:
if not additional_stations:
_LOGGER.error(
"Could not find any station in range."
"Try with a bigger radius or manually specify stations in additional_stations"
)
return False
_LOGGER.warning(
"Could not find any station in range. Will only use manually specified stations"
)
else:
for station in data["stations"]:
self.add_station(station)
# Add manually specified additional stations
for station_id in additional_stations:
try:
additional_station_data = pytankerkoenig.getStationData(
self._api_key, station_id
)
except pytankerkoenig.customException as err:
additional_station_data = {
"ok": False,
"message": err,
"exception": True,
}
if not additional_station_data["ok"]:
_LOGGER.error(
"Error when adding station %s:\n %s",
station_id,
additional_station_data["message"],
)
return False
self.add_station(additional_station_data["station"])
if len(self.stations) > 10:
_LOGGER.warning(
"Found more than 10 stations to check. "
"This might invalidate your api-key on the long run. "
"Try using a smaller radius"
)
return True
async def fetch_data(self):
"""Get the latest data from tankerkoenig.de."""
_LOGGER.debug("Fetching new data from tankerkoenig.de")
station_ids = list(self.stations)
prices = {}
# The API seems to only return at most 10 results, so split the list in chunks of 10
# and merge it together.
for index in range(ceil(len(station_ids) / 10)):
data = await self._hass.async_add_executor_job(
pytankerkoenig.getPriceList,
self._api_key,
station_ids[index * 10 : (index + 1) * 10],
)
_LOGGER.debug("Received data: %s", data)
if not data["ok"]:
_LOGGER.error(
"Error fetching data from tankerkoenig.de: %s", data["message"]
)
raise TankerkoenigError(data["message"])
if "prices" not in data:
_LOGGER.error("Did not receive price information from tankerkoenig.de")
raise TankerkoenigError("No prices in data")
prices.update(data["prices"])
return prices
def add_station(self, station: dict):
"""Add fuel station to the entity list."""
station_id = station["id"]
if station_id in self.stations:
_LOGGER.warning(
"Sensor for station with id %s was already created", station_id
)
return
self.stations[station_id] = station
_LOGGER.debug("add_station called for station: %s", station)
class TankerkoenigError(HomeAssistantError):
"""An error occurred while contacting tankerkoenig.de."""
| sander76/home-assistant | homeassistant/components/tankerkoenig/__init__.py | Python | apache-2.0 | 7,623 |
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import re
import sys
from collections import defaultdict
import git_common as git
FOOTER_PATTERN = re.compile(r'^\s*([\w-]+): (.*)$')
CHROME_COMMIT_POSITION_PATTERN = re.compile(r'^([\w/-]+)@{#(\d+)}$')
GIT_SVN_ID_PATTERN = re.compile('^([^\s@]+)@(\d+)')
def normalize_name(header):
return '-'.join([ word.title() for word in header.strip().split('-') ])
def parse_footer(line):
match = FOOTER_PATTERN.match(line)
if match:
return (match.group(1), match.group(2))
else:
return None
def parse_footers(message):
"""Parses a git commit message into a multimap of footers."""
footer_lines = []
for line in reversed(message.splitlines()):
if line == '' or line.isspace():
break
footer_lines.append(line)
footers = map(parse_footer, footer_lines)
if not all(footers):
return defaultdict(list)
footer_map = defaultdict(list)
for (k, v) in footers:
footer_map[normalize_name(k)].append(v.strip())
return footer_map
def get_footer_svn_id(branch=None):
if not branch:
branch = git.root()
svn_id = None
message = git.run('log', '-1', '--format=%B', branch)
footers = parse_footers(message)
git_svn_id = get_unique(footers, 'git-svn-id')
if git_svn_id:
match = GIT_SVN_ID_PATTERN.match(git_svn_id)
if match:
svn_id = match.group(1)
return svn_id
def get_unique(footers, key):
key = normalize_name(key)
values = footers[key]
assert len(values) <= 1, 'Multiple %s footers' % key
if values:
return values[0]
else:
return None
def get_position(footers):
"""Get the commit position from the footers multimap using a heuristic.
Returns:
A tuple of the branch and the position on that branch. For example,
Cr-Commit-Position: refs/heads/master@{#292272}
would give the return value ('refs/heads/master', 292272). If
Cr-Commit-Position is not defined, we try to infer the ref and position
from git-svn-id. The position number can be None if it was not inferrable.
"""
position = get_unique(footers, 'Cr-Commit-Position')
if position:
match = CHROME_COMMIT_POSITION_PATTERN.match(position)
assert match, 'Invalid Cr-Commit-Position value: %s' % position
return (match.group(1), match.group(2))
svn_commit = get_unique(footers, 'git-svn-id')
if svn_commit:
match = GIT_SVN_ID_PATTERN.match(svn_commit)
assert match, 'Invalid git-svn-id value: %s' % svn_commit
# V8 has different semantics than Chromium.
if re.match(r'.*https?://v8\.googlecode\.com/svn/trunk',
match.group(1)):
return ('refs/heads/candidates', match.group(2))
if re.match(r'.*https?://v8\.googlecode\.com/svn/branches/bleeding_edge',
match.group(1)):
return ('refs/heads/master', match.group(2))
# Assume that any trunk svn revision will match the commit-position
# semantics.
if re.match('.*/trunk.*$', match.group(1)):
return ('refs/heads/master', match.group(2))
# But for now only support faking branch-heads for chrome.
branch_match = re.match('.*/chrome/branches/([\w/-]+)/src$', match.group(1))
if branch_match:
# svn commit numbers do not map to branches.
return ('refs/branch-heads/%s' % branch_match.group(1), None)
raise ValueError('Unable to infer commit position from footers')
def main(args):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('ref')
g = parser.add_mutually_exclusive_group()
g.add_argument('--key', metavar='KEY',
help='Get all values for the given footer name, one per '
'line (case insensitive)')
g.add_argument('--position', action='store_true')
g.add_argument('--position-ref', action='store_true')
g.add_argument('--position-num', action='store_true')
opts = parser.parse_args(args)
message = git.run('log', '-1', '--format=%B', opts.ref)
footers = parse_footers(message)
if opts.key:
for v in footers.get(normalize_name(opts.key), []):
print v
elif opts.position:
pos = get_position(footers)
print '%s@{#%s}' % (pos[0], pos[1] or '?')
elif opts.position_ref:
print get_position(footers)[0]
elif opts.position_num:
pos = get_position(footers)
assert pos[1], 'No valid position for commit'
print pos[1]
else:
for k in footers.keys():
for v in footers[k]:
print '%s: %s' % (k, v)
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(1)
| Teamxrtc/webrtc-streaming-node | third_party/depot_tools/git_footers.py | Python | mit | 4,798 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class HrEmployee(models.Model):
_name = 'hr.employee'
_inherit = ['hr.employee', 'website.published.mixin']
public_info = fields.Char(string='Public Info')
@api.multi
def _compute_website_url(self):
super(HrEmployee, self)._compute_website_url()
for employee in self:
employee.website_url = '/page/website.aboutus#team'
| chienlieu2017/it_management | odoo/addons/website_hr/models/hr_employee.py | Python | gpl-3.0 | 508 |
########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
| cloudify-cosmo/cloudify-cli | cloudify_cli/config/__init__.py | Python | apache-2.0 | 641 |
"""
Provides various throttling policies.
"""
from __future__ import unicode_literals
import time
from django.core.cache import cache as default_cache
from django.core.exceptions import ImproperlyConfigured
from rest_framework.compat import is_authenticated
from rest_framework.settings import api_settings
class BaseThrottle(object):
"""
Rate throttling of requests.
"""
def allow_request(self, request, view):
"""
Return `True` if the request should be allowed, `False` otherwise.
"""
raise NotImplementedError('.allow_request() must be overridden')
def get_ident(self, request):
"""
Identify the machine making the request by parsing HTTP_X_FORWARDED_FOR
if present and number of proxies is > 0. If not use all of
HTTP_X_FORWARDED_FOR if it is available, if not use REMOTE_ADDR.
"""
xff = request.META.get('HTTP_X_FORWARDED_FOR')
remote_addr = request.META.get('REMOTE_ADDR')
num_proxies = api_settings.NUM_PROXIES
if num_proxies is not None:
if num_proxies == 0 or xff is None:
return remote_addr
addrs = xff.split(',')
client_addr = addrs[-min(num_proxies, len(addrs))]
return client_addr.strip()
return ''.join(xff.split()) if xff else remote_addr
def wait(self):
"""
Optionally, return a recommended number of seconds to wait before
the next request.
"""
return None
class SimpleRateThrottle(BaseThrottle):
"""
A simple cache implementation, that only requires `.get_cache_key()`
to be overridden.
The rate (requests / seconds) is set by a `throttle` attribute on the View
class. The attribute is a string of the form 'number_of_requests/period'.
Period should be one of: ('s', 'sec', 'm', 'min', 'h', 'hour', 'd', 'day')
Previous request information used for throttling is stored in the cache.
"""
cache = default_cache
timer = time.time
cache_format = 'throttle_%(scope)s_%(ident)s'
scope = None
THROTTLE_RATES = api_settings.DEFAULT_THROTTLE_RATES
def __init__(self):
if not getattr(self, 'rate', None):
self.rate = self.get_rate()
self.num_requests, self.duration = self.parse_rate(self.rate)
def get_cache_key(self, request, view):
"""
Should return a unique cache-key which can be used for throttling.
Must be overridden.
May return `None` if the request should not be throttled.
"""
raise NotImplementedError('.get_cache_key() must be overridden')
def get_rate(self):
"""
Determine the string representation of the allowed request rate.
"""
if not getattr(self, 'scope', None):
msg = ("You must set either `.scope` or `.rate` for '%s' throttle" %
self.__class__.__name__)
raise ImproperlyConfigured(msg)
try:
return self.THROTTLE_RATES[self.scope]
except KeyError:
msg = "No default throttle rate set for '%s' scope" % self.scope
raise ImproperlyConfigured(msg)
def parse_rate(self, rate):
"""
Given the request rate string, return a two tuple of:
<allowed number of requests>, <period of time in seconds>
"""
if rate is None:
return (None, None)
num, period = rate.split('/')
num_requests = int(num)
duration = {'s': 1, 'm': 60, 'h': 3600, 'd': 86400}[period[0]]
return (num_requests, duration)
def allow_request(self, request, view):
"""
Implement the check to see if the request should be throttled.
On success calls `throttle_success`.
On failure calls `throttle_failure`.
"""
if self.rate is None:
return True
self.key = self.get_cache_key(request, view)
if self.key is None:
return True
self.history = self.cache.get(self.key, [])
self.now = self.timer()
# Drop any requests from the history which have now passed the
# throttle duration
while self.history and self.history[-1] <= self.now - self.duration:
self.history.pop()
if len(self.history) >= self.num_requests:
return self.throttle_failure()
return self.throttle_success()
def throttle_success(self):
"""
Inserts the current request's timestamp along with the key
into the cache.
"""
self.history.insert(0, self.now)
self.cache.set(self.key, self.history, self.duration)
return True
def throttle_failure(self):
"""
Called when a request to the API has failed due to throttling.
"""
return False
def wait(self):
"""
Returns the recommended next request time in seconds.
"""
if self.history:
remaining_duration = self.duration - (self.now - self.history[-1])
else:
remaining_duration = self.duration
available_requests = self.num_requests - len(self.history) + 1
if available_requests <= 0:
return None
return remaining_duration / float(available_requests)
class AnonRateThrottle(SimpleRateThrottle):
"""
Limits the rate of API calls that may be made by a anonymous users.
The IP address of the request will be used as the unique cache key.
"""
scope = 'anon'
def get_cache_key(self, request, view):
if is_authenticated(request.user):
return None # Only throttle unauthenticated requests.
return self.cache_format % {
'scope': self.scope,
'ident': self.get_ident(request)
}
class UserRateThrottle(SimpleRateThrottle):
"""
Limits the rate of API calls that may be made by a given user.
The user id will be used as a unique cache key if the user is
authenticated. For anonymous requests, the IP address of the request will
be used.
"""
scope = 'user'
def get_cache_key(self, request, view):
if is_authenticated(request.user):
ident = request.user.pk
else:
ident = self.get_ident(request)
return self.cache_format % {
'scope': self.scope,
'ident': ident
}
class ScopedRateThrottle(SimpleRateThrottle):
"""
Limits the rate of API calls by different amounts for various parts of
the API. Any view that has the `throttle_scope` property set will be
throttled. The unique cache key will be generated by concatenating the
user id of the request, and the scope of the view being accessed.
"""
scope_attr = 'throttle_scope'
def __init__(self):
# Override the usual SimpleRateThrottle, because we can't determine
# the rate until called by the view.
pass
def allow_request(self, request, view):
# We can only determine the scope once we're called by the view.
self.scope = getattr(view, self.scope_attr, None)
# If a view does not have a `throttle_scope` always allow the request
if not self.scope:
return True
# Determine the allowed request rate as we normally would during
# the `__init__` call.
self.rate = self.get_rate()
self.num_requests, self.duration = self.parse_rate(self.rate)
# We can now proceed as normal.
return super(ScopedRateThrottle, self).allow_request(request, view)
def get_cache_key(self, request, view):
"""
If `view.throttle_scope` is not set, don't apply this throttle.
Otherwise generate the unique cache key by concatenating the user id
with the '.throttle_scope` property of the view.
"""
if is_authenticated(request.user):
ident = request.user.pk
else:
ident = self.get_ident(request)
return self.cache_format % {
'scope': self.scope,
'ident': ident
}
| OpenWinCon/OpenWinNet | web-gui/myvenv/lib/python3.4/site-packages/rest_framework/throttling.py | Python | apache-2.0 | 8,143 |
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from .initialise import init, deinit, reinit, colorama_text
from .ansi import Fore, Back, Style, Cursor
from .ansitowin32 import AnsiToWin32
__version__ = '0.4.4'
| google/material-design-icons | update/venv/lib/python3.9/site-packages/pip/_vendor/colorama/__init__.py | Python | apache-2.0 | 239 |
from __future__ import unicode_literals
from django.contrib import auth
from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager
from django.contrib.auth.signals import user_logged_in
from django.contrib.contenttypes.models import ContentType
from django.core import validators
from django.core.exceptions import PermissionDenied
from django.core.mail import send_mail
from django.db import models
from django.db.models.manager import EmptyManager
from django.utils import six, timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
def update_last_login(sender, user, **kwargs):
"""
A signal receiver which updates the last_login date for
the user logging in.
"""
user.last_login = timezone.now()
user.save(update_fields=['last_login'])
user_logged_in.connect(update_last_login)
class PermissionManager(models.Manager):
use_in_migrations = True
def get_by_natural_key(self, codename, app_label, model):
return self.get(
codename=codename,
content_type=ContentType.objects.db_manager(self.db).get_by_natural_key(app_label, model),
)
@python_2_unicode_compatible
class Permission(models.Model):
"""
The permissions system provides a way to assign permissions to specific
users and groups of users.
The permission system is used by the Django admin site, but may also be
useful in your own code. The Django admin site uses permissions as follows:
- The "add" permission limits the user's ability to view the "add" form
and add an object.
- The "change" permission limits a user's ability to view the change
list, view the "change" form and change an object.
- The "delete" permission limits the ability to delete an object.
Permissions are set globally per type of object, not per specific object
instance. It is possible to say "Mary may change news stories," but it's
not currently possible to say "Mary may change news stories, but only the
ones she created herself" or "Mary may only change news stories that have a
certain status or publication date."
Three basic permissions -- add, change and delete -- are automatically
created for each Django model.
"""
name = models.CharField(_('name'), max_length=255)
content_type = models.ForeignKey(
ContentType,
models.CASCADE,
verbose_name=_('content type'),
)
codename = models.CharField(_('codename'), max_length=100)
objects = PermissionManager()
class Meta:
verbose_name = _('permission')
verbose_name_plural = _('permissions')
unique_together = (('content_type', 'codename'),)
ordering = ('content_type__app_label', 'content_type__model',
'codename')
def __str__(self):
return "%s | %s | %s" % (
six.text_type(self.content_type.app_label),
six.text_type(self.content_type),
six.text_type(self.name))
def natural_key(self):
return (self.codename,) + self.content_type.natural_key()
natural_key.dependencies = ['contenttypes.contenttype']
class GroupManager(models.Manager):
"""
The manager for the auth's Group model.
"""
use_in_migrations = True
def get_by_natural_key(self, name):
return self.get(name=name)
@python_2_unicode_compatible
class Group(models.Model):
"""
Groups are a generic way of categorizing users to apply permissions, or
some other label, to those users. A user can belong to any number of
groups.
A user in a group automatically has all the permissions granted to that
group. For example, if the group Site editors has the permission
can_edit_home_page, any user in that group will have that permission.
Beyond permissions, groups are a convenient way to categorize users to
apply some label, or extended functionality, to them. For example, you
could create a group 'Special users', and you could write code that would
do special things to those users -- such as giving them access to a
members-only portion of your site, or sending them members-only email
messages.
"""
name = models.CharField(_('name'), max_length=80, unique=True)
permissions = models.ManyToManyField(
Permission,
verbose_name=_('permissions'),
blank=True,
)
objects = GroupManager()
class Meta:
verbose_name = _('group')
verbose_name_plural = _('groups')
def __str__(self):
return self.name
def natural_key(self):
return (self.name,)
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, username, email, password, **extra_fields):
"""
Creates and saves a User with the given username, email and password.
"""
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
user = self.model(username=username, email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
extra_fields.setdefault('is_staff', False)
extra_fields.setdefault('is_superuser', False)
return self._create_user(username, email, password, **extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
if extra_fields.get('is_staff') is not True:
raise ValueError('Superuser must have is_staff=True.')
if extra_fields.get('is_superuser') is not True:
raise ValueError('Superuser must have is_superuser=True.')
return self._create_user(username, email, password, **extra_fields)
# A few helper functions for common logic between User and AnonymousUser.
def _user_get_all_permissions(user, obj):
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_all_permissions"):
permissions.update(backend.get_all_permissions(user, obj))
return permissions
def _user_has_perm(user, perm, obj):
"""
A backend can raise `PermissionDenied` to short-circuit permission checking.
"""
for backend in auth.get_backends():
if not hasattr(backend, 'has_perm'):
continue
try:
if backend.has_perm(user, perm, obj):
return True
except PermissionDenied:
return False
return False
def _user_has_module_perms(user, app_label):
"""
A backend can raise `PermissionDenied` to short-circuit permission checking.
"""
for backend in auth.get_backends():
if not hasattr(backend, 'has_module_perms'):
continue
try:
if backend.has_module_perms(user, app_label):
return True
except PermissionDenied:
return False
return False
class PermissionsMixin(models.Model):
"""
A mixin class that adds the fields and methods necessary to support
Django's Group and Permission model using the ModelBackend.
"""
is_superuser = models.BooleanField(
_('superuser status'),
default=False,
help_text=_(
'Designates that this user has all permissions without '
'explicitly assigning them.'
),
)
groups = models.ManyToManyField(
Group,
verbose_name=_('groups'),
blank=True,
help_text=_(
'The groups this user belongs to. A user will get all permissions '
'granted to each of their groups.'
),
related_name="user_set",
related_query_name="user",
)
user_permissions = models.ManyToManyField(
Permission,
verbose_name=_('user permissions'),
blank=True,
help_text=_('Specific permissions for this user.'),
related_name="user_set",
related_query_name="user",
)
class Meta:
abstract = True
def get_group_permissions(self, obj=None):
"""
Returns a list of permission strings that this user has through their
groups. This method queries all available auth backends. If an object
is passed in, only permissions matching this object are returned.
"""
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_group_permissions"):
permissions.update(backend.get_group_permissions(self, obj))
return permissions
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj)
def has_perm(self, perm, obj=None):
"""
Returns True if the user has the specified permission. This method
queries all available auth backends, but returns immediately if any
backend returns True. Thus, a user who has permission from a single
auth backend is assumed to have permission in general. If an object is
provided, permissions for this specific object are checked.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
# Otherwise we need to check the backends.
return _user_has_perm(self, perm, obj)
def has_perms(self, perm_list, obj=None):
"""
Returns True if the user has each of the specified permissions. If
object is passed, it checks if the user has all required perms for this
object.
"""
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, app_label):
"""
Returns True if the user has any permissions in the given app label.
Uses pretty much the same logic as has_perm, above.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
return _user_has_module_perms(self, app_label)
class AbstractUser(AbstractBaseUser, PermissionsMixin):
"""
An abstract base class implementing a fully featured User model with
admin-compliant permissions.
Username and password are required. Other fields are optional.
"""
username = models.CharField(
_('username'),
max_length=254,
unique=True,
help_text=_('Required. 254 characters or fewer. Letters, digits and @/./+/-/_ only.'),
validators=[
validators.RegexValidator(
r'^[\w.@+-]+$',
_('Enter a valid username. This value may contain only '
'letters, numbers ' 'and @/./+/-/_ characters.')
),
],
error_messages={
'unique': _("A user with that username already exists."),
},
)
first_name = models.CharField(_('first name'), max_length=30, blank=True)
last_name = models.CharField(_('last name'), max_length=30, blank=True)
email = models.EmailField(_('email address'), blank=True)
is_staff = models.BooleanField(
_('staff status'),
default=False,
help_text=_('Designates whether the user can log into this admin site.'),
)
is_active = models.BooleanField(
_('active'),
default=True,
help_text=_(
'Designates whether this user should be treated as active. '
'Unselect this instead of deleting accounts.'
),
)
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = UserManager()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
abstract = True
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name
def email_user(self, subject, message, from_email=None, **kwargs):
"""
Sends an email to this User.
"""
send_mail(subject, message, from_email, [self.email], **kwargs)
class User(AbstractUser):
"""
Users within the Django authentication system are represented by this
model.
Username, password and email are required. Other fields are optional.
"""
class Meta(AbstractUser.Meta):
swappable = 'AUTH_USER_MODEL'
@python_2_unicode_compatible
class AnonymousUser(object):
id = None
pk = None
username = ''
is_staff = False
is_active = False
is_superuser = False
_groups = EmptyManager(Group)
_user_permissions = EmptyManager(Permission)
def __init__(self):
pass
def __str__(self):
return 'AnonymousUser'
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return 1 # instances always return the same hash value
def save(self):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def delete(self):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def set_password(self, raw_password):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def check_password(self, raw_password):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def _get_groups(self):
return self._groups
groups = property(_get_groups)
def _get_user_permissions(self):
return self._user_permissions
user_permissions = property(_get_user_permissions)
def get_group_permissions(self, obj=None):
return set()
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj=obj)
def has_perm(self, perm, obj=None):
return _user_has_perm(self, perm, obj=obj)
def has_perms(self, perm_list, obj=None):
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, module):
return _user_has_module_perms(self, module)
def is_anonymous(self):
return True
def is_authenticated(self):
return False
def get_username(self):
return self.username
| MounirMesselmeni/django | django/contrib/auth/models.py | Python | bsd-3-clause | 14,881 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import survey
| richard-willowit/odoo | addons/survey_crm/models/__init__.py | Python | gpl-3.0 | 121 |
from typing import List, <warning descr="Unused import statement">Optional</warning>
def f(x, y):
# type: (int, List[int]) -> str
y.append(x)
return 'foo' | asedunov/intellij-community | python/testData/inspections/PyUnresolvedReferencesInspection/functionTypeCommentUsesImportsFromTyping.py | Python | apache-2.0 | 168 |
from couchpotato.core.providers.userscript.base import UserscriptBase
class Trakt(UserscriptBase):
includes = ['http://trakt.tv/movie/*', 'http://*.trakt.tv/movie/*']
excludes = ['http://trakt.tv/movie/*/*', 'http://*.trakt.tv/movie/*/*']
| coolbombom/CouchPotatoServer | couchpotato/core/providers/userscript/trakt/main.py | Python | gpl-3.0 | 250 |
#!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from apcupsd import ApcupsdCollector
##########################################################################
class TestApcupsdCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('ApcupsdCollector', {
'interval': 10
})
self.collector = ApcupsdCollector(config, None)
def test_import(self):
self.assertTrue(ApcupsdCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_synthetic_data(self, publish_mock):
patch_getdata = patch.object(ApcupsdCollector, 'getData', Mock(
return_value=(
'APC : 001,039,1056\n\x00' +
'\'DATE : 2012-07-16 12:53:58 -0700 \n\x00' +
' HOSTNAME : localhost\n\x00' +
'+VERSION : 3.14.8 (16 January 2010) redhat\n\x00' +
' UPSNAME : localhost\n\x00' +
'\x15CABLE : USB Cable\n\x00' +
'\x1dMODEL : Back-UPS BX1300G \n\x00' +
'\x17UPSMODE : Stand Alone\n\x00' +
'\'STARTTIME: 2011-12-07 10:28:24 -0800 \n\x00' +
'\x13STATUS : ONLINE \n\x00' +
'\x17LINEV : 124.0 Volts\n\x00' +
'\'LOADPCT : 5.0 Percent Load Capacity\n\x00' +
'\x19BCHARGE : 100.0 Percent\n\x00' +
'\x19TIMELEFT : 73.9 Minutes\n\x00' +
'\x15MBATTCHG : 5 Percent\n\x00' +
'\x15MINTIMEL : 3 Minutes\n\x00' +
'\x15MAXTIME : 0 Seconds\n\x00' +
'\x12SENSE : Medium\n\x00' +
'\x17LOTRANS : 088.0 Volts\n\x00' +
'\x17HITRANS : 139.0 Volts\n\x00' +
'\x12ALARMDEL : Always\n\x00' +
'\x16BATTV : 27.3 Volts\n\x00' +
'+LASTXFER : Automatic or explicit self test\n\x00' +
'\x0eNUMXFERS : 19\n\x00' +
'\'XONBATT : 2012-07-13 09:11:52 -0700 \n\x00' +
'\x15TONBATT : 0 seconds\n\x00' +
'\x17CUMONBATT: 130 seconds\n\x00' +
'\'XOFFBATT : 2012-07-13 09:12:01 -0700 \n\x00' +
'\'LASTSTEST: 2012-07-13 09:11:52 -0700 \n\x00' +
'\x0eSELFTEST : NO\n\x00' +
'"STATFLAG : 0x07000008 Status Flag\n\x00' +
'\x16MANDATE : 2009-10-08\n\x00' +
'\x1aSERIALNO : 3B0941X40219 \n\x00' +
'\x16BATTDATE : 2009-10-08\n\x00' +
'\x15NOMINV : 120 Volts\n\x00' +
'\x17NOMBATTV : 24.0 ')))
patch_getdata.start()
self.collector.collect()
patch_getdata.stop()
metrics = {
'localhost.LINEV': 124.000000,
'localhost.LOADPCT': 5.000000,
'localhost.BCHARGE': 100.000000,
'localhost.TIMELEFT': 73.900000,
'localhost.BATTV': 27.300000,
'localhost.NUMXFERS': 0.000000,
'localhost.TONBATT': 0.000000,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
| EzyInsights/Diamond | src/collectors/apcupsd/test/testapcupsd.py | Python | mit | 3,682 |
# Package init for fixes.numpy.testing
| JohnGriffiths/nipype | nipype/fixes/numpy/testing/__init__.py | Python | bsd-3-clause | 39 |
from statsmodels.stats.descriptivestats import sign_test
from numpy.testing import assert_almost_equal, assert_equal
def test_sign_test():
x = [7.8, 6.6, 6.5, 7.4, 7.3, 7., 6.4, 7.1, 6.7, 7.6, 6.8]
M, p = sign_test(x, mu0=6.5)
# from R SIGN.test(x, md=6.5)
# from R
assert_almost_equal(p, 0.02148, 5)
# not from R, we use a different convention
assert_equal(M, 4)
| hlin117/statsmodels | statsmodels/stats/tests/test_descriptivestats.py | Python | bsd-3-clause | 393 |
import warnings
from django.contrib.gis.db.models.fields import (
GeometryField, LineStringField, PointField, get_srid_info,
)
from django.contrib.gis.db.models.lookups import GISLookup
from django.contrib.gis.db.models.sql import (
AreaField, DistanceField, GeomField, GMLField,
)
from django.contrib.gis.geometry.backend import Geometry
from django.contrib.gis.measure import Area, Distance
from django.db import connections
from django.db.models.expressions import RawSQL
from django.db.models.fields import Field
from django.db.models.query import QuerySet
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
class GeoQuerySet(QuerySet):
"The Geographic QuerySet."
# ### GeoQuerySet Methods ###
def area(self, tolerance=0.05, **kwargs):
"""
Returns the area of the geographic field in an `area` attribute on
each element of this GeoQuerySet.
"""
# Performing setup here rather than in `_spatial_attribute` so that
# we can get the units for `AreaField`.
procedure_args, geo_field = self._spatial_setup(
'area', field_name=kwargs.get('field_name'))
s = {'procedure_args': procedure_args,
'geo_field': geo_field,
'setup': False,
}
connection = connections[self.db]
backend = connection.ops
if backend.oracle:
s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s'
s['procedure_args']['tolerance'] = tolerance
s['select_field'] = AreaField('sq_m') # Oracle returns area in units of meters.
elif backend.postgis or backend.spatialite:
if backend.geography:
# Geography fields support area calculation, returns square meters.
s['select_field'] = AreaField('sq_m')
elif not geo_field.geodetic(connection):
# Getting the area units of the geographic field.
s['select_field'] = AreaField(Area.unit_attname(geo_field.units_name(connection)))
else:
# TODO: Do we want to support raw number areas for geodetic fields?
raise Exception('Area on geodetic coordinate systems not supported.')
return self._spatial_attribute('area', s, **kwargs)
def centroid(self, **kwargs):
"""
Returns the centroid of the geographic field in a `centroid`
attribute on each element of this GeoQuerySet.
"""
return self._geom_attribute('centroid', **kwargs)
def difference(self, geom, **kwargs):
"""
Returns the spatial difference of the geographic field in a `difference`
attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('difference', geom, **kwargs)
def distance(self, geom, **kwargs):
"""
Returns the distance from the given geographic field name to the
given geometry in a `distance` attribute on each element of the
GeoQuerySet.
Keyword Arguments:
`spheroid` => If the geometry field is geodetic and PostGIS is
the spatial database, then the more accurate
spheroid calculation will be used instead of the
quicker sphere calculation.
`tolerance` => Used only for Oracle. The tolerance is
in meters -- a default of 5 centimeters (0.05)
is used.
"""
return self._distance_attribute('distance', geom, **kwargs)
def envelope(self, **kwargs):
"""
Returns a Geometry representing the bounding box of the
Geometry field in an `envelope` attribute on each element of
the GeoQuerySet.
"""
return self._geom_attribute('envelope', **kwargs)
def force_rhr(self, **kwargs):
"""
Returns a modified version of the Polygon/MultiPolygon in which
all of the vertices follow the Right-Hand-Rule. By default,
this is attached as the `force_rhr` attribute on each element
of the GeoQuerySet.
"""
return self._geom_attribute('force_rhr', **kwargs)
def geojson(self, precision=8, crs=False, bbox=False, **kwargs):
"""
Returns a GeoJSON representation of the geometry field in a `geojson`
attribute on each element of the GeoQuerySet.
The `crs` and `bbox` keywords may be set to True if the user wants
the coordinate reference system and the bounding box to be included
in the GeoJSON representation of the geometry.
"""
backend = connections[self.db].ops
if not backend.geojson:
raise NotImplementedError('Only PostGIS 1.3.4+ and SpatiaLite 3.0+ '
'support GeoJSON serialization.')
if not isinstance(precision, six.integer_types):
raise TypeError('Precision keyword must be set with an integer.')
options = 0
if crs and bbox:
options = 3
elif bbox:
options = 1
elif crs:
options = 2
s = {'desc': 'GeoJSON',
'procedure_args': {'precision': precision, 'options': options},
'procedure_fmt': '%(geo_col)s,%(precision)s,%(options)s',
}
return self._spatial_attribute('geojson', s, **kwargs)
def geohash(self, precision=20, **kwargs):
"""
Returns a GeoHash representation of the given field in a `geohash`
attribute on each element of the GeoQuerySet.
The `precision` keyword may be used to custom the number of
_characters_ used in the output GeoHash, the default is 20.
"""
s = {'desc': 'GeoHash',
'procedure_args': {'precision': precision},
'procedure_fmt': '%(geo_col)s,%(precision)s',
}
return self._spatial_attribute('geohash', s, **kwargs)
def gml(self, precision=8, version=2, **kwargs):
"""
Returns GML representation of the given field in a `gml` attribute
on each element of the GeoQuerySet.
"""
backend = connections[self.db].ops
s = {'desc': 'GML', 'procedure_args': {'precision': precision}}
if backend.postgis:
s['procedure_fmt'] = '%(version)s,%(geo_col)s,%(precision)s'
s['procedure_args'] = {'precision': precision, 'version': version}
if backend.oracle:
s['select_field'] = GMLField()
return self._spatial_attribute('gml', s, **kwargs)
def intersection(self, geom, **kwargs):
"""
Returns the spatial intersection of the Geometry field in
an `intersection` attribute on each element of this
GeoQuerySet.
"""
return self._geomset_attribute('intersection', geom, **kwargs)
def kml(self, **kwargs):
"""
Returns KML representation of the geometry field in a `kml`
attribute on each element of this GeoQuerySet.
"""
s = {'desc': 'KML',
'procedure_fmt': '%(geo_col)s,%(precision)s',
'procedure_args': {'precision': kwargs.pop('precision', 8)},
}
return self._spatial_attribute('kml', s, **kwargs)
def length(self, **kwargs):
"""
Returns the length of the geometry field as a `Distance` object
stored in a `length` attribute on each element of this GeoQuerySet.
"""
return self._distance_attribute('length', None, **kwargs)
def mem_size(self, **kwargs):
"""
Returns the memory size (number of bytes) that the geometry field takes
in a `mem_size` attribute on each element of this GeoQuerySet.
"""
return self._spatial_attribute('mem_size', {}, **kwargs)
def num_geom(self, **kwargs):
"""
Returns the number of geometries if the field is a
GeometryCollection or Multi* Field in a `num_geom`
attribute on each element of this GeoQuerySet; otherwise
the sets with None.
"""
return self._spatial_attribute('num_geom', {}, **kwargs)
def num_points(self, **kwargs):
"""
Returns the number of points in the first linestring in the
Geometry field in a `num_points` attribute on each element of
this GeoQuerySet; otherwise sets with None.
"""
return self._spatial_attribute('num_points', {}, **kwargs)
def perimeter(self, **kwargs):
"""
Returns the perimeter of the geometry field as a `Distance` object
stored in a `perimeter` attribute on each element of this GeoQuerySet.
"""
return self._distance_attribute('perimeter', None, **kwargs)
def point_on_surface(self, **kwargs):
"""
Returns a Point geometry guaranteed to lie on the surface of the
Geometry field in a `point_on_surface` attribute on each element
of this GeoQuerySet; otherwise sets with None.
"""
return self._geom_attribute('point_on_surface', **kwargs)
def reverse_geom(self, **kwargs):
"""
Reverses the coordinate order of the geometry, and attaches as a
`reverse` attribute on each element of this GeoQuerySet.
"""
s = {'select_field': GeomField()}
kwargs.setdefault('model_att', 'reverse_geom')
if connections[self.db].ops.oracle:
s['geo_field_type'] = LineStringField
return self._spatial_attribute('reverse', s, **kwargs)
def scale(self, x, y, z=0.0, **kwargs):
"""
Scales the geometry to a new size by multiplying the ordinates
with the given x,y,z scale factors.
"""
if connections[self.db].ops.spatialite:
if z != 0.0:
raise NotImplementedError('SpatiaLite does not support 3D scaling.')
s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s',
'procedure_args': {'x': x, 'y': y},
'select_field': GeomField(),
}
else:
s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s',
'procedure_args': {'x': x, 'y': y, 'z': z},
'select_field': GeomField(),
}
return self._spatial_attribute('scale', s, **kwargs)
def snap_to_grid(self, *args, **kwargs):
"""
Snap all points of the input geometry to the grid. How the
geometry is snapped to the grid depends on how many arguments
were given:
- 1 argument : A single size to snap both the X and Y grids to.
- 2 arguments: X and Y sizes to snap the grid to.
- 4 arguments: X, Y sizes and the X, Y origins.
"""
if False in [isinstance(arg, (float,) + six.integer_types) for arg in args]:
raise TypeError('Size argument(s) for the grid must be a float or integer values.')
nargs = len(args)
if nargs == 1:
size = args[0]
procedure_fmt = '%(geo_col)s,%(size)s'
procedure_args = {'size': size}
elif nargs == 2:
xsize, ysize = args
procedure_fmt = '%(geo_col)s,%(xsize)s,%(ysize)s'
procedure_args = {'xsize': xsize, 'ysize': ysize}
elif nargs == 4:
xsize, ysize, xorigin, yorigin = args
procedure_fmt = '%(geo_col)s,%(xorigin)s,%(yorigin)s,%(xsize)s,%(ysize)s'
procedure_args = {'xsize': xsize, 'ysize': ysize,
'xorigin': xorigin, 'yorigin': yorigin}
else:
raise ValueError('Must provide 1, 2, or 4 arguments to `snap_to_grid`.')
s = {'procedure_fmt': procedure_fmt,
'procedure_args': procedure_args,
'select_field': GeomField(),
}
return self._spatial_attribute('snap_to_grid', s, **kwargs)
def svg(self, relative=False, precision=8, **kwargs):
"""
Returns SVG representation of the geographic field in a `svg`
attribute on each element of this GeoQuerySet.
Keyword Arguments:
`relative` => If set to True, this will evaluate the path in
terms of relative moves (rather than absolute).
`precision` => May be used to set the maximum number of decimal
digits used in output (defaults to 8).
"""
relative = int(bool(relative))
if not isinstance(precision, six.integer_types):
raise TypeError('SVG precision keyword argument must be an integer.')
s = {
'desc': 'SVG',
'procedure_fmt': '%(geo_col)s,%(rel)s,%(precision)s',
'procedure_args': {
'rel': relative,
'precision': precision,
}
}
return self._spatial_attribute('svg', s, **kwargs)
def sym_difference(self, geom, **kwargs):
"""
Returns the symmetric difference of the geographic field in a
`sym_difference` attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('sym_difference', geom, **kwargs)
def translate(self, x, y, z=0.0, **kwargs):
"""
Translates the geometry to a new location using the given numeric
parameters as offsets.
"""
if connections[self.db].ops.spatialite:
if z != 0.0:
raise NotImplementedError('SpatiaLite does not support 3D translation.')
s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s',
'procedure_args': {'x': x, 'y': y},
'select_field': GeomField(),
}
else:
s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s',
'procedure_args': {'x': x, 'y': y, 'z': z},
'select_field': GeomField(),
}
return self._spatial_attribute('translate', s, **kwargs)
def transform(self, srid=4326, **kwargs):
"""
Transforms the given geometry field to the given SRID. If no SRID is
provided, the transformation will default to using 4326 (WGS84).
"""
if not isinstance(srid, six.integer_types):
raise TypeError('An integer SRID must be provided.')
field_name = kwargs.get('field_name')
self._spatial_setup('transform', field_name=field_name)
self.query.add_context('transformed_srid', srid)
return self._clone()
def union(self, geom, **kwargs):
"""
Returns the union of the geographic field with the given
Geometry in a `union` attribute on each element of this GeoQuerySet.
"""
return self._geomset_attribute('union', geom, **kwargs)
# ### Private API -- Abstracted DRY routines. ###
def _spatial_setup(self, att, desc=None, field_name=None, geo_field_type=None):
"""
Performs set up for executing the spatial function.
"""
# Does the spatial backend support this?
connection = connections[self.db]
func = getattr(connection.ops, att, False)
if desc is None:
desc = att
if not func:
raise NotImplementedError('%s stored procedure not available on '
'the %s backend.' %
(desc, connection.ops.name))
# Initializing the procedure arguments.
procedure_args = {'function': func}
# Is there a geographic field in the model to perform this
# operation on?
geo_field = self._geo_field(field_name)
if not geo_field:
raise TypeError('%s output only available on GeometryFields.' % func)
# If the `geo_field_type` keyword was used, then enforce that
# type limitation.
if geo_field_type is not None and not isinstance(geo_field, geo_field_type):
raise TypeError('"%s" stored procedures may only be called on %ss.' % (func, geo_field_type.__name__))
# Setting the procedure args.
procedure_args['geo_col'] = self._geocol_select(geo_field, field_name)
return procedure_args, geo_field
def _spatial_attribute(self, att, settings, field_name=None, model_att=None):
"""
DRY routine for calling a spatial stored procedure on a geometry column
and attaching its output as an attribute of the model.
Arguments:
att:
The name of the spatial attribute that holds the spatial
SQL function to call.
settings:
Dictonary of internal settings to customize for the spatial procedure.
Public Keyword Arguments:
field_name:
The name of the geographic field to call the spatial
function on. May also be a lookup to a geometry field
as part of a foreign key relation.
model_att:
The name of the model attribute to attach the output of
the spatial function to.
"""
warnings.warn(
"The %s GeoQuerySet method is deprecated. See GeoDjango Functions "
"documentation to find the expression-based replacement." % att,
RemovedInDjango20Warning, stacklevel=2
)
# Default settings.
settings.setdefault('desc', None)
settings.setdefault('geom_args', ())
settings.setdefault('geom_field', None)
settings.setdefault('procedure_args', {})
settings.setdefault('procedure_fmt', '%(geo_col)s')
settings.setdefault('select_params', [])
connection = connections[self.db]
# Performing setup for the spatial column, unless told not to.
if settings.get('setup', True):
default_args, geo_field = self._spatial_setup(
att, desc=settings['desc'], field_name=field_name,
geo_field_type=settings.get('geo_field_type'))
for k, v in six.iteritems(default_args):
settings['procedure_args'].setdefault(k, v)
else:
geo_field = settings['geo_field']
# The attribute to attach to the model.
if not isinstance(model_att, six.string_types):
model_att = att
# Special handling for any argument that is a geometry.
for name in settings['geom_args']:
# Using the field's get_placeholder() routine to get any needed
# transformation SQL.
geom = geo_field.get_prep_value(settings['procedure_args'][name])
params = geo_field.get_db_prep_lookup('contains', geom, connection=connection)
geom_placeholder = geo_field.get_placeholder(geom, None, connection)
# Replacing the procedure format with that of any needed
# transformation SQL.
old_fmt = '%%(%s)s' % name
new_fmt = geom_placeholder % '%%s'
settings['procedure_fmt'] = settings['procedure_fmt'].replace(old_fmt, new_fmt)
settings['select_params'].extend(params)
# Getting the format for the stored procedure.
fmt = '%%(function)s(%s)' % settings['procedure_fmt']
# If the result of this function needs to be converted.
if settings.get('select_field'):
select_field = settings['select_field']
if connection.ops.oracle:
select_field.empty_strings_allowed = False
else:
select_field = Field()
# Finally, setting the extra selection attribute with
# the format string expanded with the stored procedure
# arguments.
self.query.add_annotation(
RawSQL(fmt % settings['procedure_args'], settings['select_params'], select_field),
model_att)
return self
def _distance_attribute(self, func, geom=None, tolerance=0.05, spheroid=False, **kwargs):
"""
DRY routine for GeoQuerySet distance attribute routines.
"""
# Setting up the distance procedure arguments.
procedure_args, geo_field = self._spatial_setup(func, field_name=kwargs.get('field_name'))
# If geodetic defaulting distance attribute to meters (Oracle and
# PostGIS spherical distances return meters). Otherwise, use the
# units of the geometry field.
connection = connections[self.db]
geodetic = geo_field.geodetic(connection)
geography = geo_field.geography
if geodetic:
dist_att = 'm'
else:
dist_att = Distance.unit_attname(geo_field.units_name(connection))
# Shortcut booleans for what distance function we're using and
# whether the geometry field is 3D.
distance = func == 'distance'
length = func == 'length'
perimeter = func == 'perimeter'
if not (distance or length or perimeter):
raise ValueError('Unknown distance function: %s' % func)
geom_3d = geo_field.dim == 3
# The field's get_db_prep_lookup() is used to get any
# extra distance parameters. Here we set up the
# parameters that will be passed in to field's function.
lookup_params = [geom or 'POINT (0 0)', 0]
# Getting the spatial backend operations.
backend = connection.ops
# If the spheroid calculation is desired, either by the `spheroid`
# keyword or when calculating the length of geodetic field, make
# sure the 'spheroid' distance setting string is passed in so we
# get the correct spatial stored procedure.
if spheroid or (backend.postgis and geodetic and
(not geography) and length):
lookup_params.append('spheroid')
lookup_params = geo_field.get_prep_value(lookup_params)
params = geo_field.get_db_prep_lookup('distance_lte', lookup_params, connection=connection)
# The `geom_args` flag is set to true if a geometry parameter was
# passed in.
geom_args = bool(geom)
if backend.oracle:
if distance:
procedure_fmt = '%(geo_col)s,%(geom)s,%(tolerance)s'
elif length or perimeter:
procedure_fmt = '%(geo_col)s,%(tolerance)s'
procedure_args['tolerance'] = tolerance
else:
# Getting whether this field is in units of degrees since the field may have
# been transformed via the `transform` GeoQuerySet method.
srid = self.query.get_context('transformed_srid')
if srid:
u, unit_name, s = get_srid_info(srid, connection)
geodetic = unit_name.lower() in geo_field.geodetic_units
if geodetic and not connection.features.supports_distance_geodetic:
raise ValueError(
'This database does not support linear distance '
'calculations on geodetic coordinate systems.'
)
if distance:
if srid:
# Setting the `geom_args` flag to false because we want to handle
# transformation SQL here, rather than the way done by default
# (which will transform to the original SRID of the field rather
# than to what was transformed to).
geom_args = False
procedure_fmt = '%s(%%(geo_col)s, %s)' % (backend.transform, srid)
if geom.srid is None or geom.srid == srid:
# If the geom parameter srid is None, it is assumed the coordinates
# are in the transformed units. A placeholder is used for the
# geometry parameter. `GeomFromText` constructor is also needed
# to wrap geom placeholder for SpatiaLite.
if backend.spatialite:
procedure_fmt += ', %s(%%%%s, %s)' % (backend.from_text, srid)
else:
procedure_fmt += ', %%s'
else:
# We need to transform the geom to the srid specified in `transform()`,
# so wrapping the geometry placeholder in transformation SQL.
# SpatiaLite also needs geometry placeholder wrapped in `GeomFromText`
# constructor.
if backend.spatialite:
procedure_fmt += (', %s(%s(%%%%s, %s), %s)' % (
backend.transform, backend.from_text,
geom.srid, srid))
else:
procedure_fmt += ', %s(%%%%s, %s)' % (backend.transform, srid)
else:
# `transform()` was not used on this GeoQuerySet.
procedure_fmt = '%(geo_col)s,%(geom)s'
if not geography and geodetic:
# Spherical distance calculation is needed (because the geographic
# field is geodetic). However, the PostGIS ST_distance_sphere/spheroid()
# procedures may only do queries from point columns to point geometries
# some error checking is required.
if not backend.geography:
if not isinstance(geo_field, PointField):
raise ValueError('Spherical distance calculation only supported on PointFields.')
if not str(Geometry(six.memoryview(params[0].ewkb)).geom_type) == 'Point':
raise ValueError(
'Spherical distance calculation only supported with '
'Point Geometry parameters'
)
# The `function` procedure argument needs to be set differently for
# geodetic distance calculations.
if spheroid:
# Call to distance_spheroid() requires spheroid param as well.
procedure_fmt += ",'%(spheroid)s'"
procedure_args.update({'function': backend.distance_spheroid, 'spheroid': params[1]})
else:
procedure_args.update({'function': backend.distance_sphere})
elif length or perimeter:
procedure_fmt = '%(geo_col)s'
if not geography and geodetic and length:
# There's no `length_sphere`, and `length_spheroid` also
# works on 3D geometries.
procedure_fmt += ",'%(spheroid)s'"
procedure_args.update({'function': backend.length_spheroid, 'spheroid': params[1]})
elif geom_3d and connection.features.supports_3d_functions:
# Use 3D variants of perimeter and length routines on supported backends.
if perimeter:
procedure_args.update({'function': backend.perimeter3d})
elif length:
procedure_args.update({'function': backend.length3d})
# Setting up the settings for `_spatial_attribute`.
s = {'select_field': DistanceField(dist_att),
'setup': False,
'geo_field': geo_field,
'procedure_args': procedure_args,
'procedure_fmt': procedure_fmt,
}
if geom_args:
s['geom_args'] = ('geom',)
s['procedure_args']['geom'] = geom
elif geom:
# The geometry is passed in as a parameter because we handled
# transformation conditions in this routine.
s['select_params'] = [backend.Adapter(geom)]
return self._spatial_attribute(func, s, **kwargs)
def _geom_attribute(self, func, tolerance=0.05, **kwargs):
"""
DRY routine for setting up a GeoQuerySet method that attaches a
Geometry attribute (e.g., `centroid`, `point_on_surface`).
"""
s = {'select_field': GeomField()}
if connections[self.db].ops.oracle:
s['procedure_fmt'] = '%(geo_col)s,%(tolerance)s'
s['procedure_args'] = {'tolerance': tolerance}
return self._spatial_attribute(func, s, **kwargs)
def _geomset_attribute(self, func, geom, tolerance=0.05, **kwargs):
"""
DRY routine for setting up a GeoQuerySet method that attaches a
Geometry attribute and takes a Geoemtry parameter. This is used
for geometry set-like operations (e.g., intersection, difference,
union, sym_difference).
"""
s = {
'geom_args': ('geom',),
'select_field': GeomField(),
'procedure_fmt': '%(geo_col)s,%(geom)s',
'procedure_args': {'geom': geom},
}
if connections[self.db].ops.oracle:
s['procedure_fmt'] += ',%(tolerance)s'
s['procedure_args']['tolerance'] = tolerance
return self._spatial_attribute(func, s, **kwargs)
def _geocol_select(self, geo_field, field_name):
"""
Helper routine for constructing the SQL to select the geographic
column. Takes into account if the geographic field is in a
ForeignKey relation to the current model.
"""
compiler = self.query.get_compiler(self.db)
opts = self.model._meta
if geo_field not in opts.fields:
# Is this operation going to be on a related geographic field?
# If so, it'll have to be added to the select related information
# (e.g., if 'location__point' was given as the field name).
# Note: the operation really is defined as "must add select related!"
self.query.add_select_related([field_name])
# Call pre_sql_setup() so that compiler.select gets populated.
compiler.pre_sql_setup()
for col, _, _ in compiler.select:
if col.output_field == geo_field:
return col.as_sql(compiler, compiler.connection)[0]
raise ValueError("%r not in compiler's related_select_cols" % geo_field)
elif geo_field not in opts.local_fields:
# This geographic field is inherited from another model, so we have to
# use the db table for the _parent_ model instead.
parent_model = geo_field.model._meta.concrete_model
return self._field_column(compiler, geo_field, parent_model._meta.db_table)
else:
return self._field_column(compiler, geo_field)
# Private API utilities, subject to change.
def _geo_field(self, field_name=None):
"""
Returns the first Geometry field encountered or the one specified via
the `field_name` keyword. The `field_name` may be a string specifying
the geometry field on this GeoQuerySet's model, or a lookup string
to a geometry field via a ForeignKey relation.
"""
if field_name is None:
# Incrementing until the first geographic field is found.
for field in self.model._meta.fields:
if isinstance(field, GeometryField):
return field
return False
else:
# Otherwise, check by the given field name -- which may be
# a lookup to a _related_ geographic field.
return GISLookup._check_geo_field(self.model._meta, field_name)
def _field_column(self, compiler, field, table_alias=None, column=None):
"""
Helper function that returns the database column for the given field.
The table and column are returned (quoted) in the proper format, e.g.,
`"geoapp_city"."point"`. If `table_alias` is not specified, the
database table associated with the model of this `GeoQuerySet` will be
used. If `column` is specified, it will be used instead of the value
in `field.column`.
"""
if table_alias is None:
table_alias = compiler.query.get_meta().db_table
return "%s.%s" % (compiler.quote_name_unless_alias(table_alias),
compiler.connection.ops.quote_name(column or field.column))
| jylaxp/django | django/contrib/gis/db/models/query.py | Python | bsd-3-clause | 32,348 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Code Aurora Forum. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Code Aurora nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Invoke gcc, looking for warnings, and causing a failure if there are
# non-whitelisted warnings.
import re
import os
import sys
import subprocess
# Note that gcc uses unicode, which may depend on the locale. TODO:
# force LANG to be set to en_US.UTF-8 to get consistent warnings.
allowed_warnings = set([
"alignment.c:720",
"async.c:127",
"async.c:283",
"decompress_bunzip2.c:511",
"dm.c:1118",
"dm.c:1146",
"dm-table.c:1065",
"dm-table.c:1071",
"ehci-dbg.c:44",
"ehci-dbg.c:88",
"ehci-hcd.c:1048",
"ehci-hcd.c:423",
"ehci-hcd.c:614",
"ehci-hub.c:109",
"ehci-hub.c:1265",
"ehci-msm.c:156",
"ehci-msm.c:201",
"ehci-msm.c:455",
"eventpoll.c:1118",
"gspca.c:1509",
"ioctl.c:4673",
"main.c:305",
"main.c:734",
"nf_conntrack_netlink.c:762",
"nf_nat_standalone.c:118",
"return_address.c:61",
"scan.c:749",
"smsc.c:257",
"yaffs_guts.c:1571",
"yaffs_guts.c:600",
])
# Capture the name of the object file, can find it.
ofile = None
warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''')
def interpret_warning(line):
"""Decode the message from gcc. The messages we care about have a filename, and a warning"""
line = line.rstrip('\n')
m = warning_re.match(line)
if m and m.group(2) not in allowed_warnings:
print "error, forbidden warning:", m.group(2)
# If there is a warning, remove any object if it exists.
if ofile:
try:
os.remove(ofile)
except OSError:
pass
sys.exit(1)
def run_gcc():
args = sys.argv[1:]
# Look for -o
try:
i = args.index('-o')
global ofile
ofile = args[i+1]
except (ValueError, IndexError):
pass
compiler = sys.argv[0]
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
for line in proc.stderr:
print line,
interpret_warning(line)
result = proc.wait()
return result
if __name__ == '__main__':
status = run_gcc()
sys.exit(status)
| jdheiner/SGH-T769_Kernel | scripts/gcc-wrapper.py | Python | gpl-2.0 | 3,735 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Savoir-faire Linux
# (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" QWeb usertime addon """
from . import qweb
| MackZxh/OCA-Choice | server-tools/qweb_usertime/__init__.py | Python | lgpl-3.0 | 1,067 |
import numpy as np
from .base import Plugin
from ..widgets import ComboBox, Slider
from ..canvastools import PaintTool
__all__ = ['LabelPainter']
rad2deg = 180 / np.pi
class LabelPainter(Plugin):
name = 'LabelPainter'
def __init__(self, max_radius=20, **kwargs):
super(LabelPainter, self).__init__(**kwargs)
# These widgets adjust plugin properties instead of an image filter.
self._radius_widget = Slider('radius', low=1, high=max_radius,
value=5, value_type='int', ptype='plugin')
labels = [str(i) for i in range(6)]
labels[0] = 'Erase'
self._label_widget = ComboBox('label', labels, ptype='plugin')
self.add_widget(self._radius_widget)
self.add_widget(self._label_widget)
print(self.help())
def help(self):
helpstr = ("Label painter",
"Hold left-mouse button and paint on canvas.")
return '\n'.join(helpstr)
def attach(self, image_viewer):
super(LabelPainter, self).attach(image_viewer)
image = image_viewer.original_image
self.paint_tool = PaintTool(image_viewer, image.shape,
on_enter=self.on_enter)
self.paint_tool.radius = self.radius
self.paint_tool.label = self._label_widget.index = 1
self.artists.append(self.paint_tool)
def _on_new_image(self, image):
"""Update plugin for new images."""
self.paint_tool.shape = image.shape
def on_enter(self, overlay):
pass
@property
def radius(self):
return self._radius_widget.val
@radius.setter
def radius(self, val):
self.paint_tool.radius = val
@property
def label(self):
return self._label_widget.val
@label.setter
def label(self, val):
self.paint_tool.label = val
| pratapvardhan/scikit-image | skimage/viewer/plugins/labelplugin.py | Python | bsd-3-clause | 1,876 |
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Checks WebKit style for test_expectations files."""
import logging
import optparse
import os
import re
import sys
from common import TabChecker
from webkitpy.common.host import Host
from webkitpy.layout_tests.models.test_expectations import TestExpectationParser
_log = logging.getLogger(__name__)
class TestExpectationsChecker(object):
"""Processes TestExpectations lines for validating the syntax."""
categories = set(['test/expectations'])
def __init__(self, file_path, handle_style_error, host=None):
self._file_path = file_path
self._handle_style_error = handle_style_error
self._tab_checker = TabChecker(file_path, handle_style_error)
# FIXME: host should be a required parameter, not an optional one.
host = host or Host()
host.initialize_scm()
self._port_obj = host.port_factory.get()
# Suppress error messages of test_expectations module since they will be reported later.
log = logging.getLogger("webkitpy.layout_tests.layout_package.test_expectations")
log.setLevel(logging.CRITICAL)
def _handle_error_message(self, lineno, message, confidence):
pass
def check_test_expectations(self, expectations_str, tests=None):
parser = TestExpectationParser(self._port_obj, tests, is_lint_mode=True)
expectations = parser.parse('expectations', expectations_str)
level = 5
for expectation_line in expectations:
for warning in expectation_line.warnings:
self._handle_style_error(expectation_line.line_numbers, 'test/expectations', level, warning)
def check_tabs(self, lines):
self._tab_checker.check(lines)
def check(self, lines):
expectations = '\n'.join(lines)
if self._port_obj:
self.check_test_expectations(expectations_str=expectations, tests=None)
# Warn tabs in lines as well
self.check_tabs(lines)
| weolar/miniblink49 | third_party/WebKit/Tools/Scripts/webkitpy/style/checkers/test_expectations.py | Python | apache-2.0 | 3,482 |
#!/usr/bin/python
#
# Copyright 2011 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .select import Select
from .wait import WebDriverWait
| ktan2020/legacy-automation | win/Lib/site-packages/selenium/webdriver/support/ui.py | Python | mit | 674 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from TProtocol import *
from struct import pack, unpack
__all__ = ['TCompactProtocol', 'TCompactProtocolFactory']
CLEAR = 0
FIELD_WRITE = 1
VALUE_WRITE = 2
CONTAINER_WRITE = 3
BOOL_WRITE = 4
FIELD_READ = 5
CONTAINER_READ = 6
VALUE_READ = 7
BOOL_READ = 8
def make_helper(v_from, container):
def helper(func):
def nested(self, *args, **kwargs):
assert self.state in (v_from, container), (self.state, v_from, container)
return func(self, *args, **kwargs)
return nested
return helper
writer = make_helper(VALUE_WRITE, CONTAINER_WRITE)
reader = make_helper(VALUE_READ, CONTAINER_READ)
def makeZigZag(n, bits):
return (n << 1) ^ (n >> (bits - 1))
def fromZigZag(n):
return (n >> 1) ^ -(n & 1)
def writeVarint(trans, n):
out = []
while True:
if n & ~0x7f == 0:
out.append(n)
break
else:
out.append((n & 0xff) | 0x80)
n = n >> 7
trans.write(''.join(map(chr, out)))
def readVarint(trans):
result = 0
shift = 0
while True:
x = trans.readAll(1)
byte = ord(x)
result |= (byte & 0x7f) << shift
if byte >> 7 == 0:
return result
shift += 7
class CompactType:
STOP = 0x00
TRUE = 0x01
FALSE = 0x02
BYTE = 0x03
I16 = 0x04
I32 = 0x05
I64 = 0x06
DOUBLE = 0x07
BINARY = 0x08
LIST = 0x09
SET = 0x0A
MAP = 0x0B
STRUCT = 0x0C
CTYPES = {TType.STOP: CompactType.STOP,
TType.BOOL: CompactType.TRUE, # used for collection
TType.BYTE: CompactType.BYTE,
TType.I16: CompactType.I16,
TType.I32: CompactType.I32,
TType.I64: CompactType.I64,
TType.DOUBLE: CompactType.DOUBLE,
TType.STRING: CompactType.BINARY,
TType.STRUCT: CompactType.STRUCT,
TType.LIST: CompactType.LIST,
TType.SET: CompactType.SET,
TType.MAP: CompactType.MAP
}
TTYPES = {}
for k, v in CTYPES.items():
TTYPES[v] = k
TTYPES[CompactType.FALSE] = TType.BOOL
del k
del v
class TCompactProtocol(TProtocolBase):
"Compact implementation of the Thrift protocol driver."
PROTOCOL_ID = 0x82
VERSION = 1
VERSION_MASK = 0x1f
TYPE_MASK = 0xe0
TYPE_SHIFT_AMOUNT = 5
def __init__(self, trans):
TProtocolBase.__init__(self, trans)
self.state = CLEAR
self.__last_fid = 0
self.__bool_fid = None
self.__bool_value = None
self.__structs = []
self.__containers = []
def __writeVarint(self, n):
writeVarint(self.trans, n)
def writeMessageBegin(self, name, type, seqid):
assert self.state == CLEAR
self.__writeUByte(self.PROTOCOL_ID)
self.__writeUByte(self.VERSION | (type << self.TYPE_SHIFT_AMOUNT))
self.__writeVarint(seqid)
self.__writeString(name)
self.state = VALUE_WRITE
def writeMessageEnd(self):
assert self.state == VALUE_WRITE
self.state = CLEAR
def writeStructBegin(self, name):
assert self.state in (CLEAR, CONTAINER_WRITE, VALUE_WRITE), self.state
self.__structs.append((self.state, self.__last_fid))
self.state = FIELD_WRITE
self.__last_fid = 0
def writeStructEnd(self):
assert self.state == FIELD_WRITE
self.state, self.__last_fid = self.__structs.pop()
def writeFieldStop(self):
self.__writeByte(0)
def __writeFieldHeader(self, type, fid):
delta = fid - self.__last_fid
if 0 < delta <= 15:
self.__writeUByte(delta << 4 | type)
else:
self.__writeByte(type)
self.__writeI16(fid)
self.__last_fid = fid
def writeFieldBegin(self, name, type, fid):
assert self.state == FIELD_WRITE, self.state
if type == TType.BOOL:
self.state = BOOL_WRITE
self.__bool_fid = fid
else:
self.state = VALUE_WRITE
self.__writeFieldHeader(CTYPES[type], fid)
def writeFieldEnd(self):
assert self.state in (VALUE_WRITE, BOOL_WRITE), self.state
self.state = FIELD_WRITE
def __writeUByte(self, byte):
self.trans.write(pack('!B', byte))
def __writeByte(self, byte):
self.trans.write(pack('!b', byte))
def __writeI16(self, i16):
self.__writeVarint(makeZigZag(i16, 16))
def __writeSize(self, i32):
self.__writeVarint(i32)
def writeCollectionBegin(self, etype, size):
assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state
if size <= 14:
self.__writeUByte(size << 4 | CTYPES[etype])
else:
self.__writeUByte(0xf0 | CTYPES[etype])
self.__writeSize(size)
self.__containers.append(self.state)
self.state = CONTAINER_WRITE
writeSetBegin = writeCollectionBegin
writeListBegin = writeCollectionBegin
def writeMapBegin(self, ktype, vtype, size):
assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state
if size == 0:
self.__writeByte(0)
else:
self.__writeSize(size)
self.__writeUByte(CTYPES[ktype] << 4 | CTYPES[vtype])
self.__containers.append(self.state)
self.state = CONTAINER_WRITE
def writeCollectionEnd(self):
assert self.state == CONTAINER_WRITE, self.state
self.state = self.__containers.pop()
writeMapEnd = writeCollectionEnd
writeSetEnd = writeCollectionEnd
writeListEnd = writeCollectionEnd
def writeBool(self, bool):
if self.state == BOOL_WRITE:
if bool:
ctype = CompactType.TRUE
else:
ctype = CompactType.FALSE
self.__writeFieldHeader(ctype, self.__bool_fid)
elif self.state == CONTAINER_WRITE:
if bool:
self.__writeByte(CompactType.TRUE)
else:
self.__writeByte(CompactType.FALSE)
else:
raise AssertionError, "Invalid state in compact protocol"
writeByte = writer(__writeByte)
writeI16 = writer(__writeI16)
@writer
def writeI32(self, i32):
self.__writeVarint(makeZigZag(i32, 32))
@writer
def writeI64(self, i64):
self.__writeVarint(makeZigZag(i64, 64))
@writer
def writeDouble(self, dub):
self.trans.write(pack('!d', dub))
def __writeString(self, s):
self.__writeSize(len(s))
self.trans.write(s)
writeString = writer(__writeString)
def readFieldBegin(self):
assert self.state == FIELD_READ, self.state
type = self.__readUByte()
if type & 0x0f == TType.STOP:
return (None, 0, 0)
delta = type >> 4
if delta == 0:
fid = self.__readI16()
else:
fid = self.__last_fid + delta
self.__last_fid = fid
type = type & 0x0f
if type == CompactType.TRUE:
self.state = BOOL_READ
self.__bool_value = True
elif type == CompactType.FALSE:
self.state = BOOL_READ
self.__bool_value = False
else:
self.state = VALUE_READ
return (None, self.__getTType(type), fid)
def readFieldEnd(self):
assert self.state in (VALUE_READ, BOOL_READ), self.state
self.state = FIELD_READ
def __readUByte(self):
result, = unpack('!B', self.trans.readAll(1))
return result
def __readByte(self):
result, = unpack('!b', self.trans.readAll(1))
return result
def __readVarint(self):
return readVarint(self.trans)
def __readZigZag(self):
return fromZigZag(self.__readVarint())
def __readSize(self):
result = self.__readVarint()
if result < 0:
raise TException("Length < 0")
return result
def readMessageBegin(self):
assert self.state == CLEAR
proto_id = self.__readUByte()
if proto_id != self.PROTOCOL_ID:
raise TProtocolException(TProtocolException.BAD_VERSION,
'Bad protocol id in the message: %d' % proto_id)
ver_type = self.__readUByte()
type = (ver_type & self.TYPE_MASK) >> self.TYPE_SHIFT_AMOUNT
version = ver_type & self.VERSION_MASK
if version != self.VERSION:
raise TProtocolException(TProtocolException.BAD_VERSION,
'Bad version: %d (expect %d)' % (version, self.VERSION))
seqid = self.__readVarint()
name = self.__readString()
return (name, type, seqid)
def readMessageEnd(self):
assert self.state == CLEAR
assert len(self.__structs) == 0
def readStructBegin(self):
assert self.state in (CLEAR, CONTAINER_READ, VALUE_READ), self.state
self.__structs.append((self.state, self.__last_fid))
self.state = FIELD_READ
self.__last_fid = 0
def readStructEnd(self):
assert self.state == FIELD_READ
self.state, self.__last_fid = self.__structs.pop()
def readCollectionBegin(self):
assert self.state in (VALUE_READ, CONTAINER_READ), self.state
size_type = self.__readUByte()
size = size_type >> 4
type = self.__getTType(size_type)
if size == 15:
size = self.__readSize()
self.__containers.append(self.state)
self.state = CONTAINER_READ
return type, size
readSetBegin = readCollectionBegin
readListBegin = readCollectionBegin
def readMapBegin(self):
assert self.state in (VALUE_READ, CONTAINER_READ), self.state
size = self.__readSize()
types = 0
if size > 0:
types = self.__readUByte()
vtype = self.__getTType(types)
ktype = self.__getTType(types >> 4)
self.__containers.append(self.state)
self.state = CONTAINER_READ
return (ktype, vtype, size)
def readCollectionEnd(self):
assert self.state == CONTAINER_READ, self.state
self.state = self.__containers.pop()
readSetEnd = readCollectionEnd
readListEnd = readCollectionEnd
readMapEnd = readCollectionEnd
def readBool(self):
if self.state == BOOL_READ:
return self.__bool_value == CompactType.TRUE
elif self.state == CONTAINER_READ:
return self.__readByte() == CompactType.TRUE
else:
raise AssertionError, "Invalid state in compact protocol: %d" % self.state
readByte = reader(__readByte)
__readI16 = __readZigZag
readI16 = reader(__readZigZag)
readI32 = reader(__readZigZag)
readI64 = reader(__readZigZag)
@reader
def readDouble(self):
buff = self.trans.readAll(8)
val, = unpack('!d', buff)
return val
def __readString(self):
len = self.__readSize()
return self.trans.readAll(len)
readString = reader(__readString)
def __getTType(self, byte):
return TTYPES[byte & 0x0f]
class TCompactProtocolFactory:
def __init__(self):
pass
def getProtocol(self, trans):
return TCompactProtocol(trans)
| YinYanfei/CadalWorkspace | thrift/src/thrift-0.8.0/lib/py/src/protocol/TCompactProtocol.py | Python | gpl-3.0 | 10,926 |
"""
This middleware is used for adjusting the headers in a response before it is sent to the end user.
This middleware is intended to sit as close as possible to the top of the middleare list as possible,
so that it is one of the last pieces of middleware to touch the response, and thus can most accurately
adjust/control the headers of the response.
"""
def remove_headers_from_response(response, *headers):
"""Removes the given headers from the response using the header_control middleware."""
response.remove_headers = headers
def force_header_for_response(response, header, value):
"""Forces the given header for the given response using the header_control middleware."""
force_headers = {}
if hasattr(response, 'force_headers'):
force_headers = response.force_headers
force_headers[header] = value
response.force_headers = force_headers
| ahmedaljazzar/edx-platform | openedx/core/djangoapps/header_control/__init__.py | Python | agpl-3.0 | 885 |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Package containing optional and-on functionality.
"""
| jamesmcm/luigi | luigi/contrib/__init__.py | Python | apache-2.0 | 661 |
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Issue a series of GetHash requests to the SafeBrowsing servers and measure
the response times.
Usage:
$ ./gethash_timer.py --period=600 --samples=20 --output=resp.csv
--period (or -p): The amount of time (in seconds) to wait between GetHash
requests. Using a value of more than 300 (5 minutes) to
include the effect of DNS.
--samples (or -s): The number of requests to issue. If this parameter is not
specified, the test will run indefinitely.
--output (or -o): The path to a file where the output will be written in
CSV format: sample_number,response_code,elapsed_time_ms
"""
import getopt
import httplib
import sys
import time
_GETHASH_HOST = 'safebrowsing.clients.google.com'
_GETHASH_REQUEST = (
'/safebrowsing/gethash?client=googleclient&appver=1.0&pver=2.1')
# Global logging file handle.
g_file_handle = None
def IssueGetHash(prefix):
'''Issue one GetHash request to the safebrowsing servers.
Args:
prefix: A 4 byte value to look up on the server.
Returns:
The HTTP response code for the GetHash request.
'''
body = '4:4\n' + prefix
h = httplib.HTTPConnection(_GETHASH_HOST)
h.putrequest('POST', _GETHASH_REQUEST)
h.putheader('content-length', str(len(body)))
h.endheaders()
h.send(body)
response_code = h.getresponse().status
h.close()
return response_code
def TimedGetHash(prefix):
'''Measure the amount of time it takes to receive a GetHash response.
Args:
prefix: A 4 byte value to look up on the the server.
Returns:
A tuple of HTTP resonse code and the response time (in milliseconds).
'''
start = time.time()
response_code = IssueGetHash(prefix)
return response_code, (time.time() - start) * 1000
def RunTimedGetHash(period, samples=None):
'''Runs an experiment to measure the amount of time it takes to receive
multiple responses from the GetHash servers.
Args:
period: A floating point value that indicates (in seconds) the delay
between requests.
samples: An integer value indicating the number of requests to make.
If 'None', the test continues indefinitely.
Returns:
None.
'''
global g_file_handle
prefix = '\x50\x61\x75\x6c'
sample_count = 1
while True:
response_code, elapsed_time = TimedGetHash(prefix)
LogResponse(sample_count, response_code, elapsed_time)
sample_count += 1
if samples is not None and sample_count == samples:
break
time.sleep(period)
def LogResponse(sample_count, response_code, elapsed_time):
'''Output the response for one GetHash query.
Args:
sample_count: The current sample number.
response_code: The HTTP response code for the GetHash request.
elapsed_time: The round-trip time (in milliseconds) for the
GetHash request.
Returns:
None.
'''
global g_file_handle
output_list = (sample_count, response_code, elapsed_time)
print 'Request: %d, status: %d, elapsed time: %f ms' % output_list
if g_file_handle is not None:
g_file_handle.write(('%d,%d,%f' % output_list) + '\n')
g_file_handle.flush()
def SetupOutputFile(file_name):
'''Open a file for logging results.
Args:
file_name: A path to a file to store the output.
Returns:
None.
'''
global g_file_handle
g_file_handle = open(file_name, 'w')
def main():
period = 10
samples = None
options, args = getopt.getopt(sys.argv[1:],
's:p:o:',
['samples=', 'period=', 'output='])
for option, value in options:
if option == '-s' or option == '--samples':
samples = int(value)
elif option == '-p' or option == '--period':
period = float(value)
elif option == '-o' or option == '--output':
file_name = value
else:
print 'Bad option: %s' % option
return 1
try:
print 'Starting Timed GetHash ----------'
SetupOutputFile(file_name)
RunTimedGetHash(period, samples)
except KeyboardInterrupt:
pass
print 'Timed GetHash complete ----------'
g_file_handle.close()
if __name__ == '__main__':
sys.exit(main())
| goddino/libjingle | trunk/tools/python/google/gethash_timer.py | Python | bsd-3-clause | 4,366 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_invoice(osv.osv):
_inherit = 'account.invoice'
def action_number(self, cr, uid, ids, *args):
result = super(account_invoice, self).action_number(cr, uid, ids, *args)
for inv in self.browse(cr, uid, ids):
self.pool.get('account.invoice.line').asset_create(cr, uid, inv.invoice_line)
return result
def line_get_convert(self, cr, uid, x, part, date, context=None):
res = super(account_invoice, self).line_get_convert(cr, uid, x, part, date, context=context)
res['asset_id'] = x.get('asset_id', False)
return res
class account_invoice_line(osv.osv):
_inherit = 'account.invoice.line'
_columns = {
'asset_category_id': fields.many2one('account.asset.category', 'Asset Category'),
}
def asset_create(self, cr, uid, lines, context=None):
context = context or {}
asset_obj = self.pool.get('account.asset.asset')
for line in lines:
if line.asset_category_id:
vals = {
'name': line.name,
'code': line.invoice_id.number or False,
'category_id': line.asset_category_id.id,
'purchase_value': line.price_subtotal,
'period_id': line.invoice_id.period_id.id,
'partner_id': line.invoice_id.partner_id.id,
'company_id': line.invoice_id.company_id.id,
'currency_id': line.invoice_id.currency_id.id,
'purchase_date' : line.invoice_id.date_invoice,
}
changed_vals = asset_obj.onchange_category_id(cr, uid, [], vals['category_id'], context=context)
vals.update(changed_vals['value'])
asset_id = asset_obj.create(cr, uid, vals, context=context)
if line.asset_category_id.open_asset:
asset_obj.validate(cr, uid, [asset_id], context=context)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| diogocs1/comps | web/addons/account_asset/account_asset_invoice.py | Python | apache-2.0 | 3,070 |
import warnings
from django.utils.deprecation import RemovedInDjango110Warning
from . import engines
from .backends.django import DjangoTemplates
from .engine import (
_context_instance_undefined, _dictionary_undefined, _dirs_undefined,
)
from .exceptions import TemplateDoesNotExist
from .loaders import base
def get_template(template_name, dirs=_dirs_undefined, using=None):
"""
Loads and returns a template for the given name.
Raises TemplateDoesNotExist if no such template exists.
"""
chain = []
engines = _engine_list(using)
for engine in engines:
try:
# This is required for deprecating the dirs argument. Simply
# return engine.get_template(template_name) in Django 1.10.
if isinstance(engine, DjangoTemplates):
return engine.get_template(template_name, dirs)
elif dirs is not _dirs_undefined:
warnings.warn(
"Skipping template backend %s because its get_template "
"method doesn't support the dirs argument." % engine.name,
stacklevel=2)
else:
return engine.get_template(template_name)
except TemplateDoesNotExist as e:
chain.append(e)
raise TemplateDoesNotExist(template_name, chain=chain)
def select_template(template_name_list, dirs=_dirs_undefined, using=None):
"""
Loads and returns a template for one of the given names.
Tries names in order and returns the first template found.
Raises TemplateDoesNotExist if no such template exists.
"""
chain = []
engines = _engine_list(using)
for template_name in template_name_list:
for engine in engines:
try:
# This is required for deprecating the dirs argument. Simply
# use engine.get_template(template_name) in Django 1.10.
if isinstance(engine, DjangoTemplates):
return engine.get_template(template_name, dirs)
elif dirs is not _dirs_undefined:
warnings.warn(
"Skipping template backend %s because its get_template "
"method doesn't support the dirs argument." % engine.name,
stacklevel=2)
else:
return engine.get_template(template_name)
except TemplateDoesNotExist as e:
chain.append(e)
if template_name_list:
raise TemplateDoesNotExist(', '.join(template_name_list), chain=chain)
else:
raise TemplateDoesNotExist("No template names provided")
def render_to_string(template_name, context=None,
context_instance=_context_instance_undefined,
dirs=_dirs_undefined,
dictionary=_dictionary_undefined,
request=None, using=None):
"""
Loads a template and renders it with a context. Returns a string.
template_name may be a string or a list of strings.
"""
if (context_instance is _context_instance_undefined
and dirs is _dirs_undefined
and dictionary is _dictionary_undefined):
# No deprecated arguments were passed - use the new code path
if isinstance(template_name, (list, tuple)):
template = select_template(template_name, using=using)
else:
template = get_template(template_name, using=using)
return template.render(context, request)
else:
chain = []
# Some deprecated arguments were passed - use the legacy code path
for engine in _engine_list(using):
try:
# This is required for deprecating properly arguments specific
# to Django templates. Remove Engine.render_to_string() at the
# same time as this code path in Django 1.10.
if isinstance(engine, DjangoTemplates):
if request is not None:
raise ValueError(
"render_to_string doesn't support the request argument "
"when some deprecated arguments are passed.")
# Hack -- use the internal Engine instance of DjangoTemplates.
return engine.engine.render_to_string(
template_name, context, context_instance, dirs, dictionary)
elif context_instance is not _context_instance_undefined:
warnings.warn(
"Skipping template backend %s because its render_to_string "
"method doesn't support the context_instance argument." %
engine.name, stacklevel=2)
elif dirs is not _dirs_undefined:
warnings.warn(
"Skipping template backend %s because its render_to_string "
"method doesn't support the dirs argument." % engine.name,
stacklevel=2)
elif dictionary is not _dictionary_undefined:
warnings.warn(
"Skipping template backend %s because its render_to_string "
"method doesn't support the dictionary argument." %
engine.name, stacklevel=2)
except TemplateDoesNotExist as e:
chain.append(e)
continue
if template_name:
if isinstance(template_name, (list, tuple)):
template_name = ', '.join(template_name)
raise TemplateDoesNotExist(template_name, chain=chain)
else:
raise TemplateDoesNotExist("No template names provided")
def _engine_list(using=None):
return engines.all() if using is None else [engines[using]]
class BaseLoader(base.Loader):
_accepts_engine_in_init = False
def __init__(self, *args, **kwargs):
warnings.warn(
"django.template.loader.BaseLoader was superseded by "
"django.template.loaders.base.Loader.",
RemovedInDjango110Warning, stacklevel=2)
super(BaseLoader, self).__init__(*args, **kwargs)
| DONIKAN/django | django/template/loader.py | Python | bsd-3-clause | 6,232 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Matching functions"""
import numpy as np
import numba
from .exceptions import ParameterError
from .utils import valid_intervals
__all__ = ["match_intervals", "match_events"]
@numba.jit(nopython=True, cache=True)
def __jaccard(int_a, int_b): # pragma: no cover
"""Jaccard similarity between two intervals
Parameters
----------
int_a, int_b : np.ndarrays, shape=(2,)
Returns
-------
Jaccard similarity between intervals
"""
ends = [int_a[1], int_b[1]]
if ends[1] < ends[0]:
ends.reverse()
starts = [int_a[0], int_b[0]]
if starts[1] < starts[0]:
starts.reverse()
intersection = ends[0] - starts[1]
if intersection < 0:
intersection = 0.0
union = ends[1] - starts[0]
if union > 0:
return intersection / union
return 0.0
@numba.jit(nopython=True, cache=True)
def __match_interval_overlaps(query, intervals_to, candidates): # pragma: no cover
"""Find the best Jaccard match from query to candidates"""
best_score = -1
best_idx = -1
for idx in candidates:
score = __jaccard(query, intervals_to[idx])
if score > best_score:
best_score, best_idx = score, idx
return best_idx
@numba.jit(nopython=True, cache=True)
def __match_intervals(intervals_from, intervals_to, strict=True): # pragma: no cover
"""Numba-accelerated interval matching algorithm.
"""
# sort index of the interval starts
start_index = np.argsort(intervals_to[:, 0])
# sort index of the interval ends
end_index = np.argsort(intervals_to[:, 1])
# and sorted values of starts
start_sorted = intervals_to[start_index, 0]
# and ends
end_sorted = intervals_to[end_index, 1]
search_ends = np.searchsorted(start_sorted, intervals_from[:, 1], side="right")
search_starts = np.searchsorted(end_sorted, intervals_from[:, 0], side="left")
output = np.empty(len(intervals_from), dtype=numba.uint32)
for i in range(len(intervals_from)):
query = intervals_from[i]
# Find the intervals that start after our query ends
after_query = search_ends[i]
# And the intervals that end after our query begins
before_query = search_starts[i]
# Candidates for overlapping have to (end after we start) and (begin before we end)
candidates = set(start_index[:after_query]) & set(end_index[before_query:])
# Proceed as before
if len(candidates) > 0:
output[i] = __match_interval_overlaps(query, intervals_to, candidates)
elif strict:
# Numba only lets us use compile-time constants in exception messages
raise ParameterError
else:
# Find the closest interval
# (start_index[after_query] - query[1]) is the distance to the next interval
# (query[0] - end_index[before_query])
dist_before = np.inf
dist_after = np.inf
if search_starts[i] > 0:
dist_before = query[0] - end_sorted[search_starts[i] - 1]
if search_ends[i] + 1 < len(intervals_to):
dist_after = start_sorted[search_ends[i] + 1] - query[1]
if dist_before < dist_after:
output[i] = end_index[search_starts[i] - 1]
else:
output[i] = start_index[search_ends[i] + 1]
return output
def match_intervals(intervals_from, intervals_to, strict=True):
"""Match one set of time intervals to another.
This can be useful for tasks such as mapping beat timings
to segments.
Each element ``[a, b]`` of ``intervals_from`` is matched to the
element ``[c, d]`` of ``intervals_to`` which maximizes the
Jaccard similarity between the intervals::
max(0, |min(b, d) - max(a, c)|) / |max(d, b) - min(a, c)|
In ``strict=True`` mode, if there is no interval with positive
intersection with ``[a,b]``, an exception is thrown.
In ``strict=False`` mode, any interval ``[a, b]`` that has no
intersection with any element of ``intervals_to`` is instead
matched to the interval ``[c, d]`` which minimizes::
min(|b - c|, |a - d|)
that is, the disjoint interval [c, d] with a boundary closest
to [a, b].
.. note:: An element of ``intervals_to`` may be matched to multiple
entries of ``intervals_from``.
Parameters
----------
intervals_from : np.ndarray [shape=(n, 2)]
The time range for source intervals.
The ``i`` th interval spans time ``intervals_from[i, 0]``
to ``intervals_from[i, 1]``.
``intervals_from[0, 0]`` should be 0, ``intervals_from[-1, 1]``
should be the track duration.
intervals_to : np.ndarray [shape=(m, 2)]
Analogous to ``intervals_from``.
strict : bool
If ``True``, intervals can only match if they intersect.
If ``False``, disjoint intervals can match.
Returns
-------
interval_mapping : np.ndarray [shape=(n,)]
For each interval in ``intervals_from``, the
corresponding interval in ``intervals_to``.
See Also
--------
match_events
Raises
------
ParameterError
If either array of input intervals is not the correct shape
If ``strict=True`` and some element of ``intervals_from`` is disjoint from
every element of ``intervals_to``.
Examples
--------
>>> ints_from = np.array([[3, 5], [1, 4], [4, 5]])
>>> ints_to = np.array([[0, 2], [1, 3], [4, 5], [6, 7]])
>>> librosa.util.match_intervals(ints_from, ints_to)
array([2, 1, 2], dtype=uint32)
>>> # [3, 5] => [4, 5] (ints_to[2])
>>> # [1, 4] => [1, 3] (ints_to[1])
>>> # [4, 5] => [4, 5] (ints_to[2])
The reverse matching of the above is not possible in ``strict`` mode
because ``[6, 7]`` is disjoint from all intervals in ``ints_from``.
With ``strict=False``, we get the following:
>>> librosa.util.match_intervals(ints_to, ints_from, strict=False)
array([1, 1, 2, 2], dtype=uint32)
>>> # [0, 2] => [1, 4] (ints_from[1])
>>> # [1, 3] => [1, 4] (ints_from[1])
>>> # [4, 5] => [4, 5] (ints_from[2])
>>> # [6, 7] => [4, 5] (ints_from[2])
"""
if len(intervals_from) == 0 or len(intervals_to) == 0:
raise ParameterError("Attempting to match empty interval list")
# Verify that the input intervals has correct shape and size
valid_intervals(intervals_from)
valid_intervals(intervals_to)
try:
return __match_intervals(intervals_from, intervals_to, strict=strict)
except ParameterError as exc:
raise ParameterError(
"Unable to match intervals with strict={}".format(strict)
) from exc
def match_events(events_from, events_to, left=True, right=True):
"""Match one set of events to another.
This is useful for tasks such as matching beats to the nearest
detected onsets, or frame-aligned events to the nearest zero-crossing.
.. note:: A target event may be matched to multiple source events.
Examples
--------
>>> # Sources are multiples of 7
>>> s_from = np.arange(0, 100, 7)
>>> s_from
array([ 0, 7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91,
98])
>>> # Targets are multiples of 10
>>> s_to = np.arange(0, 100, 10)
>>> s_to
array([ 0, 10, 20, 30, 40, 50, 60, 70, 80, 90])
>>> # Find the matching
>>> idx = librosa.util.match_events(s_from, s_to)
>>> idx
array([0, 1, 1, 2, 3, 3, 4, 5, 6, 6, 7, 8, 8, 9, 9])
>>> # Print each source value to its matching target
>>> zip(s_from, s_to[idx])
[(0, 0), (7, 10), (14, 10), (21, 20), (28, 30), (35, 30),
(42, 40), (49, 50), (56, 60), (63, 60), (70, 70), (77, 80),
(84, 80), (91, 90), (98, 90)]
Parameters
----------
events_from : ndarray [shape=(n,)]
Array of events (eg, times, sample or frame indices) to match from.
events_to : ndarray [shape=(m,)]
Array of events (eg, times, sample or frame indices) to
match against.
left : bool
right : bool
If ``False``, then matched events cannot be to the left (or right)
of source events.
Returns
-------
event_mapping : np.ndarray [shape=(n,)]
For each event in ``events_from``, the corresponding event
index in ``events_to``::
event_mapping[i] == arg min |events_from[i] - events_to[:]|
See Also
--------
match_intervals
Raises
------
ParameterError
If either array of input events is not the correct shape
"""
if len(events_from) == 0 or len(events_to) == 0:
raise ParameterError("Attempting to match empty event list")
# If we can't match left or right, then only strict equivalence
# counts as a match.
if not (left or right) and not np.all(np.in1d(events_from, events_to)):
raise ParameterError(
"Cannot match events with left=right=False "
"and events_from is not contained "
"in events_to"
)
# If we can't match to the left, then there should be at least one
# target event greater-equal to every source event
if (not left) and max(events_to) < max(events_from):
raise ParameterError(
"Cannot match events with left=False "
"and max(events_to) < max(events_from)"
)
# If we can't match to the right, then there should be at least one
# target event less-equal to every source event
if (not right) and min(events_to) > min(events_from):
raise ParameterError(
"Cannot match events with right=False "
"and min(events_to) > min(events_from)"
)
# array of matched items
output = np.empty_like(events_from, dtype=np.int)
return __match_events_helper(output, events_from, events_to, left, right)
@numba.jit(nopython=True, cache=True)
def __match_events_helper(
output, events_from, events_to, left=True, right=True
): # pragma: no cover
# mock dictionary for events
from_idx = np.argsort(events_from)
sorted_from = events_from[from_idx]
to_idx = np.argsort(events_to)
sorted_to = events_to[to_idx]
# find the matching indices
matching_indices = np.searchsorted(sorted_to, sorted_from)
# iterate over indices in matching_indices
for ind, middle_ind in enumerate(matching_indices):
left_flag = False
right_flag = False
left_ind = -1
right_ind = len(matching_indices)
left_diff = 0
right_diff = 0
mid_diff = 0
middle_ind = matching_indices[ind]
sorted_from_num = sorted_from[ind]
# Prevent oob from chosen index
if middle_ind == len(sorted_to):
middle_ind -= 1
# Permitted to look to the left
if left and middle_ind > 0:
left_ind = middle_ind - 1
left_flag = True
# Permitted to look to right
if right and middle_ind < len(sorted_to) - 1:
right_ind = middle_ind + 1
right_flag = True
mid_diff = abs(sorted_to[middle_ind] - sorted_from_num)
if left and left_flag:
left_diff = abs(sorted_to[left_ind] - sorted_from_num)
if right and right_flag:
right_diff = abs(sorted_to[right_ind] - sorted_from_num)
if left_flag and (
not right
and (sorted_to[middle_ind] > sorted_from_num)
or (not right_flag and left_diff < mid_diff)
or (left_diff < right_diff and left_diff < mid_diff)
):
output[ind] = to_idx[left_ind]
# Check if right should be chosen
elif right_flag and (right_diff < mid_diff):
output[ind] = to_idx[right_ind]
# Selected index wins
else:
output[ind] = to_idx[middle_ind]
# Undo sorting
solutions = np.empty_like(output)
solutions[from_idx] = output
return solutions
| bmcfee/librosa | librosa/util/matching.py | Python | isc | 12,055 |
#!/usr/bin/python
"""Hamaker's analytic antenna pattern model."""
# TobiaC 2015-11-29 (2015-07-31)
import sys
import math
import re
import pickle
import numpy
import antpat
from antpat import dualpolelem, radfarfield
from antpat.reps.sphgridfun import tvecfun, pntsonsphere
HA_LBAfile_default = ''
PICKLE_PROTO = pickle.HIGHEST_PROTOCOL
class HamakerPolarimeter(object):
"""This is the Hamaker polarimeter model class.
It is the default LOFAR pipeline model for LOFAR beams.
Note
----
The Hamaker model suffers from two main problems:
1) In polar angle it is best near the pole (boresight) and not so good
away from it
2) Discontinuities at the pole (boresight) can arise if coefficients
with ki>0, ti=0 are nonzero.
"""
nr_pols = 2 # Number of polarization channels
def __init__(self, artsdata):
"""Objects are created based on a Arts coefficient C++ header
file. There is current one default set for the HBA and one for
LBA."""
self.coefs = artsdata['coefs']
self.HAcoefversion = artsdata['HAcoefversion']
self.HAcoefband = artsdata['HAcoefband']
self.HAcoefnrelem = artsdata['HAcoefnrelem']
self.freq_center = artsdata['freq_center']
self.freq_range = artsdata['freq_range']
self.channels = artsdata['channels']
self.nr_bands = len(self.coefs)
self.freqintervs = (self.freq_center-self.freq_range,
self.freq_center+self.freq_range)
def getfreqs(self):
"""Returns nominals channel center frequencies"""
return self.channels
def getJonesAlong(self, freqvals, theta_phi):
"""Compute Jones matrix for given frequencies and directions.
Note
----
Formula used here is based on the Hamaker model for crossed dipole
pairs.
P[comp, ki, freq, theta] = \
sum_ti, fi coefs[ki, ti, fi, comp]*freq^fi*theta^ti
J[freq, theta_phi, pol, comp] = \
sum_ki R(ang(ki,phi))[pol, comp] * P[comp, ki, freq, theta]
R(ki*phi) = [cos(ang(ki,phi)) -sin(ang(ki,phi))
sin(ang(ki,phi)) cos(ang(ki,phi))]
ang(ki,phi) = (-1)^ki*(2*ki+1)*phi
Parameters
----------
freqvals : list
list of frequencies in Hz.
theta_phi : tuple
tuple, with first element an array of theta, and second element
array of phi, both in radians.
Returns
-------
response : ndarray
Jones matrix over frequencies and directions. The indices are
response[freq, dir, polchan, comp] where
freq is frequency,
dir is a theta, phi direction,
polchan is polarization channel,
comp is component.
"""
mask_horizon = True
(theta, phi) = theta_phi
theta = numpy.array(theta)
phi = numpy.array(phi)
freqvals = numpy.array(freqvals)
(k_ord, TH_ord, FR_ord, nr_pol) = self.coefs.shape
freqn = (freqvals-self.freq_center)/self.freq_range
if len(freqvals) > 1:
frqXdrn_shp = freqvals.shape+theta.shape
else:
frqXdrn_shp = theta.shape
response = numpy.zeros(frqXdrn_shp+(2, 2), dtype=complex)
for ki in range(k_ord):
P = numpy.zeros((nr_pol,)+frqXdrn_shp, dtype=complex)
for THi in range(TH_ord):
for FRi in range(FR_ord):
fac = numpy.multiply.outer(freqn**FRi,
theta**THi).squeeze()
P[0, ...] += self.coefs[ki, THi, FRi, 0]*fac
P[1, ...] += self.coefs[ki, THi, FRi, 1]*fac
ang = (-1)**ki*(2*ki+1)*phi
response[..., 0, 0] += +numpy.cos(ang)*P[0, ...]
response[..., 0, 1] += -numpy.sin(ang)*P[1, ...]
response[..., 1, 0] += +numpy.sin(ang)*P[0, ...]
response[..., 1, 1] += +numpy.cos(ang)*P[1, ...]
# numpy.array([[math.cos(ang)*P[0],-math.sin(ang)*P[1]],
# [math.sin(ang)*P[0], math.cos(ang)*P[1]]])
# Mask beam below horizon
if mask_horizon:
mh = numpy.ones(frqXdrn_shp+(1, 1))
mh[..., numpy.where(theta > numpy.pi/2), 0, 0] = 0.0
response = mh*response
return response
def scale(self, scalefac):
"""Scale Hamaker model by a multiplicative factor scalefac."""
self.coefs = scalefac*self.coefs
def _getJonesAlong_alt(self, freqvals, theta_phi):
"""Alternative calculation of JonesAlong using _basefunc."""
(theta, phi) = theta_phi
theta = numpy.array(theta)
phi = numpy.array(phi)
freqvals = numpy.array(freqvals)
(k_ord, TH_ord, FR_ord, nr_pol) = self.coefs.shape
freqn = (freqvals-self.freq_center)/self.freq_range
if len(freqvals) > 1:
frqXdrn_shp = freqvals.shape+theta.shape
else:
frqXdrn_shp = theta.shape
res = numpy.zeros(frqXdrn_shp+(2, 2), dtype=complex)
for ki in range(k_ord):
for THi in range(TH_ord):
for FRi in range(FR_ord):
bf = _basefunc(ki, THi, FRi, freqn, theta_phi)
res[..., 0, 0] += self.coefs[ki, THi, FRi, 0]*bf[0, ...]
res[..., 0, 1] += self.coefs[ki, THi, FRi, 1]*bf[1, ...]
res[..., 1, 0] += self.coefs[ki, THi, FRi, 0]*(-bf[1, ...])
res[..., 1, 1] += self.coefs[ki, THi, FRi, 1]*bf[0, ...]
return res
def _basefunc(ki, ti, fi, frqn, theta_phi):
"""Computes a basis function for Hamaker expansion.
A Hamaker basis function is a monomial in theta, freqn multiplied by
a sinusoid in phi. The order of the monomial is given by fi, ti and
the sinusoid is order is given by ki. The X-directed Hamaker basis
functions are:
ham_X[0, frqn_idx, theta_phi_idx] = +cos(ang)*(frqn**fi)*(tht**ti)
ham_X[1, frqn_idx, theta_phi_idx] = -sin(ang)*(frqn**fi)*(tht**ti)
where
ang = (-1)**ki*(2*ki+1)*phi
and
tht, phi = theta_phi
0, 1: are components of incoming field components.
There is also an additional pair of Hamaker basis functions for
Y-directed dipoles:
ham_Y[0, ...] = -ham_X[1, ...]
ham_Y[1, ...] = +ham_X[0, ...]
Note
----
Hamaker
Parameters
----------
ki : int
Order of phi in basis function.
ti : int
Order of theta in basis function.
fi : int
Order of frqn in basis function.
frqn : array_like
Normalized (interval=(-1, 1)) array of real frequencies.
theta_phi : tuple
(tht, phi) where tht is an array of theta and phi is array of azi.
Returns
-------
ham_x : array
Hamaker X basis function with ham_X[comp, frqn_idx, theta_phi_idx].
"""
tht, phi = theta_phi
tht = numpy.array(tht)
phi = numpy.array(phi)
fac = numpy.multiply.outer(frqn**fi, tht**ti)
ang = (-1)**ki*(2*ki+1)*phi
r_x = numpy.array([+numpy.cos(ang), -numpy.sin(ang)])
r_x = r_x[:, numpy.newaxis, ...]
# fac: frqord x thphord
# r_x: 2 x 1 x thphord
# * : --------------------
# ham_x: 2 x frqord x thphord
ham_x = fac*r_x
return ham_x
def hamaker_coefs(patobj, freq_center, freq_range, kord=2, tord=5, ford=5):
"""Estimate the coefficients of the Hamaker for a far-field pattern given
by patobj. One should specify the frequency center [Hz], the frequency
range [Hz] and the order of the azimuthal (kord), theta angle (tord) and
frequency (ford).
Note
----
One should try to put the freq_center value as close the global maximum
of the frequency response, i.e. it should the frequency where the antenna
has a resonance.
Returns a numpy array of complex coefficients indexed as
coefs[k][t][f][p]
with shape (kord, tord, ford, 2). These can then be used in the ArtData
dictionary in the specification of the HamakerPolarimeter class.
"""
from antpat.reps.sphgridfun.pntsonsphere import ZenHemisphGrid
from numpy.linalg import lstsq
nfreq, ntheta, nphi = ford*3, tord*3, kord*4
freqsmp = numpy.linspace(freq_center-freq_range, freq_center+freq_range,
nfreq)
freqsnrm = (freqsmp - freq_center)/freq_range
thetamsh, phimsh = ZenHemisphGrid(ntheta, nphi, incl_equator=False)
if isinstance(patobj, tvecfun.TVecFields):
Etheta, Ephi = patobj.getFalong(thetamsh, phimsh, freqsmp)
elif isinstance(patobj, radfarfield.RadFarField) \
or isinstance(patobj, dualpolelem.DualPolElem):
Etheta, Ephi = patobj.getFFalong(freqsmp, (thetamsh, phimsh))
ff0 = Etheta.flatten()
ff1 = Ephi.flatten()
bidx_shp = (kord, tord, ford)
bidx_ord = numpy.prod(bidx_shp)
ivar_shp = (nfreq, ntheta, nphi)
ivar_ord = numpy.prod(ivar_shp)
bfnd0 = numpy.zeros((ivar_ord, bidx_ord), dtype=float)
bfnd1 = numpy.zeros((ivar_ord, bidx_ord), dtype=float)
for ki in range(kord):
for ti in range(tord):
for fi in range(ford):
ham_x = _basefunc(ki, ti, fi, freqsnrm, (thetamsh, phimsh))
bidx_idx = numpy.ravel_multi_index(([ki], [ti], [fi]),
bidx_shp).squeeze()
bfnd0[:, bidx_idx] = ham_x[0].flatten()
bfnd1[:, bidx_idx] = ham_x[1].flatten()
sol0 = lstsq(bfnd0, ff0)[0].reshape(bidx_shp)
sol1 = lstsq(bfnd1, ff1)[0].reshape(bidx_shp)
coefs = numpy.moveaxis(numpy.array([sol0, sol1]), 0, -1)
return coefs
def _write_LOFAR_HAcc(artsdata):
"""Write Arts data to a LOFAR .cc file.
The filename will be '<HAcoefversion>Coeff<HAcoefband>.cc',
where <HAcoefversion> is the version name of the coefficients and
<HAcoefband> is the band (typically LBA or HBA); both are keys in the
artsdata dict argument.
"""
coefs = artsdata['coefs']
(kord, tord, ford, pord) = coefs.shape
varprefix = "{}_{}".format(artsdata['HAcoefversion'],
artsdata['HAcoefband'].lower())
filename = "{}Coeff{}.cc".format(artsdata['HAcoefversion'],
artsdata['HAcoefband'].upper())
with open(filename, 'w') as fp:
fp.write("//Created by AntPat version {}\n".format(antpat.__version__))
fp.write("#include <complex>\n")
fp.write("const double {}_freq_center = {};\n".format(
varprefix, artsdata['freq_center']))
fp.write("const double {}_freq_range = {};\n".format(
varprefix, artsdata['freq_range']))
fp.write("const unsigned int {}_coeff_shape[3] = {{{}, {}, {}}};\
\n".format(varprefix, kord, tord, ford))
fp.write("const std::complex<double> {}_coeff[{}] = {{\
\n".format(varprefix, kord*tord*ford*pord))
for ki in range(kord):
for ti in range(tord):
for fi in range(ford):
fp.write(" ")
for pi in range(pord):
cf = coefs[ki, ti, fi, pi]
fp.write(" std::complex<double>(")
fp.write("{}, {})".format(cf.real, cf.imag))
if ki + 1 < kord:
fp.write(",")
fp.write("\n")
fp.write("};\n")
# Add frequency channels (not part of original format)
fp.write("const double {}_channels[{}] = {{\n ".format(
varprefix, len(artsdata['channels'])))
fp.write("{}".format(",\n ".join(
[str(frq) for frq in artsdata['channels']])))
fp.write("\n};\n")
return filename
def convDPE2LOFARcc(antpat, freq_center, freq_range, HAcoefband=None,
HAcoefversion="def0", kord=2, tord=5, ford=5,
channels=None):
"""Convert a DualPolElem (or TVecFields or RadFarField) to a Hamaker-Arts
LOFAR .cc file."""
if channels is None:
if isinstance(antpat, tvecfun.TVecFields):
channels = antpat.getRs()
elif isinstance(antpat, radfarfield.RadFarField) \
or isinstance(antpat, dualpolelem.DualPolElem):
channels = antpat.getfreqs()
coefs = hamaker_coefs(antpat, freq_center, freq_range, kord=kord,
tord=tord, ford=ford)
HAcoefnrelem = coefs.size
artsdata = {'coefs': coefs, 'HAcoefversion': HAcoefversion,
'HAcoefband': HAcoefband, 'HAcoefnrelem': HAcoefnrelem,
'freq_center': freq_center, 'freq_range': freq_range,
'channels': channels}
filename = _write_LOFAR_HAcc(artsdata)
return artsdata, filename
def _read_LOFAR_HAcc(coefsccfilename):
"""Read Hamaker-Arts coefficients from c++ header files used in the
"lofar_element_response" code developed at ASTRON for LOFAR.
These header files contains LOFAR specific constructs such as reference
to "lba" and "hba", so it is not suitable for other projects.
"""
NR_POLS = 2
re_fcenter = r'[lh]ba_freq_center\s*=\s*(?P<centerstr>.*);'
re_frange = r'[lh]ba_freq_range\s*=\s*(?P<rangestr>.*);'
re_shape = r'[lh]ba_coeff_shape\[3\]\s*=\s*\{(?P<lstshp>[^\}]*)\};'
re_hl_ba_coeffs_lst = \
r'(?P<version>\w+)_(?P<band>[hl]ba)_coeff\s*\[\s*(?P<nrelem>\d+)\s*\]\s*=\s*\{(?P<cmplstr>[^\}]*)\}'
re_cc_cmpl_coef = r'std::complex<double>\((.*?)\)'
re_channels = r'[lh]ba_channels\[(?P<nrfrqs>\d+)\]\s*=\s*\{(?P<chnls>[^\}]*)\};'
with open(coefsccfilename, 'r') as coefsccfile:
coefsfile_content = coefsccfile.read()
searchres = re.search(re_fcenter, coefsfile_content)
freq_center = float(searchres.group('centerstr'))
searchres = re.search(re_frange, coefsfile_content)
freq_range = float(searchres.group('rangestr'))
searchres = re.search(re_shape, coefsfile_content)
lstshp = [int(lstshpel) for lstshpel in
searchres.group('lstshp').split(',')]
lstshp.append(NR_POLS)
searchres = re.search(re_hl_ba_coeffs_lst, coefsfile_content, re.M)
HAcoefversion = searchres.group('version')
HAcoefband = searchres.group('band')
HAcoefnrelem = searchres.group('nrelem')
lstofCmpl = re.findall(re_cc_cmpl_coef, searchres.group('cmplstr'))
cmplx_lst = []
for reimstr in lstofCmpl:
reimstrs = reimstr.split(',')
cmplx_lst.append(complex(float(reimstrs[0]), float(reimstrs[1])))
coefs = numpy.reshape(numpy.array(cmplx_lst), lstshp)
searchres = re.search(re_channels, coefsfile_content)
if searchres:
channels = [float(frq) for frq in
searchres.group('chnls').split(',')]
else:
channels = None
# The coefficients are order now as follows:
# coefs[k,theta,freq,spherical-component].shape == (2,5,5,2)
artsdata = {'coefs': coefs, 'HAcoefversion': HAcoefversion,
'HAcoefband': HAcoefband, 'HAcoefnrelem': HAcoefnrelem,
'freq_center': freq_center, 'freq_range': freq_range,
'channels': channels}
return artsdata
def convLOFARcc2DPE(inpfile, dpe_outfile=None):
"""Convert a LOFAR .cc file of a Hamaker-Arts model named inpfile to a
a DualPolElem object.
The channels argument specifies the nominal subband frequencies of the
data.
If dpe_outfile is given, a pickled instance is created with this name.
"""
artsdata = _read_LOFAR_HAcc(inpfile)
#artsdata['channels'] = channels
HLBA = HamakerPolarimeter(artsdata)
stnDPolel = dualpolelem.DualPolElem(HLBA)
if dpe_outfile is not None:
pickle.dump(stnDPolel, open(dpe_outfile, 'wb'), PICKLE_PROTO)
return stnDPolel
def plotElemPat(artsdata, frequency=55.0e6):
"""Plots the HA antenna pattern over the entire Hemisphere."""
THETA, PHI = pntsonsphere.ZenHemisphGrid() # theta=0.2rad for zeni anomaly
hp = HamakerPolarimeter(artsdata)
jones = hp.getJonesAlong([frequency], (THETA, PHI))
EsTh = numpy.squeeze(jones[..., 0, 0])
EsPh = numpy.squeeze(jones[..., 0, 1])
tvecfun.plotvfonsph(THETA, PHI, EsTh, EsPh, freq=frequency,
vcoordlist=['sph'], projection='azimuthal-equidistant',
vfname='Hamaker')
EsTh = numpy.squeeze(jones[..., 1, 0])
EsPh = numpy.squeeze(jones[..., 1, 1])
tvecfun.plotvfonsph(THETA, PHI, EsTh, EsPh, freq=frequency,
vcoordlist=['sph'], projection='equirectangular',
vfname='Hamaker') # vcoordlist=['Ludwig3']
def getJones(freq, az, el):
"""Print the Jones matrix of the HA model for a frequency and direction."""
hp = HamakerPolarimeter(HA_LBAfile_default)
jones = hp.getJonesAlong([freq], (0.1, 0.2))
print("Jones:")
print(jones)
print("J.J^H:")
print(numpy.dot(jones, jones.conj().transpose()).real)
IXRJ = dualpolelem.getIXRJ(jones)
print("IXRJ:", 10*numpy.log10(IXRJ), "[dB]")
def _getargs():
freq = float(sys.argv[1])
az = float(sys.argv[2])
el = float(sys.argv[3])
return freq, az, el
if __name__ == "__main__":
#artsdata = _read_LOFAR_HAcc('../../example_FF_files/DefaultCoeffHBA.cc')
artsdata = _read_LOFAR_HAcc('../../../dreamBeam/dreambeam/telescopes/LOFAR/share/defaultCoeffHBA.cc')
print(artsdata)
exit()
freq = 55e6
SAMPFREQ = 100e6
NR_CHANNELS = 512
artsdata["channels"] = numpy.linspace(SAMPFREQ, 3*SAMPFREQ, 2*NR_CHANNELS, endpoint=False)
_write_LOFAR_HAcc(artsdata)
exit()
LBAmod = HamakerPolarimeter(artsdata)
freqarg = [freq]
phiarg = [[0.1-5*math.pi/4]]
thtarg = [[math.pi/2-1.1]]
jones = LBAmod.getJonesAlong(freqarg, (thtarg, phiarg))
# jones_1 = LBAmod._getJonesAlong_alt(freqarg, (thtarg, phiarg))
print(jones)
exit()
plotElemPat(artsdata, freq)
| 2baOrNot2ba/AntPat | antpat/reps/hamaker.py | Python | isc | 18,149 |
"""
Django and project specific settings for usage during development.
Everything should be ready-to-go for a common development environment, but you may of course tweak some
options.
"""
# pylint: disable=wildcard-import, unused-wildcard-import
from .base_settings import *
CSP_POLICIES = {
# The debug error page uses inline JavaScript and CSS
'script-src': ["'self'", "'unsafe-inline'"],
'style-src': ["'self'", "'unsafe-inline'"],
'object-src': ["'self'"],
'connect-src': ["'self'"]
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'dev-db.sqlite3'),
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache'
}
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
DEFAULT_FROM_EMAIL = 'ctf-gameserver.web@localhost'
MEDIA_ROOT = os.path.join(BASE_DIR, 'uploads')
SESSION_ENGINE = 'django.contrib.sessions.backends.db'
SECRET_KEY = 'OnlySuitableForDevelopment' # nosec
TIME_ZONE = 'UTC'
FIRST_DAY_OF_WEEK = 1
DEBUG = True
INTERNAL_IPS = ('127.0.0.1')
GRAYLOG_SEARCH_URL = 'http://localhost:9000/search'
| fausecteam/ctf-gameserver | src/ctf_gameserver/web/dev_settings.py | Python | isc | 1,175 |
# -*- coding: utf-8 -*-
"""
controlbeast.ssh.shell
~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2014 by the ControlBeast team, see AUTHORS.
:license: ISC, see LICENSE for details.
"""
import time
from controlbeast.ssh.exception import CbSSHConnectionError
from controlbeast.ssh.session import CbSSHSession
from controlbeast.ssh.api import SSH_OK
from controlbeast.utils.convert import to_str, to_bytes
class CbSSHShell(CbSSHSession):
"""
Class providing an interactive SSH Shell session. Connection management is handled
transparently, so no explicit ``connect()`` or ``disconnect()`` methods exist. As
soon as one tries reading or writing to the shell session, the connection to the
remote host will be established. When an SSH shell session object gets de-referenced,
its connection will be closed and the session context will be cleaned up.
:param str hostname: remote ip address or hostname
:param str port: remote SSH port
:param str username: remote username to be used for authentication
:param str password: remote user's password
:param str passphrase: passphrase for accessing a (local) private key for authentication
:param str private_key_file: path to the private key file to be used for authentication
"""
#: boolean channel status (True = channel exists, False = channel destroyed)
_channel_status = False
#: libssh channel object
_channel = None
def __init__(self, hostname='localhost', port='22', username='', password='', passphrase='', private_key_file=''):
"""
Construct an SSH Shell session object.
"""
super(CbSSHShell, self).__init__(
hostname=hostname,
port=port,
username=username,
password=password,
passphrase=passphrase,
private_key_file=private_key_file
)
def write(self, data):
"""
Write data to remote shell.
.. note::
This method will automatically establish an SSH connection and spawn a remote
shell, should this not already have happened.
:param str data: data to be written to remote shell.
"""
if not self._channel_status:
self._channel_init()
bytes_written = self._libssh.ssh_channel_write(self._channel, to_bytes(data))
if bytes_written != len(data):
raise RuntimeError("Error writing data to SSH socket.")
def read(self, max_bytes=0):
"""
Read data from remote shell. If a limit has been specified, a maximum of ``max_bytes`` bytes will
be read and returned. Otherwise, all data until EoF will be read and returned.
.. note::
This method will automatically establish an SSH connection and spawn a remote
shell, should this not already have happened.
:param int max_bytes: maximum number of bytes to be read from remote shell
:return: data read from remote connection
:rtype: str
"""
if not self._channel_status:
self._channel_init()
if not self._libssh.ssh_channel_is_open(self._channel):
raise RuntimeError("SSH remote shell seems to be closed.")
if max_bytes:
return to_str(self._libssh.ssh_channel_read_nonblocking(self._channel, max_bytes))
else:
buffer = ""
while not self._libssh.ssh_channel_is_eof(self._channel):
buffer += to_str(self._libssh.ssh_channel_read_nonblocking(self._channel, 80))
return buffer
# noinspection PyMethodOverriding
def execute(self, command):
"""
Execute the command on the remote host.
:param str command: command string
:return: output from the remote shell
:rtype: str
"""
# empty queue
buffer = self.read(1024)
while len(buffer) > 0:
time.sleep(0.5)
buffer = self.read(1024)
# Send command
self.write("{cmd}\n".format(cmd=command))
# Retrieve output
result = ""
time.sleep(0.5)
buffer = self.read(1024)
result += buffer
while len(buffer) > 0:
time.sleep(0.5)
buffer = self.read(1024)
result += buffer
return result
def _do_or_die(self, function, *args, **kwargs):
"""
Execute function with corresponding arguments. If the function returns anything but
``SSH_OK``, an exception is raised.
:param function: pointer to function or method to be executed
:param args: positional arguments for this function or method
:param kwargs: key word arguments for this function or method
"""
return_code = function(*args, **kwargs)
if return_code != SSH_OK:
raise CbSSHConnectionError(
hostname=self.hostname,
port=self.port,
return_code=return_code,
message=to_str(self._libssh.get_error(self._session))
)
def _channel_init(self):
"""
(Re-)Initialise the libssh channel object
"""
if not self._connection_status:
self._connect()
if self._channel_status:
self._channel_terminate()
self._channel = self._libssh.ssh_channel_new(self._session)
self._do_or_die(self._libssh.ssh_channel_open_session, self._channel)
self._do_or_die(self._libssh.ssh_channel_request_pty, self._channel)
self._do_or_die(self._libssh.ssh_channel_request_shell, self._channel)
self._channel_status = True
def _channel_terminate(self):
"""
Close initialised channel and clean up.
"""
if self._channel_status and self._channel is not None:
if self._libssh.ssh_channel_is_open(self._channel):
self._libssh.ssh_channel_close(self._channel)
self._libssh.ssh_channel_send_eof(self._channel)
self._libssh.ssh_channel_free(self._channel)
self._channel = None
self._channel_status = False
def __exit__(self, exc_type, exc_val, exc_tb):
self._channel_terminate()
super(CbSSHShell, self)._terminate() | daemotron/controlbeast | controlbeast/ssh/shell.py | Python | isc | 6,307 |
import hashlib
import json
import os
import StringIO
import mock
import pytest
import confu
@pytest.fixture(scope='function')
def locations(request):
paths = {
os.path.expanduser('~/.confu.cfg'): """\
[cfn]
parameters[KeyName] = ai-gazelle
""",
os.path.abspath('.confu.cfg'): """\
[default]
profile = julius
[aws]
regions = us-west-1
default_region = us-west-1
[cfn]
bucket_format = {profile}-confu-cfn-{region}
bucket_key = vault
stack_name_format = {Prefix}-{AppEnv}-{random}
parameters[InfraSilo] = vault
parameters[ConfName] = infra-julius
parameters[ConfSource] = {profile}-confu-pkg
stack_tags[infra-silo] = vault
[pkg]
bucket_format = {profile}-confu-pkg
includes =
infras/
!infras/global/mq.yml
!infras/global/site.yml
!infras/global/.confu.cfg
!infras/global/inventories/
!infras/global/formations/
!infras/global/roles/
inventories/
ops/
[atlas]
source_dir = infras/global/atlas
"""
}
def _exists(path):
return path in paths
patch = mock.patch('confu.settings.os.path.exists', _exists)
patch.start()
request.addfinalizer(patch.stop)
def _open(path, *args, **kwargs):
return StringIO.StringIO(paths[path])
patch = mock.patch('__builtin__.open', _open, create=True)
patch.start()
request.addfinalizer(patch.stop)
def hash_dict(d):
return hashlib.sha1(json.dumps(d, sort_keys=True)).hexdigest()
def test_merge(locations):
expected = {
'atlas': {
'source_dir': 'infras/global/atlas'
},
'aws': {'default_region': 'us-west-1', 'regions': ['us-west-1']},
'cfn': {
'bucket_format': '{profile}-confu-cfn-{region}',
'bucket_key': 'vault',
'parameters': {
'ConfName': 'infra-julius',
'ConfSource': '{profile}-confu-pkg',
'InfraSilo': 'vault',
'KeyName': 'ai-gazelle'
},
'stack_name_format': '{Prefix}-{AppEnv}-{random}',
'stack_tags': {'infra-silo': 'vault'}},
'pkg': {
'bucket_format': '{profile}-confu-pkg',
'default_includes': [
'group_vars/',
'host_vars/',
'roles/',
'/ansible.cfg',
'!*/ansible.cfg',
'*.yml',
'!.project',
'!*.git',
'!*.pyc',
'!*.pyo',
'!*.git*',
'!*.travis.yml',
'!*.md',
'!Vagrantfile',
'!*/test/',
'!test.yml'
],
'includes': [
'infras/',
'!infras/global/mq.yml',
'!infras/global/site.yml',
'!infras/global/.confu.cfg',
'!infras/global/inventories/',
'!infras/global/formations/',
'!infras/global/roles/',
'inventories/',
'ops/',
],
'name': '{source.dir_name}',
'source_dir': './',
'stage_dir': '/tmp/confu/{package.name}-{package.version}',
'version': '{source.git_version}'
},
'profile': 'julius',
'region': 'us-west-1'
}
actual = confu.settings.load(globalize=False)
assert hash_dict(actual) == hash_dict(expected)
| bninja/confu | tests/test_settings.py | Python | isc | 3,386 |
from distutils.core import setup
DISTNAME='ts_charting'
FULLVERSION='0.1'
setup(name=DISTNAME,
version=FULLVERSION,
packages=['ts_charting',
]
)
| dalejung/ts-charting | setup.py | Python | mit | 726 |
#
# File: scripts/lipsync_test.nut
# Author: Astrofra
#
import gs
import os
from globals import *
from compat import *
# Include(g_viseme_set + "phoneme_to_viseme.nut")
# !
# @short lipsync_test
# @author Astrofra
#
class LipSyncTrack:
def __init__(self):
self.ui = None # SceneGetUI(g_scene)
self.external_material_list = []
self.current_clip = 0
self.text = 0
self.current_phoneme_index = 0
self.current_phoneme = 0
self.lipsync_clock = 0.0
self.duration = -1.0
self.all_done = False
self.current_viseme_sprite = 0
self.mouth_2d = False
self.disable_narrator = False
self.visemes = None
def LipSyncNutInclude(self, _key):
global g_story
_nut_fname = "voice_" + g_story + "_" + _key + ".json"
print("LipSyncNutInclude() : loading '" + _nut_fname + "'.")
json_file = None
if os.path.exists("tmp/" + _nut_fname):
print("Loading from 'tmp/'")
json_file = open("tmp/" + _nut_fname)
else:
print("Loading from 'archived_files/'")
json_file = open("archived_files/" + _nut_fname)
if json_file is not None:
self.visemes = json.loads(json_file.read())
else:
self.visemes = {}
# !
# @short OnUpdate
# Called each frame.
#
def Update(self):
# _clock = g_clock - self.lipsync_clock
# if self.all_done:
# return
# if _clock > self.current_phoneme.last_time:
# self.GetNextPhoneme()
pass
def GetNextPhoneme(self):
if self.current_phoneme_index < len(list_phoneme):
if self.text != "pause":
self.current_phoneme = list_phoneme[self.current_phoneme_index]
# print("GetNextPhoneme() : '" + self.current_phoneme.phoneme_type + "'.")
self.GetVisemeFromPhoneme(self.current_phoneme.phoneme_type)
else:
self.current_phoneme = list_phoneme[self.current_phoneme_index]
self.GetVisemeFromPhoneme("_")
self.current_phoneme_index += 1
else:
# print("LipSyncTrack::GetNextPhoneme() All done!")
self.GetVisemeFromPhoneme("_")
if self.duration < 0 or g_clock - self.lipsync_clock >= self.duration:
self.all_done = True
def GetVisemeFromPhoneme(self, pho):
# if ("video" in self.current_clip) # FIX ME!!!
# return
if pho == "_":
pho = "closed"
pho = pho.upper()
if pho in self.visemes:
vi = self.visemes[pho]
if self.disable_narrator:
return
mouth_tex = EngineLoadTexture(g_engine, g_viseme_set + vi + ".png")
if self.mouth_2d:
if self.current_viseme_sprite != 0:
UIDeleteSprite(self.ui, self.current_viseme_sprite)
self.current_viseme_sprite = UIAddSprite(self.ui, -1, mouth_tex, 10, 10, 150, 150)
SpriteSetScale(self.current_viseme_sprite, 2, 2)
else:
# local geo, mat
geo = ItemGetGeometry(SceneFindItem(g_scene, "monitor_screen"))
mat = GeometryGetMaterialFromIndex(geo, 0)
MaterialSetTexture(mat, 0, mouth_tex)
for _mat in self.external_material_list:
MaterialSetTexture(_mat, 0, mouth_tex)
def RegisterExternalMaterial(self, _mat):
self.external_material_list.append(_mat)
def AddPauseAtEnd(self):
print("LipSyncTrack::AddPauseAtEnd()")
_last_idx = len(list_phoneme) - 1
clip_duration = 0.0
if _last_idx >= 0:
list_phoneme[_last_idx].last_time += FixedSecToTick(Sec(1.0))
clip_duration = list_phoneme[_last_idx].last_time
return clip_duration
# !
# @short OnSetup
# Called when the scene is about to be setup.
#
def Feed(self, _current_clip):
print("LipSyncTrack::Feed()")
# local clip_duration, key
clip_duration = -1.0
self.duration = -1.0
self.current_clip = _current_clip
self.text = self.current_clip['text']
key = SHA1(self.text)
self.lipsync_clock = g_clock
self.all_done = False
self.current_phoneme_index = 0
self.current_phoneme = 0
print("LipSyncTrack::Feed(" + key + ")")
self.LipSyncNutInclude(key)
if "self.duration" in _current_clip:
clip_duration = FixedSecToTick(Sec(_current_clip.self.duration))
self.duration = clip_duration
else:
clip_duration = self.AddPauseAtEnd()
if "emulator" in _current_clip and not ("narrator_command" in _current_clip.emulator):
self.disable_narrator = True
else:
self.disable_narrator = False
if self.text != "pause":
SceneGetScriptInstance(g_scene).audio_mixer.PlaySound("voice_" + g_story + "_" + key + ".ogg", g_clock, "voice_over")
self.GetNextPhoneme()
return clip_duration | astrofra/amiga-memories | app/tracker_lip_sync.py | Python | mit | 4,345 |
def create_middleware(app, log_filename='repozeprofile.log',
discard_first_request=True, flush_at_shutdown=True,
path='/__profile__'):
from repoze.profile.profiler import AccumulatingProfileMiddleware
return AccumulatingProfileMiddleware(app,
log_filename=log_filename,
discard_first_request=discard_first_request,
flush_at_shutdown=flush_at_shutdown,
path=path
)
def setup_repozeprofile(app):
app.add_middleware(create_middleware)
| passy/glashammer-rdrei | glashammer/bundles/contrib/dev/repozeprofile.py | Python | mit | 503 |
"""
Written by Nathan Fritz and Lance Stout. Copyright 2011 by &yet, LLC.
Released under the terms of the MIT License
"""
import uuid
from thoonk.exceptions import Empty
from thoonk.feeds import Feed
class Queue(Feed):
"""
A Thoonk queue is a typical FIFO structure, but with an
optional priority override for inserting to the head
of the queue.
Thoonk Standard API:
publish -- Alias for put()
put -- Add an item to the queue, with optional priority.
get -- Retrieve the next item from the queue.
"""
def publish(self, item, priority=False):
"""
Add a new item to the queue.
(Same as self.put())
Arguments:
item -- The content to add to the queue.
priority -- Optional priority; if equal to True then
the item will be inserted at the head of the
queue instead of the end.
"""
self.put(item, priority)
def put(self, item, priority=False):
"""
Add a new item to the queue.
(Same as self.publish())
Arguments:
item -- The content to add to the queue (string).
priority -- Optional priority; if equal to True then
the item will be inserted at the head of the
queue instead of the end.
"""
id = uuid.uuid4().hex
pipe = self.redis.pipeline()
if priority:
pipe.rpush(self.feed_ids, id)
pipe.hset(self.feed_items, id, item)
pipe.incr(self.feed_publishes % self.feed)
else:
pipe.lpush(self.feed_ids, id)
pipe.hset(self.feed_items, id, item)
pipe.incr(self.feed_publishes)
pipe.execute()
return id
def get(self, timeout=0):
"""
Retrieve the next item from the queue.
Raises an Empty exception if the request times out.
Arguments:
timeout -- Optional time in seconds to wait before
raising an exception.
"""
result = self.redis.brpop(self.feed_ids, timeout)
if result is None:
raise Empty
id = result[1]
pipe = self.redis.pipeline()
pipe.hget(self.feed_items, id)
pipe.hdel(self.feed_items, id)
results = pipe.execute()
return results[0]
def get_ids(self):
"""Return the set of IDs used by jobs in the queue."""
return self.redis.lrange(self.feed_ids, 0, -1)
| andyet/thoonk.py | thoonk/feeds/queue.py | Python | mit | 2,574 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_create_or_update_request_initial(
resource_group_name: str,
image_name: str,
subscription_id: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"imageName": _SERIALIZER.url("image_name", image_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_delete_request_initial(
resource_group_name: str,
image_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"imageName": _SERIALIZER.url("image_name", image_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
resource_group_name: str,
image_name: str,
subscription_id: str,
*,
expand: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"imageName": _SERIALIZER.url("image_name", image_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_by_resource_group_request(
resource_group_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/images')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class ImagesOperations(object):
"""ImagesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2017_03_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name: str,
image_name: str,
parameters: "_models.Image",
**kwargs: Any
) -> "_models.Image":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Image"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'Image')
request = build_create_or_update_request_initial(
resource_group_name=resource_group_name,
image_name=image_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Image', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Image', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
image_name: str,
parameters: "_models.Image",
**kwargs: Any
) -> LROPoller["_models.Image"]:
"""Create or update an image.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param image_name: The name of the image.
:type image_name: str
:param parameters: Parameters supplied to the Create Image operation.
:type parameters: ~azure.mgmt.compute.v2017_03_30.models.Image
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either Image or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2017_03_30.models.Image]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Image"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
image_name=image_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Image', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
image_name: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
image_name=image_name,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
image_name: str,
**kwargs: Any
) -> LROPoller["_models.OperationStatusResponse"]:
"""Deletes an Image.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param image_name: The name of the image.
:type image_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either OperationStatusResponse or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
image_name=image_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
image_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.Image":
"""Gets an image.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param image_name: The name of the image.
:type image_name: str
:param expand: The expand expression to apply on the operation.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Image, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.Image
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Image"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
image_name=image_name,
subscription_id=self._config.subscription_id,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Image', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images/{imageName}'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> Iterable["_models.ImageListResult"]:
"""Gets the list of images under a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ImageListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2017_03_30.models.ImageListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImageListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ImageListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/images'} # type: ignore
@distributed_trace
def list(
self,
**kwargs: Any
) -> Iterable["_models.ImageListResult"]:
"""Gets the list of Images in the subscription. Use nextLink property in the response to get the
next page of Images. Do this till nextLink is null to fetch all the Images.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ImageListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2017_03_30.models.ImageListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ImageListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ImageListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/images'} # type: ignore
| Azure/azure-sdk-for-python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/operations/_images_operations.py | Python | mit | 26,662 |
from setuptools import setup, find_packages
from os.path import join, dirname
import re
with open('hikvisionapi/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name='hikvisionapi',
version=version,
description='The client for HIKVISION cameras, DVR',
url='https://github.com/MissiaL/hikvision-client',
author='Petr Alekseev',
author_email='[email protected]',
packages=find_packages(),
long_description_content_type="text/markdown",
long_description=open(join(dirname(__file__), 'README.md')).read(),
download_url='https://github.com/MissiaL/hikvision-client/tarball/{}'.format(version),
keywords=['api', 'hikvision', 'hikvision-client'],
install_requires=['xmltodict', 'requests', 'httpx'],
python_requires='>3.5',
)
| MissiaL/hikvision-client | setup.py | Python | mit | 911 |
#codeskulptor url: http://www.codeskulptor.org/#user40_rXnVfx7qI2_0.py
# Rock-paper-scissors-lizard-Spock template
import random
# The key idea of this program is to equate the strings
# "rock", "paper", "scissors", "lizard", "Spock" to numbers
# as follows:
#
# 0 - rock
# 1 - Spock
# 2 - paper
# 3 - lizard
# 4 - scissors
# helper functions
def name_to_number(name):
# delete the following pass statement and fill in your code below
if name == "rock":
return 0
elif name == "Spock":
return 1
elif name == "paper":
return 2
elif name == "lizard":
return 3
elif name == "scissors":
return 4
else:
print "name_to_number recieved invalid input"
# convert name to number using if/elif/else
# don't forget to return the result!
def number_to_name(number):
# delete the following pass statement and fill in your code below
if number == 0:
return "rock"
elif number == 1:
return "Spock"
elif number == 2:
return "paper"
elif number == 3:
return "lizard"
elif number == 4:
return "scissors"
else:
print "number_to_name recieved invalid input"
# convert number to a name using if/elif/else
# don't forget to return the result!
def rpsls(player_choice):
# delete the following pass statement and fill in your code below
# print a blank line to separate consecutive games
print ""
# print out the message for the player's choice
print "Player chooses ", player_choice
# convert the player's choice to player_number using the function name_to_number()
player_number = name_to_number(player_choice)
# compute random guess for comp_number using random.randrange()
comp_number = random.randint(0, 4)
# convert comp_number to comp_choice using the function number_to_name()
comp_choice = number_to_name(comp_number)
# print out the message for computer's choice
print "Computer chooses ", comp_choice
# compute difference of comp_number and player_number modulo five
difference = (comp_number - player_number) % 5
# use if/elif/else to determine winner, print winner message
if difference == 1 or difference == 2:
print "Computer wins!"
elif difference == 3 or difference == 4:
print "Player wins!"
elif difference == 0:
print "Player and computer tie!"
else:
print "Something went wrong!"
# test your code - THESE CALLS MUST BE PRESENT IN YOUR SUBMITTED CODE
rpsls("rock")
rpsls("Spock")
rpsls("paper")
rpsls("lizard")
rpsls("scissors")
# always remember to check your completed program against the grading rubric
| Ustabil/Python-part-one | mini_projects/rpsls/rpsls.py | Python | mit | 2,712 |
from .BaseTerminal import BaseTerminal
from eventlet.queue import Queue
class EchoTerminal(BaseTerminal):
def __init__(self):
super().__init__()
self._queue = Queue()
def send(self, data):
self._queue.put(data)
def recv(self, count=None):
return self._queue.get()
| sorgloomer/websocket_terminal | server-python3/wspty/EchoTerminal.py | Python | mit | 312 |
from default_object import Objects
class Wall(Objects):
def set_hierarchy(self):
self.hierarchy_level = 10
def __str__(self):
return '#'
def __repr__(self):
return '#'
def interact(self, obj):
return False | TRBaldim/Q-Learning | elements/wall.py | Python | mit | 259 |
"""
Django settings for lister project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os, secrets
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = secrets.SECRET_KEY
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'lists.apps.ListsConfig',
'api.apps.ApiConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework.authtoken',
]
if not 'OPENSHIFT_APP_NAME' in os.environ:
INSTALLED_APPS.append('sslserver')
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'ssl_redirect.middleware.SSLRedirectMiddleware',
]
ROOT_URLCONF = 'lister.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'lister.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
if 'OPENSHIFT_POSTGRESQL_DB_PASSWORD' in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['PGDATABASE'],
'USER': os.environ['OPENSHIFT_POSTGRESQL_DB_USERNAME'],
'PASSWORD': os.environ['OPENSHIFT_POSTGRESQL_DB_PASSWORD'],
'HOST': os.environ['OPENSHIFT_POSTGRESQL_DB_HOST'],
'PORT': os.environ['OPENSHIFT_POSTGRESQL_DB_PORT'],
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, '../db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/New_York'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, '../wsgi/static/')
SERIALIZATION_MODULES = {'filtered-json': 'api.filtered_json_serializer'}
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
)
}
SSL_ON = True
SSL_ALWAYS = True
| bolecki/lister | lister/lister/settings.py | Python | mit | 4,193 |
"""
add rxnorm and ndfrt ids on things
"""
import os
from collections import defaultdict, Counter
from functools import lru_cache
from itertools import chain
import requests
import json
import time
from tqdm import tqdm
from wikidataintegrator import wdi_helpers, wdi_core, wdi_login
from scheduled_bots.local import WDPASS, WDUSER
login = wdi_login.WDLogin(WDUSER, WDPASS)
from scheduled_bots.drugs.pharma.ndfrt import get_roles, get_props
def make_ref(nui):
refs = [[
wdi_core.WDItemID(value='Q21008030', prop_nr='P248', is_reference=True), # stated in ndfrt
wdi_core.WDExternalID(value=nui, prop_nr='P2115', is_reference=True), # NDF-RT ID
wdi_core.WDTime(time=time.strftime('+%Y-%m-%dT00:00:00Z'), prop_nr='P813', is_reference=True) # retrieved
]]
return refs
rxnorm_qid = wdi_helpers.id_mapper("P3345", return_as_set=True)
print("{} rxnorms have duplicate qids".format(len({k: v for k, v in rxnorm_qid.items() if len(v) > 1})))
rxnorm_qid = {k: list(v)[0] for k, v in rxnorm_qid.items() if len(v) == 1}
nui_qid = wdi_helpers.id_mapper("P2115", return_as_set=True)
print("{} nuis have duplicate qids".format(len({k: v for k, v in nui_qid.items() if len(v) > 1})))
nui_qid = {k: list(v)[0] for k, v in nui_qid.items() if len(v) == 1}
mesh_qid = wdi_helpers.id_mapper("P486", return_as_set=True)
nuis_info = json.load(open("nuis_info.json")) if os.path.exists("nuis_info.json") else dict()
nuis_info = {k: v for k, v in nuis_info.items() if v}
nuis_info = {k: v for k, v in nuis_info.items() if get_roles(v)}
for nui in tqdm(nuis_info):
s = []
qid = None
rxcui = get_props(nuis_info[nui]).get("RxNorm_CUI")
if nui in nui_qid and rxcui in rxnorm_qid:
if nui_qid[nui] != rxnorm_qid[rxcui]:
print("there's something wrong with me!!")
print(nuis_info[nui]['conceptName'], nui, nui_qid[nui], rxcui, rxnorm_qid[rxcui])
continue
if nui in nui_qid and rxcui not in rxnorm_qid:
# add the rxnorm rxcui onto this qid
print(nuis_info[nui]['conceptName'], nui, nui_qid[nui], rxcui)
s = [wdi_core.WDExternalID(rxcui, "P3345", references=make_ref(nui))]
qid = nui_qid[nui]
if nui not in nui_qid and rxcui in rxnorm_qid:
# add the ndfrt nui onto this qid
print(nuis_info[nui]['conceptName'], nui, rxcui, rxnorm_qid[rxcui])
s = [wdi_core.WDExternalID(nui, "P2115", references=make_ref(nui))]
qid = rxnorm_qid[rxcui]
if s:
item = wdi_core.WDItemEngine(wd_item_id=qid, data=s, append_value=['P2115','P3345'])
item.write(login)
for nui in tqdm(nuis_info):
rxcui = get_props(nuis_info[nui]).get("RxNorm_CUI")
mesh = get_props(nuis_info[nui]).get("MeSH_DUI")
if rxcui in rxnorm_qid and mesh and mesh not in mesh_qid:
print(nui, mesh, rxcui)
qid = rxnorm_qid[rxcui]
s = [wdi_core.WDExternalID(mesh, "P486", references=make_ref(nui)),
wdi_core.WDExternalID(nui, "P2115", references=make_ref(nui))]
item = wdi_core.WDItemEngine(wd_item_id=qid, data=s, append_value=['P486','P2115','P3345'])
item.write(login) | SuLab/scheduled-bots | scheduled_bots/drugs/pharma/3-ndfrt-rxnorm.py | Python | mit | 3,151 |
from codecs import open
from os import path
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='aws_list_all',
version='0.8.0',
description='List all your AWS resources, all regions, all services.',
long_description=long_description,
url='https://github.com/JohannesEbke/aws_list_all',
author='Johannes Ebke',
author_email='[email protected]',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
],
keywords='aws boto3 listings resources region services',
packages=['aws_list_all'],
install_requires=['boto3>=1.16.57', 'app_json_file_cache>=0.2.2'],
entry_points={
'console_scripts': [
'aws_list_all=aws_list_all.__main__:main',
'aws-list-all=aws_list_all.__main__:main',
],
},
include_package_data=True,
)
| JohannesEbke/aws_list_all | setup.py | Python | mit | 1,352 |
import numpy as np
import os, fnmatch
from sys import argv
from scipy import ndimage
from skimage import measure, morphology, segmentation
from skimage.feature import peak_local_max
from tocmfastpy import *
from h_transform_globalsync import *
import pylab as plt
import seaborn as sns
import ws3d_gpu, edt_cuda
from joblib import Parallel, delayed
from IO_utils import *
import optparse
#possibly useful
#morphology.remove_small_objects
def local_maxima_debug(arr, ionized, threshold_h=0.7, connectivity=2, try_loading=False, outfile='smoothed_11.npy', smoothing='hmax'):
neighborhood = ndimage.morphology.generate_binary_structure(len(arr.shape), connectivity)
#maxima = None
maxima = peak_local_max(arr, labels=ionized, footprint=neighborhood, indices=False, exclude_border=False)
if smoothing == 'hmax': #smoothing with h-max transform
if try_loading:
try:
print "loading h_max_transform"
smoothed_arr = np.load('smoothed.npy')
except:
smoothed_arr = h_max_cpu(arr, neighborhood, maxima, threshold_h, mask=ionized, connectivity=connectivity)
np.save(outfile, smoothed_arr)
else:
smoothed_arr = h_max_cpu(arr, neighborhood, maxima, threshold_h, mask=ionized, connectivity=2, max_iterations=5)
np.save(outfile, smoothed_arr)
maxima = peak_local_max(smoothed_arr, labels=ionized, footprint=neighborhood, indices=False, exclude_border=False)
# elif smoothing == 'bin':
# print 'Smoothing field with binary dilation'
# n_reg = 0
# m_reg = 1000
# while True:
# maxima = ionized & ndimage.binary_dilation(maxima, structure=neighborhood, iterations=1) #smoothing with binary dilation
# tmp_labels = measure.label(maxima, connectivity=connectivity)
# m_reg = len(measure.regionprops(tmp_labels))
# print m_reg
# if m_reg == n_reg: break
# n_reg = m_reg
return maxima #np.where(detected_maxima)
def local_maxima_cpu(arr, ionized, threshold_h=0.7, connectivity=2, save=False, outfile='smoothed_11.npy'):
neighborhood = ndimage.morphology.generate_binary_structure(len(arr.shape), connectivity)
maxima = peak_local_max(arr, labels=ionized, footprint=neighborhood, indices=False, exclude_border=False)
if threshold_h > 0:
arr = h_max_cpu(arr, neighborhood, maxima, threshold_h, mask=ionized, connectivity=2, max_iterations=50)
maxima = peak_local_max(arr, labels=ionized, footprint=neighborhood, indices=False, exclude_border=False)
return maxima, arr
def local_maxima_gpu(arr, ionized, threshold_h=0.7, connectivity=2):
s_arr, maxima = h_max_gpu(arr=arr,mask=ionized, maxima=None, h=threshold_h, n_iter=1000)
return maxima, s_arr
def watershed_3d(image, connectivity=2, h=0.7, target='cuda', edtfile=None):
ionized = (image == 1.)
#ionized = ionized*morphology.remove_small_objects(ionized, 3) #speeds up later process
if target == 'cuda' or target == 'gpu':
print 'Computing EDT'
EDT = None
try:
EDT = np.load(edtfile)['EDT']
except:
EDT = ndimage.distance_transform_edt(ionized)
#EDT_c = edt_cuda.distance_transform_edt(arr=ionized)
#
maxima, smEDT = local_maxima_gpu(EDT.copy(), ionized, connectivity=connectivity, threshold_h=h)
#import IPython; IPython.embed()
print 'Computing watershed'
if True:
labels = ws3d_gpu.watershed(-smEDT, mask=ionized)
#import IPython; IPython.embed()
markers = measure.label(maxima, connectivity=connectivity)
else:
markers = measure.label(maxima, connectivity=connectivity)
labels = morphology.watershed(-smEDT, markers, mask=ionized)
#flabels = morphology.watershed(-smEDT, markers, mask=np.ones_like(ionized))
import IPython; IPython.embed()
elif target == 'cpu':
print 'Computing EDT'
EDT = ndimage.distance_transform_edt(ionized)
maxima, smEDT = local_maxima_cpu(EDT.copy(), ionized, connectivity=connectivity, threshold_h=h)
print 'Computing watershed'
markers = measure.label(maxima, connectivity=connectivity)
labels = morphology.watershed(-EDT, markers, mask=ionized)
return labels, markers, EDT, smEDT
def _get_var(Q, logR):
R = np.exp(logR)
return -Q/4/np.pi/R**3
def get_size_dist(labels, Q, scale=1, log=True, n_bins=20):
R = measure.regionprops(labels)
R_eff = scale*np.array([r.equivalent_diameter/2 for r in R])
#R_eff = (3*volumes/4/np.pi)**(1./3)
#dn/dr*(4pi*r**4/3Q) = dn/d(r**(-3))
if not log:
hist,bins = np.histogram(R_eff, normed=True, bins=100)
else:
logR = np.log(R_eff)
# var = -Q/4/np.pi/R_eff**3
log_edges = np.linspace(np.min(logR)-1,np.max(logR)+1,n_bins)
#var_edges = _get_var(Q, log_edges)
hist,bin_edges = np.histogram(logR, bins=log_edges, normed=True)
bws = (log_edges[1:]-log_edges[:-1])/2
bins = np.exp((log_edges[1:]+log_edges[:-1])/2)
hist *= 4*np.pi*bins**3/3/Q
hist /= np.dot(hist, bws)
#hist = hist/Q*4*np.pi*(bins)**3/3
return hist, bins
def plot_zscroll_dist(fn1='watershed_z10.npz', fn2='watershed_z11.npz', fn3='watershed_z12.npz'):
plt.figure()
for fn in [fn1,fn2,fn3]:
f = np.load(fn)
hist, bins = get_size_dist(f['labels'], f['Q'], f['scale'])
plt.plot(bins, hist, label=fn.split('.')[0].split('_')[1])
plt.xscale('log')
plt.xlabel('R(Mpc)')
plt.ylabel(r'\frac{dP}{d\ln r}')
plt.legend()
sns.set_context("talk", font_scale=1.4)
def plot_dist(labels, scale=1):
"""
scale is Mpc/pixel
"""
R = measure.regionprops(labels)
R_eff = scale*np.array([r.equivalent_diameter/2 for r in R])
logR = np.log(R_eff)
var = -Q/4/np.pi/R_eff**3
log_edges = np.linspace(np.min(logR)-1,np.max(logR)+1,n_bins)
var_edges = _get_var(Q, log_edges)
hist,bin_edges = np.histogram(var, bins=var_edges, normed=True)
bins = np.exp((log_edges[1:]+log_edges[:-1])/2)
sns.distplot(R_eff, hist=False, bins=bins)
def watershed_21cmBox(path):
box = boxio.readbox(path)
return watershed_3d(box.box_data)
def mc_test(N=1000,SIZE=200):
x, y, z = np.indices((SIZE, SIZE,SIZE))
image = np.zeros_like(x)
print image.shape
for n in xrange(N):
print n
x1, y1, z1 = np.random.randint(0,SIZE, size=3)
r1 = np.random.randint(1,SIZE/10)
mask_circle1 = (x - x1)**2 + (y - y1)**2 + (z - z1)**2< r1**2
image = np.logical_or(mask_circle1, image)
distance = ndimage.distance_transform_edt(image)
# local_maxi = peak_local_max(distance, labels=image,
# footprint=np.ones((3, 3, 3)),
# indices=False)
# markers = ndimage.label(local_maxi)[0]
# labels = morphology.watershed(-distance, markers, mask=image)
sd, maxima = h_max_gpu(arr=distance,mask=image, maxima=None, h=1.0, n_iter=150, connectivity=3)
labels = ws3d_gpu.watershed(-sd, mask=image)
markers = measure.label(maxima, connectivity=3)
flabels = morphology.watershed(-sd, markers, mask=image)
import matplotlib
carr = np.random.rand(256, 3); carr[0,:] = 0
cmap = matplotlib.colors.ListedColormap(carr)
plt.subplot(121)
plt.imshow(labels[SIZE/2], cmap=cmap)
plt.subplot(122)
plt.imshow(flabels[SIZE/2], cmap=cmap)
import IPython; IPython.embed()
def circle_test():
x, y, z = np.indices((80, 80,80))
x1, y1, z1, x2, y2, z2 = 28, 28,50, 44, 52,54
r1, r2 = 26, 40
mask_circle1 = (x - x1)**2 + (y - y1)**2 + (z - z1)**2< r1**2
mask_circle2 = (x - x2)**2 + (y - y2)**2 + (z - z2)**2< r2**2
image = np.logical_or(mask_circle1, mask_circle2)
# Now we want to separate the two objects in image
# Generate the markers as local maxima of the distance
# to the background
distance = ndimage.distance_transform_edt(image)
local_maxi = peak_local_max(distance, labels=image,
footprint=np.ones((3, 3, 3)),
indices=False)
markers = ndimage.label(local_maxi)[0]
#labels = morphology.watershed(-distance, markers, mask=image)
flabels = ws3d_gpu.watershed(-distance, mask=image)
import matplotlib
carr = np.random.rand(256, 3); carr[0,:] = 0
cmap = matplotlib.colors.ListedColormap(carr)
fig, axes = plt.subplots(1,1)
#axes[0].imshow(labels[40],cmap=cmap)
axes.imshow(flabels[40],cmap=cmap)
import IPython; IPython.embed()
if __name__ == '__main__':
o = optparse.OptionParser()
o.add_option('-d','--dir', dest='DIR', default='/home/yunfanz/Data/21cmFast/Boxes/')
o.add_option('-p','--pat', dest='PAT', default='*xH_nohalos_*')
o.add_option('-o','--out', dest='OUTDIR', default='./NPZ/')
(opts, args) = o.parse_args()
files = find_files(opts.DIR, pattern=opts.PAT)
for path in [files[0]]:
print 'Processing', path
b1 = boxio.readbox(path)
d1 = 1 - b1.box_data
#d1 = 1 - b1.box_data#[:252,:252,:252]
scale = float(b1.param_dict['dim']/b1.param_dict['BoxSize'])
#OUTFILE = b1.param_dict['basedir']+'/watershed_z{0}.npz'.format(b1.z)
OUTFILE = opts.OUTDIR+'dwatershed_z{0}_L{1}_Iter{2}.npz'.format(b1.z, b1.param_dict['BoxSize'], b1.param_dict['Iteration'])
labels, markers, EDT, smEDT = watershed_3d(d1, h=0.35, target='gpu', connectivity=3, edtfile=OUTFILE)
Q_a = 1 - b1.param_dict['nf']
print 'Q', Q_a
print 'saving', OUTFILE
np.savez(OUTFILE, Q=Q_a, scale=scale, labels=labels, markers=markers, EDT=EDT, smEDT=smEDT)
#hist, bins = get_size_dist(labels, Q, scale=scale)
# import matplotlib
# carr = np.random.rand(256, 3); carr[0,:] = 0
# cmap = matplotlib.colors.ListedColormap(carr)
# import IPython; IPython.embed()
| yunfanz/ReionBub | watershed.py | Python | mit | 10,091 |
from django.conf.urls import url
from matches.views.matches import MatchView, ControlView, CreateMatchView, ListMatchView
urlpatterns = [
url(r'^create/$', CreateMatchView.as_view(), name='create'),
url(r'^list/$', ListMatchView.as_view(), name='list'),
url(r'^(?P<pk>\d+)/$', MatchView.as_view(), name='overlay'),
url(r'^(?P<pk>\d+)/control$', ControlView.as_view(), name='control'),
]
| sheepeatingtaz/xwingoverlayer | matches/urls/matches.py | Python | mit | 406 |
#!/usr/bin/env python3
"""Client for ETNA's APIs"""
# TODO: Implement a Session upon instantiation?
# TODO: Cli ? :o
# TODO: CLI.
from datetime import datetime
from typing import Union, List
from io import BytesIO
import requests
from .constants import (
AUTH_URL,
IDENTITY_URL,
USER_INFO_URL,
PROMOTION_URL,
USER_PROMO_URL,
ACTIVITY_URL,
NOTIF_URL,
GRADES_URL,
PICTURE_URL,
SEARCH_URL,
ACTIVITIES_URL,
GROUPS_URL,
GSA_EVENTS_URL,
GSA_LOGS_URL,
EVENTS_URL,
DECLARATION_URL,
DECLARATIONS_URL,
CONVERSATIONS_URL,
TICKET_URL,
TICKETS_URL,
)
__author__ = "Theo Massard <[email protected]>"
class EtnaWrapper:
""""""
def __init__(
self,
login: str = None,
password: str = None,
cookies: dict = None,
use_session: bool = False,
headers: dict = None,
):
self.login = login
self._cookies = cookies
if cookies is None:
self._cookies = self.get_cookies(login, password)
# XXX: be careful about this one
if use_session:
self._req = requests.Session()
else:
self._req = requests
self.headers = headers
def __repr__(self):
return "<etnawrapper.etna.EtnaWrapper(login='{}', cookies={})>".format(
self.login, self._cookies
)
def __eq__(self, obj):
if not isinstance(obj, EtnaWrapper):
raise NotImplementedError
return (
self.login == obj.login
and self._cookies == obj._cookies
and isinstance(self._req, type(obj._req))
)
def __neq__(self, obj):
return not self == obj
def _query(
self, url: str, method="GET", raw: bool = False, data=None, params=None,
) -> Union[dict, requests.Response]:
"""Perform a request using the `self._req` HTTP client.
Upon requesting a non-standard URL (not returning JSON),
the `raw` flag allow to return a `requests.Response` object
instead of a dictionnary.
"""
response = self._req.request(
method,
url,
cookies=self._cookies,
json=data,
params=params,
headers=self.headers,
timeout=50,
)
if raw:
return response # type: requests.Response
return response.json() # type: dict
@staticmethod
def get_cookies(login: str = None, password: str = None) -> str:
"""Fetch a Cookie."""
if login is None:
raise ValueError("missing login, can not authenticate")
if password is None:
raise ValueError("missing password, can not authenticate")
data = {"login": login, "password": password}
resp = requests.post(AUTH_URL, data=data)
return resp.cookies.get_dict()
def get_user_info(self, user_id: int = None) -> dict:
"""Return a user's informations. Defaults to self.login."""
# TODO: Docstring -> show example
url = IDENTITY_URL
if user_id is not None:
url = USER_INFO_URL.format(user_id=user_id)
result = self._query(url)
return result
def get_promotion(self, promotion_id: int = None) -> dict:
"""Return a user's informations. Defaults to self.login."""
# TODO: Docstring -> show example
# NOTE: Is it actually the same output?
url = USER_PROMO_URL
if promotion_id is not None:
url = PROMOTION_URL.format(promo_id=promotion_id)
result = self._query(url)
return result
def get_user_promotion(self, login: str = None) -> dict:
"""Return user's promotions."""
url = USER_PROMO_URL
if login is not None:
url = USER_PROMO_URL + "?login=" + login
result = self._query(url)
return result
def get_current_activities(self, login: str = None) -> dict:
"""Return a user's current activities.
Defaults to self.login.
"""
url = ACTIVITY_URL.format(login=login or self.login)
result = self._query(url)
return result
def get_notifications(self, login: str = None) -> dict:
"""Return `login`'s notifications.
If login is not set, defaults to self.login.
"""
url = NOTIF_URL.format(login=login or self.login)
result = self._query(url)
return result
def get_grades(self, promotion_id: int, login: str = None) -> dict:
"""Fetch a student's grades, based on the promotion."""
url = GRADES_URL.format(login=login or self.login, promo_id=promotion_id)
result = self._query(url)
return result
def get_picture(self, login: str = None) -> BytesIO:
url = PICTURE_URL.format(login=login or self.login)
result = self._query(url, raw=True)
return result.content
def get_projects(self, login: str = None, date: datetime = None) -> dict:
"""Fetch a student's projects base on the login."""
url = SEARCH_URL.format(login=login or self.login)
params = dict()
if date is not None:
_date = date.strftime('%Y-%m-%d')
params["date"] = _date
result = self._query(url, params=params)
return result
def get_project_activites(self, module: str) -> dict:
"""Fetch activities related to `module`."""
url = ACTIVITIES_URL.format(module_id=module)
result = self._query(url)
return result
def get_group_for_activity(self, module: str, project: str) -> dict:
"""Return group composition for the module/project tuple."""
url = GROUPS_URL.format(module_id=module, project_id=project)
result = self._query(url)
return result
def get_students(self, promotion_id: int) -> dict:
"""Fetch every student bsaed on `promotion_id`."""
url = PROMOTION_URL.format(promo_id=promotion_id)
result = self._query(url)
return result
def get_log_events(self, login: str = None) -> dict:
"""Get a user's log event, defaults to self.login."""
url = GSA_EVENTS_URL.format(login=login or self.login)
result = self._query(url)
return result
def get_logs(self, login: str = None) -> dict:
"""Fetch a user's logs, defaults to self.login."""
url = GSA_LOGS_URL.format(login=login or self.login)
result = self._query(url)
return result
def get_events(
self, start_date: datetime, end_date: datetime, login: str = None
) -> dict:
"""Fetch a user's events, defaults to self.login."""
url = EVENTS_URL.format(
login=login or self.login,
start_date=start_date.isoformat(),
end_date=end_date.isoformat(),
)
result = self._query(url)
return result
def get_conversations(self, user_id: int, start: int = None, size: int = None) -> dict:
"""Return the list of conversations for a user.
Requires read permission for this user_id.
Use this method with a user_id corresponding to your login
to ensure readability.
"""
url = CONVERSATIONS_URL.format(user_id=user_id)
params = dict()
if start is not None:
params['from'] = start
if size is not None:
params['size'] = size
result = self._query(url, params=params)
return result
def get_declarations(self, start: str = None, end: str = None) -> dict:
"""Return the list of declarations for a user.
Requires read permission for this login.
"""
url = DECLARATIONS_URL.format(login=self.login)
params = dict()
if start is not None:
params['start'] = start
if end is not None:
params['end'] = end
result = self._query(url, params=params)
return result
def declare_log(self, module_id: int, content: dict):
"""Send a log declaration for module_id with `content`.
Content should be of the following form:
>>> content = {
"module": 1111,
"declaration": {
"start": "2019-05-6 10:00",
"end": "2019-05-6 10:00",
"content": "Objectifs: do things\n" \
"Actions: Did stuff\n" \
"Resultats: Got stuff done\n"
},
}
"""
url = DECLARATION_URL.format(
login=self.login,
module_id=module_id,
)
result = self._query(url, method='OPTIONS', raw=True)
result = self._query(url, method='POST', data=content)
return result
def open_ticket(self, title: str, message: str, tags: List[str] = None, users: List[str] = None):
"""Open a ticket."""
content = {}
content['title'] = title
content['message'] = message
content['tags'] = tags
content['users'] = users
url = TICKETS_URL
result = self._query(url, method='OPTIONS', raw=True)
result = self._query(url, method='POST', data=content)
return result
def close_ticket(self, ticket_id: int):
"""Close a ticket."""
url = TICKET_URL.format(task_id=ticket_id)
result = self._query(url, method='DELETE')
return result
def get_tickets(self):
"""Fetch the list of tickets."""
url = TICKETS_URL
result = self._query(url)
return result
def get_ticket(self, ticket_id: int):
"""Fetch the ticket matching `ticket_id`."""
url = TICKET_URL.format(task_id=ticket_id)
result = self._query(url)
return result
__all__ = ("EtnaWrapper",)
| massard-t/etnawrapper | etnawrapper/etna.py | Python | mit | 9,890 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from lxml import etree
class Command(BaseCommand):
help = 'Validates given XML file with RELAX NG schema'
def add_arguments(self, parser):
parser.add_argument('xml_file', help='path to XML file')
parser.add_argument('rng_file', help='path to RELAX NG file')
def handle(self, *args, **options):
xml_file_etree = etree.parse(options['xml_file'])
rng_file_etree = etree.parse(options['rng_file'])
relaxng = etree.RelaxNG(rng_file_etree)
try:
relaxng.assertValid(xml_file_etree)
self.stdout.write('Document is valid')
except etree.DocumentInvalid as ex:
self.stdout.write('Document is not valid: ' + str(ex))
| lev-veshnyakov/django-import-data | import_data/management/commands/validate_xml.py | Python | mit | 938 |
from rcat import RCAT
from scipy import stats, odr
import numpy as np
import matplotlib.pyplot as plt
import math
def plot_kcat_rmaxn_correlation(x, y, fig, ax, color='#9E7E5E', edge='none',
yerr='none', labels={}, scatter_size=30, hide_overlap=True,
fit=False, zorder=3):
logx = np.log10(x)
logy = np.log10(y)
ax.scatter(x, y,s=scatter_size, c=color, marker='o', edgecolor=edge, zorder=zorder)
if yerr != 'none':
ax.errorbar(x, y,
yerr=yerr, barsabove=False,
fmt=None, ecolor='k', alpha=0.4)
ax.plot([1e-4, 1e4], [1e-4,1e4], '#333676', ls='-', lw=2, zorder=5)
#Define function for scipy.odr
fit_func = lambda B,x: B[0]*x + B[1]
#Fit the data using scipy.odr
Model = odr.Model(fit_func)
Data = odr.RealData(logx, logy)
Odr = odr.ODR(Data, Model, beta0=[1,1])
output = Odr.run()
#output.pprint()
beta = output.beta
if fit:
edge = np.array([-4, 4])
ax.plot([1e-4, 1e4], 10**fit_func(beta, edge), color='#699A33', ls=':', lw=3, zorder=1)
ax.set_xscale('log', nonposx='clip')
ax.set_yscale('log', nonposy='clip')
if labels!={}:
add_labels(x, y, labels, ax, fig, hide_overlap)
return output
def add_labels(x, y, labels, ax, fig, hide_overlap=True):
ann = []
for r, name in labels.iteritems():
if x[r]>y[r]:
ann.append(ax.text(x[r], y[r]/1.1, name,
ha='center', va='top', zorder=5, size=13))
if x[r]<y[r]:
ann.append(ax.text(x[r], y[r]*1.1, name,
ha='center', va='bottom', zorder=5, size=13))
mask = np.zeros(fig.canvas.get_width_height(), bool)
fig.canvas.draw()
for i, a in enumerate(ann):
bbox = a.get_window_extent()
x0 = int(bbox.x0)
x1 = int(math.ceil(bbox.x1))
y0 = int(bbox.y0)
y1 = int(math.ceil(bbox.y1))
s = np.s_[x0:x1, y0:y1]
if hide_overlap:
if np.any(mask[s]):
a.set_visible(False)
else:
mask[s] = True
else:
mask[s] = True
if __name__ == "__main__":
R = RCAT()
fontsize = 20
fig = plt.figure(figsize=(8,8))
ax = plt.axes()
rcat = R.rcat
kcat = R.kcat['kcat [s^-1]']
rmaxn = R.rmaxn['rmax [s^-1]']
index = kcat.index & rmaxn.index
x = kcat[index]
y = rmaxn[index]
res = np.abs(np.log10(x) - np.log10(y))
labels = res[res>=1] # at least 10 fold difference
labels = {k:v for k,v in R.map_reactions_to_gene_names().iteritems() if k in labels}
report = plot_kcat_rmaxn_correlation(x, y,
fig, ax,
labels=labels,
fit=True)
rmse = np.sqrt( report.sum_square / len(x) )
r, pval = stats.pearsonr(np.log10(x), np.log10(y))
labels = {k:v for k,v in R.map_reactions_to_gene_names().iteritems() if k in index and k not in labels}
add_labels(x, y, labels, ax, fig) # specific labels to add
ax.set_ylabel(r'in vivo $r_{\mathrm{max}}\,\left[s^{-1}\right]$',
size=fontsize, style='italic')
ax.set_xlabel(r'in vitro $k_{\mathrm{cat}}\,\left[s^{-1}\right]$',
size=fontsize, style='italic')
ax.tick_params(axis='both', which='both', top='off', right='off')
[tick.label.set_fontsize(fontsize) for tick in ax.xaxis.get_major_ticks()]
[tick.label.set_fontsize(fontsize) for tick in ax.yaxis.get_major_ticks()]
ax.set_xlim(1e-3/4,4*1e3)
ax.set_ylim(1e-3/4,4*1e3)
plt.tight_layout()
plt.savefig('%s/svg/kcat_rmax_correlation.svg'%R.path) | dandanvidi/catalytic-rates | scripts/figure_correlation_plot.py | Python | mit | 4,051 |
# -*- encoding: utf-8 -*-
'''
Given a binary tree and a sum, determine if the tree has a root-to-leaf path such that adding up all the values along the path equals the given sum.
For example:
Given the below binary tree and sum = 22,
5
/ \
4 8
/ / \
11 13 4
/ \ \
7 2 1
return true, as there exist a root-to-leaf path 5->4->11->2 which sum is 22.
'''
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def hasPathSum(self, root, sum):
"""
:type root: TreeNode
:type sum: int
:rtype: bool
"""
if root == None:
return False
path = 0
return self.traverse(root, sum, 0)
def traverse(self, node, sum, path):
if node.left == None and node.right == None:
# leaf node
if path + node.val == sum:
return True
return False
if node.left != None:
res = self.traverse(node.left, sum, path + node.val)
if res == True:
return True
if node.right != None:
return self.traverse(node.right, sum, path + node.val)
return False
| weixsong/algorithm | leetcode/112.py | Python | mit | 1,385 |
#!/usr/bin/python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'das_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
try:
specifiers.append(s[percent+1])
except:
print('Failed to get specifier')
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
#assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
# fetch_all_translations()
postprocess_translations()
| mainconceptx/DAS | contrib/devtools/update-translations.py | Python | mit | 7,657 |
#!/usr/bin/env python
# -*- coding: utf8 -*-
import argparse
import signal
from rtrlib import RTRManager
def pfx_callback(pfx_record, added):
if added:
c = '+'
else:
c = '-'
print("{sign} {prefix:40} {max:3} - {min:3} {asn:10}".format(
sign=c,
prefix=pfx_record.prefix,
max=pfx_record.max_len,
min=pfx_record.min_len,
asn=pfx_record.asn
)
)
def spki_callback(spki_record, added):
if added:
c = '+'
else:
c = '-'
print("{sign} {asn}".format(sign=c, asn=spki_record.asn))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("protocol", choices=('tcp', ))
parser.add_argument("hostname")
parser.add_argument("port", type=int)
parser.add_argument(
"-k",
default=False,
action="store_true",
help="Print information about SPKI updates"
)
parser.add_argument(
"-p",
default=False,
action="store_true",
help="Print information about PFX updates"
)
args = parser.parse_args()
if args.p:
pfx_fp = pfx_callback
else:
pfx_fp = None
if args.k:
spki_fp = spki_callback
else:
spki_fp = None
print("{:40} {:3} {:4}".format("Prefix", "Prefix Length", "ASN"))
with RTRManager(args.hostname, args.port, pfx_update_callback=pfx_fp, spki_update_callback=spki_fp) as mgr:
try:
signal.pause()
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| rtrlib/python-binding | tools/rtrclient.py | Python | mit | 1,810 |
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
from __future__ import absolute_import
import copy
import json
import re
from svtplay_dl.error import ServiceError
from svtplay_dl.fetcher.hds import hdsparse
from svtplay_dl.fetcher.hls import hlsparse
from svtplay_dl.fetcher.http import HTTP
from svtplay_dl.service import OpenGraphThumbMixin
from svtplay_dl.service import Service
class Bigbrother(Service, OpenGraphThumbMixin):
supported_domains = ["bigbrother.se"]
def get(self):
data = self.get_urldata()
match = re.search(r'id="(bcPl[^"]+)"', data)
if not match:
yield ServiceError("Can't find flash id.")
return
flashid = match.group(1)
match = re.search(r'playerID" value="([^"]+)"', self.get_urldata())
if not match:
yield ServiceError("Can't find playerID")
return
playerid = match.group(1)
match = re.search(r'playerKey" value="([^"]+)"', self.get_urldata())
if not match:
yield ServiceError("Can't find playerKey")
return
playerkey = match.group(1)
match = re.search(r'videoPlayer" value="([^"]+)"', self.get_urldata())
if not match:
yield ServiceError("Can't find videoPlayer info")
return
videoplayer = match.group(1)
dataurl = (
"http://c.brightcove.com/services/viewer/htmlFederated?flashID={}&playerID={}&playerKey={}"
"&isVid=true&isUI=true&dynamicStreaming=true&@videoPlayer={}".format(flashid, playerid, playerkey, videoplayer)
)
data = self.http.request("get", dataurl).content
match = re.search(r"experienceJSON = ({.*});", data)
if not match:
yield ServiceError("Can't find json data")
return
jsondata = json.loads(match.group(1))
renditions = jsondata["data"]["programmedContent"]["videoPlayer"]["mediaDTO"]["renditions"]
if jsondata["data"]["publisherType"] == "PREMIUM":
yield ServiceError("Premium content")
return
for i in renditions:
if i["defaultURL"].endswith("f4m"):
streams = hdsparse(
copy.copy(self.config), self.http.request("get", i["defaultURL"], params={"hdcore": "3.7.0"}), i["defaultURL"], output=self.output
)
for n in list(streams.keys()):
yield streams[n]
if i["defaultURL"].endswith("m3u8"):
streams = hlsparse(self.config, self.http.request("get", i["defaultURL"]), i["defaultURL"], output=self.output)
for n in list(streams.keys()):
yield streams[n]
if i["defaultURL"].endswith("mp4"):
yield HTTP(copy.copy(self.config), i["defaultURL"], i["encodingRate"] / 1024, output=self.output)
| olof/svtplay-dl | lib/svtplay_dl/service/bigbrother.py | Python | mit | 2,929 |
from django import forms
import models
class SearchForm(forms.ModelForm):
class Meta:
model = models.Search
'''
Display searches, allow edits
'''
class warningForm(forms.Form):
message = forms.CharField()
action = forms.CharField()
searchid = forms.CharField()
redirect = forms.CharField()
class displaySearchForm(forms.Form):
title = forms.CharField()
include = forms.CharField()
exclude= forms.CharField(required = False)
class displaySubSearches(forms.Form):
title = forms.CharField()
include = forms.CharField()
exclude= forms.CharField(required = False)
class editCriteria(forms.Form):
title = forms.CharField()
include = forms.CharField()
exclude= forms.CharField(required = False)
| pbarton666/buzz_bot | djangoproj/djangoapp/forms.py | Python | mit | 733 |
#this algorithm finds one factor of a given number
from gcd import *
def f(x,n):
return (pow(x,2,n)+1)%n
def Rho(n):
factors = {}
i = 1
x = 1
d = 1
while d == 1:
#Calculate x[i]
factors[i] = f(x,n)
#Calculate x[i+1]
factors[i+1] = f(factors[i],n)
#find the gcd
d = gcd(abs(factors[(i+1)/2] - factors[i+1]),n)
#if not relatively prime return d
if d > 1:
return d
#continue iteration
else:
x = factors[i+1]
i+=2
| Bedrock02/General-Coding | Math/pollardRho.py | Python | mit | 452 |
#/usr/bin/env python
from PyQt4 import QtGui, QtCore
from PyQt4.QtGui import QMenu, QCursor
from PyQt4.QtCore import QPoint, QString, Qt
from contextMenu import ContextMenu
class DraggableNode(object):
def __init__(self, parent, name, node_num = None):
self.parent = parent
self.frame = self.parent.parent
self.name = name
self.node_num = node_num
self.press = None
self.collection = None
self.connect()
def connect(self):
self.cidpress = self.parent.fig.canvas.mpl_connect('button_press_event', self.on_press)
self.cidmotion = self.parent.fig.canvas.mpl_connect('motion_notify_event', self.on_motion)
self.cidrelease = self.parent.fig.canvas.mpl_connect('button_release_event', self.on_release)
def context_menu(self):
cm = ContextMenu(self.name, self)
cm.popup(QCursor.pos())
def disconnect(self):
self.parent.fig.canvas.mpl_disconnect(self.cidpress)
self.parent.fig.canvas.mpl_disconnect(self.cidmotion)
self.parent.fig.canvas.mpl_disconnect(self.cidrelease)
def on_press(self, event):
app = self.parent.parent.parent.parent # QT application
collection = self.parent.get_artist()
# deselect all, and begin group select operation
if app.keyboardModifiers() == Qt.ShiftModifier:
self.parent.select_node(None)
self.parent.group_select('down', event.xdata, event.ydata)
return
for obj in collection.contains(event):
if obj != True and obj != False: #obj is a dictionary
if len(obj['ind']) > 0:
canvas_click = False
if str(obj['ind'][0]) == str(self.node_num):
if event.button == 3:
self.context_menu() #popup menu on right mouseclick
elif event.button == 2:
self.press = event.xdata, event.ydata #save click coords for node movement
self.parent.save_selected_positions()
elif event.button == 1:
if app.keyboardModifiers() == Qt.ControlModifier: #add to selection
self.parent.select_node(self.name, add=True)
else:
self.parent.select_node(self.name)
def on_motion(self, event):
collection = self.parent.get_artist()
for obj in collection.contains(event):
if obj != True and obj != False: #obj is a dictionary
if len(obj['ind']) > 0: #at least one node activated
if str(obj['ind'][0]) == str(self.node_num): #found self in event list
self.parent.status_bar.showMessage('File Name: "'+str(self.name)+'"', 2500)
# If self.press is set, it means
# 1) self.press gets set in on_press()
# 2) self.press can only be set in one instance of DraggableNode at a time
# 3) therefore, only the instance with self.press set will move
if self.press != None:
xpress,ypress = self.press
# print event.xdata, event.ydata
self.parent.move_node(self.name, xpress, ypress, event.xdata, event.ydata)
self.press = xpress, ypress
self.parent.redraw()
def on_release(self, event):
self.press = None
def set_node_num(parent, self, node_num):
self.node_num = node_num
| fdesjardins/nara-stepbrowser | src/draggableNode.py | Python | mit | 3,714 |
# coding = utf-8
__all__ = ['aside', 'footer', 'header'] | owlsn/h_crawl | display/display/ui_modules/__init__.py | Python | mit | 56 |
# Copyright (c) 2014 Universidade Federal Fluminense (UFF)
# Copyright (c) 2014 Polytechnic Institute of New York University.
# This file is part of noWorkflow.
# Please, consult the license terms in the LICENSE file.
from __future__ import (absolute_import, print_function,
division, unicode_literals)
import os
import argparse
import json
from .. import utils
from ..persistence import persistence
from ..models.trial import Trial
from ..models.trial_prolog import TrialProlog
from .command import Command
def int_or_type(string):
try:
return int(string)
except ValueError:
if "diff" in string:
splitted = string.split(':')
if len(splitted) < 3:
raise argparse.ArgumentTypeError("you must diff two trials")
return splitted
return string
def nbconvert(code):
cells = []
for cell in code.split('\n# <codecell>\n'):
cells.append({
'cell_type': 'code',
'execution_count': None,
'metadata': {
'collapsed': True,
},
'outputs': [],
'source': [cell]
})
result = {
'cells': cells,
'nbformat': 4,
'nbformat_minor': 0,
'metadata': {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.6"
}
}
}
return result
class Export(Command):
def add_arguments(self):
add_arg = self.add_argument
add_arg('-r', '--rules', action='store_true',
help='also exports inference rules')
add_arg('-i', '--ipython', action='store_true',
help='export ipython notebook file')
add_arg('trial', type=int_or_type, nargs='?',
help='trial id or none for last trial. If you are generation '
'ipython notebook files, it is also possible to use "history"'
'or "diff:<trial_id_1>:<trial_id_2>"')
add_arg('--dir', type=str,
help='set project path where is the database. Default to '
'current directory')
def execute(self, args):
persistence.connect_existing(args.dir or os.getcwd())
if not args.ipython:
trial = Trial(trial_id=args.trial, exit=True)
trial_prolog = TrialProlog(trial)
print(trial_prolog.export_text_facts())
if args.rules:
print('\n'.join(trial_prolog.export_rules()))
else:
if args.trial == "history":
nb = nbconvert(("%load_ext noworkflow\n"
"import noworkflow.now.ipython as nip\n"
"# <codecell>\n"
"history = nip.History()\n"
"# history.graph.width = 700\n"
"# history.graph.height = 300\n"
"# history.script = '*'\n"
"# history.execution = '*'\n"
"# <codecell>\n"
"history"))
with open('History.ipynb', 'w') as ipynb:
json.dump(nb, ipynb)
elif isinstance(args.trial, list):
nb = nbconvert(("%load_ext noworkflow\n"
"import noworkflow.now.ipython as nip\n"
"# <codecell>\n"
"diff = nip.Diff({1}, {2})\n"
"# diff.graph.view = 0\n"
"# diff.graph.mode = 3\n"
"# diff.graph.width = 500\n"
"# diff.graph.height = 500\n"
"# <codecell>\n"
"diff").format(*args.trial))
with open('Diff-{1}-{2}.ipynb'.format(*args.trial), 'w') as ipynb:
json.dump(nb, ipynb)
else:
nb = nbconvert(("%load_ext noworkflow\n"
"import noworkflow.now.ipython as nip\n"
"# <codecell>\n"
"trial = nip.Trial({})\n"
"# trial.graph.mode = 3\n"
"# trial.graph.width = 500\n"
"# trial.graph.height = 500\n"
"# <codecell>\n"
"trial").format(args.trial))
with open('Trial-{}.ipynb'.format(args.trial), 'w') as ipynb:
json.dump(nb, ipynb)
| paopao74cn/noworkflow | capture/noworkflow/now/cmd/cmd_export.py | Python | mit | 5,168 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class StartTaskInformation(Model):
"""Information about a start task running on a compute node.
:param state: The state of the start task on the compute node. Possible
values include: 'running', 'completed'
:type state: str or ~azure.batch.models.StartTaskState
:param start_time: The time at which the start task started running. This
value is reset every time the task is restarted or retried (that is, this
is the most recent time at which the start task started running).
:type start_time: datetime
:param end_time: The time at which the start task stopped running. This is
the end time of the most recent run of the start task, if that run has
completed (even if that run failed and a retry is pending). This element
is not present if the start task is currently running.
:type end_time: datetime
:param exit_code: The exit code of the program specified on the start task
command line. This property is set only if the start task is in the
completed state. In general, the exit code for a process reflects the
specific convention implemented by the application developer for that
process. If you use the exit code value to make decisions in your code, be
sure that you know the exit code convention used by the application
process. However, if the Batch service terminates the start task (due to
timeout, or user termination via the API) you may see an operating
system-defined exit code.
:type exit_code: int
:param container_info: Information about the container under which the
task is executing. This property is set only if the task runs in a
container context.
:type container_info:
~azure.batch.models.TaskContainerExecutionInformation
:param failure_info: Information describing the task failure, if any. This
property is set only if the task is in the completed state and encountered
a failure.
:type failure_info: ~azure.batch.models.TaskFailureInformation
:param retry_count: The number of times the task has been retried by the
Batch service. Task application failures (non-zero exit code) are retried,
pre-processing errors (the task could not be run) and file upload errors
are not retried. The Batch service will retry the task up to the limit
specified by the constraints.
:type retry_count: int
:param last_retry_time: The most recent time at which a retry of the task
started running. This element is present only if the task was retried
(i.e. retryCount is nonzero). If present, this is typically the same as
startTime, but may be different if the task has been restarted for reasons
other than retry; for example, if the compute node was rebooted during a
retry, then the startTime is updated but the lastRetryTime is not.
:type last_retry_time: datetime
:param result: The result of the task execution. If the value is 'failed',
then the details of the failure can be found in the failureInfo property.
Possible values include: 'success', 'failure'
:type result: str or ~azure.batch.models.TaskExecutionResult
"""
_validation = {
'state': {'required': True},
'start_time': {'required': True},
'retry_count': {'required': True},
}
_attribute_map = {
'state': {'key': 'state', 'type': 'StartTaskState'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'exit_code': {'key': 'exitCode', 'type': 'int'},
'container_info': {'key': 'containerInfo', 'type': 'TaskContainerExecutionInformation'},
'failure_info': {'key': 'failureInfo', 'type': 'TaskFailureInformation'},
'retry_count': {'key': 'retryCount', 'type': 'int'},
'last_retry_time': {'key': 'lastRetryTime', 'type': 'iso-8601'},
'result': {'key': 'result', 'type': 'TaskExecutionResult'},
}
def __init__(self, state, start_time, retry_count, end_time=None, exit_code=None, container_info=None, failure_info=None, last_retry_time=None, result=None):
super(StartTaskInformation, self).__init__()
self.state = state
self.start_time = start_time
self.end_time = end_time
self.exit_code = exit_code
self.container_info = container_info
self.failure_info = failure_info
self.retry_count = retry_count
self.last_retry_time = last_retry_time
self.result = result
| lmazuel/azure-sdk-for-python | azure-batch/azure/batch/models/start_task_information.py | Python | mit | 5,037 |
#!/bin/python
"""
This is a small script to fetch as much metadata about team members as one can, and then update the team.yml data file
"""
import yaml
team_file = open('_data/team.yml')
team = yaml.load(team_file)
print team
| AAROC/hackfest-site | assets/update-team.py | Python | mit | 231 |
################################################################################
# Copyright (c) 2006-2017 Franz Inc.
# All rights reserved. This program and the accompanying materials are
# made available under the terms of the MIT License which accompanies
# this distribution, and is available at http://opensource.org/licenses/MIT
################################################################################
from __future__ import print_function, unicode_literals
import sys
# JSON abstraction layer
try:
import simplejson as json
except ImportError:
import json
class JsonDecodeError(Exception):
pass
def encode_json(value):
return json.dumps(value)
def decode_json(text):
# JSON on Py3 insists on getting Unicode strings
if sys.version_info[0] > 2 and isinstance(text, bytes):
text = text.decode('utf-8')
try:
return json.loads(text)
except ValueError:
raise JsonDecodeError
| franzinc/agraph-python | src/franz/miniclient/agjson.py | Python | mit | 953 |
import logging
from gym_mupen64plus.envs.MarioKart64.mario_kart_env import MarioKartEnv
logger = logging.getLogger(__name__)
| emomicrowave/gym-mupen64plus | gym_mupen64plus/__init__.py | Python | mit | 126 |
from __future__ import print_function, division
import util
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
class Autoencoder:
def __init__(self, D, M):
# training data
self.X = tf.placeholder(tf.float32, shape=(None, D))
# input -> hidden
self.W = tf.Variable(tf.random_normal(shape = (D, M)) * np.sqrt(2.0 / M))
self.b = tf.Variable(np.zeros(M).astype(np.float32))
# hidden -> output
self.V = tf.Variable(tf.random_normal(shape=(M, D)) * np.sqrt(2.0 / D))
self.c = tf.Variable(np.zeros(D).astype(np.float32))
# construct the reconstruction
self.Z = tf.nn.relu(tf.matmul(self.X, self.W) + self.b)
logits = tf.matmul(self.Z, self.V) + self.c
self.X_hat = tf.nn.sigmoid(logits)
# cpmpute cost
self.cost = tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(
labels = self.X,
logits = logits
)
)
self.train_op = tf.train.RMSPropOptimizer(learning_rate = 0.001).minimize(self.cost)
self.init_op = tf.global_variables_initializer()
self.sess = tf.InteractiveSession()
self.sess.run(self.init_op)
def fit(self, X, epochs = 30, batch_sz = 64):
costs = []
n_batches = len(X) // batch_sz
print("N batches")
for i in range(epochs):
print("epoch: ", i)
np.random.shuffle(X)
for j in range(n_batches):
batch = X[j*batch_sz:(j+1)*batch_sz]
_, c = self.sess.run((self.train_op, self.cost), feed_dict= {self.X : batch})
c /= batch_sz
costs.append(c)
if j % 100 == 0:
print("iter %d cost %.3f" % (j, c))
plt.plot(costs)
plt.show()
def predict(self, X):
return self.sess.run(self.X_hat, feed_dict = {self.X : X})
def main():
X, Y = util.get_mnist()
model = Autoencoder(784, 300)
model.fit(X)
done = False
while not done:
i = np.random.choice(len(X))
x = X[i]
im = model.predict([x]).reshape(28, 28)
plt.subplot(1,2,1)
plt.imshow(x.reshape(28, 28), cmap = 'gray')
plt.title("Original")
plt.subplot(1,2,2)
plt.imshow(im, cmap = 'gray')
plt.title("reconstruction")
plt.show()
ans = input("another one?: ")
if ans and ans[0] in ('n' or 'N'):
done = True
if __name__ == '__main__':
main()
| mohsaad/Algorithms | machine_learning/autoencoders/autoencoder_tf.py | Python | mit | 2,559 |
# Generated by Django 3.1.7 on 2021-03-28 20:23
import api_v3.models.profile
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_bleach.models
class Migration(migrations.Migration):
dependencies = [
('api_v3', '0013_added_expenses'),
]
operations = [
migrations.AlterField(
model_name='ticket',
name='business_activities',
field=django_bleach.models.BleachField(blank=True, null=True),
),
migrations.AlterField(
model_name='ticket',
name='connections',
field=django_bleach.models.BleachField(blank=True, null=True),
),
migrations.AlterField(
model_name='ticket',
name='initial_information',
field=django_bleach.models.BleachField(blank=True, null=True),
),
migrations.AlterField(
model_name='ticket',
name='sources',
field=django_bleach.models.BleachField(blank=True, null=True),
),
]
| occrp/id-backend | api_v3/migrations/0014_change_ticket_column_limits.py | Python | mit | 1,107 |
# -*- coding: utf-8 -*-
"""
webapp2
=======
Taking Google App Engine's webapp to the next level!
:copyright: 2010 by tipfy.org.
:license: Apache Sotware License, see LICENSE for details.
"""
import logging
import re
import urllib
import urlparse
from google.appengine.ext.webapp import Request
from google.appengine.ext.webapp.util import run_bare_wsgi_app, run_wsgi_app
import webob
import webob.exc
#: Base HTTP exception, set here as public interface.
HTTPException = webob.exc.HTTPException
#: Allowed request methods.
ALLOWED_METHODS = frozenset(['GET', 'POST', 'HEAD', 'OPTIONS', 'PUT',
'DELETE', 'TRACE'])
#: Value used for required arguments.
REQUIRED_VALUE = object()
#: Regex for URL definitions.
_ROUTE_REGEX = re.compile(r'''
\< # The exact character "<"
(\w*) # The optional variable name (restricted to a-z, 0-9, _)
(?::([^>]*))? # The optional :regex part
\> # The exact character ">"
''', re.VERBOSE)
class Response(webob.Response):
"""Abstraction for an HTTP response.
Implements all of ``webapp.Response`` interface, except ``wsgi_write()``
as the response itself is returned by the WSGI application.
"""
def __init__(self, *args, **kwargs):
super(Response, self).__init__(*args, **kwargs)
# webapp uses self.response.out.write()
self.out = self.body_file
def set_status(self, code, message=None):
"""Sets the HTTP status code of this response.
:param message:
The HTTP status string to use
:param message:
A status string. If none is given, uses the default from the
HTTP/1.1 specification.
"""
if message:
self.status = '%d %s' % (code, message)
else:
self.status = code
def clear(self):
"""Clears all data written to the output stream so that it is empty."""
self.app_iter = []
@staticmethod
def http_status_message(code):
"""Returns the default HTTP status message for the given code.
:param code:
The HTTP code for which we want a message.
"""
message = webob.statusreasons.status_reasons.get(code)
if not message:
raise KeyError('Invalid HTTP status code: %d' % code)
return message
class RequestHandler(object):
"""Base HTTP request handler. Clients should subclass this class.
Subclasses should override get(), post(), head(), options(), etc to handle
different HTTP methods.
Implements most of ``webapp.RequestHandler`` interface.
"""
def __init__(self, app, request, response):
"""Initializes this request handler with the given WSGI application,
Request and Response.
:param app:
A :class:`WSGIApplication` instance.
:param request:
A ``webapp.Request`` instance.
:param response:
A :class:`Response` instance.
"""
self.app = app
self.request = request
self.response = response
def initialize(self, request, response):
"""Initializes this request handler with the given Request and
Response.
.. warning::
This is deprecated. It is here for compatibility with webapp only.
Use __init__() instead.
:param request:
A ``webapp.Request`` instance.
:param response:
A :class:`Response` instance.
"""
logging.warning('RequestHandler.initialize() is deprecated. '
'Use __init__() instead.')
self.app = WSGIApplication.active_instance
self.request = request
self.response = response
def __call__(self, _method, *args, **kwargs):
"""Dispatches the requested method.
:param _method:
The method to be dispatched: the request method in lower case
(e.g., 'get', 'post', 'head', 'put' etc).
:param args:
Positional arguments to be passed to the method, coming from the
matched :class:`Route`.
:param kwargs:
Keyword arguments to be passed to the method, coming from the
matched :class:`Route`.
:returns:
None.
"""
method = getattr(self, _method, None)
if method is None:
# 405 Method Not Allowed.
# The response MUST include an Allow header containing a
# list of valid methods for the requested resource.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4.6
valid = ', '.join(get_valid_methods(self))
self.abort(405, headers=[('Allow', valid)])
# Execute the method.
method(*args, **kwargs)
def error(self, code):
"""Clears the response output stream and sets the given HTTP error
code. This doesn't stop code execution; the response is still
available to be filled.
:param code:
HTTP status error code (e.g., 501).
"""
self.response.set_status(code)
self.response.clear()
def abort(self, code, *args, **kwargs):
"""Raises an :class:`HTTPException`. This stops code execution,
leaving the HTTP exception to be handled by an exception handler.
:param code:
HTTP status error code (e.g., 404).
:param args:
Positional arguments to be passed to the exception class.
:param kwargs:
Keyword arguments to be passed to the exception class.
"""
abort(code, *args, **kwargs)
def redirect(self, uri, permanent=False, abort=False):
"""Issues an HTTP redirect to the given relative URL. This won't stop
code execution unless **abort** is True. A common practice is to
return when calling the function::
return self.redirect('/some-path')
:param uri:
A relative or absolute URI (e.g., '../flowers.html').
:param permanent:
If True, uses a 301 redirect instead of a 302 redirect.
:param abort:
If True, raises an exception to perform the redirect.
.. seealso:: :meth:`redirect_to`.
"""
absolute_url = str(urlparse.urljoin(self.request.uri, uri))
if permanent:
code = 301
else:
code = 302
if abort:
self.abort(code, headers=[('Location', absolute_url)])
self.response.headers['Location'] = absolute_url
self.response.set_status(code)
self.response.clear()
def redirect_to(self, _name, _permanent=False, _abort=False, *args,
**kwargs):
"""Convenience method mixing :meth:`redirect` and :meth:`url_for`:
Issues an HTTP redirect to a named URL built using :meth:`url_for`.
:param _name:
The route name to redirect to.
:param _permanent:
If True, uses a 301 redirect instead of a 302 redirect.
:param _abort:
If True, raises an exception to perform the redirect.
:param args:
Positional arguments to build the URL.
:param kwargs:
Keyword arguments to build the URL.
.. seealso:: :meth:`redirect` and :meth:`url_for`.
"""
url = self.url_for(_name, *args, **kwargs)
self.redirect(url, permanent=_permanent, abort=_abort)
def url_for(self, _name, *args, **kwargs):
"""Builds and returns a URL for a named :class:`Route`.
For example, if you have these routes defined for the application::
app = WSGIApplication([
Route(r'/', 'handlers.HomeHandler', 'home'),
Route(r'/wiki', WikiHandler, 'wiki'),
Route(r'/wiki/<page>', WikiHandler, 'wiki-page'),
])
Here are some examples of how to generate URLs inside a handler::
# /
url = self.url_for('home')
# http://localhost:8080/
url = self.url_for('home', _full=True)
# /wiki
url = self.url_for('wiki')
# http://localhost:8080/wiki
url = self.url_for('wiki', _full=True)
# http://localhost:8080/wiki#my-heading
url = self.url_for('wiki', _full=True, _anchor='my-heading')
# /wiki/my-first-page
url = self.url_for('wiki-page', page='my-first-page')
# /wiki/my-first-page?format=atom
url = self.url_for('wiki-page', page='my-first-page', format='atom')
:param _name:
The route name.
:param args:
Positional arguments to build the URL. All positional variables
defined in the route must be passed and must conform to the
format set in the route. Extra arguments are ignored.
:param kwargs:
Keyword arguments to build the URL. All variables not set in the
route default values must be passed and must conform to the format
set in the route. Extra keywords are appended as URL arguments.
A few keywords have special meaning:
- **_full**: If True, builds an absolute URL.
- **_scheme**: URL scheme, e.g., `http` or `https`. If defined,
an absolute URL is always returned.
- **_netloc**: Network location, e.g., `www.google.com`. If
defined, an absolute URL is always returned.
- **_anchor**: If set, appends an anchor to generated URL.
:returns:
An absolute or relative URL.
.. seealso:: :meth:`Router.build`.
"""
return self.app.router.build(_name, self.request, args, kwargs)
def get_config(self, module, key=None, default=REQUIRED_VALUE):
"""Returns a configuration value for a module.
.. seealso:: :meth:`Config.load_and_get`.
"""
return self.app.config.load_and_get(module, key=key, default=default)
def handle_exception(self, exception, debug_mode):
"""Called if this handler throws an exception during execution.
The default behavior is to raise the exception to be handled by
:meth:`WSGIApplication.handle_exception`.
:param exception:
The exception that was thrown.
:param debug_mode:
True if the web application is running in debug mode.
"""
raise
class RedirectHandler(RequestHandler):
"""Redirects to the given URL for all GET requests. This is meant to be
used when defining URL routes. You must provide at least the keyword
argument *url* in the route default values. Example::
def get_redirect_url(handler, *args, **kwargs):
return handler.url_for('new-route-name')
app = WSGIApplication([
Route(r'/old-url', RedirectHandler, defaults={'url': '/new-url'}),
Route(r'/other-old-url', RedirectHandler, defaults={'url': get_redirect_url}),
])
Based on idea from `Tornado`_.
"""
def get(self, *args, **kwargs):
"""Performs the redirect. Two keyword arguments can be passed through
the URL route:
- **url**: A URL string or a callable that returns a URL. The callable
is called passing ``(handler, *args, **kwargs)`` as arguments.
- **permanent**: If False, uses a 301 redirect instead of a 302
redirect Default is True.
"""
url = kwargs.pop('url', '/')
permanent = kwargs.pop('permanent', True)
if callable(url):
url = url(self, *args, **kwargs)
self.redirect(url, permanent=permanent)
class Config(dict):
"""A simple configuration dictionary keyed by module name. This is a
dictionary of dictionaries. It requires all values to be dictionaries
and applies updates and default values to the inner dictionaries instead
of the first level one.
The configuration object is available as a ``config`` attribute of the
:class:`WSGIApplication`. If is instantiated and populated when the app is
built::
config = {}
config['my.module'] = {
'foo': 'bar',
}
app = WSGIApplication([('/', MyHandler)], config=config)
Then to read configuration values, use :meth:`RequestHandler.get_config`::
class MyHandler(RequestHandler):
def get(self):
foo = self.get_config('my.module', 'foo')
# ...
"""
#: Loaded module configurations.
loaded = None
def __init__(self, value=None, default=None, loaded=None):
"""Initializes the configuration object.
:param value:
A dictionary of configuration dictionaries for modules.
:param default:
A dictionary of configuration dictionaries for default values.
:param loaded:
A list of modules to be marked as loaded.
"""
self.loaded = loaded or []
if value is not None:
assert isinstance(value, dict)
for module in value.keys():
self.update(module, value[module])
if default is not None:
assert isinstance(default, dict)
for module in default.keys():
self.setdefault(module, default[module])
def __setitem__(self, module, value):
"""Sets a configuration for a module, requiring it to be a dictionary.
:param module:
A module name for the configuration, e.g.: 'webapp2.plugins.i18n'.
:param value:
A dictionary of configurations for the module.
"""
assert isinstance(value, dict)
super(Config, self).__setitem__(module, value)
def update(self, module, value):
"""Updates the configuration dictionary for a module.
>>> cfg = Config({'webapp2.plugins.i18n': {'locale': 'pt_BR'})
>>> cfg.get('webapp2.plugins.i18n', 'locale')
pt_BR
>>> cfg.get('webapp2.plugins.i18n', 'foo')
None
>>> cfg.update('webapp2.plugins.i18n', {'locale': 'en_US', 'foo': 'bar'})
>>> cfg.get('webapp2.plugins.i18n', 'locale')
en_US
>>> cfg.get('webapp2.plugins.i18n', 'foo')
bar
:param module:
The module to update the configuration, e.g.:
'webapp2.plugins.i18n'.
:param value:
A dictionary of configurations for the module.
:returns:
None.
"""
assert isinstance(value, dict)
if module not in self:
self[module] = {}
self[module].update(value)
def setdefault(self, module, value):
"""Sets a default configuration dictionary for a module.
>>> cfg = Config({'webapp2.plugins.i18n': {'locale': 'pt_BR'})
>>> cfg.get('webapp2.plugins.i18n', 'locale')
pt_BR
>>> cfg.get('webapp2.plugins.i18n', 'foo')
None
>>> cfg.setdefault('webapp2.plugins.i18n', {'locale': 'en_US', 'foo': 'bar'})
>>> cfg.get('webapp2.plugins.i18n', 'locale')
pt_BR
>>> cfg.get('webapp2.plugins.i18n', 'foo')
bar
:param module:
The module to set default configuration, e.g.:
'webapp2.plugins.i18n'.
:param value:
A dictionary of configurations for the module.
:returns:
None.
"""
assert isinstance(value, dict)
if module not in self:
self[module] = {}
for key in value.keys():
self[module].setdefault(key, value[key])
def get(self, module, key=None, default=None):
"""Returns a configuration value for given key in a given module.
>>> cfg = Config({'webapp2.plugins.i18n': {'locale': 'pt_BR'})
>>> cfg.get('webapp2.plugins.i18n')
{'locale': 'pt_BR'}
>>> cfg.get('webapp2.plugins.i18n', 'locale')
pt_BR
>>> cfg.get('webapp2.plugins.i18n', 'invalid-key')
None
>>> cfg.get('webapp2.plugins.i18n', 'invalid-key', 'default-value')
default-value
:param module:
The module to get a configuration from, e.g.:
'webapp2.plugins.i18n'.
:param key:
The key from the module configuration.
:param default:
A default value to return in case the configuration for
the module/key is not set.
:returns:
The configuration value.
"""
if module not in self:
return default
if key is None:
return self[module]
elif key not in self[module]:
return default
return self[module][key]
def load_and_get(self, module, key=None, default=REQUIRED_VALUE):
"""Returns a configuration value for a module. If it is not already
set, loads a ``default_config`` variable from the given module,
updates the app configuration with those default values and returns
the value for the given key. If the key is still not available,
returns the provided default value or raises an exception if no
default was provided.
Every module that allows some kind of configuration sets a
``default_config`` global variable that is loaded by this function,
cached and used in case the requested configuration was not defined
by the user.
:param module:
The configured module.
:param key:
The config key.
:param default:
A default value to return in case the configuration for
the module/key is not set.
:returns:
A configuration value.
"""
if module not in self.loaded:
# Load default configuration and update config.
values = import_string(module + '.default_config', silent=True)
if values:
self.setdefault(module, values)
self.loaded.append(module)
value = self.get(module, key, default)
if value is not REQUIRED_VALUE:
return value
if key is None:
raise KeyError('Module %s is not configured.' % module)
else:
raise KeyError('Module %s requires the config key "%s" to be '
'set.' % (module, key))
class BaseRoute(object):
"""Interface for URL routes. Custom routes must implement some or all
methods and attributes from this class.
"""
#: Route name, used to build URLs.
name = None
#: True if this route is only used for URL generation and never matches.
build_only = False
def match(self, request):
"""Matches this route against the current request.
:param request:
A ``webapp.Request`` instance.
:returns:
A tuple ``(handler, args, kwargs)`` if the route matches, or None.
"""
raise NotImplementedError()
def build(self, request, args, kwargs):
"""Builds and returns a URL for this route.
:param request:
The current ``Request`` object.
:param args:
Tuple of positional arguments to build the URL.
:param kwargs:
Dictionary of keyword arguments to build the URL.
:returns:
An absolute or relative URL.
"""
raise NotImplementedError()
def get_routes(self):
"""Generator to get all routes from a route.
:yields:
This route or all nested routes that it contains.
"""
yield self
def get_match_routes(self):
"""Generator to get all routes that can be matched from a route.
:yields:
This route or all nested routes that can be matched.
"""
if not self.build_only:
yield self
elif not self.name:
raise ValueError("Route %r is build_only but doesn't have a "
"name" % self)
def get_build_routes(self):
"""Generator to get all routes that can be built from a route.
:yields:
This route or all nested routes that can be built.
"""
if self.name is not None:
yield self
class SimpleRoute(BaseRoute):
"""A route that is compatible with webapp's routing. URL building is not
implemented as webapp has rudimentar support for it, and this is the most
unknown webapp feature anyway.
"""
def __init__(self, template, handler):
"""Initializes a URL route.
:param template:
A regex to be matched.
:param handler:
A :class:`RequestHandler` class or dotted name for a class to be
lazily imported, e.g., ``my.module.MyHandler``.
"""
self.template = template
self.handler = handler
# Lazy property.
self._regex = None
@property
def regex(self):
if self._regex is None:
if not self.template.startswith('^'):
self.template = '^' + self.template
if not self.template.endswith('$'):
self.template += '$'
self._regex = re.compile(self.template)
return self._regex
def match(self, request):
"""Matches this route against the current request.
.. seealso:: :meth:`BaseRoute.match`.
"""
match = self.regex.match(request.path)
if match:
return self.handler, match.groups(), {}
def __repr__(self):
return '<SimpleRoute(%r, %r)>' % (self.template, self.handler)
__str__ = __repr__
class Route(BaseRoute):
"""A URL route definition. A route template contains parts enclosed by
``<>`` and is used to match requested URLs. Here are some examples::
route = Route(r'/article/<id:[\d]+>', ArticleHandler)
route = Route(r'/wiki/<page_name:\w+>', WikiPageHandler)
route = Route(r'/blog/<year:\d{4}>/<month:\d{2}>/<day:\d{2}>/<slug:\w+>', BlogItemHandler)
Based on `Another Do-It-Yourself Framework`_, by Ian Bicking. We added
URL building, non-keyword variables and other improvements.
"""
def __init__(self, template, handler=None, name=None, defaults=None,
build_only=False):
"""Initializes a URL route.
:param template:
A route template to be matched, containing parts enclosed by ``<>``
that can have only a name, only a regular expression or both:
============================= ==================================
Format Example
============================= ==================================
``<name>`` ``r'/<year>/<month>'``
``<:regular expression>`` ``r'/<:\d{4}>/<:\d{2}>'``
``<name:regular expression>`` ``r'/<year:\d{4}>/<month:\d{2}>'``
============================= ==================================
If the name is set, the value of the matched regular expression
is passed as keyword argument to the :class:`RequestHandler`.
Otherwise it is passed as positional argument.
The same template can mix parts with name, regular expression or
both.
:param handler:
A :class:`RequestHandler` class or dotted name for a class to be
lazily imported, e.g., ``my.module.MyHandler``.
:param name:
The name of this route, used to build URLs based on it.
:param defaults:
Default or extra keywords to be returned by this route. Values
also present in the route variables are used to build the URL
when they are missing.
:param build_only:
If True, this route never matches and is used only to build URLs.
"""
self.template = template
self.handler = handler
self.name = name
self.defaults = defaults or {}
self.build_only = build_only
# Lazy properties.
self._regex = None
self._variables = None
self._reverse_template = None
def _parse_template(self):
self._variables = {}
last = count = 0
regex = template = ''
for match in _ROUTE_REGEX.finditer(self.template):
part = self.template[last:match.start()]
name = match.group(1)
expr = match.group(2) or '[^/]+'
last = match.end()
if not name:
name = '__%d__' % count
count += 1
template += '%s%%(%s)s' % (part, name)
regex += '%s(?P<%s>%s)' % (re.escape(part), name, expr)
self._variables[name] = re.compile('^%s$' % expr)
regex = '^%s%s$' % (regex, re.escape(self.template[last:]))
self._regex = re.compile(regex)
self._reverse_template = template + self.template[last:]
self.has_positional_variables = count > 0
@property
def regex(self):
if self._regex is None:
self._parse_template()
return self._regex
@property
def variables(self):
if self._variables is None:
self._parse_template()
return self._variables
@property
def reverse_template(self):
if self._reverse_template is None:
self._parse_template()
return self._reverse_template
def match(self, request):
"""Matches this route against the current request.
.. seealso:: :meth:`BaseRoute.match`.
"""
match = self.regex.match(request.path)
if match:
kwargs = self.defaults.copy()
kwargs.update(match.groupdict())
if kwargs and self.has_positional_variables:
args = tuple(value[1] for value in sorted((int(key[2:-2]), \
kwargs.pop(key)) for key in \
kwargs.keys() if key.startswith('__')))
else:
args = ()
return self.handler, args, kwargs
def build(self, request, args, kwargs):
"""Builds a URL for this route.
.. seealso:: :meth:`Router.build`.
"""
full = kwargs.pop('_full', False)
scheme = kwargs.pop('_scheme', None)
netloc = kwargs.pop('_netloc', None)
anchor = kwargs.pop('_anchor', None)
if full or scheme or netloc:
if not netloc:
netloc = request.host
if not scheme:
scheme = 'http'
path, query = self._build(args, kwargs)
return urlunsplit(scheme, netloc, path, query, anchor)
def _build(self, args, kwargs):
"""Builds the path for this route.
:returns:
A tuple ``(path, kwargs)`` with the built URL path and extra
keywords to be used as URL query arguments.
"""
variables = self.variables
if self.has_positional_variables:
for index, value in enumerate(args):
key = '__%d__' % index
if key in variables:
kwargs[key] = value
values = {}
for name, regex in variables.iteritems():
value = kwargs.pop(name, self.defaults.get(name))
if not value:
raise KeyError('Missing argument "%s" to build URL.' % \
name.strip('_'))
if not isinstance(value, basestring):
value = str(value)
if not regex.match(value):
raise ValueError('URL buiding error: Value "%s" is not '
'supported for argument "%s".' % (value, name.strip('_')))
values[name] = value
return (self.reverse_template % values, kwargs)
def __repr__(self):
return '<Route(%r, %r, name=%r, defaults=%r, build_only=%r)>' % \
(self.template, self.handler, self.name, self.defaults,
self.build_only)
__str__ = __repr__
class Router(object):
"""A simple URL router used to match the current URL, dispatch the handler
and build URLs for other resources.
"""
#: Class used when the route is a tuple. Default is compatible with webapp.
route_class = SimpleRoute
def __init__(self, routes=None):
"""Initializes the router.
:param routes:
A list of :class:`Route` instances to initialize the router.
"""
# Handler classes imported lazily.
self._handlers = {}
# All routes that can be matched.
self.match_routes = []
# All routes that can be built.
self.build_routes = {}
if routes:
for route in routes:
self.add(route)
def add(self, route):
"""Adds a route to this router.
:param route:
A :class:`Route` instance.
"""
if isinstance(route, tuple):
# Simple route, compatible with webapp.
route = self.route_class(*route)
for r in route.get_match_routes():
self.match_routes.append(r)
for r in route.get_build_routes():
self.build_routes[r.name] = r
def match(self, request):
"""Matches all routes against the current request. The first one that
matches is returned.
:param request:
A ``webapp.Request`` instance.
:returns:
A tuple ``(route, args, kwargs)`` if a route matched, or None.
"""
for route in self.match_routes:
match = route.match(request)
if match:
return match
def dispatch(self, app, request, response, match):
"""Dispatches a request. This calls the :class:`RequestHandler` from
the matched :class:`Route`.
:param app:
A :class:`WSGIApplication` instance.
:param request:
A ``webapp.Request`` instance.
:param response:
A :class:`Response` instance.
:param match:
A tuple ``(handler, args, kwargs)``, resulted from the matched
route.
"""
handler_class, args, kwargs = match
if isinstance(handler_class, basestring):
if handler_class not in self._handlers:
self._handlers[handler_class] = import_string(handler_class)
handler_class = self._handlers[handler_class]
try:
handler = handler_class(app, request, response)
except TypeError, e:
# Support webapp's initialize().
handler = handler_class()
handler.initialize(request, response)
try:
handler(request.method.lower(), *args, **kwargs)
except Exception, e:
# If the handler implements exception handling,
# let it handle it.
handler.handle_exception(e, app.debug)
def build(self, name, request, args, kwargs):
"""Builds and returns a URL for a named :class:`Route`.
:param name:
The route name.
:param request:
The current ``Request`` object.
:param args:
Tuple of positional arguments to build the URL.
:param kwargs:
Dictionary of keyword arguments to build the URL.
:returns:
An absolute or relative URL.
.. seealso:: :meth:`RequestHandler.url_for`.
"""
route = self.build_routes.get(name)
if not route:
raise KeyError('Route "%s" is not defined.' % name)
return route.build(request, args, kwargs)
def __repr__(self):
routes = self.match_routes + [v for k, v in \
self.build_routes.iteritems() if v not in self.match_routes]
return '<Router(%r)>' % routes
__str__ = __repr__
class WSGIApplication(object):
"""Wraps a set of webapp RequestHandlers in a WSGI-compatible application.
To use this class, pass a list of tuples ``(regex, RequestHandler class)``
or :class:`Route` instances to the constructor, and pass the class instance
to a WSGI handler. Example::
from webapp2 import RequestHandler, WSGIApplication
class HelloWorldHandler(RequestHandler):
def get(self):
self.response.out.write('Hello, World!')
app = WSGIApplication([
(r'/', HelloWorldHandler),
])
def main():
app.run()
if __name__ == '__main__':
main()
The URL mapping is first-match based on the list ordering. Items in the
list can also be an object that implements the method ``match(request)``.
The provided class :class:`Route` is a route implementation that allows
reversible URLs and keyword arguments passed to the handler. Example::
app = WSGIApplication([
Route(r'/articles', ArticlesHandler, 'articles'),
Route(r'/articles/<id:[\d]+>', ArticleHandler, 'article'),
])
.. seealso:: :class:`Route`.
"""
#: Default class used for the request object.
request_class = Request
#: Default class used for the response object.
response_class = Response
#: Default class used for the router object.
router_class = Router
#: Default class used for the config object.
config_class = Config
#: A dictionary mapping HTTP error codes to :class:`RequestHandler`
#: classes used to handle them. The handler set for status 500 is used
#: as default if others are not set.
error_handlers = {}
def __init__(self, routes=None, debug=False, config=None):
"""Initializes the WSGI application.
:param routes:
List of URL definitions as tuples ``(route, RequestHandler class)``.
:param debug:
True if this is debug mode, False otherwise.
:param config:
A configuration dictionary for the application.
"""
self.debug = debug
self.router = self.router_class(routes)
self.config = self.config_class(config)
# For compatibility with webapp only. Don't use it!
WSGIApplication.active_instance = self
def __call__(self, environ, start_response):
"""Called by WSGI when a request comes in. Calls :meth:`wsgi_app`."""
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
"""This is the actual WSGI application. This is not implemented in
:meth:`__call__` so that middlewares can be applied without losing a
reference to the class. So instead of doing this::
app = MyMiddleware(app)
It's a better idea to do this instead::
app.wsgi_app = MyMiddleware(app.wsgi_app)
Then you still have the original application object around and
can continue to call methods on it.
This idea comes from `Flask`_.
:param environ:
A WSGI environment.
:param start_response:
A callable accepting a status code, a list of headers and an
optional exception context to start the response.
"""
try:
# For compatibility with webapp only. Don't use it!
WSGIApplication.active_instance = self
self.request = request = self.request_class(environ)
response = self.response_class()
if request.method not in ALLOWED_METHODS:
# 501 Not Implemented.
raise webob.exc.HTTPNotImplemented()
# match is (route, args, kwargs)
match = self.router.match(request)
if match:
self.router.dispatch(self, request, response, match)
else:
# 404 Not Found.
raise webob.exc.HTTPNotFound()
except Exception, e:
try:
self.handle_exception(request, response, e)
except webob.exc.WSGIHTTPException, e:
# Use the exception as response.
response = e
except Exception, e:
# Our last chance to handle the error.
if self.debug:
raise
# 500 Internal Server Error: nothing else to do.
response = webob.exc.HTTPInternalServerError()
finally:
self.request = None
return response(environ, start_response)
def handle_exception(self, request, response, e):
"""Handles an exception. Searches :attr:`error_handlers` for a handler
with the error code, if it is a :class:`HTTPException`, or the 500
status code as fall back. Dispatches the handler if found, or re-raises
the exception to be caught by :class:`WSGIApplication`.
:param request:
A ``webapp.Request`` instance.
:param response:
A :class:`Response` instance.
:param e:
The raised exception.
"""
logging.exception(e)
if self.debug:
raise
if isinstance(e, HTTPException):
code = e.code
else:
code = 500
handler = self.error_handlers.get(code) or self.error_handlers.get(500)
if handler:
# Handle the exception using a custom handler.
handler(self, request, response)('get', exception=e)
else:
# No exception handler. Catch it in the WSGI app.
raise
def url_for(self, _name, *args, **kwargs):
"""Builds and returns a URL for a named :class:`Route`.
.. seealso:: :meth:`RequestHandler.url_for` and :meth:`Router.build`.
"""
return self.router.build(_name, self.request, args, kwargs)
def get_config(self, module, key=None, default=REQUIRED_VALUE):
"""Returns a configuration value for a module.
.. seealso:: :meth:`Config.load_and_get`.
"""
return self.config.load_and_get(module, key=key, default=default)
def run(self, bare=False):
"""Runs the app using ``google.appengine.ext.webapp.util.run_wsgi_app``.
This is generally called inside a ``main()`` function of the file
mapped in *app.yaml* to run the application::
# ...
app = WSGIApplication([
Route(r'/', HelloWorldHandler),
])
def main():
app.run()
if __name__ == '__main__':
main()
:param bare:
If True, uses ``run_bare_wsgi_app`` instead of ``run_wsgi_app``,
which doesn't add WSGI middleware.
"""
if bare:
run_bare_wsgi_app(self)
else:
run_wsgi_app(self)
def abort(code, *args, **kwargs):
"""Raises an ``HTTPException``. The exception is instantiated passing
*args* and *kwargs*.
:param code:
A valid HTTP error code from ``webob.exc.status_map``, a dictionary
mapping status codes to subclasses of ``HTTPException``.
:param args:
Arguments to be used to instantiate the exception.
:param kwargs:
Keyword arguments to be used to instantiate the exception.
"""
cls = webob.exc.status_map.get(code)
if not cls:
raise KeyError('No exception is defined for code %r.' % code)
raise cls(*args, **kwargs)
def get_valid_methods(handler):
"""Returns a list of HTTP methods supported by a handler.
:param handler:
A :class:`RequestHandler` instance.
:returns:
A list of HTTP methods supported by the handler.
"""
return [m for m in ALLOWED_METHODS if getattr(handler, m.lower(), None)]
def import_string(import_name, silent=False):
"""Imports an object based on a string. If *silent* is True the return
value will be None if the import fails.
Simplified version of the function with same name from `Werkzeug`_.
:param import_name:
The dotted name for the object to import.
:param silent:
If True, import errors are ignored and None is returned instead.
:returns:
The imported object.
"""
import_name = to_utf8(import_name)
try:
if '.' in import_name:
module, obj = import_name.rsplit('.', 1)
return getattr(__import__(module, None, None, [obj]), obj)
else:
return __import__(import_name)
except (ImportError, AttributeError):
if not silent:
raise
def url_escape(value):
"""Returns a valid URL-encoded version of the given value.
This function comes from `Tornado`_.
:param value:
A URL to be encoded.
:returns:
The encoded URL.
"""
return urllib.quote_plus(to_utf8(value))
def url_unescape(value):
"""Decodes the given value from a URL.
This function comes from `Tornado`_.
:param value:
A URL to be decoded.
:returns:
The decoded URL.
"""
return to_unicode(urllib.unquote_plus(value))
def to_utf8(value):
"""Returns a string encoded using UTF-8.
This function comes from `Tornado`_.
:param value:
A unicode or string to be encoded.
:returns:
The encoded string.
"""
if isinstance(value, unicode):
return value.encode('utf-8')
assert isinstance(value, str)
return value
def to_unicode(value):
"""Returns a unicode string from a string, using UTF-8 to decode if needed.
This function comes from `Tornado`_.
:param value:
A unicode or string to be decoded.
:returns:
The decoded string.
"""
if isinstance(value, str):
return value.decode('utf-8')
assert isinstance(value, unicode)
return value
def urlunsplit(scheme=None, netloc=None, path=None, query=None, fragment=None):
"""Similar to ``urlparse.urlunsplit``, but will escape values and
urlencode and sort query arguments.
:param scheme:
URL scheme, e.g., `http` or `https`.
:param netloc:
Network location, e.g., `localhost:8080` or `www.google.com`.
:param path:
URL path.
:param query:
URL query as an escaped string, or a dictionary or list of key-values
tuples to build a query.
:param fragment:
Fragment identifier, also known as "anchor".
:returns:
An assembled absolute or relative URL.
"""
if not scheme or not netloc:
scheme = None
netloc = None
if path:
path = urllib.quote_plus(to_utf8(path), '/')
if query and not isinstance(query, basestring):
if isinstance(query, dict):
query = query.items()
query_args = []
for key, values in query:
if isinstance(values, basestring):
values = (values,)
for value in values:
query_args.append((to_utf8(key), to_utf8(value)))
# Sorting should be optional? Sorted args are commonly needed to build
# URL signatures for services.
query_args.sort()
query = urllib.urlencode(query_args)
if fragment:
fragment = url_escape(fragment)
return urlparse.urlunsplit((scheme, netloc, path, query, fragment))
| melmothx/jsonbot | jsb/upload/webapp2/__init__.py | Python | mit | 43,235 |
from django.test import TestCase
from django.conf import settings
from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
| BowdoinOrient/bongo | bongo/apps/frontend/tests/templatetags_tests.py | Python | mit | 1,164 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Entry.description'
db.add_column('challenge_entry', 'description',
self.gf('django.db.models.fields.TextField')(default='NO DESCRIPTION'),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Entry.description'
db.delete_column('challenge_entry', 'description')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Permission']", 'symmetrical': 'False'})
},
'auth.permission': {
'Meta': {'object_name': 'Permission', 'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'blank': 'True', 'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'user_set'", 'blank': 'True', 'to': "orm['auth.Group']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'user_set'", 'blank': 'True', 'to': "orm['auth.Permission']", 'symmetrical': 'False'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'challenge.challenge': {
'Meta': {'object_name': 'Challenge'},
'duration': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'null': 'True', 'max_length': '256'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coder.Coder']"})
},
'challenge.challengecomment': {
'Meta': {'object_name': 'ChallengeComment'},
'challenge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge.Challenge']"}),
'coder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coder.Coder']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 3, 13, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'challenge.entry': {
'Meta': {'object_name': 'Entry'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 3, 13, 0, 0)'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'participant': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge.Participant']"}),
'thefile': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})
},
'challenge.entrycomment': {
'Meta': {'object_name': 'EntryComment'},
'coder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coder.Coder']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 3, 13, 0, 0)'}),
'entry': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge.Entry']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'challenge.entryscreenshot': {
'Meta': {'object_name': 'EntryScreenshot'},
'entry': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge.Entry']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pic': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'})
},
'challenge.participant': {
'Meta': {'object_name': 'Participant', 'unique_together': "(('coder', 'challenge'),)"},
'challenge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge.Challenge']"}),
'coder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['coder.Coder']"}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2014, 3, 13, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_owner': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'challenge.rule': {
'Meta': {'object_name': 'Rule'},
'challenge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['challenge.Challenge']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'coder.coder': {
'Meta': {'object_name': 'Coder'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '256'}),
'tagline': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '1024'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'xp': ('django.db.models.fields.BigIntegerField', [], {'default': '0'})
},
'contenttypes.contenttype': {
'Meta': {'db_table': "'django_content_type'", 'object_name': 'ContentType', 'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['challenge'] | mavroskardia/codechallenge | cc/apps/challenge/migrations/0017_auto__add_field_entry_description.py | Python | mit | 8,193 |
"""Author: Michal Zmuda
Copyright (C) 2015 ACK CYFRONET AGH
This software is released under the MIT license cited in 'LICENSE.txt'
Brings up a set of cluster-worker nodes. They can create separate clusters.
"""
import os
from . import docker, common, worker, gui, panel, location_service_bootstrap
def up(image, bindir, dns_server, uid, config_path, logdir=None,
dnsconfig_path=None, storages_dockers=None, luma_config=None):
if dnsconfig_path is None:
config = common.parse_json_config_file(config_path)
input_dir = config['dirs_config']['oz_worker']['input_dir']
dnsconfig_path = os.path.join(os.path.abspath(bindir), input_dir,
'data', 'dns.config')
return worker.up(image, bindir, dns_server, uid, config_path,
OZWorkerConfigurator(dnsconfig_path), logdir)
class OZWorkerConfigurator:
def __init__(self, dnsconfig_path):
self.dnsconfig_path = dnsconfig_path
def tweak_config(self, cfg, uid, instance):
sys_config = cfg['nodes']['node']['sys.config'][self.app_name()]
sys_config['external_ip'] = {'string': 'IP_PLACEHOLDER'}
if 'location_service_bootstrap_nodes' in sys_config:
sys_config['location_service_bootstrap_nodes'] = map(lambda name:
location_service_bootstrap.format_if_test_node(name, uid),
sys_config['location_service_bootstrap_nodes'])
if 'http_domain' in sys_config:
domain = worker.cluster_domain(instance, uid)
sys_config['http_domain'] = {'string': domain}
if 'onepanel_rest_url' in sys_config:
rest_url = sys_config['onepanel_rest_url']
port = rest_url['port']
protocol = rest_url['protocol']
node_name, _sep, instance = rest_url['domain'].partition('.')
panel_hostname = panel.panel_hostname(node_name, instance, uid)
sys_config["onepanel_rest_url"] = {
'string': "{0}://{1}:{2}".format(protocol, panel_hostname, port)
}
return cfg
# Called BEFORE the instance (cluster of workers) is started,
# once for every instance
def pre_configure_instance(self, instance, instance_domain, config):
this_config = config[self.domains_attribute()][instance]
if 'gui_override' in this_config and isinstance(
this_config['gui_override'], dict):
# Preconfigure GUI override
gui_config = this_config['gui_override']
gui.override_gui(gui_config, instance_domain)
# Called AFTER the instance (cluster of workers) has been started
def post_configure_instance(self, bindir, instance, config, container_ids,
output, storages_dockers=None,
luma_config=None):
this_config = config[self.domains_attribute()][instance]
# Check if gui livereload is enabled in env and turn it on
if 'gui_override' in this_config and isinstance(
this_config['gui_override'], dict):
gui_config = this_config['gui_override']
livereload_flag = gui_config['livereload']
if livereload_flag:
for container_id in container_ids:
livereload_dir = gui_config['mount_path']
gui.run_livereload(container_id, livereload_dir)
def pre_start_commands(self, domain):
return '''
sed -i.bak s/\"IP_PLACEHOLDER\"/\"`ip addr show eth0 | grep "inet\\b" | awk '{{print $2}}' | cut -d/ -f1`\"/g /tmp/gen_dev_args.json
escript bamboos/gen_dev/gen_dev.escript /tmp/gen_dev_args.json
mkdir -p /root/bin/node/data/
touch /root/bin/node/data/dns.config
sed -i.bak s/onedata.org/{domain}/g /root/bin/node/data/dns.config
'''.format(domain=domain)
def extra_volumes(self, config, bindir, instance_domain, storages_dockers):
extra_volumes = []
# Check if gui override is enabled in env and add required volumes
if 'gui_override' in config and isinstance(config['gui_override'],
dict):
gui_config = config['gui_override']
extra_volumes.extend(gui.extra_volumes(gui_config, instance_domain))
return extra_volumes
def couchbase_ramsize(self):
return 1024
def couchbase_buckets(self):
return {"default": 512, "location_service": 100}
def app_name(self):
return "oz_worker"
def domains_attribute(self):
return "zone_domains"
def domain_env_name(self):
return "zone_domain"
def nodes_list_attribute(self):
return "oz_worker_nodes"
| onedata/web-client | bamboos/docker/environment/zone_worker.py | Python | mit | 4,712 |
# Bryan Barrows
# CSC 110 - Winter 17
# suess.py
# As I was exploring the chapter on graphics, there was an excellent example of "Red Fish, Blue Fish",
# which I took the liberty of playing with and exploring.
# I found it really cool how by definining so many variables within our drawFish method, it is possible
# to simply call the drawFish method with a number of parameters.
# A fun, simple demonstration on the power of modularity.
from graphics import *
def main():
win = GraphWin('Seuss')
drawFish(win, 25, 25, 85, 55, label="One Fish")
drawFish(win, 35, 110, 75, 140)
drawFish(win, 50, 150, 75, 170, label="Two Fish" )
drawFish(win, 145, 35, 170, 55, 'red', "Red Fish" )
drawFish(win, 125, 110, 175, 145, 'blue', "Blue Fish")
def drawFish(win,p1x, p1y, p2x, p2y,color=None, label=None):
p1 = Point(p1x, p1y)
p2 = Point(p2x, p2y)
fish = Oval(p1,p2)
diffX = p2x-p1x
diffY = p2y-p1y
fishEye = Point((p1x + diffX/6),(p1y +(diffY/2) - diffY/6))
fishTail = Polygon(Point(p2x, p1y+diffY/2), Point(p2x+diffX/6, p1y), Point(p2x+diffX/6, p1y+diffY))
fish.setFill(color)
fishTail.setFill(color)
caption = Text(Point(p1x + 20, p1y + diffY + 10), label)
fish.draw(win)
fishEye.draw(win)
fishTail.draw(win)
caption.draw(win)
main()
| bbarrows89/CSC110_Projects | suess.py | Python | mit | 1,314 |
import pygame, random
from pygame.locals import *
CONST_WIDTH = 640 #* 2
CONST_HEIGHT = 480 #* 2
CONST_BOX_WIDTH = 8
CONST_BOX_HEIGHT = 8
#Colors
BackgroundBarva = (0,0,0,0)
BARVE = []
Bela = (255,255,255,255)
BARVE.append(Bela)
Crna = (0,0,0,255)
BARVE.append(Crna)
Rdeca = (255,0,0,255)
BARVE.append(Rdeca)
Modra = (0,0,255,255)
BARVE.append(Modra)
Rumena = (255,255,0,255)
BARVE.append(Rumena)
Oranzna = (255,128,0,255)
BARVE.append(Oranzna)
Zelena = (0,255,0,255)
BARVE.append(Zelena)
Roza=(255,0,255, 255)
BARVE.append(Roza)
Vijolicna= (127,0,255,255)
BARVE.append(Vijolicna)
class Labirint:
def __init__(self, labPodlaga, resitevPodlaga):
self.state = 'create'
self.labArr = []
self.lPodlaga = labPodlaga
self.lPodlaga.fill(BackgroundBarva) #fill with black
self.resPodlaga = resitevPodlaga # surface
self.resPodlaga.fill(BackgroundBarva)
for i in range(CONST_HEIGHT/CONST_BOX_HEIGHT):
pygame.draw.line(self.lPodlaga, Crna, (0, i * CONST_BOX_HEIGHT), (CONST_WIDTH, i * CONST_BOX_HEIGHT))
for j in range(CONST_WIDTH/CONST_BOX_WIDTH):
self.labArr.append(0x0000)
if (i == 0):
pygame.draw.line(self.lPodlaga, Crna, (j * CONST_BOX_WIDTH, 0), (j * CONST_BOX_WIDTH, CONST_HEIGHT))
pygame.draw.rect(self.resPodlaga, Modra, Rect(0,0,CONST_BOX_WIDTH, CONST_BOX_HEIGHT))
pygame.draw.rect(self.resPodlaga, Modra, Rect((CONST_WIDTH-CONST_BOX_WIDTH),(CONST_HEIGHT - CONST_BOX_HEIGHT),CONST_BOX_WIDTH, CONST_BOX_HEIGHT))
self.vseCelice = (CONST_HEIGHT/CONST_BOX_HEIGHT) * (CONST_WIDTH/ CONST_BOX_WIDTH)
self.stackCelic = []
self.trenutnaCelica = random.randint(0, self.vseCelice-1)
self.obiskaneCelice = 1
self.smeri = [(-1,0), (0,1), (1,0), (0,-1)]
#Pims ALGORITHM
self.zidovi = []
self.labArr[self.trenutnaCelica] |= 0x00F0 #part of the maze
self.zidovi.append((self.trenutnaCelica,0))
self.zidovi.append((self.trenutnaCelica,1))
self.zidovi.append((self.trenutnaCelica,2))
self.zidovi.append((self.trenutnaCelica,3))
def update(self):
if self.state == 'idle':
print "IDLE"
elif self.state == 'create':
#while loop
if self.obiskaneCelice >= self.vseCelice:
self.trenutnaCelica = 0 #pos top-left
self.stackCelic = []
self.state = 'solve'
return
moved = False
while (self.obiskaneCelice < self.vseCelice): # moved == False -> uncomment this line if you want to se maze generating
x = self. trenutnaCelica %(CONST_WIDTH/CONST_BOX_WIDTH)
y = self. trenutnaCelica /(CONST_WIDTH/CONST_BOX_WIDTH)
#find all neighbors with walls
sosedje = []
for i in range(len(self.smeri)):
nx = x + self.smeri[i][0]
ny = y + self.smeri[i][1]
#Check the borders
if ((nx >= 0) and (ny >= 0) and (nx < CONST_WIDTH/CONST_BOX_WIDTH) and (ny < CONST_HEIGHT/CONST_BOX_HEIGHT)):
if (self.labArr[(ny * CONST_WIDTH/CONST_BOX_WIDTH + nx )] & 0x000F) == 0: #visited -> checked in binary
nidx = ny * CONST_WIDTH/CONST_BOX_WIDTH + nx
sosedje.append((nidx, 1<<i))
if len(sosedje) > 0:
#chose random neighbor
idx = random.randint(0, len(sosedje)-1)
nidx, direction = sosedje[idx]
#knock down the wall
dx = x * CONST_BOX_WIDTH
dy = y * CONST_BOX_HEIGHT
b = CONST_BOX_HEIGHT
if direction & 1: # if direction is West
self.labArr[nidx] |= (4) # if direction is East
pygame.draw.line(self.lPodlaga, BackgroundBarva, (dx, dy + b/8), (dx, dy +(b*7/8))) #BackgroundBarva to match Background
elif direction & 2: # if direction is South
self.labArr[nidx] |= (8) # if direction is North
pygame.draw.line(self.lPodlaga, BackgroundBarva, (dx+b/8, dy+b), (dx+(b*7/8), dy+b))
elif direction & 4: # if direction is East
self.labArr[nidx] |= (1) # if direction is West
pygame.draw.line(self.lPodlaga, BackgroundBarva, (dx+b, dy+b/8), (dx+b, dy+(b*7/8)))
elif direction & 8: # if direction is North
self.labArr[nidx] |= (2) # if direction is South
pygame.draw.line(self.lPodlaga, BackgroundBarva, (dx+b/8, dy), (dx+(b*7/8), dy))
self.labArr[self.trenutnaCelica] |= direction
self.stackCelic.append(self.trenutnaCelica)
self.trenutnaCelica = nidx
self.obiskaneCelice = self.obiskaneCelice + 1
moved = True
else:
self.trenutnaCelica = self.stackCelic.pop()
elif self.state == 'solve':
if self.trenutnaCelica == (self.vseCelice-1):
self.state = 'reset'
return
moved = False
while (moved == False): #self.trenutnaCelica < (self.vseCelice-1): -> insta solve
x = self. trenutnaCelica %(CONST_WIDTH/CONST_BOX_WIDTH)
y = self. trenutnaCelica /(CONST_WIDTH/CONST_BOX_WIDTH)
sosedje = []
directions = self.labArr[self.trenutnaCelica] & 0xF
for i in range(len(self.smeri)):
if (directions & (1 << i)) > 0:
nx = x + self.smeri[i][0]
ny = y + self.smeri[i][1]
#Check the borders
if ((nx >= 0) and (ny >= 0) and (nx < CONST_WIDTH/CONST_BOX_WIDTH) and (ny < CONST_HEIGHT/CONST_BOX_HEIGHT)):
nidx = ny * CONST_WIDTH/CONST_BOX_WIDTH + nx
if ((self.labArr[nidx] & 0xFF00) == 0): #check there's no bactrack or solution
sosedje.append((nidx,1 << i))
if (len(sosedje) > 0):
idx = random.randint(0, len(sosedje)-1)
nidx, direction = sosedje[idx]
dx = x * CONST_BOX_WIDTH
dy = y * CONST_BOX_HEIGHT
#set the opposite wall of the neighbor
if direction & 1:
self.labArr[nidx] |= (4 << 12)
elif direction & 2:
self.labArr[nidx] |= (8 << 12)
elif direction & 4:
self.labArr[nidx] |= (1 << 12)
elif direction & 8:
self.labArr[nidx] |= (2 << 12)
#Draw a green square
RadnBarva = BARVE[random.randint(1,len(BARVE)-1)] #from 1 up because you dont want to have white walls
pygame.draw.rect(self.resPodlaga, RadnBarva, Rect(dx,dy, CONST_BOX_WIDTH, CONST_BOX_HEIGHT))
self.labArr[self.trenutnaCelica] |= direction << 8
self.stackCelic.append(self.trenutnaCelica)
self.trenutnaCelica = nidx
moved = True
else:
#Draw red square
pygame.draw.rect(self.resPodlaga, Rdeca, Rect((x*CONST_BOX_WIDTH), (y*CONST_BOX_HEIGHT), CONST_BOX_WIDTH, CONST_BOX_HEIGHT))
# Not a solution, so AND the bit to take away the solution bit
self.labArr[self.trenutnaCelica] &= 0xF0FF
self.trenutnaCelica = self.stackCelic.pop()
elif self.state == 'prim':
if len(self.zidovi) <= 0:
self.trenutnaCelica = 0
self.stackCelic = []
self.state = 'solve'
return
moved = False
while (len(self.zidovi) > 0): #(moved == False):-> uncomment this line if you want to se maze generating
zid = random.randint(0, len(self.zidovi)-1)
self.trenutnaCelica = self.zidovi[zid][0]
x = self. trenutnaCelica %(CONST_WIDTH/CONST_BOX_WIDTH)
y = self. trenutnaCelica /(CONST_WIDTH/CONST_BOX_WIDTH)
smer = self.zidovi[zid][1]
nx = x + self.smeri[smer][0]
ny = y + self.smeri[smer][1]
nidx = ny*CONST_WIDTH/CONST_BOX_WIDTH + nx
dx = x * CONST_BOX_WIDTH
dy = y * CONST_BOX_HEIGHT
direction = 1 << smer
b = CONST_BOX_HEIGHT
if ((nx>=0) and (ny>=0) and (nx< CONST_WIDTH/CONST_BOX_WIDTH) and (ny < CONST_HEIGHT/CONST_BOX_HEIGHT)):
if (self.labArr[nidx] & 0x00F0) == 0:
if direction & 1: # if direction is West
self.labArr[nidx] |= (4) # if direction is East
pygame.draw.line(self.lPodlaga, BackgroundBarva, (dx, dy + b/8), (dx, dy +(b*7/8)))
elif direction & 2: # if direction is South
self.labArr[nidx] |= (8) # if direction is North
pygame.draw.line(self.lPodlaga, BackgroundBarva, (dx+b/8, dy+b), (dx+(b*7/8), dy+b))
elif direction & 4: # if direction is East
self.labArr[nidx] |= (1) # if direction is West
pygame.draw.line(self.lPodlaga, BackgroundBarva, (dx+b, dy+b/8), (dx+b, dy+(b*7/8)))
elif direction & 8: # if direction is North
self.labArr[nidx] |= (2) # if direction is South
pygame.draw.line(self.lPodlaga, BackgroundBarva, (dx+b/8, dy), (dx+(b*7/8), dy))
self.labArr[self.trenutnaCelica] |= direction
self.labArr[nidx] = 0x00F0 #mark as a path of maze
#add walls
self.zidovi.append((nidx, 0))
self.zidovi.append((nidx, 1))
self.zidovi.append((nidx, 2))
self.zidovi.append((nidx, 3))
moved = True
self.zidovi.remove(self.zidovi[zid])
elif self.state == 'reset':
self.__init__(self.lPodlaga,self.resPodlaga)
def narisi(self, screen):
screen.blit(self.resPodlaga, (0,0))
screen.blit(self.lPodlaga, (0,0))
#Main function
def main():
pygame.init() #initialization
#Create display and set dimensions on 640X480
screen = pygame.display.set_mode((CONST_WIDTH,CONST_HEIGHT))
pygame.display.set_caption('Labirint')
pygame.mouse.set_visible(0)
#Background
bacground = pygame.Surface(screen.get_size()) #get size of screen
bacground = bacground.convert()
bacground.fill((255,255,255))
resitevPodlaga = pygame.Surface(screen.get_size())
resitevPodlaga = resitevPodlaga.convert_alpha()
resitevPodlaga.fill((0,0,0,0))
labPodlaga = pygame.Surface(screen.get_size())
labPodlaga = labPodlaga.convert_alpha() #give some alpha values
labPodlaga.fill((0,0,0,0))
lab = Labirint(labPodlaga, resitevPodlaga)
screen.blit(bacground, (0,0))
pygame.display.flip()
clock = pygame.time.Clock()
while True:
clock.tick(60) #60 fps
for event in pygame.event.get(): #goes thru events
#quits if escape is clicked
if event.type == QUIT:
return
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
return
lab.update()
screen.blit(bacground, (0,0))
lab.narisi(screen)
pygame.display.flip()
#return
if __name__ == '__main__': main() #when python starts start main funciton | RokKos/Maze | PyMaze.py | Python | mit | 9,739 |
###########################################################################################
# Author: Josh Joseph [email protected]
# 4/29/16
# This is the main server file for PCR hero....
from bottle import route, run, template, get, post, request, response, redirect, static_file
import m3
import os
pcrDB = m3.get_db("pcrhero")
HASHWORD = 'applesauce'
HOSTIP = 'http://www.pcrhero.org:8000'
HOMEDIR = '/home/ubuntu/pythonproject/'
###########################################################################################
### File get path functions -- This section can be cleaned up if all file requests are listed
### with their appropriate file path after the root directory... #TODO
############################################################################################
@get('/static/<filename:path>')
def static(filename):
return static_file(filename, root='/home/ubuntu/pythonproject/static/')
##This is a filepath to static addresses on the site. You will need to use an appropriate
##address (or a system link for security purposes) when using on a different host
@get('/badges/<filename:path>')
def badge(filename):
return static_file(filename, root='/home/ubuntu/pythonproject/badges/')
##This is a filepath to static addresses on the site. You will need to use an appropriate
##address (or a system link for security purposes) when using on a different host
@get('/issuers/<filename:path>')
def issuer(filename):
return static_file(filename, root='/home/ubuntu/pythonproject/issuers/')
##This is a filepath to static addresses on the site. You will need to use an appropriate
##address (or a system link for security purposes) when using on a different host
@get('/users/<filename:path>')
def issuer(filename):
return static_file(filename, root='/home/ubuntu/pythonproject/users/')
##This is a filepath to static addresses on the site. You will need to use an appropriate
##address (or a system link for security purposes) when using on a different host
@get('/images/<filename:path>')
def image(filename):
return static_file(filename, root='/home/ubuntu/pythonproject/images/')
##This is a filepath to static addresses on the site. You will need to use an appropriate
##address (or a system link for security purposes) when using on a different host
@get('/criteria/<filename:path>')
def criteria(filename):
return static_file(filename, root='/home/ubuntu/pythonproject/criteria/')
##This is a filepath to static addresses on the site. You will need to use an appropriate
##address (or a system link for security purposes) when using on a different host
@get('/awardedbadges/<filename:path>')
def awardedbadge(filename):
return static_file(filename, root='/home/ubuntu/pythonproject/awardedbadges/')
##This is a filepath to static addresses on the site. You will need to use an appropriate
##address (or a system link for security purposes) when using on a different host
##########################################################################################
#### MAIN ROUTING FUNCTIONS
##########################################################################################
@route('/')
def home():
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + '''\
<h1>PCR Hero - your journey to achievement begins here!</h1>
</body>
'''
####################### TO DO - put remainder of register logic into a tpl file rather than expanding here
@get('/register')
def show_registration():
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + '''\
<h1>Thanks for registering with PCR Hero - your journey to achievement begins here!</h1>
<form action="" method="POST">
<p>
<label for="name">What is your name?</label>
<input type="text" name="name"/> </p>
<p>
<label for="email">What is your email?</label>
<input type="email" name="email"/> </p>
<p>
<label for="password">Enter a strong password:</label>
<input type="password" name="password"/> </p>
<p>
<label for="password">Reenter that strong password:</label>
<input type="password" name="passwordcheck"/> </p>
<input type="submit"/>
</form>
</body>
'''
@post('/register')
def show_name():
name = request.params.name
email = request.params.email
password = request.params.password
passwordcheck = request.params.passwordcheck
if(password != passwordcheck):
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + '''\
<h1>Thanks for registering with PCR Hero - your journey to achievement begins here!</h1>
<form action="" method="POST">
<p>
<label for="name">What is your name?</label>
<input type="text" name="name" required/> </p>
<p>
<label for="email">What is your email?</label>
<input type="email" name="email" required/> </p>
<p>
<label for="password">Enter a strong password:</label>
<input type="password" name="password" required/> </p>
<p>
<label for="password">Reenter that strong password:
<input type="password" name="passwordcheck" required/>
<div style = "color: red; display: inline;"> Passwords need to match! </div> </label></p>
<input type="submit"/>
</form>
</body>
'''
elif(m3.get_person(pcrDB, email) != None):
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + '''\
<h1>Thanks for registering with PCR Hero - your journey to achievement begins here!</h1>
<form action="" method="POST">
<p>
<label for="name">What is your name?
<input type="text" name="name"/>
</label></p>
<p>
<label for="email">What is your email?</label>
<input type="email" name="email" required/>
<div style = "color: red; display: inline;"> That email is taken! </div></p>
<p>
<label for="password">Enter a strong password:</label>
<input type="password" name="password" required/> </p>
<p>
<label for="password">Reenter that strong password:</label>
<input type="password" name="passwordcheck" required/>
</p>
<input type="submit"/>
</form>
</body>
'''
else:
## It worked!
## Hash the password
hashword = m3.shaHash(password, "deadsea")
## create the new user object
newUser = m3.PCRUser(email, name, hashword)
m3.add_person(pcrDB, newUser)
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + '''\
<h2>Hello, {}!</h2><p>Thanks for registering.</p>
</body>
</html>
'''.format(request.POST.name)
########## END TODO (reminder, putting this in a tpl will save like ~70 lines of code :)
@get('/myprofile')
def profile():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
userbadges = m3.get_users_badges(pcrDB, useremail)
userapps = m3.get_users_apps(pcrDB, useremail)
applist = {}
for appname in userapps:
applist[appname] = (m3.get_app(pcrDB, appname))
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero - {}</h1>
'''.format(useremail) + template('profile.tpl', badges=userbadges, apps=applist) + "</body>"
else:
redirect("/login")
@get('/login')
def show_registration():
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + '''\
<h1>Welcome to PCR Hero - please login here!</h1>
<form action="" method="POST">
<p>
<label for="email">Email:</label>
<input type="email" name="email" required/> </p>
<p>
<label for="password">Password:</label>
<input type="password" name="password" required/> </p>
<p>
<input type="submit"/>
</form>
</body>
'''
@post('/login')
def show_name():
email = request.params.email
password = request.params.password
hashword = m3.shaHash(password, "deadsea")
### need to begin with checking for username (email) - otherwise we'll get a keyerror
if(m3.get_person(pcrDB, email) == None):
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + "Sorry - this username is not registered!"
else:
### need to load up the user's hashword for comparison purposes
loginHashword = m3.get_user_hashword(pcrDB, email)
if(hashword != loginHashword):
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + "Sorry - your password is incorrect!"
elif(hashword == loginHashword):
response.set_cookie('loggedin', email, max_age= 600, secret='applesauce', path='/')
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce')) + "<h2>Hello, {}!<p>Welcome back!</p></h2>".format(request.POST.email)
else:
return template('base.tpl', title='PCR Hero', email=request.get_cookie('loggedin', secret='applesauce'))+ "Sorry, something went wrong!"
@get('/admin-badge')
def badge_menu():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
userbadges = m3.get_users_badges(pcrDB, useremail)
issuers = m3.get_issuers(pcrDB)
image_path = "/home/ubuntu/pythonproject/images"
available_images = os.listdir(image_path)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
'''.format(useremail) + template('admin-badge.tpl', badges=userbadges, issuers=issuers, images=available_images) + "</body>"
else:
redirect("/login")
@post('/admin-badge')
def badge_submit():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
userbadges = m3.get_users_badges(pcrDB, useremail)
issuers = m3.get_issuers(pcrDB)
image_path = "/home/ubuntu/pythonproject/images"
available_images = os.listdir(image_path)
## return args
name = request.params.name
if(m3.find_badge(pcrDB, name) != None):
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
<h2 style="color:red">A badge with that name already exists!</h2>
'''.format(useremail) + template('admin-badge.tpl', badges=userbadges, issuers=issuers, images=available_images) + "</body>"
else:
description = request.params.description
image = request.params.image
criteria = request.params.criteria
tags = request.params.tags
issuer = request.params.issuer
newBadge = m3.OpenBadge(name, description, image, criteria, tags, issuer)
newBadge.establish_here()
newBadge.add_badge(pcrDB)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
<h2 style="color:blue">Your badge was successfully created!</h2>
'''.format(useremail) + template('admin-badge.tpl', badges=userbadges, issuers=issuers, images=available_images) + "</body>"
else:
redirect("/login")
@get('/admin-issuer')
def issuer_create_menu():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
userbadges = m3.get_users_badges(pcrDB, useremail)
issuers = m3.get_issuers(pcrDB)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
'''.format(useremail) + template('admin-issuer.tpl', badges=userbadges, issuers=issuers) + "</body>"
else:
redirect("/login")
@post('/admin-issuer')
def issuer_create_submit():
name = request.params.name
description = request.params.description
url = request.params.url
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
userbadges = m3.get_users_badges(pcrDB, useremail)
issuers = m3.get_issuers(pcrDB)
if(m3.find_issuer(pcrDB, name) != None):
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1> <p style="color:red;">Sorry, that issuer is taken!</p>
'''.format(useremail) + template('admin-issuer.tpl', badges=userbadges, issuers=issuers) + "</body>"
else:
newIssuer = m3.PCRIssuer(name, description, url)
m3.add_issuer(pcrDB, newIssuer)
newIssuer.establish_here()
issuers = m3.get_issuers(pcrDB)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1> <p style="color:blue;">Your issuer has been created!</p>
'''.format(useremail) + template('admin-issuer.tpl', badges=userbadges, issuers=issuers) + "</body>"
else:
redirect("/login")
@get('/admin-awards')
def badge_award_menu():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
badge_list = m3.get_badges(pcrDB)
user_list = m3.get_users(pcrDB)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
'''.format(useremail) + template('admin-award.tpl', badges=badge_list, users=user_list) + "</body>"
else:
redirect("/login")
@post('/admin-awards')
def badge_award_submit():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
badge_list = m3.get_badges(pcrDB) # list of all badges
user_list = m3.get_users(pcrDB) # list of all users
current_user = request.params.user
current_user_badges = m3.get_users_badges(pcrDB, current_user)
current_badge = request.params.badge
## check that the user doesn't already have the badge
# if so, send back to the menu
if(current_badge in current_user_badges):
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
<h2 style="color:red;">That user already has that badge!</h2>
'''.format(useremail) + template('admin-award.tpl', badges=badge_list, users=user_list) + "</body>"
# if not, award the badge
## awarding badge magic
else:
m3.award_badge_to_user(pcrDB, current_badge, current_user)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
<h2 style="color:blue;">Badge successfully awarded!<h2>
'''.format(useremail) + template('admin-award.tpl', badges=badge_list, users=user_list) + "</body>"
else:
redirect("/login")
@get('/admin-images')
def images_menu():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
userbadges = m3.get_users_badges(pcrDB, useremail)
image_path = "/home/ubuntu/pythonproject/images"
available_images = os.listdir(image_path)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
'''.format(useremail) + template('admin-images.tpl', badges=userbadges, images=available_images, image_path=image_path) + "</body>"
else:
redirect("/login")
@post('/admin-images')
def upload_image():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
userbadges = m3.get_users_badges(pcrDB, useremail)
image_path = "/home/ubuntu/pythonproject/images"
available_images = os.listdir(image_path)
upload = request.files.image
name, ext = os.path.splitext(upload.filename)
if ext not in ('.png'):
return "File extension not allowed."
save_path = "/home/ubuntu/pythonproject/images"
file_path = "{path}/{file}".format(path=save_path, file=upload.filename)
upload.save(file_path)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
<h2 style="color:blue">Image successfully uploaded!</h2>
'''.format(useremail) + template('admin-images.tpl', badges=userbadges, images=available_images, image_path=image_path) + "</body>"
else:
redirect("/login")
@get('/admin-tasks')
def tasks_menu():
if(request.get_cookie('loggedin')):
useremail = request.get_cookie('loggedin', secret='applesauce')
badge_list = m3.get_badges(pcrDB)
user_list = m3.get_users(pcrDB)
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
'''.format(useremail) + template('admin-tasks.tpl', badges=badge_list, users=user_list, typeselection = 0) + "</body>"
else:
redirect("/login")
@post('/admin-tasks')
def tasks_menu_post():
if(request.get_cookie('loggedin')):
submitted = request.params.flag
typeselection = request.params.typeselection
badge_list = m3.get_badges(pcrDB)
user_list = m3.get_users(pcrDB)
app_list = m3.get_all_apps(pcrDB)
useremail = request.get_cookie('loggedin', secret='applesauce')
if(submitted == "False"):
if(typeselection != 0):
app = request.params.app
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
'''.format(useremail) + template('admin-tasks.tpl', badges=badge_list, users=user_list, app_list=app_list, typeselection = typeselection, app = app) + "</body>"
else:
user = request.params.user
badge = request.params.badge
app = request.params.app
print("typeselection = %s " % typeselection)
### type handling for task assignment:
if(typeselection == "percent"):
circuit = request.params.circuit
score = float(request.params.score)
percent = int(request.params.percent)
NewTask = m3.PercentTask(user, badge, app, circuit, score, percent)
elif(typeselection == "repeat"):
circuit = request.params.circuit
repeat = int(request.params.repeat)
NewTask = m3.RepeatTask(user, badge, app, circuit, repeat)
elif(typeselection == "unique"):
unique = request.params.unique
NewTask = m3.UniqueTask(user, badge, app, unique)
elif(typeselection == "timetrial"):
days = int(request.params.days)
hours = int(request.params.hours)
minutes = int(request.params.minutes)
circuit = request.params.circuit
tasknum = int(request.params.tasknum)
NewTask = m3.TimeTrialTask(user, badge, app, days, hours, minutes, circuit, tasknum)
else: #performance
circuit = request.params.circuit
targetyield = int(request.params.targetyield)
cost = int(request.params.cost)
NewTask = m3.PerformanceTask(user, badge, app, circuit, targetyield, cost)
### task is assigned, now time to see if it's unique...
print(NewTask.output())
result = NewTask.assign(pcrDB)
if(result):
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
<h2 style="color:blue;">Task successfully started...</h2>
'''.format(useremail) + template('admin-tasks.tpl', badges=badge_list, users=user_list, typeselection = 0) + "</body>"
else:
return template('base.tpl', title='PCR Hero', email= useremail) + '''\
<h1>Welcome to PCR Hero's Admin Menu - {}</h1>
<h2 style="color:red;">Task already assigned to user...</h2>
'''.format(useremail) + template('admin-tasks.tpl', badges=badge_list, users=user_list, typeselection = 0) + "</body>"
else:
redirect("/login")
@post('/submit')
def submit():
username = request.params.user
appname = request.params.app
submittedcircuit = request.params.circuit
tasks = m3.get_users_tasks_for_app(pcrDB, username, appname)
taskarray = []
for task in tasks:
taskarray.append(task)
print('TaskList----')
for task in taskarray:
print(task)
print('\n')
# Step 1 - evaluate for tasks that have expired and remove them (time trials)
print('Check for timetrials...')
for task in taskarray:
if(task['type'] == 'timetrial'):
if(m3.check_task_datetime(pcrDB, task)):
## check_task_datetime returns True if time's up
print("%s's time is up!" % task['badge'])
m3.remove_task_by_id(pcrDB, task['_id']) ## delete task now that badge has been awarded
taskarray.remove(task) ## remove from taskarray
print("Task removed...")
# # Step 2 - evaluate badges and award them if completed
# ### Step 3 - evaluate for tasks that need unique submissions or multiple tasks (unique, repeat, timetrial)
for task in taskarray:
if(task['type'] == 'unique'):
pass ## This is the one circuit type that is going to require a little more work
## What is needed is for a mongodb call to $find the {circuit: circuit name} in the
elif(task['type'] == 'repeat'):
if(task['circuit'] == submittedcircuit):
m3.increment_task_by_id(pcrDB, task['_id'], "count")
## check if criteria met...
if(task['count'] >= task['repeatTarget']):
m3.award_badge_to_user(pcrDB, task['badge'], task['user'])
print("A new badge was awarded to %s!" % task['user'])
m3.remove_task_by_id(pcrDB, task['_id']) ## delete task now that badge has been awarded
taskarray.remove(task) ## remove from taskarray
print("Task removed...")
elif(task['type'] == 'timetrial'):
if(task['circuit'] == submittedcircuit):
m3.increment_task_by_id(pcrDB, task['_id'], "tasksDone")
## check if criteria met...
if(task['tasksDone'] >= task['tasknumGoal']):
m3.award_badge_to_user(pcrDB, task['badge'], task['user'])
print("A new badge was awarded to %s!" % task['user'])
m3.remove_task_by_id(pcrDB, task['_id']) ## delete task now that badge has been awarded
taskarray.remove(task) ## remove from taskarray
print("Task removed...")
### Step 4 - compare percentage scores
elif(task['type'] == 'percent'):
if(task['circuit'] == submittedcircuit):
newScore = reqeust.params.score
## check if criteria met...
if(newScore >= task['goalScore']):
m3.award_badge_to_user(pcrDB, task['badge'], task['user'])
print("A new badge was awarded to %s!" % task['user'])
m3.remove_task_by_id(pcrDB, task['_id']) ## delete task now that badge has been awarded
taskarray.remove(task) ## remove from taskarray
print("Task removed...")
## else, check if this is an improvement - this will be useful once the tasks badge is implemented
if(newScore >= task['score']):
m3.update_task_by_id(pcrDB, task['_id'], "score", newScore)
print("Score improved! Getting closer!")
### Step 5 - check cost/performance scores
elif(task['type'] == 'performance'):
if(task['circuit'] == submittedcircuit):
newScore = reqeust.params.score
newCost = request.params.cost
## check if criteria met...
if(newScore >= task['targetyield']):
if(newCost <= task['cost']):
m3.award_badge_to_user(pcrDB, task['badge'], task['user'])
print("A new badge was awarded to %s!" % task['user'])
m3.remove_task_by_id(pcrDB, task['_id']) ## delete task now that badge has been awarded
taskarray.remove(task) ## remove from taskarray
print("Task removed...")
else:
pass ## can always add new task types
@get('/logout')
def logout():
response.set_cookie('loggedin', '', path='/')
redirect("/")
run(host='172.31.57.1', port=8000, debug=True)
| jwjoseph/PCRHero | hello4.py | Python | mit | 25,735 |
"""
A drop in replacment for the Adafruit_DotStar module.
It allows me to to visualize what the LED strip may look like, without actually having one.
"""
from __future__ import print_function
import graphics
class Adafruit_DotStar(object):
"A mock implementation of the Adafruit_DotStart that simulates LEDs in the UI"
_LED_SIZE = 30
_WINDOW_HEIGHT = 300
_WINDOW_WIDTH = 800
def __init__(self, numPixels, a=None, order=None):
print("Inside initializer")
self._numPixels = numPixels
self._pixels = [0] * numPixels
self._win = None
self._leds = []
print(self._pixels)
def getPixel(self, index):
"""Returns the color value of the given pixel."""
return self._pixels(index)
def begin(self):
"""Opens the Mock_DotStar window."""
print("Starting Mock_DotStar")
self._win = graphics.GraphWin("midi-light-py", self._WINDOW_WIDTH, self._WINDOW_HEIGHT)
self._win.setBackground("black")
leds_per_row = self._WINDOW_WIDTH // (self._LED_SIZE)
x = 0
y = 0
for i, pixel in enumerate(self._pixels):
x = (i % leds_per_row) * self._LED_SIZE
y = (i // leds_per_row) * self._LED_SIZE
block = graphics.Rectangle(graphics.Point(x, y), graphics.Point(x + self._LED_SIZE - 1, y + self._LED_SIZE - 1))
r, g, b = ((pixel >> 16) & 255, (pixel >> 8) & 255, pixel & 255)
block.setFill(graphics.color_rgb(r, g, b))
block.draw(self._win)
t = graphics.Text(graphics.Point(x + self._LED_SIZE // 2, y + self._LED_SIZE // 2), str(i))
t.setSize(10)
t.setTextColor("white")
t.draw(self._win)
self._leds.append(block)
def setBrightness(self, brightness):
"""Sets the brightness for the whole strip. Not implemented."""
print("setBrightness called in Mock_DotStar. Not implemented.")
def setPixelColor(self, index, color_or_r, g=None, b=None):
"""Sets the given LED color value. To be compatible with DotStart library, you can either pass just the color value or r,g,b values."""
if g is None:
self._pixels[index] = color_or_r
else:
self._pixels[index] = b + (g << 8) + (color_or_r << 16)
def clear(self):
"""Reset all LED values to 0 (black/off)."""
print("Clearing strip data")
# Set strip data to 'off' (just clears buffer, does not write to strip)
self._pixels = [0] * self._numPixels
def close(self):
"""Closes the LED window. """
if not self._win.closed:
self._win.close()
def show(self):
"""Renders the current state of the LEDs to screen."""
for led, pixel in zip(self._leds, self._pixels):
r, g, b = ((pixel >> 16) & 255, (pixel >> 8) & 255, pixel & 255)
led.setFill(graphics.color_rgb(r, g, b))
| dodgyrabbit/midi-light-py | Mock_DotStar.py | Python | mit | 2,955 |
"Docstring parsing example"
from gpkit import Model, parse_variables
class Cube(Model):
"""Demonstration of nomenclature syntax
Lines that end in "Variables" will be parsed as a scalar variable table
until the next blank line.
Variables
---------
A [m^2] surface area
V 100 [L] minimum volume
Lines that end in "Variables of length $N" will be parsed as vector
variables of length $N until the next blank line.
Variables of length 3
---------------------
s [m] side length
Let's introduce more variables: (any line ending in "Variables" is parsed)
Zoning Variables
----------------
h 1 [m] minimum height
Upper Unbounded
---------------
A
The ordering of these blocks doesn't affect anything; order them in the
way that makes the most sense to someone else reading your model.
"""
def setup(self):
exec parse_variables(Cube.__doc__)
return [A >= 2*(s[0]*s[1] + s[1]*s[2] + s[2]*s[0]),
s.prod() >= V,
s[2] >= h]
print parse_variables(Cube.__doc__)
c = Cube()
c.cost = c.A
print c.solve(verbosity=0).table()
| convexopt/gpkit | docs/source/examples/docstringparsing.py | Python | mit | 1,187 |
# -*- coding: utf-8 -*-
"""
This is part of WebScout software
Docs EN: http://hack4sec.pro/wiki/index.php/WebScout_en
Docs RU: http://hack4sec.pro/wiki/index.php/WebScout
License: MIT
Copyright (c) Anton Kuzmin <http://anton-kuzmin.ru> (ru) <http://anton-kuzmin.pro> (en)
Job class for Fuzzer urls module
"""
from classes.jobs.GeneratorJob import GeneratorJob
class FuzzerUrlsJob(GeneratorJob):
""" Job class for Fuzzer urls module """
pass
| hack4sec/ws-cli | classes/jobs/FuzzerUrlsJob.py | Python | mit | 454 |
#!/usr/bin/env
"""
wave_sig.py
References:
-----------
See "http://paos.colorado.edu/research/wavelets/"
Written January 1998 by C. Torrence
Translated to Python 2014
Using Anaconda packaged Python
from wave_matlab
"""
import numpy as np
def wave_signif(Y,dt,scale1,lag1=0.0,sigtest=0, dof=0, siglvl=0.95):
""" Inputs:
-------
Y = timeseries (or variance?)
dt = sampling time
scale1 = vector of scale indices
"""
n1 = len(Y)
J1 = len(scale1)
scale = scale1
s0 = scale.min()
dj = np.log(scale[1]/scale[0])/np.log(2.)
## for morlet 6 only
#lag1 = 0.72 - from Torrence 1998 for rednoise NinoSST
param = 6.
k0 = param
fourier_factor = (4. * np.pi) / (k0 + np.sqrt(2. + k0**2))
empir = np.array([2.,-1,-1,-1])
if (k0 == 6.):
empir[1]=0.776
empir[2]=2.32
empir[3]=0.60
if (np.size(Y) == 1):
variance = Y
else:
variance = np.var(Y)
period = scale * fourier_factor
dofmin = empir[0] # Degrees of freedom with no smoothing
Cdelta = empir[1] # reconstruction factor
gamma_fac = empir[2] # time-decorrelation factor
dj0 = empir[3] # scale-decorrelation factor
freq = dt / period; # normalized frequency
fft_theor = (1. - lag1**2.) / (1. - 2. * lag1 * np.cos(freq * 2. * np.pi) + lag1**2.) # [Eqn(16)]
fft_theor = variance * fft_theor # include time-series variance
signif = fft_theor
if (sigtest==0):
dof = dofmin
chisquare = chisquare_inv(siglvl,dof, scipy=True) / dof
signif = fft_theor * chisquare # [Eqn(18)]
else:
print "No options for sigtest != 0"
raise
return(signif,fft_theor)
def global_wave_signif(Y,dt,scale1,lag1=0.0,sigtest=1, dof=0, siglvl=0.95):
"""Time averaged significance for global wavelet averages"""
n1 = len(Y)
J1 = len(scale1)
scale = scale1
s0 = scale.min()
dj = np.log(scale[1]/scale[0])/np.log(2.)
## for morlet 6 only
#lag1 = 0.72 - from Torrence 1998 for rednoise NinoSST
param = 6.
k0 = param
fourier_factor = (4. * np.pi) / (k0 + np.sqrt(2. + k0**2))
empir = np.array([2.,-1,-1,-1])
if (k0 == 6.):
empir[1]=0.776
empir[2]=2.32
empir[3]=0.60
if (np.size(Y) == 1):
variance = Y
else:
variance = np.var(Y)
period = scale * fourier_factor
dofmin = empir[0] # Degrees of freedom with no smoothing
Cdelta = empir[1] # reconstruction factor
gamma_fac = empir[2] # time-decorrelation factor
dj0 = empir[3] # scale-decorrelation factor
freq = dt / period; # normalized frequency
fft_theor = (1. - lag1**2.) / (1. - 2. * lag1 * np.cos(freq * 2. * np.pi) + lag1**2.) # [Eqn(16)]
fft_theor = variance * fft_theor # include time-series variance
signif = fft_theor
if (sigtest==1):
dof = dofmin * np.sqrt(1. + (dof * dt / gamma_fac / scale1)**2. )
dof = [dofmin if trunc < dofmin else trunc for trunc in dof ]
chisquare = chisquare_inv(siglvl,dof, scipy=True) / dof
signif = fft_theor * chisquare
else:
print "No options for sigtest != 1"
raise
return(signif,fft_theor)
def chisquare_inv(P,V, scipy=True):
""" Translated from chisquare_inv.m
Originally coded by C. Torrence January 1998
By passing flag scipy = True : use scipy stats.chi2.inverse function
"""
if ((1-P) < 1e-4):
print "Must use a P <0.9999"
exit()
elif ((P==0.95) & (V==2)): # from lookup tables
X = 5.9915
return (X)
elif ((P==0.90) & (V==2)): # from lookup tables
X = 4.605
if scipy:
import scipy as sp
X = sp.stats.chi2.isf((1-P),V)
return (X)
| jeepsterboy/waveletanalysis | wave_sig.py | Python | mit | 3,974 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('libraries', '0003_libraryimport'),
('books', '0004_auto_20150811_0425'),
]
operations = [
migrations.CreateModel(
name='BookOnShelf',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', serialize=False, auto_created=True)),
('created', model_utils.fields.AutoCreatedField(verbose_name='created', default=django.utils.timezone.now, editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(verbose_name='modified', default=django.utils.timezone.now, editable=False)),
('book', models.ForeignKey(to='books.Book')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Shelf',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', serialize=False, auto_created=True)),
('created', model_utils.fields.AutoCreatedField(verbose_name='created', default=django.utils.timezone.now, editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(verbose_name='modified', default=django.utils.timezone.now, editable=False)),
('name', models.CharField(max_length=255)),
('meta', jsonfield.fields.JSONField(blank=True)),
('library', models.ForeignKey(to='libraries.Library')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='bookonshelf',
name='shelf',
field=models.ForeignKey(to='books.Shelf'),
),
]
| phildini/bockus | books/migrations/0005_auto_20150817_2146.py | Python | mit | 1,958 |
'''
https://pythonprogramming.net/svm-in-python-machine-learning-tutorial/?completed=/svm-constraint-optimization-machine-learning-tutorial/
SVM Training - Practical Machine Learning Tutorial with Python p.26
https://youtu.be/QAs2olt7pJ4?list=PLQVvvaa0QuDfKTOs3Keq_kaG2P55YRn5v
'''
import matplotlib.pyplot as plt
from matplotlib import style
import numpy as np
style.use('ggplot')
class Support_Vector_Machine:
def __init__(self, visualization=True):
self.visualization = visualization
self.colors = {1:'r',-1:'b'}
if self.visualization:
self.fig = plt.figure()
self.ax = self.fig.add_subplot(1,1,1)
# train (scikitlearn uses a fit method to train model.
def fit(self, data):
self.data = data
#{ ||w||: [w,b] }
opt.dict = ()
transforms = [[1,2], [-1,1], [-1,-1], [1,-1]]
all_data = []
for yi in self.data:
for featureset in self.data[yi]:
for feature in featureset:
all_data.append(feature)
self.max_feature_value = max(all_data)
self.min_feature_value = min(all_data)
all_data = None
step_sizes = [self.max_feature_value * 0.1, self.max_feature_value * 0.01, self.max_feature_value * 0.001]
#NB: smaller steps become more expensive
#can these steps be multithreaded?
b_range_multiple = 5
#this ^ is expensive.
b_multiple = 5
latest_optimum = self.max_feature_value * 10
for step in step_sizes:
w = np.array([latest_optimum, latest_optimum])
#we can set optimized because convex method.
optimized = False
while not optimized:
pass
pass
def predict(self,features):
# sign( x.w+b ) {identify if +ive or -ive}
classification = np.sign(np.dot(np.array(features),self.w)+self.b)
return classification
data_dict = {-1:np.array([[1,7], [2,8], [3,8],]),
1:np.array([[5,1], [6,-1], [7,3],])}
"""
https://docs.scipy.org/doc/numpy/reference/generated/numpy.dot.html
numpy.dot(a, b, out=None) = Dot product of two arrays.
http://scikit-learn.org/stable/modules/svm.html
http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html
textbook, lecture notes and MOOC course
https://web.stanford.edu/~boyd/cvxbook/
https://lagunita.stanford.edu/courses/Engineering/CVX101/Winter2014/about
""" | aspiringguru/sentexTuts | PracMachLrng/sentex_ML_demo18_SVM Training_p26.py | Python | mit | 2,461 |
from blocks.initialization import Constant
from cuboid.bricks.inception import ConcatBricks
from cuboid.bricks import Convolutional
from numpy.testing import assert_allclose
import theano
import theano.tensor as T
import numpy as np
def test_concat_bricks_conv():
i1 = Convolutional(input_dim=(4, 16, 16), num_filters=10,
filter_size=(1, 1), pad=(0, 0))
i2 = Convolutional(input_dim=(4, 16, 16), num_filters=12,
filter_size=(1, 1), pad=(0, 0))
brick = ConcatBricks([i1, i2], input_dim=(4, 16, 16))
brick.weights_init = Constant(0.0)
brick.biases_init = Constant(0.0)
brick.initialize()
x = T.tensor4('input')
y = brick.apply(x)
func_ = theano.function([x], [y])
x_val = np.ones((1, 4, 16, 16), dtype=theano.config.floatX)
res = func_(x_val)[0]
assert_allclose(res.shape, (1, 22, 16, 16))
assert_allclose(brick.get_dim("output"), (22, 16, 16))
| lukemetz/cuboid | tests/bricks/test_inception.py | Python | mit | 945 |
#!/usr/bin/env python
"""
Copyright (C) 2016 Anthony Briggs <[email protected]>
This file is part of Chat-thing.
Chat-thing is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
Chat-thing is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with Chat-thing. If not, see
<http://www.gnu.org/licenses/agpl.txt>.
"""
"""
Simple webchat thingo. Well, maybe not so simple now that I've added users,
registration, login, password change, blah blah blah
http://blog.pythonisito.com/2012/07/realtime-web-chat-with-socketio-and.html
http://docs.peewee-orm.com/en/latest/peewee/querying.html#sorting-records
http://bottlepy.org/docs/dev/async.html
http://bottlepy.org/docs/dev/index.html
http://bottlepy.org/docs/dev/stpl.html
Installing Python 3.5 from scratch:
https://www.raspberrypi.org/forums/viewtopic.php?f=32&t=134828
(and http://www.extellisys.com/articles/python-on-debian-wheezy)
"""
import json
import os.path
import random
import bottle
from bottle import run, request, response, redirect, abort
from bottle import Bottle, template, SimpleTemplate, static_file
from peewee import SqliteDatabase
from flash import message_plugin
from models import db, Message, User
from models import get_user, check_login, set_user_password
import config
app = Bottle()
app.install(message_plugin)
def set_user_token(user):
token = ''.join([random.choice('0123456789abcdef') for i in range(64)])
response.set_cookie('token', token)
user.token = token
user.save()
@app.route('/')
def index():
user = get_user(request)
the_html = open('the.html').read()
last_msgs = Message.select().order_by(-Message.id)[:10]
return template(the_html, messages=reversed(last_msgs),
user=user, request=request, config=config)
@app.route('/static/<filename>')
def server_static(filename):
if filename.endswith('.wav'):
response.headers['Content-type'] = "audio/wav;"
return static_file(filename, root=config.static_path)
@app.route('/register')
def register():
"""Register a new account"""
# This is done through user.txt for the moment...
pass
@app.route('/confirm')
def confirm():
"""Confirm an account (via clicking a link in email)."""
token = request.query.token
user = get_user(request, token)
if user:
set_user_token(user)
redirect('/set_password')
return "<p>No user with that token</p>"
@app.get('/set_password')
def set_password():
"""Display password reset form"""
user = get_user(request)
the_html = open('password_change.html').read()
return template(the_html, user=user, request=request)
@app.post('/set_password')
def post_set_password():
user = get_user(request)
if user:
password = request.forms.get('password')
password_check = request.forms.get('password_check')
if password != password_check:
the_html = open('password_change.html').read()
response.flash("Those passwords don't match!")
return template(the_html, user=user, request=request)
set_user_password(user, password)
response.flash("Your password has been changed")
redirect('/')
# TODO: /forgot_password
@app.get('/edit_profile')
def edit_profile():
"""Display edit profile form"""
user = get_user(request)
the_html = open('edit_profile.html').read()
return template(the_html, user=user, request=request)
@app.post('/edit_profile')
def post_edit_profile():
user = get_user(request)
if user:
# Username shouldn't already exist
username = request.forms.get('username')
try:
existing_user = User.get(username=username)
if existing_user and existing_user != user:
the_html = open('edit_profile.html').read()
response.flash("That username is taken!")
return template(the_html, user=user, request=request)
except User.DoesNotExist:
pass
# Email should look vaguely legitimate
# TODO: security thing - should we enforce confirmation
# w/ the old email address?
email = request.forms.get('email')
if '@' not in email and '.' not in email.split('@')[1]:
the_html = open('edit_profile.html').read()
response.flash("That email is invalid!")
return template(the_html, user=user, request=request)
user.username = request.forms.get('username')
user.first_name = request.forms.get('first_name')
user.last_name = request.forms.get('last_name')
user.email = request.forms.get('email')
user.save()
response.flash("Your profile has been updated")
redirect('/')
@app.post('/login')
def login():
username = request.forms.get('username')
password = request.forms.get('password')
user = check_login(username, password)
if user:
set_user_token(user)
response.flash("Welcome, " + user.first_name)
else:
response.flash("No user found with that login")
redirect('/')
@app.route('/logout')
def logout():
response.delete_cookie('token')
response.flash("You have been logged out!")
redirect('/')
run(app, host="0.0.0.0", port=config.web_port)
| AnthonyBriggs/Python-101 | chat2/chat.py | Python | mit | 5,770 |
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
def user_return(self):
return self.username+" "+self.first_name
User.add_to_class("__str__", user_return)
class Department(models.Model):
dept = models.CharField(max_length=100)
def __str__(self):
return self.dept
class EmployeeDepartment(models.Model):
dept_employee = models.ForeignKey(User, on_delete=models.CASCADE)
dept_department = models.ForeignKey(Department, on_delete=models.CASCADE)
def __str__(self):
return self.dept_employee.username+" "+self.dept_employee.first_name+" "+self.dept_department.dept
class EmployeeType(models.Model):
type_employee = models.OneToOneField(User, on_delete=models.CASCADE)
type_type = models.CharField(max_length=10, choices=(("Staff","Staff"),("Faculty","Faculty")))
def __str__(self):
return self.type_employee.username+" "+self.type_employee.first_name
class AllPost(models.Model):
post = models.CharField(max_length=200)
def __str__(self):
return self.post
class LeaveauthorityPost(models.Model):
leaveauthority_post = models.ForeignKey(AllPost, on_delete=models.CASCADE)
def __str__(self):
return self.leaveauthority_post.post
class LeaveseekingPost(models.Model):
leaveseeking_post = models.ForeignKey(AllPost, on_delete=models.CASCADE)
leaveforwarding_post = models.ForeignKey(LeaveauthorityPost, on_delete=models.CASCADE, related_name="forwarding")
leavesanctioning_post = models.ForeignKey(LeaveauthorityPost, on_delete=models.CASCADE, related_name="sanctioning")
def __str__(self):
return self.leaveseeking_post.post
class EmployeeLeaveseeking(models.Model):
seeking_employee = models.OneToOneField(User, on_delete=models.CASCADE)
seeking_post = models.ForeignKey(LeaveseekingPost, on_delete=models.CASCADE)
tempseeking_post = models.ForeignKey(LeaveseekingPost, on_delete=models.CASCADE, related_name='temp_post')
def __str__(self):
return self.seeking_employee.username+" "+self.seeking_employee.first_name
class EmployeeLeaveauthority(models.Model):
authority_employee = models.ForeignKey(User, on_delete=models.CASCADE)
authority_post = models.OneToOneField(LeaveauthorityPost, on_delete=models.CASCADE)
def __str__(self):
return self.authority_post.leaveauthority_post.post
class EmployeeLeavestatus(models.Model):
leave_employee = models.OneToOneField(User, on_delete=models.CASCADE)
leave_status = models.BooleanField(default=False)
def __str__(self):
return self.leave_employee.username+" "+self.leave_employee.first_name
class EmployeeAllpost(models.Model):
post_employee = models.ForeignKey(User, on_delete=models.CASCADE)
post_post = models.ForeignKey(AllPost, on_delete=models.CASCADE)
def __str__(self):
return self.post_employee.username+" "+self.post_employee.first_name+" "+self.post_post.post
class ReplacingEmployee(models.Model):
replacing_employee = models.OneToOneField(User, on_delete=models.CASCADE )
replacing_academic = models.ForeignKey(User, on_delete=models.CASCADE, related_name="academic", null=True)
replacing_administrative = models.ForeignKey(User, on_delete=models.CASCADE, related_name="administrative")
def __str__(self):
return self.replacing_employee.username+" "+self.replacing_employee.first_name
| avinash795k/leaveProject | userpanel/models.py | Python | mit | 3,453 |
# -*- encoding: utf-8 -*-
"""
Комплексные числа
"""
#Complex numbers with a nonzero real component are written as "(real+imagj)",
#or can be created with the "complex(real, imag)" function.
print 1j * 1J
print 1j * complex(0,1)
print 3+1j*3
print (3+1j)*3
print (1+2j)/(1+1j)
| h4/fuit-webdev | examples/lesson2/1.2/1.2.3.py | Python | mit | 299 |
"""Common settings and globals."""
from os.path import abspath, basename, dirname, join, normpath
from sys import path
########## PATH CONFIGURATION
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
########## END PATH CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('Your Name', '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
########## END MANAGER CONFIGURATION
########## DATABASE CONFIGURATION
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'UTC'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
#SITE_ID = 1
# see: https://docs.djangoproject.com/en/1.7/topics/i18n/
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
########## END GENERAL CONFIGURATION
########## MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = normpath(join(SITE_ROOT, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
########## END MEDIA CONFIGURATION
########## STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = normpath(join(SITE_ROOT, 'assets'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
normpath(join(SITE_ROOT, 'static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
########## END STATIC FILE CONFIGURATION
########## secret configuration
# see: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# note: this key should only be used for development and testing.
SECRET_KEY = r"{{ secret_key }}"
#SECRET_KEY = 'y3s*z3x_r*^u*2rdkdapcain-ys1rb255pvtfpbi3#0o0l91=k'
########## end secret configuration
########## SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
########## END SITE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
normpath(join(SITE_ROOT, 'templates')),
)
########## END TEMPLATE CONFIGURATION
########## MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE_CLASSES = (
# Default Django middleware.
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
########## END MIDDLEWARE CONFIGURATION
########## URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = '%s.urls' % SITE_NAME
########## END URL CONFIGURATION
########## APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin panel and documentation:
'grappelli',
'django.contrib.admin',
# 'django.contrib.admindocs',
)
# Apps specific for this project go here.
LOCAL_APPS = (
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + LOCAL_APPS
########## END APP CONFIGURATION
########## LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
########## END LOGGING CONFIGURATION
########## WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = '%s.wsgi.application' % SITE_NAME
########## END WSGI CONFIGURATION
########## SOUTH CONFIGURATION
# See: http://south.readthedocs.org/en/latest/installation.html#configuring-your-django-installation
INSTALLED_APPS += (
# Database migration helpers:
#'south',
)
# Don't need to use South when setting up a test database.
# SOUTH_TESTS_MIGRATE = False
########## END SOUTH CONFIGURATION
| abelthf/layout_template | project_name/project_name/settings/base.py | Python | mit | 6,616 |
from __future__ import print_function
import sys
from nose.core import run
from nose.core import unittest
from pprint import pprint
from stdplus._sshConfig import parseSshConfig
from stdplus import *
def test_isIp_InvalidSamples():
assert(not isIp( "Obviously not an ip" )) # obvious
assert(not isIp( "16" )) # obvious
assert(not isIp( "1234.123.123.123" )) # first octet is too long
assert(not isIp( "123.1234.123.123" )) # second octet is too long
assert(not isIp( "123.123.1234.123" )) # third octet is too long
assert(not isIp( "123.123.123.1234" )) # fourth octet is too long
assert(not isIp( "12a.123.123.123" )) # first octet contains alpha
assert(not isIp( "123.12a.123.123" )) # second octet contains alpha
assert(not isIp( "123.123.12a.123" )) # third octet contains alpha
assert(not isIp( "123.123.123.12a" )) # fourth octet contains alpha
assert(not isIp( "192.168.1.1.32" )) # too many octets
assert(not isIp( "foo.192.168.1.1" )) # too many octets, leading octet not even a number
def test_isIp_validSamples():
assert(isIp( "8.8.8.8" )) # all octets single digits
assert(isIp( "18.18.18.18" )) # all octets two digits
assert(isIp( "192.168.100.200" )) # all octets 3 digits
| earlye/python-stdplus | tests/stdplus/testIsIp.py | Python | mit | 1,253 |
from unittest import TestCase
from iota import Address
from iota.crypto.types import Digest
from iota.multisig.crypto.addresses import MultisigAddressBuilder
from iota.multisig.types import MultisigAddress
class MultisigAddressBuilderTestCase(TestCase):
"""
Generating values for this test case using the JS lib:
.. code-block:: javascript
// Define digests to use to create the multisig addy.
var digests = ['...', ...];
...
var Multisig = require('./lib/multisig/address.js');
var addy = new Multisig(digests);
console.log(addy.finalize());
"""
def setUp(self):
super(MultisigAddressBuilderTestCase, self).setUp()
# Define some tryte sequences that we can reuse between tests.
self.digest_1 =\
Digest(
trytes =
b'FWNEPVJNGUKTSHSBDO9AORBCVWWLVXC9KAMKYYNKPYNJDKSAUURI9ELKOEEYPKVTYP'
b'CKOCJQESYFEMINIFKX9PDDGRBEEHYYXCJW9LHGWFZGHKCPVDBGMGQKIPCNKNITGMZT'
b'DIWVUB9PCHCOPHMIWKSUKRHZOJPMAY',
key_index = 0,
)
self.digest_2 =\
Digest(
trytes =
b'PAIRLDJQY9XAUSKIGCTHRJHZVARBEY9NNHYJ9UI9HWWZXFSDWEZEGDCWNVVYSYDV9O'
b'HTR9NGGZURISWTNECFTCMEWQQFJ9VKLFPDTYJYXC99OLGRH9OSFJLMEOGHFDHZYEAF'
b'IMIZTJRBQUVCR9U9ZWTMUXTUEOUBLC',
key_index = 0,
)
self.digest_3 =\
Digest(
trytes =
b'KBNYOFY9HJSPBDBFSTIEMYJAAMNOXLVXBDUKJRBUGAPIIZNDARXEWDZRBCIYFQCBID'
b'HXIQFIDFPNGIFN9DDXQUGYZGDML9ZIELDSVICFUOPWEPCUWEDUFKXKSOZKTSHIMEIR'
b'HOXKPJFRWWCNYPXR9RI9SMBFSDQFWM',
key_index = 0,
)
def test_success_multiple_digests(self):
"""
Generating a multisig address from multiple digests.
"""
builder = MultisigAddressBuilder()
builder.add_digest(self.digest_1)
builder.add_digest(self.digest_2)
addy = builder.get_address()
self.assertIsInstance(addy, MultisigAddress)
self.assertEqual(
addy,
Address(
b'ZYKDKGXTMGINTQLUMVNBBI9XCEI9BWYF9YOPCBFT'
b'UUJZWM9YIWHNYZEWOPEVRVLKZCPRKLCQD9BR9FVLC',
),
)
# The multisig address also keeps track of the digests used to
# create it (mostly for troubleshooting purposes).
self.assertListEqual(addy.digests, [self.digest_1, self.digest_2])
def test_success_single_digest(self):
"""
Generating a "multisig" address from a single digest.
This does the same thing as generating a regular address from the
corresponding key.
"""
builder = MultisigAddressBuilder()
builder.add_digest(self.digest_1)
addy = builder.get_address()
self.assertIsInstance(addy, MultisigAddress)
self.assertEqual(
addy,
Address(
b'TBOLOKTNJ9MFGBSJBIWDZBHWJRLMKAEGUZFJFNGS'
b'VODKPPULLGJVHTCENCD9OOCNYPRLV9XGBGLDZNHPZ',
),
)
# The address is still designated multisig, so we keep track of the
# digest used to generate it.
self.assertListEqual(addy.digests, [self.digest_1])
def test_error_no_digests(self):
"""
Attempting to generate a multisig addresses without providing any
digests.
I mean, why even bother, right?
"""
builder = MultisigAddressBuilder()
with self.assertRaises(ValueError):
builder.get_address()
def test_success_duplicate_digest(self):
"""
Using the same digest multiple times in the same multisig address?
It's unconventional, admittedly, but the maths work out, so..
"""
builder = MultisigAddressBuilder()
builder.add_digest(self.digest_1)
builder.add_digest(self.digest_2)
# I have no idea why you'd want to do this, but that's why it's not
# my job to make those kinds of decisions.
builder.add_digest(self.digest_1)
addy = builder.get_address()
self.assertIsInstance(addy, MultisigAddress)
self.assertEqual(
addy,
Address(
b'JXJLZDJENNRODT9VEIRPVDX9YRLMDYDEXCQUYFIU'
b'XFKFJOYOGTJPEIBEKDNEFRFVVVSQFBGMNZRBGFARD',
),
)
# Note that ``digest_1`` appears twice, because we added it twice.
self.assertListEqual(
addy.digests,
[self.digest_1, self.digest_2, self.digest_1],
)
def test_success_extract_multiple(self):
"""
You can extract the address multiple times from the same builder
(it's the same instance every time).
"""
builder = MultisigAddressBuilder()
builder.add_digest(self.digest_1)
builder.add_digest(self.digest_2)
addy_1 = builder.get_address()
addy_2 = builder.get_address()
# Same instance is returned every time.
self.assertIs(addy_1, addy_2)
def test_error_already_finalized(self):
"""
Once an address is extracted from the builder, no more digests can
be added.
"""
builder = MultisigAddressBuilder()
builder.add_digest(self.digest_1)
builder.add_digest(self.digest_2)
builder.get_address()
with self.assertRaises(ValueError):
builder.add_digest(self.digest_3)
| iotaledger/iota.lib.py | test/multisig/crypto/addresses_test.py | Python | mit | 4,973 |
# local imports
from .client import ClientError
from .client import DryRun
from .client import MarathonClient
from .deployment import DeploymentFailed
from .deployment import DeploymentNotFound
from .deployment import MarathonDeployment
__all__ = [
'ClientError',
'DeploymentFailed',
'DeploymentNotFound',
'DryRun',
'MarathonClient',
'MarathonDeployment',
]
| shopkeep/shpkpr | shpkpr/marathon/__init__.py | Python | mit | 384 |
# -*- coding:utf-8 -*-
from gensim import corpora
import nltk
import myLogger
import numpy as np
__author__ = "JOHNKYON"
def raw_initializer(dataset):
"""
Read input and output information from json object
:param dataset:
:return:
"""
logger = myLogger.myLogger("initializer")
logger.info("Starting raw initializing")
input_raw = []
output_raw = []
for call in dataset:
input_row = []
output_row = []
# if call.log["session-id"] == "voip-f32f2cfdae-130328_192703":
for turn, label in call:
input_row.append(turn["output"]["transcript"].lower())
output_row.append(turn["output"]["dialog-acts"])
input_row.append(label["transcription"].lower())
output_row.append(label["semantics"]["json"])
input_raw.append(input_row)
output_raw.append(output_row)
logger.info("Finish raw initializing")
print(len(input_raw))
return {"input": input_raw, "output": output_raw}
def token_initializer(data):
"""
Translate text from input into token
:param data:
:return:
"""
logger = myLogger.myLogger("Token initializer")
logger.info("Starting tokenizing")
token = map(lambda element: map(lambda x: nltk.word_tokenize(x.lower()), element), data)
logger.info("Tokenizing finished")
return token
def dictionary_initializer(token):
"""
Build dictionary with token
:param token:
:return:
"""
logger = myLogger.myLogger("Dictionary initializer")
logger.info("Starting building dictionary")
raw = map(lambda element: reduce(lambda x, y: x + y, element), token)
dictionary = corpora.Dictionary(raw)
logger.info("Finish building dictionary")
return dictionary
def label_dict(output):
"""
Use output to create label dictionary and output vector
:param output:
:return:
"""
act_dict = {}
slot_dict = {}
label = []
act_count = 0
slot_count = 0
for session in output:
act_session = []
slot_session = []
for sentence in session:
act_sentence = []
slot_sentence = []
for dic in sentence:
if act_dict.has_key(dic["act"]):
act_sentence.append(act_dict[dic["act"]])
else:
act_sentence.append(act_count)
act_dict[dic["act"]] = act_count
act_count += 1
for slot in dic["slots"]:
if slot_dict.has_key(tuple(slot)):
slot_sentence.append(slot_dict[tuple(slot)])
else:
slot_sentence.append(slot_count)
slot_dict[tuple(slot)] = slot_count
slot_count += 1
act_session.append(act_sentence)
slot_session.append(act_sentence)
label.append({"act": act_session, "slot": slot_session})
return label, act_dict, slot_dict
class Set:
def __init__(self, token, dictionary, output, mode):
logger = myLogger.myLogger("Input layer initializer")
logger.info("Initializing input raw")
self.input = map(lambda call: map(lambda sentence: map(lambda x: dictionary.token2id[x], sentence), call), token)
self.output, self.act_dict, self.slot_dict = label_dict(output)
self.sentence_length = 0
self.sentence_count = 0
for session in self.input:
self.sentence_count = max(self.sentence_count, len(session))
for sentence in session:
self.sentence_length = max(self.sentence_length, len(sentence))
# 初始化ndarray
self.input_mtr = np.zeros((len(self.input), self.sentence_count, self.sentence_length))
self.output_mtr = np.zeros((len(self.input), self.sentence_count, len(self.act_dict)))
for session_index in range(0, len(self.input)):
for sentence_index in range(0, len(self.input[session_index])):
# 此处仅记录act的label
for n in range(0, len(self.input[session_index][sentence_index])):
self.input_mtr[session_index][sentence_index][n] = self.input[session_index][sentence_index][n]
if mode == 1:
for n in self.output[session_index]["act"][sentence_index]:
self.output_mtr[session_index][sentence_index][n] = 1
elif mode == 2:
for n in self.output[session_index]["slot"][sentence_index]:
self.output_mtr[session_index][sentence_index][n] = 1
def __iter__(self):
for session_index in range(0, len(self.input)):
for sentence_index in range(0, len(self.input[session_index])):
vector = {"input": self.input_mtr[session_index, sentence_index], "output": self.output_mtr[session_index, sentence_index]}
yield vector | JOHNKYON/DSTC | DSTC2/traindev/scripts/initializer.py | Python | mit | 4,974 |
import re
import argparse
class Link(object):
def __init__(self, src, dst, label):
self._src = 'bb%i' % (src,)
self._dst = dst if isinstance(dst, str) else 'bb%i' % (dst,)
self._label = label
def main():
#cratename,pat = 'rustc_lint','fn .*expr_refers_to_this_method.*'
cratename,pat = 'std','fn resize.*HashMap'
#cratename,pat = 'rustc', 'fn tables.*::"rustc"::ty::context::TyCtxt'
argp = argparse.ArgumentParser()
argp_src = argp.add_mutually_exclusive_group(required=True)
argp_src.add_argument("--file", type=str)
argp_src.add_argument("--crate", type=str)
argp.add_argument("--fn-name", type=str, required=True)
args = argp.parse_args()
pat = 'fn '+args.fn_name
infile = args.file or ('output/'+args.crate+'.hir_3_mir.rs')
fp = open(infile)
start_pat = re.compile(pat)
def_line = None
for line in fp:
line = line.strip()
if start_pat.match(line) != None:
print("# ",line)
def_line = line
break
if def_line is None:
return
for line in fp:
if line.strip() == "bb0: {":
break
bbs = []
cur_bb_lines = []
level = 2
for line in fp:
line = line.strip()
if line == "}":
level -= 1
if level == 0:
break
else:
bbs.append( cur_bb_lines )
cur_bb_lines = []
continue
if "bb" in line and ": {" in line:
level += 1
continue
outstr = ""
comment_level = 0
i = 0
while i < len(line):
if comment_level > 0:
if line[i:i+2] == '*/':
comment_level -= 1
i += 2
continue
if line[i:i+2] == '/*':
comment_level += 1
i += 2
continue
if comment_level == 0:
outstr += line[i]
i += 1
print("#",len(bbs),outstr)
cur_bb_lines.append(outstr)
goto_regex = re.compile('goto bb(\d+);$')
call_regex = re.compile('.*goto bb(\d+) else bb(\d+)$')
if_regex = re.compile('.*goto bb(\d+); } else { goto bb(\d+); }$')
switch_regex = re.compile('(\d+) => bb(\d+),')
links = []
for idx,bb in enumerate(bbs):
if bb[-1] == 'return;':
links.append( Link(idx, 'return', "return") )
continue
if bb[-1] == 'diverge;':
#links.append( Link(idx, 'panic', "diverge") )
continue
m = goto_regex.match(bb[-1])
if m != None:
links.append( Link(idx, int(m.group(1)), "") )
continue
m = call_regex.match(bb[-1])
if m != None:
links.append( Link(idx, int(m.group(1)), "ret") )
#links.append( Link(idx, int(m.group(2)), "panic") )
continue
m = if_regex.match(bb[-1])
if m != None:
links.append( Link(idx, int(m.group(1)), "true") )
links.append( Link(idx, int(m.group(2)), "false") )
continue
# Rewrite `switch` terminators to de-duplicate the most common arm
if bb[-1].startswith("switch "):
arms = []
counts = {}
for m in switch_regex.finditer(bb[-1]):
tgt = int(m.group(2))
arms.append(( tgt, "var%s" % (m.group(1),) ))
if not tgt in counts:
counts[tgt] = 0
counts[tgt] += 1
max_arm = max([ (v,k) for k,v in counts.items() ])
# If one arm is used more times than there are arms, dedup it
if max_arm[0] > len(counts):
default_tgt = max_arm[1]
p = bb[-1].find('{')
bb[-1] = bb[-1][:p+1]
for tgt,var in arms:
if tgt != default_tgt:
links.append( Link(idx, tgt, var ) )
bb[-1] += "{} => bb{}, ".format(var, tgt)
bb[-1] += "* => bb{} }}".format(default_tgt)
links.append(Link(idx, default_tgt, "*" ))
else:
for tgt,var in arms:
links.append( Link(idx, tgt, var ) )
print("digraph {")
print("node [shape=box, labeljust=l; fontname=\"mono\"];")
for l in links:
print('"{}" -> "{}" [label="{}"];'.format(l._src, l._dst, l._label))
print("")
for idx,bb in enumerate(bbs):
print('"bb{0}" [label="BB{0}:'.format(idx), end="")
for stmt in bb:
print('\\l',stmt.replace('"', '\\"'), end="")
print('"];')
print("}")
main()
| thepowersgang/mrustc | scripts/mir_to_dot.py | Python | mit | 4,736 |
"""
Amazon Web Services version 4 authentication for the Python `Requests`_
library.
.. _Requests: https://github.com/kennethreitz/requests
Features
--------
* Requests authentication for all AWS services that support AWS auth v4
* Independent signing key objects
* Automatic regeneration of keys when scope date boundary is passed
* Support for STS temporary credentials
Implements header-based authentication, GET URL parameter and POST parameter
authentication are not supported.
Supported Services
------------------
This package has been tested as working against:
AppStream, Auto-Scaling, CloudFormation, CloudFront, CloudHSM, CloudSearch,
CloudTrail, CloudWatch Monitoring, CloudWatch Logs, CodeDeploy, Cognito
Identity, Cognito Sync, Config, DataPipeline, Direct Connect, DynamoDB, Elastic
Beanstalk, ElastiCache, EC2, EC2 Container Service, Elastic Load Balancing,
Elastic MapReduce, ElasticSearch, Elastic Transcoder, Glacier, Identity and
Access Management (IAM), Key Management Service (KMS), Kinesis, Lambda,
Opsworks, Redshift, Relational Database Service (RDS), Route 53, Simple Storage
Service (S3), Simple Notification Service (SNS), Simple Queue Service (SQS),
Storage Gateway, Security Token Service (STS)
The following services do not support AWS auth version 4 and are not usable
with this package:
Simple Email Service (SES), Simple Workflow Service (SWF), Import/Export,
SimpleDB, DevPay, Mechanical Turk
The AWS Support API has not been tested as it requires a premium subscription.
Installation
------------
Install via pip:
.. code-block:: bash
$ pip install requests-aws4auth
requests-aws4auth requires the `Requests`_ library by Kenneth Reitz.
requests-aws4auth supports Python 3.3 and up. Python 2.7 may work but is not supported after the version 1.0.x tree.
Basic usage
-----------
.. code-block:: python
>>> import requests
>>> from requests_aws4auth import AWS4Auth
>>> endpoint = 'http://s3-eu-west-1.amazonaws.com'
>>> auth = AWS4Auth('<ACCESS ID>', '<ACCESS KEY>', 'eu-west-1', 's3')
>>> response = requests.get(endpoint, auth=auth)
>>> response.text
<?xml version="1.0" encoding="UTF-8"?>
<ListAllMyBucketsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01">
<Owner>
<ID>bcaf1ffd86f461ca5fb16fd081034f</ID>
<DisplayName>webfile</DisplayName>
...
This example would list your buckets in the ``eu-west-1`` region of the Amazon
S3 service.
STS Temporary Credentials
-------------------------
.. code-block:: python
>>> from requests_aws4auth import AWS4Auth
>>> auth = AWS4Auth('<ACCESS ID>', '<ACCESS KEY>', 'eu-west-1', 's3',
session_token='<SESSION TOKEN>')
...
This example shows how to construct an AWS4Auth object for use with STS
temporary credentials. The ``x-amz-security-token`` header is added with
the session token. Temporary credential timeouts are not managed -- in
case the temporary credentials expire, they need to be re-generated and
the AWS4Auth object re-constructed with the new credentials.
Date handling
-------------
If an HTTP request to be authenticated contains a ``Date`` or ``X-Amz-Date``
header, AWS will only accept the authorised request if the date in the header
matches the scope date of the signing key (see the `AWS REST API date docs`_).
.. _AWS REST API date docs: http://docs.aws.amazon.com/general/latest/gr/sigv4-date-handling.html).
From version 0.8 of requests-aws4auth, if the header date does not match the
scope date, an ``AWS4Auth`` instance will automatically regenerate its signing
key, using the same scope parameters as the previous key except for the date,
which will be changed to match the request date. If a request does not include
a date, the current date is added to the request in an ``X-Amz-Date`` header,
and the signing key is regenerated if this differs from the scope date.
This means that ``AWS4Auth`` now extracts and parses dates from the values of
``X-Amz-Date`` and ``Date`` headers. Supported date formats are:
* RFC 7231 (e.g. Mon, 09 Sep 2011 23:36:00 GMT)
* RFC 850 (e.g. Sunday, 06-Nov-94 08:49:37 GMT)
* C time (e.g. Wed Dec 4 00:00:00 2002)
* Amz-Date format (e.g. 20090325T010101Z)
* ISO 8601 / RFC 3339 (e.g. 2009-03-25T10:11:12.13-01:00)
If either header is present but ``AWS4Auth`` cannot extract a date because all
present date headers are in an unrecognisable format, ``AWS4Auth`` will delete
any ``X-Amz-Date`` and ``Date`` headers present and replace with a single
``X-Amz-Date`` header containing the current date. This behaviour can be
modified using the ``raise_invalid_date`` keyword argument of the ``AWS4Auth``
constructor.
Automatic key regeneration
--------------------------
If you do not want the signing key to be automatically regenerated when a
mismatch between the request date and the scope date is encountered, use the
alternative ``StrictAWS4Auth`` class, which is identical to ``AWS4Auth`` except
that upon encountering a date mismatch it just raises a ``DateMismatchError``.
You can also use the ``PassiveAWS4Auth`` class, which mimics the ``AWS4Auth``
behaviour prior to version 0.8 and just signs and sends the request, whether
the date matches or not. In this case it is up to the calling code to handle an
authentication failure response from AWS caused by the date mismatch.
Secret key storage
------------------
To allow automatic key regeneration, the secret key is stored in the
``AWS4Auth`` instance, in the signing key object. If you do not want this to
occur, instantiate the instance using an ``AWS4Signing`` key which was created
with the store_secret_key parameter set to False:
.. code-block:: python
>>> sig_key = AWS4SigningKey(secret_key, region, service, date, False)
>>> auth = StrictAWS4Auth(access_id, sig_key)
The ``AWS4Auth`` class will then raise a ``NoSecretKeyError`` when it attempts
to regenerate its key. A slightly more conceptually elegant way to handle this
is to use the alternative ``StrictAWS4Auth`` class, again instantiating it with
an ``AWS4SigningKey`` instance created with ``store_secret_key = False``.
Multithreading
--------------
If you share ``AWS4Auth`` (or even ``StrictAWS4Auth``) instances between
threads you are likely to encounter problems. Because ``AWS4Auth`` instances
may unpredictably regenerate their signing key as part of signing a request,
threads using the same instance may find the key changed by another thread
halfway through the signing process, which may result in undefined behaviour.
It may be possible to rig up a workable instance sharing mechanism using
locking primitives and the ``StrictAWS4Auth`` class, however this poor author
can't think of a scenario which works safely yet doesn't suffer from at some
point blocking all threads for at least the duration of an HTTP request, which
could be several seconds. If several requests come in in close succession which
all require key regenerations then the system could be forced into serial
operation for quite a length of time.
In short, it's probably best to create a thread-local instance of ``AWS4Auth``
for each thread that needs to do authentication.
API reference
-------------
See the doctrings in ``aws4auth.py`` and ``aws4signingkey.py``.
Testing
-------
A test suite is included in the test folder.
The package passes all tests in the AWS auth v4 `test_suite`_, and contains
tests against the supported live services. See docstrings in
``test/requests_aws4auth_test.py`` for details about running the tests.
Connection parameters are included in the tests for the AWS Support API, should
you have access and want to try it. The documentation says it supports auth v4
so it should work if you have a subscription. Do pass on your results!
.. _test_suite: http://docs.aws.amazon.com/general/latest/gr/signature-v4-test-suite.html
Unsupported AWS features / todo
-------------------------------
* Currently does not support Amazon S3 chunked uploads
* Tests for new AWS services
* Requires Requests library to be present even if only using
* Coherent documentation
"""
# Licensed under the MIT License:
# http://opensource.org/licenses/MIT
from .aws4auth import AWS4Auth, StrictAWS4Auth, PassiveAWS4Auth
from .aws4signingkey import AWS4SigningKey
from .exceptions import RequestsAws4AuthException, DateMismatchError, NoSecretKeyError
del aws4auth
del aws4signingkey
del exceptions
__version__ = '1.1.1'
| sam-washington/requests-aws4auth | requests_aws4auth/__init__.py | Python | mit | 8,452 |
"""LCM type definitions
This file automatically generated by lcm.
DO NOT MODIFY BY HAND!!!!
"""
import cStringIO as StringIO
import struct
import kinect_joint_t
class kinect_bodyframe_update_t(object):
__slots__ = ["bodyjoints"]
SpineBase = 0
SpineMid = 1
Neck = 2
Head = 3
ShoulderLeft = 4
ElbowLeft = 5
WristLeft = 6
HandLeft = 7
ShoulderRight = 8
ElbowRight = 9
WristRight = 10
HandRight = 11
HipLeft = 12
KneeLeft = 13
AnkleLeft = 14
FootLeft = 15
HipRight = 16
KneeRight = 17
AnkleRight = 18
FootRight = 19
SpineShoulder = 20
HandTipLeft = 21
ThumbLeft = 22
HandTipRight = 23
ThumbRight = 24
def __init__(self):
self.bodyjoints = [ None for dim0 in range(25) ]
def encode(self):
buf = StringIO.StringIO()
buf.write(kinect_bodyframe_update_t._get_packed_fingerprint())
self._encode_one(buf)
return buf.getvalue()
def _encode_one(self, buf):
for i0 in range(25):
assert self.bodyjoints[i0]._get_packed_fingerprint() == kinect_joint_t.kinect_joint_t._get_packed_fingerprint()
self.bodyjoints[i0]._encode_one(buf)
def decode(data):
if hasattr(data, 'read'):
buf = data
else:
buf = StringIO.StringIO(data)
if buf.read(8) != kinect_bodyframe_update_t._get_packed_fingerprint():
raise ValueError("Decode error")
return kinect_bodyframe_update_t._decode_one(buf)
decode = staticmethod(decode)
def _decode_one(buf):
self = kinect_bodyframe_update_t()
self.bodyjoints = []
for i0 in range(25):
self.bodyjoints.append(kinect_joint_t.kinect_joint_t._decode_one(buf))
return self
_decode_one = staticmethod(_decode_one)
_hash = None
def _get_hash_recursive(parents):
if kinect_bodyframe_update_t in parents: return 0
newparents = parents + [kinect_bodyframe_update_t]
tmphash = (0x833951cac4a4f9e8+ kinect_joint_t.kinect_joint_t._get_hash_recursive(newparents)) & 0xffffffffffffffff
tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff
return tmphash
_get_hash_recursive = staticmethod(_get_hash_recursive)
_packed_fingerprint = None
def _get_packed_fingerprint():
if kinect_bodyframe_update_t._packed_fingerprint is None:
kinect_bodyframe_update_t._packed_fingerprint = struct.pack(">Q", kinect_bodyframe_update_t._get_hash_recursive([]))
return kinect_bodyframe_update_t._packed_fingerprint
_get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
| GearsAD/semisorted_arnerve | lcm_types/user_types/semisorted_arnerve/kinect_bodyframe_update_t.py | Python | mit | 2,701 |
#Andrew Tan, 4/12, Section 010, Part 3
import random
#Define possible cards and associated values
cards = ['10 of Hearts', '9 of Hearts', '8 of Hearts', '7 of Hearts', '6 of Hearts', '5 of Hearts', '4 of Hearts', '3 of Hearts', '2 of Hearts', 'Ace of Hearts', 'King of Hearts', 'Queen of Hearts', 'Jack of Hearts', '10 of Diamonds', '9 of Diamonds', '8 of Diamonds', '7 of Diamonds', '6 of Diamonds', '5 of Diamonds', '4 of Diamonds', '3 of Diamonds', '2 of Diamonds', 'Ace of Diamonds', 'King of Diamonds', 'Queen of Diamonds', 'Jack of Diamonds', '10 of Clubs', '9 of Clubs', '8 of Clubs', '7 of Clubs', '6 of Clubs', '5 of Clubs', '4 of Clubs', '3 of Clubs', '2 of Clubs', 'Ace of Clubs', 'King of Clubs', 'Queen of Clubs', 'Jack of Clubs', '10 of Spades', '9 of Spades', '8 of Spades', '7 of Spades', '6 of Spades', '5 of Spades', '4 of Spades', '3 of Spades', '2 of Spades', 'Ace of Spades', 'King of Spades', 'Queen of Spades', 'Jack of Spades']
values = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 10, 10, 10, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 10, 10, 10, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 10, 10, 10, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 10, 10, 10]
#First deal
player_hand = []
player_points = 0
for i in range(2):
index = random.randint(0, len(cards))
player_hand.append(cards[index])
player_points += values[index]
print("Player hand: {} is worth {}".format(player_hand, player_points))
#Subsequent deals
while player_points < 21:
option = str.lower(input("(h)it or (s)tand? "))
if option == "h":
index = random.randint(0, len(cards))
player_hand.append(cards[index])
player_points += values[index]
print("You drew {}".format(cards[index]))
print("Player hand: {} is worth {}".format(player_hand, player_points))
continue
if option == "s":
print()
break
if player_points == 21:
print("Player got 21! Blackjack!")
winner = "Player"
elif player_points > 21:
print("Bust!")
winner = "Computer"
#Computer deals
elif player_points < 21:
computer_hand = []
computer_points = 0
for i in range(2):
index = random.randint(0, len(cards))
computer_hand.append(cards[index])
computer_points += values[index]
print("Computer hand: {} is worth {}".format(computer_hand, computer_points))
while computer_points < 21 and computer_points < player_points:
index = random.randint(0, len(cards))
computer_hand.append(cards[index])
computer_points += values[index]
print("Computer drew {}".format(cards[index]))
print("Computer hand: {} is worth {}".format(computer_hand, computer_points))
if computer_points == 21:
print("Computer got 21! Blackjack!")
winner = "Computer"
elif computer_points > 21:
print("Bust!")
winner = "Player"
else:
winner = "Computer"
#Display winner
print("{} wins!".format(winner))
| sojournexx/python | Assignments/TanAndrew_assign8_part3.py | Python | mit | 2,988 |
#!/usr/bin/env python3
import os
from setuptools import setup, find_packages
from homeassistant.const import __version__
PACKAGE_NAME = 'homeassistant'
HERE = os.path.abspath(os.path.dirname(__file__))
DOWNLOAD_URL = ('https://github.com/home-assistant/home-assistant/archive/'
'{}.zip'.format(__version__))
PACKAGES = find_packages(exclude=['tests', 'tests.*'])
REQUIRES = [
'requests>=2,<3',
'pyyaml>=3.11,<4',
'pytz>=2016.6.1',
'pip>=7.0.0',
'jinja2>=2.8',
'voluptuous==0.9.1',
'typing>=3,<4',
'sqlalchemy==1.0.14',
]
setup(
name=PACKAGE_NAME,
version=__version__,
license='MIT License',
url='https://home-assistant.io/',
download_url=DOWNLOAD_URL,
author='Paulus Schoutsen',
author_email='[email protected]',
description='Open-source home automation platform running on Python 3.',
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=REQUIRES,
test_suite='tests',
keywords=['home', 'automation'],
entry_points={
'console_scripts': [
'hass = homeassistant.__main__:main'
]
},
classifiers=[
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Topic :: Home Automation'
],
)
| devdelay/home-assistant | setup.py | Python | mit | 1,479 |
# Return a Collatz chain for a given integer input
# Output returned as a list
# Created by Joel Pozin on March 7, 2017
import sys
def collatz(num):
"""
if n is even, n/2
if n is odd, 3n+1
"""
newlist = [num]
while num != 1:
if num%2 == 0:
num //= 2
elif num%2 == 1:
num = 3*num + 1
newlist.append(num)
return newlist
if __name__ == "__main__":
try:
collatz(int(sys.argv[1]))
except ValueError:
print("Incompatible input, please try again")
| jpozin/Math-Projects | CollatzChain.py | Python | mit | 579 |
import amity
import socket
import logging
import tornado.ioloop
if __name__ == "__main__":
import sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG,
format='%(asctime)s %(process)d %(filename)s %(lineno)d %(levelname)s #| %(message)s',
datefmt='%H:%M:%S')
ioloop = tornado.ioloop.IOLoop.instance()
events = True
testingserver1 = amity.Client(host='testserver1', username='monast', secret='tsanom', events=events)
#testingserver2 = amity.Client(host='testserver2', username='admin', secret='amp111', events=events)
#tornado.ioloop.PeriodicCallback(testingserver1.ListCommands, 10000).start()
ioloop.start()
| whardier/AMIty | test/poll.py | Python | mit | 674 |
# -*- coding: utf-8 -*-
"""
Display current conditions from openweathermap.org.
As of 2015-10-09, you need to signup for a free API key via
http://openweathermap.org/register
Once you signup, use the API key generated at signup to either:
1. set the `apikey` parameter directly
2. place the API key (and nothing else) as a single line in
~/.config/i3status/openweathermap-apikey
3. same as 2), but at any file location configured via the `apikey_file` parameter
Configuration parameters:
- apikey : openweathermap.org api key (default: empty)
- apikey_file : path to file containing api key (default: ~/.config/i3status/openweathermap-apikey)
- cache_timeout : seconds between requests for weather updates (default: 1800)
- direction_precision : wind direction precision (default: 2)
- 1 : N E S W
- 2 : N NE E SE S etc
- 3 : N NNE NE ENE E etc
- format : display format (default: '{city} {temp}°F {icon} {sky} {humidity}%rh {pressure}inHg {direction} {wind}mph')
- corresponding metric format would be '{city} {temp}°C {icon} {sky} {humidity}%rh {pressure}hPa {direction} {wind}m/s'
- city : city name (eg 'Seattle')
- temp : temperature (eg '35')
- icon : weather icon (eg '☀')
- sky : weather conditions (eg 'Clear' or 'Rain' or 'Haze' etc)
- humidity : relative humidity (eg '50')
- pressure : barometric pressure (eg '29.58')
- direction : wind direction (eg 'NW')
- wind : wind speed (eg '11')
- location : city,country of location for which to show weather (default: 'Seattle,US')
- see http://openweathermap.org/city
- for US, a location like 'Springfield IL' will also work
- request_timeout : seconds after which to abort request (default: 10)
- timezone : timezone of location (default: 'America/Los_Angeles')
- used to determine if it's currently day or night at the location
- units : imperial or metric units (default: 'imperial')
- imperial :
- temperature : fahrenheit
- pressure : inches of mercury
- wind : miles per hour
- metric :
- temperature : celsius
- pressure : hectopascal
- wind : meters per second
"""
from datetime import datetime
from dateutil import tz
from os.path import expanduser
from time import time
import json
import requests
DIRECTIONS = {
1: 'N E S W'.split(),
2: 'N NE E SE S SW W NW'.split(),
3: 'N NNE NE ENE E ESE SE SSE S SSW SW WSW W WNW NW NNW'.split(),
}
class Py3status:
# available configuration parameters
# literal api key
apikey = ''
# or path to file containing api key
apikey_file = '~/.config/i3status/openweathermap-apikey'
# check for updates every 1800 seconds (30 minutes)
cache_timeout = 1800
# at most 2 direction chars (ie NW)
direction_precision = 2
# format as Seattle 50°F ☽ Clear 62%rh 30.25inHg N 5mph
format = '{city} {temp}°F {icon} {sky} {humidity}%rh {pressure}inHg {direction} {wind}mph'
#format = '{city} {temp}°C {icon} {sky} {humidity}%rh {pressure}hPa {direction} {wind}m/s'
# icons
icon_sun = '☀'
icon_moon = '☽'
icon_clouds = '☁'
icon_rain = '☔'
icon_fog = '▒'
icon_mist = '░'
icon_haze = '☼' # ◌|☷
icon_snow = '❄'
icon_thunderstorm = '⚡'
icon_unknown = '?'
# get weather for Seattle,US
location = 'Seattle,US'
# abort request after 10 seconds
request_timeout = 10
# use Pacific Time for calculating day/night
timezone = 'America/Los_Angeles'
#timezone = tz.tzlocal()
# use imperial units instead of metric
units = 'imperial'
test_data = ''#'/home/justin/able/weather.json'
def _load_apikey(self):
with open(expanduser(self.apikey_file)) as f:
return f.readline().rstrip()
def _get_weather(self):
if self.test_data != '':
with open(self.test_data) as f:
return json.load(f)
apikey = self.apikey or self._load_apikey()
url = (
'http://api.openweathermap.org/data/2.5/weather' +
'?q={location}&units={units}&APPID={apikey}'
).format(location=self.location, units=self.units, apikey=apikey)
response = requests.get(url, timeout=self.request_timeout)
if response.status_code != 200:
raise Exception('{status} error getting weather for {location}'.format(
status=response.status_code, location=self.location))
return response.json()
def _get_hour_of_day(self, timestamp):
dt = datetime.utcfromtimestamp(timestamp).replace(tzinfo=tz.tzutc())
return dt.astimezone(tz.gettz(self.timezone)).hour
def _get_icon(self, weather):
sky = weather['weather'][0]['main']
if sky == 'Clear':
# after midnight utc, openweathermap.org will report
# sunrise/sunset for tomorrow
# so convert to hour of day in order to compare to now
now = self._get_hour_of_day(int(weather['dt']))
sunrise = self._get_hour_of_day(int(weather['sys']['sunrise']))
sunset = self._get_hour_of_day(int(weather['sys']['sunset']))
if now < sunrise or now > sunset:
return self.icon_moon
else:
return self.icon_sun
elif sky == 'Clouds': return self.icon_clouds
elif sky == 'Rain': return self.icon_rain
elif sky == 'Fog': return self.icon_fog
elif sky == 'Mist': return self.icon_mist
elif sky == 'Haze': return self.icon_haze
elif sky == 'Snow': return self.icon_snow
elif sky == 'Thunderstorm': return self.icon_thunderstorm
return icon_unknown
def _get_temp(self, weather):
temp = float(weather['main']['temp'])
return '{:.0f}'.format(temp)
def _get_pressure(self, weather):
pressure = float(weather['main']['pressure'])
if self.units == 'imperial':
return '{:.2f}'.format(pressure*0.0295) # convert from hPa to inHg
return '{:.0f}'.format(pressure)
def _get_wind(self, weather):
wind = float(weather['wind']['speed'])
return '{:.0f}'.format(wind)
def _get_direction(self, weather):
azimuth = float(weather['wind']['deg'])
directions = DIRECTIONS[self.direction_precision]
slices = len(directions)
slice = 360 / slices
return directions[int((azimuth+(slice/2))/slice) % slices]
def j3_weather(self, i3s_output_list, i3s_config):
weather = self._get_weather()
text = self.py3.safe_format(self.format, {
'city': weather['name'],
'icon': self._get_icon(weather),
'sky': weather['weather'][0]['main'],
'temp': self._get_temp(weather),
'humidity': weather['main']['humidity'],
'pressure': self._get_pressure(weather),
'wind': self._get_wind(weather),
'direction': self._get_direction(weather),
})
return {
'cached_until': time() + self.cache_timeout,
'full_text': text,
}
if __name__ == "__main__":
"""
Test this module by calling it directly.
"""
from time import sleep
x = Py3status()
config = {
'color_good': '#00FF00',
'color_degraded': '#FFFF00',
'color_bad': '#FF0000',
}
print(x.j3_weather([], config)['full_text'])
| justinludwig/j3status | j3_weather.py | Python | mit | 7,547 |
#!/usr/bin/env python
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (assert_raises, run_module_suite,
assert_equal, assert_allclose)
import pywt
def test_available_modes():
modes = ['zpd', 'cpd', 'sym', 'ppd', 'sp1', 'per']
assert_equal(pywt.MODES.modes, modes)
assert_equal(pywt.MODES.from_object('cpd'), 2)
def test_invalid_modes():
x = np.arange(4)
assert_raises(ValueError, pywt.dwt, x, 'db2', 'unknown')
assert_raises(TypeError, pywt.dwt, x, 'db2', -1)
assert_raises(TypeError, pywt.dwt, x, 'db2', 7)
assert_raises(TypeError, pywt.dwt, x, 'db2', None)
assert_raises(ValueError, pywt.MODES.from_object, 'unknown')
assert_raises(ValueError, pywt.MODES.from_object, -1)
assert_raises(ValueError, pywt.MODES.from_object, 7)
assert_raises(TypeError, pywt.MODES.from_object, None)
def test_dwt_idwt_allmodes():
# Test that :func:`dwt` and :func:`idwt` can be performed using every mode
x = [1, 2, 1, 5, -1, 8, 4, 6]
dwt_result_modes = {
'zpd': ([-0.03467518, 1.73309178, 3.40612438, 6.32928585, 6.95094948],
[-0.12940952, -2.15599552, -5.95034847, -1.21545369,
-1.8625013]),
'cpd': ([1.28480404, 1.73309178, 3.40612438, 6.32928585, 7.51935555],
[-0.48296291, -2.15599552, -5.95034847, -1.21545369,
0.25881905]),
'sym': ([1.76776695, 1.73309178, 3.40612438, 6.32928585, 7.77817459],
[-0.61237244, -2.15599552, -5.95034847, -1.21545369,
1.22474487]),
'ppd': ([6.9162743, 1.73309178, 3.40612438, 6.32928585, 6.9162743],
[-1.99191082, -2.15599552, -5.95034847, -1.21545369,
-1.99191082]),
'sp1': ([-0.51763809, 1.73309178, 3.40612438, 6.32928585, 7.45000519],
[0, -2.15599552, -5.95034847, -1.21545369, 0]),
'per': ([4.053172, 3.05257099, 2.85381112, 8.42522221],
[0.18946869, 4.18258152, 4.33737503, 2.60428326])
}
for mode in pywt.MODES.modes:
cA, cD = pywt.dwt(x, 'db2', mode)
assert_allclose(cA, dwt_result_modes[mode][0], rtol=1e-7, atol=1e-8)
assert_allclose(cD, dwt_result_modes[mode][1], rtol=1e-7, atol=1e-8)
assert_allclose(pywt.idwt(cA, cD, 'db2', mode), x, rtol=1e-10)
def test_default_mode():
# The default mode should be 'sym'
x = [1, 2, 1, 5, -1, 8, 4, 6]
cA, cD = pywt.dwt(x, 'db2')
cA2, cD2 = pywt.dwt(x, 'db2', mode='sym')
assert_allclose(cA, cA2)
assert_allclose(cD, cD2)
assert_allclose(pywt.idwt(cA, cD, 'db2'), x)
if __name__ == '__main__':
run_module_suite()
| ThomasA/pywt | pywt/tests/test_modes.py | Python | mit | 2,725 |
import hypergan as hg
import tensorflow as tf
from hypergan.gan_component import ValidationException
from hypergan.inputs.image_loader import ImageLoader
import os
def fixture_path(subpath=""):
return os.path.dirname(os.path.realpath(__file__)) + '/fixtures/' + subpath
class TestImageLoader:
def test_constructor(self):
with self.test_session():
loader = ImageLoader(32)
self.assertEqual(loader.batch_size, 32)
def test_load_non_existent_path(self):
with self.assertRaises(ValidationException):
loader = ImageLoader(32)
loader.create("/tmp/nonexistentpath", format='png')
def test_load_fixture(self):
with self.test_session():
loader = ImageLoader(32)
x, y = loader.create(fixture_path(), width=4, height=4, format='png')
self.assertEqual(y.get_shape(), [])
self.assertEqual(int(x.get_shape()[1]), 4)
self.assertEqual(int(x.get_shape()[2]), 4)
def test_load_fixture(self):
with self.test_session():
loader = ImageLoader(32) #TODO crop=true?
loader.create(fixture_path(), width=2, height=2, format='png')
self.assertEqual(int(loader.x.get_shape()[1]), 2)
self.assertEqual(int(loader.x.get_shape()[2]), 2)
def test_load_fixture_resize(self):
with self.test_session():
loader = ImageLoader(32) #TODO crop=true?
loader.create(fixture_path(), width=8, height=8, resize=True, format='png')
self.assertEqual(int(loader.x.get_shape()[1]), 8)
self.assertEqual(int(loader.x.get_shape()[2]), 8)
def test_load_fixture_single(self):
with self.test_session():
loader = ImageLoader(32) #TODO crop=true? why is this working
loader.create(fixture_path('images'), width=4, height=4, format='png')
self.assertEqual(int(loader.x.get_shape()[1]), 4)
self.assertEqual(int(loader.x.get_shape()[2]), 4)
def test_load_fixture_single(self):
with self.test_session():
loader = ImageLoader(32) #TODO crop=true?
loader.create(fixture_path(), width=4, height=4, format='png')
self.assertEqual(loader.file_count, 2)
def test_load_fixture_single_count(self):
with self.test_session():
loader = ImageLoader(32) #TODO crop=true?
loader.create(fixture_path('white'), width=4, height=4, format='png')
self.assertEqual(loader.file_count, 1)
if __name__ == "__main__":
tf.test.main()
| 255BITS/HyperGAN | tests/inputs/test_image_loader.py | Python | mit | 2,599 |
##
## File: macros.py
##
## Author: Schuyler Martin <[email protected]>
##
## Description: Python file that contains C-like macros for the project
##
#### GLOBALS ####
# Enables extended debug printing
DEBUG_MACRO = False
# Debug flag for database debugging; uses a different SQLite file
DEBUG_DB = True
# Host name of the server
SERVER_HOST = "localhost"
# Name of the default queue
SERVER_QUEUE = "Default Queue"
UID_BOOTSTRAP_QUEUE = "UID Queue"
# UID prefixes that identifies what kind of user we have
UID_PREFIX_STU = "stu_"
UID_PREFIX_TUT = "tut_"
# RIT email extension
RIT_EMAIL_EXT = "@rit.edu"
# Various semi-official tutor titles
TUTOR_TA = "TA"
TUTOR_SLI = "SLI"
TUTOR_TUT = "Tutor"
# TODO Message commands/interface with the app
# User enters/leaves the Mentoring Center
MSG_STU_ENTER = "stu_enters"
MSG_TUT_ENTER = "tut_enters"
MSG_USER_ENTER = "user_enters"
MSG_USER_LEAVE = "user_leaves"
# Generic "User is getting helped or giving help"
MSG_USER_HELPED = "user_help"
# Student proposes question/gets question answered
MSG_STU_QUEST = "stu_question"
MSG_STU_ANS = "stu_answer"
# Tutor is assigned a question/finishes with a student
MSG_TUT_HELP = "tut_help"
MSG_TUT_DONE = "tut_done"
# Error messages
MSG_ERR_USER_LOGIN = "err_user_login"
# TODO Message JSON parameters
MSG_PARAM_METHOD = "get_method"
MSG_PARAM_USER_NAME = "user_name"
MSG_PARAM_USER_PASSWD = "user_passwd"
MSG_PARAM_USER_F_NAME = "user_f_name"
MSG_PARAM_USER_L_NAME = "user_l_name"
MSG_PARAM_USER_TITLE = "user_title"
# identify users in messaging
MSG_PARAM_USER_UID = "user_uid"
MSG_PARAM_STU_UID = "student_uid"
MSG_PARAM_TUT_UID = "tutor_uid"
# SQLite database file naming
SQL_DB_PATH = "./"
SQL_DB_FILE = "mmcga.db"
SQL_DB = SQL_DB_PATH + SQL_DB_FILE
# debug version of the database for testing purposes
SQL_DB_FILE_DEBUG = "debug_test_mmcga.db"
SQL_DB_DEBUG = SQL_DB_PATH + SQL_DB_FILE_DEBUG
# TODO Database tables
DB_USER_TBL = "Users"
# alternative indices in the table ("secondary keys")
DB_UNAME_IDX = "user_name_idx"
# fields for DB tables
DB_FIELD_UID = "uid"
DB_FIELD_UNAME = "user_name"
DB_FIELD_JSON = "json_str"
# field types for DB tables
DB_F_TYPE_INT = "INTEGER"
DB_F_TYPE_TXT = "TEXT"
| RIT-CS-Mentoring-Center-Queueing/mmcga_project | server/utils/macros.py | Python | mit | 2,356 |
"""
This file taken from https://github.com/icecreammatt/flask-empty
and modified for this project.
"""
import os
_basedir = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
TESTING = True
ADMINS = frozenset(['[email protected]'])
SECRET_KEY = 'WillWorkForJob'
THREADS_PER_PAGE = 8
CSRF_ENABLED = True
CSRF_SESSION_KEY = "supercalifragilistic98765"
| scottdillon/rti_exercise | rti_app/config.py | Python | mit | 366 |
Subsets and Splits