text
stringlengths 4
1.02M
| meta
dict |
---|---|
from beamit.resources.base import Resource
class PasswordChangeRequest(Resource):
MEDIA_TYPE = 'application/vnd.beamit.password.change.request+json'
def __init__(self, email, password, new_password):
self.email = email
self.password = password
self.new_password = new_password
def __repr__(self):
return "<PasswordChangeRequest email: {}, password: {}, new_password: {}>".format(
self.email,
self.password,
self.new_password,
)
def to_dict(self):
return dict(email=self.email, password=self.password, new_password=self.new_password)
@classmethod
def from_dict(cls, dct):
return cls(
email=dct.get("email"),
password=dct.get("password"),
new_password=dct.get("new_password"),
)
class PasswordChangeResponse(Resource):
MEDIA_TYPE = 'application/vnd.beamit.password.change.response+json'
def __init__(self, user_id):
self.user_id = user_id
def __repr__(self):
return "<PasswordChangeResponse user_id: {}>".format(self.user_id)
def to_dict(self):
return dict(user_id=self.user_id)
@classmethod
def from_dict(cls, dct):
return cls(user_id=dct.get("user_id"))
| {
"content_hash": "805ecd92fc785f196cec7c2ce6c76d2b",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 93,
"avg_line_length": 27.319148936170212,
"alnum_prop": 0.6137071651090342,
"repo_name": "ksweta/BeamIt-Server",
"id": "038a4e2425643785cb2f571d803b9a54b8153fdf",
"size": "1284",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "beamit/resources/password.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "148"
},
{
"name": "Python",
"bytes": "32100"
}
],
"symlink_target": ""
} |
"""ovirt-host-setup websocket_proxy plugin."""
from otopi import util
from . import config
from . import pki
@util.export
def createPlugins(context):
config.Plugin(context=context)
pki.Plugin(context=context)
# vim: expandtab tabstop=4 shiftwidth=4
| {
"content_hash": "114801fcbb74f8aa9e48a91a90e0d35a",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 46,
"avg_line_length": 15.588235294117647,
"alnum_prop": 0.7358490566037735,
"repo_name": "halober/ovirt-engine",
"id": "6e59d3656b27a0a252a16d1dee5aa6c98ef6ebcf",
"size": "895",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packaging/setup/plugins/ovirt-engine-setup/websocket_proxy/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "251848"
},
{
"name": "Java",
"bytes": "26541598"
},
{
"name": "JavaScript",
"bytes": "890"
},
{
"name": "Python",
"bytes": "698283"
},
{
"name": "Shell",
"bytes": "105362"
},
{
"name": "XSLT",
"bytes": "54683"
}
],
"symlink_target": ""
} |
#!/usr/bin/env python
from datetime import date
import os
import re
SRC_DIR = 'src/main/java/'
ABSTRACT = re.compile(r'public abstract class Abstract')
TYPE = re.compile(r'class [A-Za-z0-9]+(<[^>]+?(?: extends ([A-Za-z0-9_]+))?>)?')
TARGET = re.compile(r'\s@Nullable (([A-Z][A-Za-z0-9_]+).*?)(<.+?>)? actual\) {')
IMPORT = re.compile(r'import (?:static )?((?:com\.google\.)?android(x?)\..*?);')
ASSERTIONS = 'Assertions.java'
projects = []
for candidate in filter(os.path.isdir, os.listdir('.')):
if candidate.startswith('truth-android'):
projects.append(candidate)
print('Projects: %s\n' % projects)
def _find_assertions(path):
for root, dirs, files in os.walk(path):
if ASSERTIONS in files:
return os.path.join(root, ASSERTIONS)
raise Exception('Could not locate Assertions.java in %s.' % path)
for project in projects:
src_dir = os.path.join(project, SRC_DIR)
assertions_file = _find_assertions(src_dir)
assertions_dir = os.path.dirname(assertions_file)
classes_package = assertions_dir[len(src_dir):].replace(os.sep, '.')
print('\n' * 3)
print(project)
print('')
print('src_dir = %s' % src_dir)
print('assertions_file = %s' % assertions_file)
print('assertions_dir = %s' % assertions_dir)
print('classes_package = %s' % classes_package)
print('')
assertions = []
for root, dirs, files in os.walk(assertions_dir):
for f in files:
if not f.endswith('Subject.java'):
continue
print('-'*80)
local_package = root[len(src_dir):].replace(os.sep, '.')
package = '%s.%s' % (local_package, f[:-5])
print('package : %s' % package)
with open(os.path.join(root, f)) as j:
java = j.read()
if ABSTRACT.search(java) is not None:
print('SKIP (abstract)')
continue # Abstract class.
target_match = TARGET.search(java)
import_type = target_match.group(2)
target_type = target_match.group(1)
generics = target_match.group(3)
print('import type: %s' % import_type)
print('target type: %s' % target_type)
print('generics : %s' % generics)
for match in IMPORT.finditer(java):
if match.group(1).endswith(import_type):
import_package = match.group(1)
break
else:
raise Exception('Could not find target package for %s' % import_type)
type_match = TYPE.search(java)
bounds_type = type_match.group(1)
bounds_ext = type_match.group(2)
if generics:
print('bounds type: %s' % bounds_type)
print('bounds ext : %s' % bounds_ext)
if bounds_ext:
for match in IMPORT.finditer(java):
if match.group(1).endswith(bounds_ext):
bounds_type = bounds_type.replace(bounds_ext, match.group(1))
break
else:
raise Exception('Could not find target package for %s' % bounds_ext)
print('bounds fqcn: %s' % bounds_type)
target_package = import_package.replace(import_type, target_type)
print('import pkg : %s' % import_package)
print('target pkg : %s' % target_package)
assertions.append(
(package, target_package, bounds_type or '', generics or '')
)
print('-'*80)
with open(assertions_file, 'w') as out:
out.write('// Copyright %s PKWARE, Inc.\n' % date.today().year)
out.write('//\n')
out.write('// This class is generated. Do not modify directly!\n')
out.write('package %s;\n\n' % classes_package)
out.write('import javax.annotation.Nonnull;\n')
out.write('import javax.annotation.Nullable;\n\n')
out.write('import static com.google.common.truth.Truth.assertAbout;\n\n')
out.write('/** Assertions for testing Android classes. */\n')
out.write('@SuppressWarnings("deprecation")\n')
out.write('public final class Assertions {')
for package, target_package, bounds_type, generic_keys in sorted(assertions, key=lambda x: x[0]):
out.write('\n')
out.write(' @Nonnull\n')
out.write(' public static %s%s%s assertThat(\n' % (bounds_type + ' ' if bounds_type else '', package, generic_keys))
out.write(' @Nullable %s%s target) {\n' % (target_package, generic_keys))
out.write(' return assertAbout(%s%s::new).that(target);\n' % (package, generic_keys))
out.write(' }\n')
out.write('\n')
out.write(' private Assertions() {\n')
out.write(' throw new AssertionError("No instances.");\n')
out.write(' }\n')
out.write('}\n')
print('\nNew Assertion./s.java files written!\n') | {
"content_hash": "14cbd0a5b02985aab6004e5456dae51e",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 123,
"avg_line_length": 35.71653543307087,
"alnum_prop": 0.6166225749559083,
"repo_name": "pkware/truth-android",
"id": "e31059a54c21c5d37af06b22b8c33ca49e909b70",
"size": "4536",
"binary": false,
"copies": "1",
"ref": "refs/heads/mv/gpg",
"path": "generate-assertions-java.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1341"
},
{
"name": "HTML",
"bytes": "13148"
},
{
"name": "Java",
"bytes": "950659"
},
{
"name": "Python",
"bytes": "4536"
},
{
"name": "Shell",
"bytes": "2484"
}
],
"symlink_target": ""
} |
'''
Write code to reverse a C-Style String. (C-String means that “abcd” is represented as
!ve characters, including the null character.)
'''
def reverse_cstyle_string(string_):
i = 2
reverse_string = ""
for _ in range(len(string_) - 1):
reverse_string += string_[-i]
i += 1
return reverse_string
if __name__ == '__main__':
str_ = input()
# Make it C Style
str_ += '\0'
print(reverse_cstyle_string(str_))
| {
"content_hash": "5806f98ca6be32a60492e78588a167a7",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 85,
"avg_line_length": 21.761904761904763,
"alnum_prop": 0.5864332603938731,
"repo_name": "roommen/CtCI",
"id": "4f50c51f0b9221bb8a5cb1b744eb45c0cae631ad",
"size": "461",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fourth_edition/ch1_arrays_and_strings/python/1.2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12073"
}
],
"symlink_target": ""
} |
import json
from urllib.request import urlopen
from datetime import datetime
from i3pystatus import IntervalModule
from i3pystatus.core.util import internet, require
class SpaceAPI(IntervalModule):
"""
Show if a hackerspace is open
.. rubric:: Available formatters
* {state}
* {message}
* {lastchange}
"""
data = {}
format = "S: {state}"
color_open = "#00FF00"
color_closed = "#FF0000"
interval = 10
settings = (
("url", "spaceapi endpoint"),
("format", "format string used for output."),
("color_open", "color if hackerspace is opened"),
("color_closed", "color if hackerspace is closed"),
("interval", "update interval")
)
required = ('url', )
url = None
@require(internet)
def run(self):
res = urlopen(self.url)
api = json.loads(res.read())
self.data['color'] = self.color_open if api['state']['open'] else self.color_closed
self.data['state'] = 'open' if api['state']['open'] else 'closed'
self.data['message'] = api['state'].get('message', '')
self.data['lastchange'] = datetime.fromtimestamp(int(api['state']['lastchange']))
self.output = {
"full_text": self.format.format(**self.data),
"color": self.data['color']
}
| {
"content_hash": "a6d42942d6be437bec83acc95ae0ceb9",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 91,
"avg_line_length": 26.176470588235293,
"alnum_prop": 0.5865168539325842,
"repo_name": "Arvedui/i3pystatus",
"id": "167e6806bbea5803689aaf04e08a65fad485d807",
"size": "1335",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "i3pystatus/spaceapi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "607826"
},
{
"name": "Shell",
"bytes": "757"
}
],
"symlink_target": ""
} |
import itertools
import unittest
from oslo_log import log as logging
import mock
import oslotest.base
import testtools
from designate import exceptions
from designate import objects
LOG = logging.getLogger(__name__)
def debug(*a, **kw):
for v in a:
LOG.debug(repr(v))
for k in sorted(kw):
LOG.debug("%s: %s", k, repr(kw[k]))
class TestRecordSet(objects.RecordSet):
FIELDS = {
'id': {},
'name': {},
'records': {
'relation': True,
'relation_cls': 'RecordList',
},
}
def create_test_recordset():
rs = objects.RecordSet(
name='www.example.org.',
type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.1'),
objects.Record(data='192.0.2.2'),
])
)
return rs
class RecordSetTest(oslotest.base.BaseTestCase):
def test_init(self):
rs = create_test_recordset()
self.assertEqual(rs.name, 'www.example.org.')
self.assertEqual(rs.type, 'A')
def test_not_managed(self):
rs = create_test_recordset()
self.assertFalse(rs.managed)
def test_managed(self):
rs = objects.RecordSet(
name='www.example.org.',
type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.1', managed=True),
objects.Record(data='192.0.2.2'),
])
)
self.assertTrue(rs.managed)
def test_action(self):
action = 'CREATE'
rs = objects.RecordSet(
name='www.example.org.',
type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.1', action=action),
])
)
self.assertEqual(rs.action, action)
def test_action_create(self):
rs = objects.RecordSet(
name='www.example.org.', type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.1', action='CREATE'),
])
)
self.assertEqual(rs.action, 'CREATE')
def test_action_create_plus_update(self):
rs = objects.RecordSet(
name='www.example.org.', type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.1', action='CREATE'),
objects.Record(data='192.0.2.2', action='UPDATE'),
])
)
self.assertEqual(rs.action, 'UPDATE')
def test_action_delete_plus_update(self):
rs = objects.RecordSet(
name='www.example.org.', type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.1', action='DELETE'),
objects.Record(data='192.0.2.2', action='UPDATE'),
])
)
self.assertEqual(rs.action, 'UPDATE')
def test_action_delete_only(self):
rs = objects.RecordSet(
name='www.example.org.', type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.1', action='DELETE'),
objects.Record(data='192.0.2.2', action='DELETE'),
])
)
self.assertEqual(rs.action, 'DELETE')
@unittest.expectedFailure # bug
def test_status_error(self):
statuses = ('ERROR', 'PENDING', 'ACTIVE')
failed = False
for s1, s2, s3 in itertools.permutations(statuses):
rs = objects.RecordSet(
name='www.example.org.', type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.1', status=s1),
objects.Record(data='192.0.2.2', status=s2),
objects.Record(data='192.0.2.3', status=s3),
])
)
if rs.status != 'ERROR':
failed = True
print("test_status_error failed for %s %s %s: %s" % (
s1, s2, s3, rs.status))
self.assertFalse(failed)
def test_status_pending(self):
rs = objects.RecordSet(
name='www.example.org.', type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.2', status='PENDING'),
objects.Record(data='192.0.2.3', status='ACTIVE'),
])
)
self.assertEqual(rs.status, 'PENDING')
def test_status_pending2(self):
rs = objects.RecordSet(
name='www.example.org.', type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.3', status='ACTIVE'),
objects.Record(data='192.0.2.2', status='PENDING'),
])
)
self.assertEqual(rs.status, 'PENDING')
def test_status_active(self):
rs = objects.RecordSet(
name='www.example.org.', type='A',
records=objects.RecordList(objects=[
objects.Record(data='192.0.2.3', status='ACTIVE'),
])
)
self.assertEqual(rs.status, 'ACTIVE')
def test_validate(self):
rs = create_test_recordset()
rs.validate()
def test_validate_handle_exception(self):
rs = create_test_recordset()
fn_name = 'designate.objects.DesignateObject.obj_cls_from_name'
with mock.patch(fn_name) as patched:
patched.side_effect = KeyError
with testtools.ExpectedException(exceptions.InvalidObject):
# TODO(Federico): check the attributes of the exception
rs.validate()
| {
"content_hash": "c1ea252b4e5fefe8c93e82ffaaa42df4",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 71,
"avg_line_length": 31.43820224719101,
"alnum_prop": 0.5393137955682631,
"repo_name": "tonyli71/designate",
"id": "58ba537acb8149f078b15fe0e1ae50a0e6bfcaea",
"size": "6258",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "designate/tests/unit/test_objects/test_recordset.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2200317"
},
{
"name": "Ruby",
"bytes": "4560"
},
{
"name": "Shell",
"bytes": "12933"
}
],
"symlink_target": ""
} |
import asyncore
import os
import socket
import sys
import time
import traceback
from .. import protocol
from .. import util
BLOCK_SIZE = 8192
class Connection(asyncore.dispatcher):
'''A connection to a remote peer node.
Handles buffering input and output into messages and call the
corresponding command_* handler in daemon.'''
SERVICES = protocol.SERVICE_NODE_NETWORK
def __init__(self, node, address, sock = None):
# this is also available as self._map from dispatcher
self._node = node
# send and receive buffers
self._send_buffer = ""
self._recv_buffer = ""
# total byte count we have sent and received
self._tx_bytes = 0
self._rx_bytes = 0
# last time we sent and receieved data
self._last_tx_time = 0
self._last_ping_time = 0
self._last_rx_time = 0
# remote node details
self._address = address
self._external_ip_address = None
self._services = None
self._start_height = None
self._user_agent = None
self._version = None
self._relay = None
self._banscore = 0
# have we got a version acknowledgement from the remote node?
self._verack = False
# if we get a socket, we started because of an accept
if sock:
asyncore.dispatcher.__init__(self, sock = sock, map = node)
self._incoming = True
# otherwise, we get an address to connect to
else:
asyncore.dispatcher.__init__(self, map = node)
try:
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect(address)
except Exception, e:
self.handle_close()
raise e
self._incoming = False
# we bootstrap communication with the node by broadcasting our version
now = time.time()
message = protocol.Version(
version = node.coin.protocol_version,
services = self.SERVICES,
timestamp = now,
addr_recv = protocol.NetworkAddress(now, self.SERVICES, address[0], address[1]),
addr_from = protocol.NetworkAddress(now, self.SERVICES, node.external_ip_address, node.port),
nonce = os.urandom(8),
user_agent = node.user_agent,
start_height = node.blockchain_height,
relay = False
)
self.send_message(message)
# remote node details
address = property(lambda s: s._address)
ip_address = property(lambda s: s._address[0])
port = property(lambda s: s._address[1])
incoming = property(lambda s: s._incoming)
services = property(lambda s: s._services)
start_height = property(lambda s: s._start_height)
user_agent = property(lambda s: s._user_agent)
version = property(lambda s: s._version)
relay = property(lambda s: s._relay)
external_ip_address = property(lambda s: s._external_ip_address)
# connection details
verack = property(lambda s: s._verack)
rx_bytes = property(lambda s: s._rx_bytes)
tx_bytes = property(lambda s: s._tx_bytes)
node = property(lambda s: s._node)
banscore = property(lambda s: s._banscore)
# last time we heard from the remote node
timestamp = property(lambda s: (time.time() - s._last_rx_time))
def add_banscore(self, penalty = 1):
self._banscore += penalty
def reduce_banscore(self, penalty = 1):
if (self._banscore - penalty) < 0:
self._banscore = 0
else:
self._banscore -= penalty
def readable(self):
now = time.time()
rx_ago = now - self._last_rx_time
tx_ago = now - self._last_tx_time
ping_ago = now - self._last_ping_time
# haven't sent anything for 30 minutes, send a ping every 5 minutes
if self._last_tx_time and tx_ago > (30 * 60) and ping_ago > (5 * 60):
self.send_message(protocol.Ping(os.urandom(8)))
self._last_ping_time = time.time()
# it's been over 3 hours... disconnect
if self._last_rx_time and rx_ago > (3 * 60 * 60):
self.handle_close()
return False
return True
def handle_read(self):
# read some data and add it to our incoming buffer
try:
chunk = self.recv(BLOCK_SIZE)
except Exception, e:
chunk = ''
# remote connection closed
if not chunk:
self.handle_close()
return
self._recv_buffer += chunk
self._rx_bytes += len(chunk)
self.node._rx_bytes += len(chunk)
self._last_rx_time = time.time()
# process as many messages as we have the complete bytes for
while True:
# how long is the next message, and do we have it all?
length = protocol.Message.first_message_length(self._recv_buffer)
if length is None or length >= len(self._recv_buffer):
break
# parse the message and handle it
payload = self._recv_buffer[:length]
try:
message = protocol.Message.parse(payload, self.node.coin.magic)
self.handle_message(message)
except protocol.UnknownMessageException, e:
self.node.invalid_command(self, self._recv_buffer[:length], e)
except protocol.MessageFormatException, e:
self.node.invalid_command(self, self._recv_buffer[:length], e)
# remove the message bytes from the buffer
self._recv_buffer = self._recv_buffer[length:]
def writable(self):
return len(self._send_buffer) > 0
def handle_write(self):
try:
sent = self.send(self._send_buffer)
self._tx_bytes += sent
self.node._tx_bytes += sent
self._last_tx_time = time.time()
except Exception, e:
self.handle_close()
return
self._send_buffer = self._send_buffer[sent:]
def handle_error(self):
t, v, tb = sys.exc_info()
if t == socket.error:
self.node.log('--- connection refused', peer = self, level = self.node.LOG_LEVEL_INFO)
else:
self.node.log(traceback.format_exc(), peer = self, level = self.node.LOG_LEVEL_ERROR)
del tb
self.handle_close()
def handle_close(self):
try:
self.close()
except Exception, e:
pass
self.node.disconnected(self)
def handle_message(self, message):
self.node.log('<<< ' + str(message), peer = self, level = self.node.LOG_LEVEL_PROTOCOL)
self.node.log('<<< ' + message._debug(), peer = self, level = self.node.LOG_LEVEL_DEBUG)
kwargs = dict((k, getattr(message, k)) for (k, t) in message.properties)
if message.command == protocol.Version.command:
self._services = message.services
self._start_height = message.start_height
self._user_agent = message.user_agent
self._version = message.version
self._relay = message.relay
self._external_ip_address = message.addr_recv.address
self.node.connected(self)
elif message.command == protocol.VersionAck.command:
self._verack = True
elif message.command == protocol.Alert.command:
# @TODO: check expiration, etc.
if message.verify(self.node.coin.alert_public_key):
kwargs = dict((k, getattr(message, k)) for (k, t) in message.payload_properties)
elif message.verify(self.node.alert_public_key):
kwargs = dict((k, getattr(message, k)) for (k, t) in message.payload_properties)
else:
self.node.invalid_alert(self, message)
message = None
if message:
getattr(self.node, 'command_' + message.name)(self, **kwargs)
def send_message(self, message):
msg = str(message)
self.node.log('>>> ' + str(message), peer = self, level = self.node.LOG_LEVEL_PROTOCOL)
self.node.log('>>> ' + message._debug(), peer = self, level = self.node.LOG_LEVEL_DEBUG)
self._send_buffer += message.binary(self.node.coin.magic)
def __hash__(self):
return hash(self.address)
def __eq__(self, other):
return self == other
def __str__(self):
return '<Connection(%s) %s:%d>' % (self._fileno, self.ip_address, self.port)
| {
"content_hash": "4f70d8dbc3213053306e4603fdf86228",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 115,
"avg_line_length": 31.53818181818182,
"alnum_prop": 0.5715438717860025,
"repo_name": "ricmoo/pycoind",
"id": "edf68602bd60de61dcbd0c58e63a84fbe7bf0e00",
"size": "9793",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pycoind/node/connection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "631386"
}
],
"symlink_target": ""
} |
from principal.models import Alumno, Imparteasignatura, Asignatura
from principal.services import DepartmentService
from django.db.models import Q
def get_student_subjects(student_id):
student = Alumno.objects.get(id=student_id)
return student.asignaturas.all().order_by('nombre')
def get_lecturer_subjects(lecturer_id):
return Asignatura.objects.filter(
id__in=[a.asignatura.id for a in Imparteasignatura.objects.filter(profesor_id=lecturer_id)]).order_by('nombre')
def create(form):
subject = Asignatura()
subject.nombre = form.cleaned_data['name']
subject.curso = form.cleaned_data['course']
subject.codigo = form.cleaned_data['code']
subject.cuatrimestre = form.cleaned_data['quarter']
subject.creditos = form.cleaned_data['credits']
subject.web = form.cleaned_data['web']
subject.duracion = form.cleaned_data['duration']
subject.tipo_asignatura = form.cleaned_data['type']
subject.departamento = form.cleaned_data['departament']
return subject
def save(subject):
subject.save()
def find_by_code(code):
try:
subject = Asignatura.objects.get(codigo=code)
except Asignatura.DoesNotExist:
subject = None
return subject
def find_one(subject_id):
try:
subject = Asignatura.objects.get(id=subject_id)
except Asignatura.DoesNotExist:
subject = None
return subject
def edit(form):
subject = find_one(form.cleaned_data['id'])
certifications_new = set(form.cleaned_data['certifications'])
certification_subject = set(subject.titulacion_set.all())
common = certifications_new.intersection(certification_subject)
remove = certification_subject.difference(common)
insert = certifications_new.difference(common)
[subject.titulacion_set.add(certification) for certification in list(insert)]
[subject.titulacion_set.remove(certification) for certification in list(remove)]
return subject
def find_all():
return Asignatura.objects.all()
# Returns the users subscribed to the logged lecturer's subjects
def lecturer_students(lecturer_id):
students = [subject.alumno_set.all() for subject in get_lecturer_subjects(lecturer_id)]
return list(set([item for sublist in students for item in sublist])) # Merge lists inside and remove duplicates
def rollback(subject_create, subject_link, certifications):
for subject in subject_link:
[subject.titulacion_set.remove(certification) for certification in list(certifications)]
for subject in subject_create:
subject.delete()
def subject_students(subject, lecturer_id):
return subject.alumno_set.all() if subject else get_lecturer_subjects(lecturer_id)
def get_form_data_xml(subject):
data = {}
if subject:
data = {
'name': subject['nombre'],
'course': subject['curso'],
'code': subject['codigo'],
'quarter': subject['cuatrimestre'],
'credits': subject['creditos'],
'duration': subject['duracion'],
'type': subject['tipo'],
}
try:
data['web'] = subject['web']
except KeyError:
pass
department = DepartmentService.find_by_code(subject['departamento'])
data['departament'] = department.id
return data
def get_form_data_csv(subject):
data = {}
if subject:
data = {
'name': subject[1],
'course': subject[2],
'code': subject[0],
'quarter': subject[3],
'credits': subject[4],
'duration': subject[5],
'type': subject[6],
}
try:
data['web'] = subject[8]
except IndexError:
pass
department = DepartmentService.find_by_code(subject[7])
data['departament'] = department.id
return data
def search(search_text):
return Asignatura.objects.filter(
Q(nombre__icontains=search_text) |
Q(codigo__icontains=search_text)).order_by('nombre')
# TODO NO HA SIDO PROBADO (Revisar Carlos)
def get_student_subjects_search(student_id, search_text):
student = Alumno.objects.get(id=student_id)
return student.asignaturas.all().filter(
Q(nombre__icontains=search_text) | Q(codigo__icontains=search_text)).order_by('nombre')
# TODO NO HA SIDO PROBADO (Revisar Carlos)
def get_lecturer_subjects_search(lecturer_id, search_text):
return Asignatura.objects.filter(
id__in=[a.asignatura.id for a in Imparteasignatura.objects.filter(
profesor_id=lecturer_id & Q(asignatura_nombre__icontains=search_text) | Q(
asignatura_codigo__icontains=search_text))]).order_by(
'nombre')
| {
"content_hash": "5c7047bc491f6acd509a1392c7ed50d9",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 119,
"avg_line_length": 30.314102564102566,
"alnum_prop": 0.6614506238105308,
"repo_name": "carborgar/gestionalumnostfg",
"id": "f16c2b5a2c0642a834f356930917412c24c52ba4",
"size": "4729",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "principal/services/SubjectService.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "163328"
},
{
"name": "HTML",
"bytes": "233644"
},
{
"name": "JavaScript",
"bytes": "58644"
},
{
"name": "Python",
"bytes": "228702"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.db import models, transaction
from django.utils.crypto import get_random_string
from django.utils.timezone import now
class Key(models.Model):
key = models.CharField(max_length=254, unique=True, db_index=True)
created = models.DateTimeField(default=now)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
def save(self, *args, **kwargs):
if self.pk:
return
with transaction.atomic():
self.key = get_random_string(254)
while Key.objects.filter(key=self.key).exists():
self.key = get_random_string(254)
super(Key, self).save(*args, **kwargs)
| {
"content_hash": "edea14ee8a9898e4adfffc575f362f08",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 70,
"avg_line_length": 34.25,
"alnum_prop": 0.6613138686131387,
"repo_name": "MarkusH/django-sshlogin",
"id": "ec67ea17a38daec3e6fe88314cc53459ea0a8b40",
"size": "685",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keymgmt/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "8537"
}
],
"symlink_target": ""
} |
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse, reverse_lazy
from django.shortcuts import render
from django.template import Context
from django.views.generic import FormView, CreateView
from django.views.generic.edit import ModelFormMixin
from rest_framework import viewsets, generics
from videos.forms import VideoForm
from videos.tables import SimpleTable
from .models import Video
from .serializer import VideoSerializer
class VideoViewSet(viewsets.ModelViewSet):
"""
API Endpoint for videos.
"""
queryset = Video.objects.all()
serializer_class = VideoSerializer
class VideoCreate(CreateView):
model = Video
form_class = VideoForm
success_url = reverse_lazy('videos:success')
def form_valid(self, form):
self.object = form.save(commit=False)
for f in self.request.FILES.getlist('file'):
self.object.pk = None
self.object.file = f
self.object.save()
return super(VideoCreate, self).form_valid(form)
@login_required
def overview(request):
mod_queue = Video.objects.filter(approved=False)
t = 'videos/overview.html'
c = {
'moderation_queue_count': len(mod_queue),
'total_videos': Video.objects.count(),
'video_table': SimpleTable(Video.objects.all()),
'awaiting_moderation': mod_queue,
}
return render(request, t, c)
@login_required
def moderate(request):
video_list = Video.objects.all()
t = 'videos/moderate.html'
c = {'video_list': video_list}
return render(request, t, c)
| {
"content_hash": "a98f073f27a9a0e5fdc8233878895734",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 58,
"avg_line_length": 26.032258064516128,
"alnum_prop": 0.6951672862453532,
"repo_name": "hub-ology/video_village",
"id": "301a08d6087d463646ea879037ffb92d512091c9",
"size": "1614",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "videos/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13218"
},
{
"name": "HTML",
"bytes": "35391"
},
{
"name": "JavaScript",
"bytes": "528"
},
{
"name": "Python",
"bytes": "94455"
}
],
"symlink_target": ""
} |
"""Test the NEW_NAME config flow."""
from unittest.mock import patch
from homeassistant import config_entries, setup
from homeassistant.components.NEW_DOMAIN.config_flow import CannotConnect, InvalidAuth
from homeassistant.components.NEW_DOMAIN.const import DOMAIN
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.NEW_DOMAIN.config_flow.PlaceholderHub.authenticate",
return_value=True,
), patch(
"homeassistant.components.NEW_DOMAIN.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.NEW_DOMAIN.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Name of the device"
assert result2["data"] == {
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.NEW_DOMAIN.config_flow.PlaceholderHub.authenticate",
side_effect=InvalidAuth,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.NEW_DOMAIN.config_flow.PlaceholderHub.authenticate",
side_effect=CannotConnect,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
| {
"content_hash": "bab97d4d0252483b3c889058d26af69f",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 86,
"avg_line_length": 33.24175824175824,
"alnum_prop": 0.5980165289256199,
"repo_name": "partofthething/home-assistant",
"id": "04eab6e683c6c41605245ff39a656b3f671c134d",
"size": "3025",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "script/scaffold/templates/config_flow/tests/test_config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
import errno
import os
import re
from string import Template
PYTHONQT_WRAPPER_WITH_PARENT = Template("""
//-----------------------------------------------------------------------------
class PythonQtWrapper_${className} : public QObject
{
Q_OBJECT
public:
public Q_SLOTS:
${className}* new_${className}(${parentClassName}* parent = 0)
{
return new ${className}(parent);
}
void delete_${className}(${className}* obj) { delete obj; }
};
""")
PYTHONQT_WRAPPER_WITHOUT_PARENT = Template("""
//-----------------------------------------------------------------------------
class PythonQtWrapper_${className} : public QObject
{
Q_OBJECT
public:
public Q_SLOTS:
${className}* new_${className}()
{
return new ${className}();
}
void delete_${className}(${className}* obj) { delete obj; }
};
""")
def _mkdir_p(path):
"""See """
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def ctk_wrap_pythonqt(target, namespace, output_dir, input_files, extra_verbose):
if extra_verbose:
print("target: %s" % target)
print("namespace: %s" % namespace)
print("output_dir: %s" % output_dir)
print("input_files: %s" % input_files)
_mkdir_p(output_dir)
includes = []
pythonqtWrappers = []
registerclasses = []
namespace = namespace.replace('.', '_')
for input_file in input_files:
filename = os.path.basename(input_file)
if extra_verbose:
print("Wrapping %s" % filename)
# what is the filename without the extension
filename_we = os.path.splitext(filename)[0]
# Extract classname - NOTE: We assume the filename matches the associated class
className = filename_we
if extra_verbose:
print("\tclassName:%s" % className)
# Extract parent classname
parentClassName = None
# Read input files
with open(input_file) as f:
content = f.read()
# Skip wrapping if file do NOT contain Q_OBJECT
if 'Q_OBJECT' not in content:
if extra_verbose:
print("\tskipping - No Q_OBJECT macro")
continue
# Skip wrapping if constructor doesn't match:
# my_class()
# my_class(QObject* newParent ...)
# my_class(QWidget* newParent ...)
# Constructor with either QWidget or QObject as first parameter
regex = r"[^~]%s[\s\n]*\([\s\n]*((QObject|QWidget)[\s\n]*\*[\s\n]*\w+[\s\n]*(\=[\s\n]*(0|NULL)|,.*\=.*\)|\)|\)))" % className
res = re.search(regex, content, re.MULTILINE)
if res is None:
if extra_verbose:
print("\tskipping - Missing expected constructor signature")
continue
# Skip wrapping if object has a virtual pure method
# "x3b" is the unicode for semicolon
regex = r"virtual[\w\n\s\*\(\)]+\=[\s\n]*(0|NULL)[\s\n]*\x3b"
res = re.search(regex, content, re.MULTILINE)
if res is not None:
if extra_verbose:
print("skipping - Contains a virtual pure method")
continue
if parentClassName is None:
# Does constructor signature is of the form: myclass()
regex = r"[^~]%s[\s\n]*\([\s\n]*\)" % className
res = re.search(regex, content, re.MULTILINE)
if res is not None:
parentClassName = ""
if extra_verbose:
print("\tconstructor of the form: %s()" % className)
if parentClassName is None:
# Does constructor signature is of the form: myclass(QObject * parent ...)
regex = r"%s[\s\n]*\([\s\n]*QObject[\s\n]*\*[\s\n]*\w+[\s\n]*(\=[\s\n]*(0|NULL)|,.*\=.*\)|\))" % className
res = re.search(regex, content, re.MULTILINE)
if res is not None:
parentClassName = "QObject"
if extra_verbose:
print("\tconstructor of the form: %s(QObject * parent ... )" % className)
if parentClassName is None:
# Does constructor signature is of the form: myclass(QWidget * parent ...)
regex = r"%s[\s\n]*\([\s\n]*QWidget[\s\n]*\*[\s\n]*\w+[\s\n]*(\=[\s\n]*(0|NULL)|,.*\=.*\)|\))" % className
res = re.search(regex, content, re.MULTILINE)
if res is not None:
parentClassName = "QWidget"
if extra_verbose:
print("\tconstructor of the form: %s(QWidget * parent ... )" % className)
if parentClassName is not None:
includes.append('#include "%s.h"' % filename_we)
# Generate PythonQtWrapper class
if parentClassName == "QObject" or parentClassName == "QWidget":
pythonqtWrappers.append(
PYTHONQT_WRAPPER_WITH_PARENT.substitute(className = className, parentClassName = parentClassName))
elif parentClassName == "":
pythonqtWrappers.append(PYTHONQT_WRAPPER_WITHOUT_PARENT.substitute(className = className))
else: # Case parentClassName is None
raise Exception("Problem wrapping %s" % input_file)
# Generate code allowing to register the class metaobject and its associated "light" wrapper
registerclasses.append(
Template("""
PythonQt::self()->registerClass(
&${className}::staticMetaObject, "${target}",
PythonQtCreateObject<PythonQtWrapper_${className}>);
""").substitute(className = className, target = target))
output_header = output_dir + "/" + namespace + "_" + target + ".h"
if extra_verbose:
print("output_header: %s" % output_header)
# Write master include file
with open(output_header, "w") as f:
f.write(Template(
"""
//
// File auto-generated by ctkWrapPythonQt.py
//
#ifndef __${namespace}_${target}_h
#define __${namespace}_${target}_h
#include <QObject>
${includes}
${pythonqtWrappers}
#endif
""").substitute(namespace = namespace, target = target, includes = '\n'.join(includes), pythonqtWrappers = '\n'.join(pythonqtWrappers)))
output_cpp = output_dir + "/" + namespace + "_" + target + "_init.cpp"
if extra_verbose:
print("output_cpp: %s" % output_cpp)
with open(output_cpp , "w") as f:
# Write wrapper header
f.write(Template(
"""
//
// File auto-generated by ctkWrapPythonQt.py
//
#include <PythonQt.h>
// XXX Avoid warning: "HAVE_STAT" redefined
#undef HAVE_STAT
#include "${namespace}_${target}.h"
void PythonQt_init_${namespace}_${target}(PyObject* module)
{
Q_UNUSED(module);
${registerclasses}
}
""").substitute(namespace = namespace, target = target, registerclasses = '\n'.join(registerclasses)))
if __name__ == '__main__':
from optparse import OptionParser
usage = "usage: %prog [options] <output_file> <input_file> [<input_file1> [...]]"
parser = OptionParser(usage=usage)
parser.add_option("-t", "--target",
dest="target", action="store", type="string",
help="Name of the associated library")
parser.add_option("-n", "--namespace",
dest="namespace", action="store", type="string",
help="Wrapping namespace")
parser.add_option("--output-dir",
dest="output_dir", action="store", type="string",
help="Output directory")
parser.add_option("-v", "--verbose",
dest="verbose", action="store_true",
help="Print verbose information")
parser.add_option("--extra-verbose",
dest="extra_verbose", action="store_true",
help="Print extra verbose information")
(options, args) = parser.parse_args()
#if len(args) < 2:
# parser.error("arguments '%s' are required !" % '<output_file> <input_file>')
if options.extra_verbose:
options.verbose = True
ctk_wrap_pythonqt(options.target, options.namespace, options.output_dir, args, options.extra_verbose)
if options.verbose:
print("Wrapped %d files" % len(args))
| {
"content_hash": "56077cf7d90b6cc46b6f8cc4990661a8",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 137,
"avg_line_length": 36.708695652173915,
"alnum_prop": 0.5527656046428995,
"repo_name": "rkhlebnikov/CTK",
"id": "ef7d895bece757f8a4f9d5f957bd31d33d2c14ed",
"size": "8444",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "CMake/ctkWrapPythonQt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "24360"
},
{
"name": "C++",
"bytes": "8085072"
},
{
"name": "CMake",
"bytes": "505821"
},
{
"name": "HTML",
"bytes": "2448"
},
{
"name": "PLpgSQL",
"bytes": "599228"
},
{
"name": "Python",
"bytes": "12804"
},
{
"name": "XSLT",
"bytes": "34850"
}
],
"symlink_target": ""
} |
from TProtocol import *
from struct import pack, unpack
__all__ = ['TCompactProtocol', 'TCompactProtocolFactory']
CLEAR = 0
FIELD_WRITE = 1
VALUE_WRITE = 2
CONTAINER_WRITE = 3
BOOL_WRITE = 4
FIELD_READ = 5
CONTAINER_READ = 6
VALUE_READ = 7
BOOL_READ = 8
def make_helper(v_from, container):
def helper(func):
def nested(self, *args, **kwargs):
assert self.state in (v_from, container), (self.state, v_from, container)
return func(self, *args, **kwargs)
return nested
return helper
writer = make_helper(VALUE_WRITE, CONTAINER_WRITE)
reader = make_helper(VALUE_READ, CONTAINER_READ)
def makeZigZag(n, bits):
return (n << 1) ^ (n >> (bits - 1))
def fromZigZag(n):
return (n >> 1) ^ -(n & 1)
def writeVarint(trans, n):
out = []
while True:
if n & ~0x7f == 0:
out.append(n)
break
else:
out.append((n & 0xff) | 0x80)
n = n >> 7
trans.write(''.join(map(chr, out)))
def readVarint(trans):
result = 0
shift = 0
while True:
x = trans.readAll(1)
byte = ord(x)
result |= (byte & 0x7f) << shift
if byte >> 7 == 0:
return result
shift += 7
class CompactType:
STOP = 0x00
TRUE = 0x01
FALSE = 0x02
BYTE = 0x03
I16 = 0x04
I32 = 0x05
I64 = 0x06
DOUBLE = 0x07
BINARY = 0x08
LIST = 0x09
SET = 0x0A
MAP = 0x0B
STRUCT = 0x0C
CTYPES = {TType.STOP: CompactType.STOP,
TType.BOOL: CompactType.TRUE, # used for collection
TType.BYTE: CompactType.BYTE,
TType.I16: CompactType.I16,
TType.I32: CompactType.I32,
TType.I64: CompactType.I64,
TType.DOUBLE: CompactType.DOUBLE,
TType.STRING: CompactType.BINARY,
TType.STRUCT: CompactType.STRUCT,
TType.LIST: CompactType.LIST,
TType.SET: CompactType.SET,
TType.MAP: CompactType.MAP
}
TTYPES = {}
for k, v in CTYPES.items():
TTYPES[v] = k
TTYPES[CompactType.FALSE] = TType.BOOL
del k
del v
class TCompactProtocol(TProtocolBase):
"Compact implementation of the Thrift protocol driver."
PROTOCOL_ID = 0x82
VERSION = 1
VERSION_MASK = 0x1f
TYPE_MASK = 0xe0
TYPE_SHIFT_AMOUNT = 5
def __init__(self, trans):
TProtocolBase.__init__(self, trans)
self.state = CLEAR
self.__last_fid = 0
self.__bool_fid = None
self.__bool_value = None
self.__structs = []
self.__containers = []
def __writeVarint(self, n):
writeVarint(self.trans, n)
def writeMessageBegin(self, name, type, seqid):
assert self.state == CLEAR
self.__writeUByte(self.PROTOCOL_ID)
self.__writeUByte(self.VERSION | (type << self.TYPE_SHIFT_AMOUNT))
self.__writeVarint(seqid)
self.__writeString(name)
self.state = VALUE_WRITE
def writeMessageEnd(self):
assert self.state == VALUE_WRITE
self.state = CLEAR
def writeStructBegin(self, name):
assert self.state in (CLEAR, CONTAINER_WRITE, VALUE_WRITE), self.state
self.__structs.append((self.state, self.__last_fid))
self.state = FIELD_WRITE
self.__last_fid = 0
def writeStructEnd(self):
assert self.state == FIELD_WRITE
self.state, self.__last_fid = self.__structs.pop()
def writeFieldStop(self):
self.__writeByte(0)
def __writeFieldHeader(self, type, fid):
delta = fid - self.__last_fid
if 0 < delta <= 15:
self.__writeUByte(delta << 4 | type)
else:
self.__writeByte(type)
self.__writeI16(fid)
self.__last_fid = fid
def writeFieldBegin(self, name, type, fid):
assert self.state == FIELD_WRITE, self.state
if type == TType.BOOL:
self.state = BOOL_WRITE
self.__bool_fid = fid
else:
self.state = VALUE_WRITE
self.__writeFieldHeader(CTYPES[type], fid)
def writeFieldEnd(self):
assert self.state in (VALUE_WRITE, BOOL_WRITE), self.state
self.state = FIELD_WRITE
def __writeUByte(self, byte):
self.trans.write(pack('!B', byte))
def __writeByte(self, byte):
self.trans.write(pack('!b', byte))
def __writeI16(self, i16):
self.__writeVarint(makeZigZag(i16, 16))
def __writeSize(self, i32):
self.__writeVarint(i32)
def writeCollectionBegin(self, etype, size):
assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state
if size <= 14:
self.__writeUByte(size << 4 | CTYPES[etype])
else:
self.__writeUByte(0xf0 | CTYPES[etype])
self.__writeSize(size)
self.__containers.append(self.state)
self.state = CONTAINER_WRITE
writeSetBegin = writeCollectionBegin
writeListBegin = writeCollectionBegin
def writeMapBegin(self, ktype, vtype, size):
assert self.state in (VALUE_WRITE, CONTAINER_WRITE), self.state
if size == 0:
self.__writeByte(0)
else:
self.__writeSize(size)
self.__writeUByte(CTYPES[ktype] << 4 | CTYPES[vtype])
self.__containers.append(self.state)
self.state = CONTAINER_WRITE
def writeCollectionEnd(self):
assert self.state == CONTAINER_WRITE, self.state
self.state = self.__containers.pop()
writeMapEnd = writeCollectionEnd
writeSetEnd = writeCollectionEnd
writeListEnd = writeCollectionEnd
def writeBool(self, bool):
if self.state == BOOL_WRITE:
if bool:
ctype = CompactType.TRUE
else:
ctype = CompactType.FALSE
self.__writeFieldHeader(ctype, self.__bool_fid)
elif self.state == CONTAINER_WRITE:
if bool:
self.__writeByte(CompactType.TRUE)
else:
self.__writeByte(CompactType.FALSE)
else:
raise AssertionError, "Invalid state in compact protocol"
writeByte = writer(__writeByte)
writeI16 = writer(__writeI16)
@writer
def writeI32(self, i32):
self.__writeVarint(makeZigZag(i32, 32))
@writer
def writeI64(self, i64):
self.__writeVarint(makeZigZag(i64, 64))
@writer
def writeDouble(self, dub):
self.trans.write(pack('!d', dub))
def __writeString(self, s):
self.__writeSize(len(s))
self.trans.write(s)
writeString = writer(__writeString)
def readFieldBegin(self):
assert self.state == FIELD_READ, self.state
type = self.__readUByte()
if type & 0x0f == TType.STOP:
return (None, 0, 0)
delta = type >> 4
if delta == 0:
fid = self.__readI16()
else:
fid = self.__last_fid + delta
self.__last_fid = fid
type = type & 0x0f
if type == CompactType.TRUE:
self.state = BOOL_READ
self.__bool_value = True
elif type == CompactType.FALSE:
self.state = BOOL_READ
self.__bool_value = False
else:
self.state = VALUE_READ
return (None, self.__getTType(type), fid)
def readFieldEnd(self):
assert self.state in (VALUE_READ, BOOL_READ), self.state
self.state = FIELD_READ
def __readUByte(self):
result, = unpack('!B', self.trans.readAll(1))
return result
def __readByte(self):
result, = unpack('!b', self.trans.readAll(1))
return result
def __readVarint(self):
return readVarint(self.trans)
def __readZigZag(self):
return fromZigZag(self.__readVarint())
def __readSize(self):
result = self.__readVarint()
if result < 0:
raise TException("Length < 0")
return result
def readMessageBegin(self):
assert self.state == CLEAR
proto_id = self.__readUByte()
if proto_id != self.PROTOCOL_ID:
raise TProtocolException(TProtocolException.BAD_VERSION,
'Bad protocol id in the message: %d' % proto_id)
ver_type = self.__readUByte()
type = (ver_type & self.TYPE_MASK) >> self.TYPE_SHIFT_AMOUNT
version = ver_type & self.VERSION_MASK
if version != self.VERSION:
raise TProtocolException(TProtocolException.BAD_VERSION,
'Bad version: %d (expect %d)' % (version, self.VERSION))
seqid = self.__readVarint()
name = self.__readString()
return (name, type, seqid)
def readMessageEnd(self):
assert self.state == CLEAR
assert len(self.__structs) == 0
def readStructBegin(self):
assert self.state in (CLEAR, CONTAINER_READ, VALUE_READ), self.state
self.__structs.append((self.state, self.__last_fid))
self.state = FIELD_READ
self.__last_fid = 0
def readStructEnd(self):
assert self.state == FIELD_READ
self.state, self.__last_fid = self.__structs.pop()
def readCollectionBegin(self):
assert self.state in (VALUE_READ, CONTAINER_READ), self.state
size_type = self.__readUByte()
size = size_type >> 4
type = self.__getTType(size_type)
if size == 15:
size = self.__readSize()
self.__containers.append(self.state)
self.state = CONTAINER_READ
return type, size
readSetBegin = readCollectionBegin
readListBegin = readCollectionBegin
def readMapBegin(self):
assert self.state in (VALUE_READ, CONTAINER_READ), self.state
size = self.__readSize()
types = 0
if size > 0:
types = self.__readUByte()
vtype = self.__getTType(types)
ktype = self.__getTType(types >> 4)
self.__containers.append(self.state)
self.state = CONTAINER_READ
return (ktype, vtype, size)
def readCollectionEnd(self):
assert self.state == CONTAINER_READ, self.state
self.state = self.__containers.pop()
readSetEnd = readCollectionEnd
readListEnd = readCollectionEnd
readMapEnd = readCollectionEnd
def readBool(self):
if self.state == BOOL_READ:
return self.__bool_value == CompactType.TRUE
elif self.state == CONTAINER_READ:
return self.__readByte() == CompactType.TRUE
else:
raise AssertionError, "Invalid state in compact protocol: %d" % self.state
readByte = reader(__readByte)
__readI16 = __readZigZag
readI16 = reader(__readZigZag)
readI32 = reader(__readZigZag)
readI64 = reader(__readZigZag)
@reader
def readDouble(self):
buff = self.trans.readAll(8)
val, = unpack('!d', buff)
return val
def __readString(self):
len = self.__readSize()
return self.trans.readAll(len)
readString = reader(__readString)
def __getTType(self, byte):
return TTYPES[byte & 0x0f]
class TCompactProtocolFactory:
def __init__(self):
pass
def getProtocol(self, trans):
return TCompactProtocol(trans)
| {
"content_hash": "baab88e314d0f2f1712ea8ace3827dea",
"timestamp": "",
"source": "github",
"line_count": 376,
"max_line_length": 80,
"avg_line_length": 26.96808510638298,
"alnum_prop": 0.6412228796844182,
"repo_name": "CodEnFisH/palantir",
"id": "6d57aeba9a82b32310e7420761488435eae88366",
"size": "10140",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "floodlight/thrift/lib/py/build/lib.linux-x86_64-2.7/thrift/protocol/TCompactProtocol.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "65846"
},
{
"name": "Awk",
"bytes": "2682"
},
{
"name": "C",
"bytes": "874293"
},
{
"name": "C#",
"bytes": "159724"
},
{
"name": "C++",
"bytes": "2998428"
},
{
"name": "CSS",
"bytes": "174552"
},
{
"name": "Emacs Lisp",
"bytes": "5154"
},
{
"name": "Erlang",
"bytes": "116132"
},
{
"name": "Go",
"bytes": "361326"
},
{
"name": "Haskell",
"bytes": "53155"
},
{
"name": "Java",
"bytes": "17572204"
},
{
"name": "JavaScript",
"bytes": "201380"
},
{
"name": "Logos",
"bytes": "636620"
},
{
"name": "OCaml",
"bytes": "33971"
},
{
"name": "Objective-C",
"bytes": "185870"
},
{
"name": "PHP",
"bytes": "353658"
},
{
"name": "Perl",
"bytes": "220586"
},
{
"name": "Python",
"bytes": "1840956"
},
{
"name": "Ruby",
"bytes": "356879"
},
{
"name": "Shell",
"bytes": "1759783"
},
{
"name": "Smalltalk",
"bytes": "79506"
},
{
"name": "VimL",
"bytes": "2837"
},
{
"name": "XSLT",
"bytes": "218042"
}
],
"symlink_target": ""
} |
__all__ = ['spilloverpolicy_stats'] | {
"content_hash": "ef4b164675cadc8c3e5b2054b6dd4a12",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 35,
"avg_line_length": 35,
"alnum_prop": 0.6571428571428571,
"repo_name": "benfinke/ns_python",
"id": "3c5b6922c24e2b7b962e394baeb9f7c62d31917f",
"size": "35",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "build/lib/nssrc/com/citrix/netscaler/nitro/resource/stat/spillover/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "21836782"
},
{
"name": "Shell",
"bytes": "513"
}
],
"symlink_target": ""
} |
import traceback
def shell():
from config import get_version, set_win32_requests_ca_bundle_path
set_win32_requests_ca_bundle_path()
print """
Datadog Agent v%s - Python Shell
""" % (get_version())
while True:
cmd = raw_input('>>> ')
try:
exec(cmd)
except Exception, e:
print traceback.format_exc(e)
if __name__ == "__main__":
shell()
| {
"content_hash": "8f2ebcd04886177fc81cce7b743e7465",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 69,
"avg_line_length": 21.57894736842105,
"alnum_prop": 0.5609756097560976,
"repo_name": "tebriel/dd-agent",
"id": "516e77c38027288a2c54946badb91bbc47c98555",
"size": "517",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "win32/shell.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2717"
},
{
"name": "Go",
"bytes": "2389"
},
{
"name": "HTML",
"bytes": "9060"
},
{
"name": "Nginx",
"bytes": "3908"
},
{
"name": "PowerShell",
"bytes": "2665"
},
{
"name": "Python",
"bytes": "2175373"
},
{
"name": "Ruby",
"bytes": "102404"
},
{
"name": "Shell",
"bytes": "58131"
},
{
"name": "XSLT",
"bytes": "2222"
}
],
"symlink_target": ""
} |
import re
from logging import CRITICAL
import etree_loader
"""
CONSTANTS
=============================================================================
"""
"""
Constants you might want to modify
-----------------------------------------------------------------------------
"""
BLOCK_LEVEL_ELEMENTS = re.compile("^(p|div|h[1-6]|blockquote|pre|table|dl|ol|ul"
"|script|noscript|form|fieldset|iframe|math"
"|hr|hr/|style|li|dt|dd|thead|tbody"
"|tr|th|td|section|footer|header|group|figure"
"|figcaption|aside|article|canvas|output"
"|progress|video)$", re.IGNORECASE)
# Placeholders
STX = u'\u0002' # Use STX ("Start of text") for start-of-placeholder
ETX = u'\u0003' # Use ETX ("End of text") for end-of-placeholder
INLINE_PLACEHOLDER_PREFIX = STX+"klzzwxh:"
INLINE_PLACEHOLDER = INLINE_PLACEHOLDER_PREFIX + "%s" + ETX
INLINE_PLACEHOLDER_RE = re.compile(INLINE_PLACEHOLDER % r'([0-9]{4})')
AMP_SUBSTITUTE = STX+"amp"+ETX
"""
Constants you probably do not need to change
-----------------------------------------------------------------------------
"""
RTL_BIDI_RANGES = ( (u'\u0590', u'\u07FF'),
# Hebrew (0590-05FF), Arabic (0600-06FF),
# Syriac (0700-074F), Arabic supplement (0750-077F),
# Thaana (0780-07BF), Nko (07C0-07FF).
(u'\u2D30', u'\u2D7F'), # Tifinagh
)
# Extensions should use "markdown.util.etree" instead of "etree" (or do `from
# markdown.util import etree`). Do not import it by yourself.
etree = etree_loader.importETree()
"""
AUXILIARY GLOBAL FUNCTIONS
=============================================================================
"""
def isBlockLevel(tag):
"""Check if the tag is a block level HTML tag."""
if isinstance(tag, basestring):
return BLOCK_LEVEL_ELEMENTS.match(tag)
# Some ElementTree tags are not strings, so return False.
return False
"""
MISC AUXILIARY CLASSES
=============================================================================
"""
class AtomicString(unicode):
"""A string which should not be further processed."""
pass
class Processor:
def __init__(self, markdown_instance=None):
if markdown_instance:
self.markdown = markdown_instance
class HtmlStash:
"""
This class is used for stashing HTML objects that we extract
in the beginning and replace with place-holders.
"""
def __init__ (self):
""" Create a HtmlStash. """
self.html_counter = 0 # for counting inline html segments
self.rawHtmlBlocks=[]
def store(self, html, safe=False):
"""
Saves an HTML segment for later reinsertion. Returns a
placeholder string that needs to be inserted into the
document.
Keyword arguments:
* html: an html segment
* safe: label an html segment as safe for safemode
Returns : a placeholder string
"""
self.rawHtmlBlocks.append((html, safe))
placeholder = self.get_placeholder(self.html_counter)
self.html_counter += 1
return placeholder
def reset(self):
self.html_counter = 0
self.rawHtmlBlocks = []
def get_placeholder(self, key):
return "%swzxhzdk:%d%s" % (STX, key, ETX)
| {
"content_hash": "5db2ff8857f56649ee7201d3fe32e076",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 80,
"avg_line_length": 30.52212389380531,
"alnum_prop": 0.5326181501884604,
"repo_name": "stupidnetizen/miniblog",
"id": "db45a5ebc3cdebb84fa28ade8e6f54f69ca28a72",
"size": "3473",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "lib/markdown2/util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
Created on Tue Apr 07 15:11:49 2014
@author: Maurizio Napolitano <[email protected]>
The MIT License (MIT)
Copyright (c) 2016 Fondazione Bruno Kessler http://fbk.eu
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from optparse import OptionParser
from arcrestsplite.arcrestapi import ArcGIS
version = "0.1"
def main():
usage = "%prog arcgis_restapi_layer namefile\n"
usage += "eg:\n %prog https://geoservices.provincia.tn.it/agol/rest/services/geologico/GEOTERM3_LIMAMM_LAG_GHI/MapServer/5 dbfile.sqlite"
parser = OptionParser(usage)
# parser.add_option("-v","--version",action="store_false",help="show version")
(options,args) = parser.parse_args()
if len(args) == 0:
parser.print_help()
else:
source = args[0]
output = args[1]
arcgis = ArcGIS(source)
arcgis.discover()
for layer in arcgis.layers:
if layer['querable']:
url=layer['url']
name=layer['name']
arcgis.download(url,output,name)
if __name__ == "__main__":
main()
| {
"content_hash": "2a12f107b78f5314d73b08113158676f",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 143,
"avg_line_length": 45.46666666666667,
"alnum_prop": 0.7155425219941349,
"repo_name": "napo/arcgisrest2spatialite",
"id": "cf6dbf2efcc41bc6e64f2ed150955e30fe84b0c4",
"size": "2088",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "arcgis2splite.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25338"
}
],
"symlink_target": ""
} |
from pyxl import html
print(<html><body>Hello World!</body></html>)
| {
"content_hash": "315db4501fc788729a2cfbdf1b3e7908",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 45,
"avg_line_length": 23,
"alnum_prop": 0.7101449275362319,
"repo_name": "lez/pyxl3",
"id": "2b2ffee16fd344066ddfc4d66d3cfab74701af29",
"size": "85",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyxl/examples/hello_world.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Emacs Lisp",
"bytes": "4107"
},
{
"name": "Python",
"bytes": "109136"
},
{
"name": "VimL",
"bytes": "38879"
}
],
"symlink_target": ""
} |
"""unit tests for sparse utility functions"""
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (TestCase, run_module_suite, assert_equal,
assert_raises)
from scipy.sparse import sputils
class TestSparseUtils(TestCase):
def test_upcast(self):
assert_equal(sputils.upcast('intc'), np.intc)
assert_equal(sputils.upcast('int32', 'float32'), np.float64)
assert_equal(sputils.upcast('bool', complex, float), np.complex128)
assert_equal(sputils.upcast('i', 'd'), np.float64)
def test_getdtype(self):
A = np.array([1], dtype='int8')
assert_equal(sputils.getdtype(None, default=float), float)
assert_equal(sputils.getdtype(None, a=A), np.int8)
def test_isscalarlike(self):
assert_equal(sputils.isscalarlike(3.0), True)
assert_equal(sputils.isscalarlike(-4), True)
assert_equal(sputils.isscalarlike(2.5), True)
assert_equal(sputils.isscalarlike(1 + 3j), True)
assert_equal(sputils.isscalarlike(np.array(3)), True)
assert_equal(sputils.isscalarlike("16"), True)
assert_equal(sputils.isscalarlike(np.array([3])), False)
assert_equal(sputils.isscalarlike([[3]]), False)
assert_equal(sputils.isscalarlike((1,)), False)
assert_equal(sputils.isscalarlike((1, 2)), False)
def test_isintlike(self):
assert_equal(sputils.isintlike(3.0), True)
assert_equal(sputils.isintlike(-4), True)
assert_equal(sputils.isintlike(np.array(3)), True)
assert_equal(sputils.isintlike(np.array([3])), False)
assert_equal(sputils.isintlike(2.5), False)
assert_equal(sputils.isintlike(1 + 3j), False)
assert_equal(sputils.isintlike((1,)), False)
assert_equal(sputils.isintlike((1, 2)), False)
def test_isshape(self):
assert_equal(sputils.isshape((1, 2)), True)
assert_equal(sputils.isshape((5, 2)), True)
assert_equal(sputils.isshape((1.5, 2)), False)
assert_equal(sputils.isshape((2, 2, 2)), False)
assert_equal(sputils.isshape(([2], 2)), False)
def test_issequence(self):
assert_equal(sputils.issequence((1,)), True)
assert_equal(sputils.issequence((1, 2, 3)), True)
assert_equal(sputils.issequence([1]), True)
assert_equal(sputils.issequence([1, 2, 3]), True)
assert_equal(sputils.issequence(np.array([1, 2, 3])), True)
assert_equal(sputils.issequence(np.array([[1], [2], [3]])), False)
assert_equal(sputils.issequence(3), False)
def test_ismatrix(self):
assert_equal(sputils.ismatrix(((),)), True)
assert_equal(sputils.ismatrix([[1], [2]]), True)
assert_equal(sputils.ismatrix(np.arange(3)[None]), True)
assert_equal(sputils.ismatrix([1, 2]), False)
assert_equal(sputils.ismatrix(np.arange(3)), False)
assert_equal(sputils.ismatrix([[[1]]]), False)
assert_equal(sputils.ismatrix(3), False)
def test_isdense(self):
assert_equal(sputils.isdense(np.array([1])), True)
assert_equal(sputils.isdense(np.matrix([1])), True)
def test_validateaxis(self):
func = sputils.validateaxis
assert_raises(TypeError, func, (0, 1))
assert_raises(TypeError, func, 1.5)
assert_raises(ValueError, func, 3)
# These function calls should not raise errors
for axis in (-2, -1, 0, 1, None):
func(axis)
if __name__ == "__main__":
run_module_suite()
| {
"content_hash": "fb45b09b57c0c7ddc13c234ce947ad28",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 75,
"avg_line_length": 38.333333333333336,
"alnum_prop": 0.6302945301542777,
"repo_name": "DailyActie/Surrogate-Model",
"id": "5c18a38c05d5ab2ce8e75220d146a8681aa4f9f1",
"size": "3565",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "01-codes/scipy-master/scipy/sparse/tests/test_sputils.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "18746"
},
{
"name": "C",
"bytes": "13004913"
},
{
"name": "C++",
"bytes": "14692003"
},
{
"name": "CMake",
"bytes": "72831"
},
{
"name": "CSS",
"bytes": "303488"
},
{
"name": "Fortran",
"bytes": "7339415"
},
{
"name": "HTML",
"bytes": "854774"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "2432846"
},
{
"name": "Jupyter Notebook",
"bytes": "829689"
},
{
"name": "M4",
"bytes": "1379"
},
{
"name": "Makefile",
"bytes": "48708"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PHP",
"bytes": "93585"
},
{
"name": "Pascal",
"bytes": "1449"
},
{
"name": "Perl",
"bytes": "1152272"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "34668203"
},
{
"name": "Roff",
"bytes": "5925"
},
{
"name": "Ruby",
"bytes": "92498"
},
{
"name": "Shell",
"bytes": "94698"
},
{
"name": "TeX",
"bytes": "156540"
},
{
"name": "TypeScript",
"bytes": "41691"
}
],
"symlink_target": ""
} |
from layer import *
class SoftmaxLayer(Layer):
def __init__(self, *args, **kwargs):
super(SoftmaxLayer, self).__init__(*args, **kwargs)
@classmethod
def IsLayerType(cls, proto):
return proto.hyperparams.activation == deepnet_pb2.Hyperparams.SOFTMAX
def ApplyActivation(self):
state = self.state
temp = self.batchsize_temp
state.max(axis=0, target=temp)
state.add_row_mult(temp, -1)
cm.exp(state)
state.sum(axis=0, target=temp)
state.div_by_row(temp)
def Sample(self):
self.state.perturb_prob_for_softmax_sampling(target=self.sample)
self.sample.choose_max(axis=0)
def ComputeDeriv(self):
"""Compute derivative w.r.t input given derivative w.r.t output."""
raise Exception('Back prop through softmax not implemented.')
def AllocateMemory(self, batchsize):
super(SoftmaxLayer, self).AllocateMemory(batchsize)
self.expansion_matrix = cm.CUDAMatrix(np.eye(self.numlabels))
def AllocateBatchsizeDependentMemory(self, batchsize):
super(SoftmaxLayer, self).AllocateBatchsizeDependentMemory(batchsize)
dimensions = self.dimensions
numlabels = self.numlabels
self.data = cm.CUDAMatrix(np.zeros((dimensions, batchsize)))
self.deriv = cm.CUDAMatrix(np.zeros((numlabels*dimensions, batchsize)))
self.batchsize_temp = cm.CUDAMatrix(np.zeros((dimensions, batchsize)))
if self.loss_function == deepnet_pb2.Layer.CROSS_ENTROPY:
self.temp2 = cm.CUDAMatrix(np.zeros((dimensions, batchsize)))
self.indices = cm.CUDAMatrix(np.zeros((1, dimensions * batchsize)))
self.rowshift = cm.CUDAMatrix(
numlabels*np.arange(dimensions * batchsize).reshape(1, -1))
elif self.loss_function == deepnet_pb2.Layer.SQUARED_LOSS:
self.expanded_batch = cm.CUDAMatrix(np.zeros((numlabels * dimensions, batchsize)))
def GetData(self):
self.expansion_matrix.select_columns(self.data, target=self.state)
def GetLoss(self, get_deriv=False):
"""Compute loss and also deriv w.r.t to it if asked for.
Compute the loss function. Targets should be in self.data, predictions
should be in self.state.
Args:
get_deriv: If True, compute the derivative w.r.t the loss function and put
it in self.deriv.
"""
perf = deepnet_pb2.Metrics()
perf.MergeFrom(self.proto.performance_stats)
perf.count = self.batchsize
tiny = self.tiny
if self.loss_function == deepnet_pb2.Layer.CROSS_ENTROPY:
temp2 = self.temp2
temp = self.batchsize_temp
batchsize = self.batchsize
dimensions = self.dimensions
numlabels = self.numlabels
state = self.state
data = self.data
unitcell = self.unitcell
indices = self.indices
# Optimized for space to handle large number of labels in a softmax.
data.reshape((1, batchsize * dimensions))
data.add(self.rowshift, target=indices)
state.reshape((numlabels, dimensions * batchsize))
state.max(axis=0, target=temp2)
state.reshape((1, batchsize * numlabels * dimensions))
state.select_columns(indices, temp)
temp2.subtract(temp)
temp2.sign(target=temp2)
temp2.sum(axis=1, target=unitcell)
correct_preds = batchsize - unitcell.euclid_norm()
if get_deriv:
temp.subtract(1, target=temp2)
state.set_selected_columns(indices, temp2)
state.reshape((numlabels * dimensions, batchsize))
self.deriv.assign(self.state)
state.reshape((numlabels * dimensions, batchsize))
temp.add(tiny)
cm.log(temp)
temp.sum(axis=1, target=unitcell)
cross_entropy = unitcell.euclid_norm()
perf.cross_entropy = cross_entropy
perf.correct_preds = correct_preds
elif self.loss_function == deepnet_pb2.Layer.SQUARED_LOSS:
self.expansion_matrix.select_columns(self.data, target=self.expanded_batch)
self.state.subtract(self.expanded_batch, target=self.deriv)
error = self.deriv.euclid_norm()**2
perf.error = error
else:
raise Exception('Unknown loss function for Softmax units.')
return perf
def GetSparsityDivisor(self):
raise Exception('Sparsity not implemented for replicated softmax units.')
| {
"content_hash": "66e60218840155a3ff5fcd2fa1144550",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 88,
"avg_line_length": 38.41284403669725,
"alnum_prop": 0.6923811798423692,
"repo_name": "kashif/deepnet",
"id": "2da61ab7a12a4c62a76815c5d026311c1296c0dd",
"size": "4187",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deepnet/softmax_layer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
class LoopControl(Base):
_loop_var = FieldAttribute(isa='str')
_label = FieldAttribute(isa='str')
_pause = FieldAttribute(isa='int')
def __init__(self):
super(LoopControl, self).__init__()
@staticmethod
def load(data, variable_manager=None, loader=None):
t = LoopControl()
return t.load_data(data, variable_manager=variable_manager, loader=loader)
| {
"content_hash": "98f17b1e858f2a2051ef6ae006e41ad9",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 82,
"avg_line_length": 29.15,
"alnum_prop": 0.6895368782161235,
"repo_name": "e-gob/plataforma-kioscos-autoatencion",
"id": "b983ee88ed591da52d3ab74f7c509ceae311ef31",
"size": "1328",
"binary": false,
"copies": "64",
"ref": "refs/heads/master",
"path": "scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/playbook/loop_control.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "41110"
},
{
"name": "C++",
"bytes": "3804"
},
{
"name": "CSS",
"bytes": "34823"
},
{
"name": "CoffeeScript",
"bytes": "8521"
},
{
"name": "HTML",
"bytes": "61168"
},
{
"name": "JavaScript",
"bytes": "7206"
},
{
"name": "Makefile",
"bytes": "1347"
},
{
"name": "PowerShell",
"bytes": "584344"
},
{
"name": "Python",
"bytes": "25506593"
},
{
"name": "Ruby",
"bytes": "245726"
},
{
"name": "Shell",
"bytes": "5075"
}
],
"symlink_target": ""
} |
"""Layer serialization/deserialization functions.
"""
# pylint: disable=wildcard-import
# pylint: disable=unused-import
import threading
from tensorflow.python import tf2
from tensorflow.python.keras.engine import base_layer
from tensorflow.python.keras.engine import input_layer
from tensorflow.python.keras.engine import input_spec
from tensorflow.python.keras.layers import advanced_activations
from tensorflow.python.keras.layers import convolutional
from tensorflow.python.keras.layers import convolutional_recurrent
from tensorflow.python.keras.layers import core
from tensorflow.python.keras.layers import cudnn_recurrent
from tensorflow.python.keras.layers import dense_attention
from tensorflow.python.keras.layers import einsum_dense
from tensorflow.python.keras.layers import embeddings
from tensorflow.python.keras.layers import local
from tensorflow.python.keras.layers import merge
from tensorflow.python.keras.layers import multi_head_attention
from tensorflow.python.keras.layers import noise
from tensorflow.python.keras.layers import pooling
from tensorflow.python.keras.layers import recurrent
from tensorflow.python.keras.layers import recurrent_v2
from tensorflow.python.keras.layers import rnn_cell_wrapper_v2
from tensorflow.python.keras.layers import wrappers
from tensorflow.python.keras.layers.normalization import batch_normalization
from tensorflow.python.keras.layers.normalization import batch_normalization_v1
from tensorflow.python.keras.layers.normalization import layer_normalization
from tensorflow.python.keras.layers.preprocessing import category_crossing
from tensorflow.python.keras.layers.preprocessing import category_encoding
from tensorflow.python.keras.layers.preprocessing import discretization
from tensorflow.python.keras.layers.preprocessing import hashing
from tensorflow.python.keras.layers.preprocessing import image_preprocessing
from tensorflow.python.keras.layers.preprocessing import integer_lookup
from tensorflow.python.keras.layers.preprocessing import normalization as preprocessing_normalization
from tensorflow.python.keras.layers.preprocessing import string_lookup
from tensorflow.python.keras.layers.preprocessing import text_vectorization
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_inspect as inspect
from tensorflow.python.util.tf_export import keras_export
ALL_MODULES = (base_layer, input_layer, advanced_activations, convolutional,
convolutional_recurrent, core, cudnn_recurrent, dense_attention,
embeddings, einsum_dense, local, merge, noise,
batch_normalization_v1, layer_normalization,
pooling, image_preprocessing, recurrent, wrappers, hashing,
category_crossing, category_encoding, discretization,
multi_head_attention, integer_lookup,
preprocessing_normalization, string_lookup, text_vectorization)
ALL_V2_MODULES = (rnn_cell_wrapper_v2, batch_normalization, layer_normalization,
recurrent_v2)
# ALL_OBJECTS is meant to be a global mutable. Hence we need to make it
# thread-local to avoid concurrent mutations.
LOCAL = threading.local()
def populate_deserializable_objects():
"""Populates dict ALL_OBJECTS with every built-in layer.
"""
global LOCAL
if not hasattr(LOCAL, 'ALL_OBJECTS'):
LOCAL.ALL_OBJECTS = {}
LOCAL.GENERATED_WITH_V2 = None
if LOCAL.ALL_OBJECTS and LOCAL.GENERATED_WITH_V2 == tf2.enabled():
# Objects dict is already generated for the proper TF version:
# do nothing.
return
LOCAL.ALL_OBJECTS = {}
LOCAL.GENERATED_WITH_V2 = tf2.enabled()
base_cls = base_layer.Layer
generic_utils.populate_dict_with_module_objects(
LOCAL.ALL_OBJECTS,
ALL_MODULES,
obj_filter=lambda x: inspect.isclass(x) and issubclass(x, base_cls))
# Overwrite certain V1 objects with V2 versions
if tf2.enabled():
generic_utils.populate_dict_with_module_objects(
LOCAL.ALL_OBJECTS,
ALL_V2_MODULES,
obj_filter=lambda x: inspect.isclass(x) and issubclass(x, base_cls))
# These deserialization aliases are added for backward compatibility,
# as in TF 1.13, "BatchNormalizationV1" and "BatchNormalizationV2"
# were used as class name for v1 and v2 version of BatchNormalization,
# respectively. Here we explicitly convert them to their canonical names.
LOCAL.ALL_OBJECTS[
'BatchNormalizationV1'] = batch_normalization_v1.BatchNormalization
LOCAL.ALL_OBJECTS[
'BatchNormalizationV2'] = batch_normalization.BatchNormalization
# Prevent circular dependencies.
from tensorflow.python.keras import models # pylint: disable=g-import-not-at-top
from tensorflow.python.keras.premade.linear import LinearModel # pylint: disable=g-import-not-at-top
from tensorflow.python.keras.premade.wide_deep import WideDeepModel # pylint: disable=g-import-not-at-top
from tensorflow.python.keras.feature_column.sequence_feature_column import SequenceFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['Input'] = input_layer.Input
LOCAL.ALL_OBJECTS['InputSpec'] = input_spec.InputSpec
LOCAL.ALL_OBJECTS['Functional'] = models.Functional
LOCAL.ALL_OBJECTS['Model'] = models.Model
LOCAL.ALL_OBJECTS['SequenceFeatures'] = SequenceFeatures
LOCAL.ALL_OBJECTS['Sequential'] = models.Sequential
LOCAL.ALL_OBJECTS['LinearModel'] = LinearModel
LOCAL.ALL_OBJECTS['WideDeepModel'] = WideDeepModel
if tf2.enabled():
from tensorflow.python.keras.feature_column.dense_features_v2 import DenseFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
else:
from tensorflow.python.keras.feature_column.dense_features import DenseFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
# Merge layers, function versions.
LOCAL.ALL_OBJECTS['add'] = merge.add
LOCAL.ALL_OBJECTS['subtract'] = merge.subtract
LOCAL.ALL_OBJECTS['multiply'] = merge.multiply
LOCAL.ALL_OBJECTS['average'] = merge.average
LOCAL.ALL_OBJECTS['maximum'] = merge.maximum
LOCAL.ALL_OBJECTS['minimum'] = merge.minimum
LOCAL.ALL_OBJECTS['concatenate'] = merge.concatenate
LOCAL.ALL_OBJECTS['dot'] = merge.dot
@keras_export('keras.layers.serialize')
def serialize(layer):
return generic_utils.serialize_keras_object(layer)
@keras_export('keras.layers.deserialize')
def deserialize(config, custom_objects=None):
"""Instantiates a layer from a config dictionary.
Args:
config: dict of the form {'class_name': str, 'config': dict}
custom_objects: dict mapping class names (or function names)
of custom (non-Keras) objects to class/functions
Returns:
Layer instance (may be Model, Sequential, Network, Layer...)
"""
populate_deserializable_objects()
return generic_utils.deserialize_keras_object(
config,
module_objects=LOCAL.ALL_OBJECTS,
custom_objects=custom_objects,
printable_module_name='layer')
| {
"content_hash": "ced10e4511c61b3a5d9db1e09804ae55",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 132,
"avg_line_length": 45.92156862745098,
"alnum_prop": 0.7717050953600911,
"repo_name": "sarvex/tensorflow",
"id": "f623084ee7a854be185d72e6b945acf2ce994c3e",
"size": "7715",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tensorflow/python/keras/layers/serialization.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "148184"
},
{
"name": "C++",
"bytes": "6224499"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "650478"
},
{
"name": "Java",
"bytes": "53519"
},
{
"name": "JavaScript",
"bytes": "6659"
},
{
"name": "Jupyter Notebook",
"bytes": "777935"
},
{
"name": "Objective-C",
"bytes": "1288"
},
{
"name": "Protocol Buffer",
"bytes": "61743"
},
{
"name": "Python",
"bytes": "3474762"
},
{
"name": "Shell",
"bytes": "45640"
},
{
"name": "TypeScript",
"bytes": "283668"
}
],
"symlink_target": ""
} |
from fabric import task
@task
def expect_conf_value(c):
assert c.it_came_from == "yml"
@task
def expect_conf_key_filename(c):
expected = ["private.key", "other.key"]
got = c.connect_kwargs.key_filename
assert got == expected, "{!r} != {!r}".format(got, expected)
@task
def expect_cli_key_filename(c):
expected = ["cli.key"]
got = c.connect_kwargs.key_filename
assert got == expected, "{!r} != {!r}".format(got, expected)
| {
"content_hash": "1e72928aed1f2421c72a607829a2be3e",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 64,
"avg_line_length": 22.75,
"alnum_prop": 0.6329670329670329,
"repo_name": "fabric/fabric",
"id": "650ced7d88202e3327ebef6e50bb9d22322a718a",
"size": "455",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/_support/yml_conf/fabfile.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "295293"
}
],
"symlink_target": ""
} |
"""Test descendant package tracking code."""
from decimal import Decimal
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.messages import COIN
from test_framework.p2p import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
chain_transaction,
)
# default limits
MAX_ANCESTORS = 25
MAX_DESCENDANTS = 25
# custom limits for node1
MAX_ANCESTORS_CUSTOM = 5
MAX_DESCENDANTS_CUSTOM = 10
assert MAX_DESCENDANTS_CUSTOM >= MAX_ANCESTORS_CUSTOM
class MempoolPackagesTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [
[
"-maxorphantx=1000",
"[email protected]", # immediate tx relay
],
[
"-maxorphantx=1000",
"-limitancestorcount={}".format(MAX_ANCESTORS_CUSTOM),
"-limitdescendantcount={}".format(MAX_DESCENDANTS_CUSTOM),
],
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Mine some blocks and have them mature.
peer_inv_store = self.nodes[0].add_p2p_connection(P2PTxInvStore()) # keep track of invs
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
utxo = self.nodes[0].listunspent(10)
txid = utxo[0]['txid']
vout = utxo[0]['vout']
value = utxo[0]['amount']
assert 'ancestorcount' not in utxo[0]
assert 'ancestorsize' not in utxo[0]
assert 'ancestorfees' not in utxo[0]
fee = Decimal("0.0001")
# MAX_ANCESTORS transactions off a confirmed tx should be fine
chain = []
witness_chain = []
ancestor_vsize = 0
ancestor_fees = Decimal(0)
for i in range(MAX_ANCESTORS):
(txid, sent_value) = chain_transaction(self.nodes[0], [txid], [0], value, fee, 1)
value = sent_value
chain.append(txid)
# We need the wtxids to check P2P announcements
witnesstx = self.nodes[0].gettransaction(txid=txid, verbose=True)['decoded']
witness_chain.append(witnesstx['hash'])
# Check that listunspent ancestor{count, size, fees} yield the correct results
wallet_unspent = self.nodes[0].listunspent(minconf=0)
this_unspent = next(utxo_info for utxo_info in wallet_unspent if utxo_info['txid'] == txid)
assert_equal(this_unspent['ancestorcount'], i + 1)
ancestor_vsize += self.nodes[0].getrawtransaction(txid=txid, verbose=True)['vsize']
assert_equal(this_unspent['ancestorsize'], ancestor_vsize)
ancestor_fees -= self.nodes[0].gettransaction(txid=txid)['fee']
assert_equal(this_unspent['ancestorfees'], ancestor_fees * COIN)
# Wait until mempool transactions have passed initial broadcast (sent inv and received getdata)
# Otherwise, getrawmempool may be inconsistent with getmempoolentry if unbroadcast changes in between
peer_inv_store.wait_for_broadcast(witness_chain)
# Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor
# count and fees should look correct
mempool = self.nodes[0].getrawmempool(True)
assert_equal(len(mempool), MAX_ANCESTORS)
descendant_count = 1
descendant_fees = 0
descendant_vsize = 0
assert_equal(ancestor_vsize, sum([mempool[tx]['vsize'] for tx in mempool]))
ancestor_count = MAX_ANCESTORS
assert_equal(ancestor_fees, sum([mempool[tx]['fees']['base'] for tx in mempool]))
descendants = []
ancestors = list(chain)
for x in reversed(chain):
# Check that getmempoolentry is consistent with getrawmempool
entry = self.nodes[0].getmempoolentry(x)
assert_equal(entry, mempool[x])
# Check that the descendant calculations are correct
assert_equal(entry['descendantcount'], descendant_count)
descendant_fees += entry['fees']['base']
assert_equal(entry['fees']['modified'], entry['fees']['base'])
assert_equal(entry['fees']['descendant'], descendant_fees)
descendant_vsize += entry['vsize']
assert_equal(entry['descendantsize'], descendant_vsize)
descendant_count += 1
# Check that ancestor calculations are correct
assert_equal(entry['ancestorcount'], ancestor_count)
assert_equal(entry['fees']['ancestor'], ancestor_fees)
assert_equal(entry['ancestorsize'], ancestor_vsize)
ancestor_vsize -= entry['vsize']
ancestor_fees -= entry['fees']['base']
ancestor_count -= 1
# Check that parent/child list is correct
assert_equal(entry['spentby'], descendants[-1:])
assert_equal(entry['depends'], ancestors[-2:-1])
# Check that getmempooldescendants is correct
assert_equal(sorted(descendants), sorted(self.nodes[0].getmempooldescendants(x)))
# Check getmempooldescendants verbose output is correct
for descendant, dinfo in self.nodes[0].getmempooldescendants(x, True).items():
assert_equal(dinfo['depends'], [chain[chain.index(descendant)-1]])
if dinfo['descendantcount'] > 1:
assert_equal(dinfo['spentby'], [chain[chain.index(descendant)+1]])
else:
assert_equal(dinfo['spentby'], [])
descendants.append(x)
# Check that getmempoolancestors is correct
ancestors.remove(x)
assert_equal(sorted(ancestors), sorted(self.nodes[0].getmempoolancestors(x)))
# Check that getmempoolancestors verbose output is correct
for ancestor, ainfo in self.nodes[0].getmempoolancestors(x, True).items():
assert_equal(ainfo['spentby'], [chain[chain.index(ancestor)+1]])
if ainfo['ancestorcount'] > 1:
assert_equal(ainfo['depends'], [chain[chain.index(ancestor)-1]])
else:
assert_equal(ainfo['depends'], [])
# Check that getmempoolancestors/getmempooldescendants correctly handle verbose=true
v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True)
assert_equal(len(v_ancestors), len(chain)-1)
for x in v_ancestors.keys():
assert_equal(mempool[x], v_ancestors[x])
assert chain[-1] not in v_ancestors.keys()
v_descendants = self.nodes[0].getmempooldescendants(chain[0], True)
assert_equal(len(v_descendants), len(chain)-1)
for x in v_descendants.keys():
assert_equal(mempool[x], v_descendants[x])
assert chain[0] not in v_descendants.keys()
# Check that ancestor modified fees includes fee deltas from
# prioritisetransaction
self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=1000)
ancestor_fees = 0
for x in chain:
entry = self.nodes[0].getmempoolentry(x)
ancestor_fees += entry['fees']['base']
assert_equal(entry['fees']['ancestor'], ancestor_fees + Decimal('0.00001'))
# Undo the prioritisetransaction for later tests
self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=-1000)
# Check that descendant modified fees includes fee deltas from
# prioritisetransaction
self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=1000)
descendant_fees = 0
for x in reversed(chain):
entry = self.nodes[0].getmempoolentry(x)
descendant_fees += entry['fees']['base']
assert_equal(entry['fees']['descendant'], descendant_fees + Decimal('0.00001'))
# Adding one more transaction on to the chain should fail.
assert_raises_rpc_error(-26, "too-long-mempool-chain", chain_transaction, self.nodes[0], [txid], [vout], value, fee, 1)
# Check that prioritising a tx before it's added to the mempool works
# First clear the mempool by mining a block.
self.generate(self.nodes[0], 1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
# Prioritise a transaction that has been mined, then add it back to the
# mempool by using invalidateblock.
self.nodes[0].prioritisetransaction(txid=chain[-1], fee_delta=2000)
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# Keep node1's tip synced with node0
self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
# Now check that the transaction is in the mempool, with the right modified fee
descendant_fees = 0
for x in reversed(chain):
entry = self.nodes[0].getmempoolentry(x)
descendant_fees += entry['fees']['base']
if (x == chain[-1]):
assert_equal(entry['fees']['modified'], entry['fees']['base'] + Decimal("0.00002"))
assert_equal(entry['fees']['descendant'], descendant_fees + Decimal("0.00002"))
# Check that node1's mempool is as expected (-> custom ancestor limit)
mempool0 = self.nodes[0].getrawmempool(False)
mempool1 = self.nodes[1].getrawmempool(False)
assert_equal(len(mempool1), MAX_ANCESTORS_CUSTOM)
assert set(mempool1).issubset(set(mempool0))
for tx in chain[:MAX_ANCESTORS_CUSTOM]:
assert tx in mempool1
# TODO: more detailed check of node1's mempool (fees etc.)
# check transaction unbroadcast info (should be false if in both mempools)
mempool = self.nodes[0].getrawmempool(True)
for tx in mempool:
assert_equal(mempool[tx]['unbroadcast'], False)
# TODO: test ancestor size limits
# Now test descendant chain limits
txid = utxo[1]['txid']
value = utxo[1]['amount']
vout = utxo[1]['vout']
transaction_package = []
tx_children = []
# First create one parent tx with 10 children
(txid, sent_value) = chain_transaction(self.nodes[0], [txid], [vout], value, fee, 10)
parent_transaction = txid
for i in range(10):
transaction_package.append({'txid': txid, 'vout': i, 'amount': sent_value})
# Sign and send up to MAX_DESCENDANT transactions chained off the parent tx
chain = [] # save sent txs for the purpose of checking node1's mempool later (see below)
for _ in range(MAX_DESCENDANTS - 1):
utxo = transaction_package.pop(0)
(txid, sent_value) = chain_transaction(self.nodes[0], [utxo['txid']], [utxo['vout']], utxo['amount'], fee, 10)
chain.append(txid)
if utxo['txid'] is parent_transaction:
tx_children.append(txid)
for j in range(10):
transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value})
mempool = self.nodes[0].getrawmempool(True)
assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS)
assert_equal(sorted(mempool[parent_transaction]['spentby']), sorted(tx_children))
for child in tx_children:
assert_equal(mempool[child]['depends'], [parent_transaction])
# Sending one more chained transaction will fail
utxo = transaction_package.pop(0)
assert_raises_rpc_error(-26, "too-long-mempool-chain", chain_transaction, self.nodes[0], [utxo['txid']], [utxo['vout']], utxo['amount'], fee, 10)
# Check that node1's mempool is as expected, containing:
# - txs from previous ancestor test (-> custom ancestor limit)
# - parent tx for descendant test
# - txs chained off parent tx (-> custom descendant limit)
self.wait_until(lambda: len(self.nodes[1].getrawmempool()) ==
MAX_ANCESTORS_CUSTOM + 1 + MAX_DESCENDANTS_CUSTOM, timeout=10)
mempool0 = self.nodes[0].getrawmempool(False)
mempool1 = self.nodes[1].getrawmempool(False)
assert set(mempool1).issubset(set(mempool0))
assert parent_transaction in mempool1
for tx in chain[:MAX_DESCENDANTS_CUSTOM]:
assert tx in mempool1
for tx in chain[MAX_DESCENDANTS_CUSTOM:]:
assert tx not in mempool1
# TODO: more detailed check of node1's mempool (fees etc.)
# TODO: test descendant size limits
# Test reorg handling
# First, the basics:
self.generate(self.nodes[0], 1)
self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash())
self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash())
# Now test the case where node1 has a transaction T in its mempool that
# depends on transactions A and B which are in a mined block, and the
# block containing A and B is disconnected, AND B is not accepted back
# into node1's mempool because its ancestor count is too high.
# Create 8 transactions, like so:
# Tx0 -> Tx1 (vout0)
# \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7
#
# Mine them in the next block, then generate a new tx8 that spends
# Tx1 and Tx7, and add to node1's mempool, then disconnect the
# last block.
# Create tx0 with 2 outputs
utxo = self.nodes[0].listunspent()
txid = utxo[0]['txid']
value = utxo[0]['amount']
vout = utxo[0]['vout']
send_value = (value - fee) / 2
inputs = [ {'txid' : txid, 'vout' : vout} ]
outputs = {}
for _ in range(2):
outputs[self.nodes[0].getnewaddress()] = send_value
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
tx0_id = txid
value = send_value
# Create tx1
tx1_id, _ = chain_transaction(self.nodes[0], [tx0_id], [0], value, fee, 1)
# Create tx2-7
vout = 1
txid = tx0_id
for _ in range(6):
(txid, sent_value) = chain_transaction(self.nodes[0], [txid], [vout], value, fee, 1)
vout = 0
value = sent_value
# Mine these in a block
self.generate(self.nodes[0], 1)
# Now generate tx8, with a big fee
inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ]
outputs = { self.nodes[0].getnewaddress() : send_value + value - 4*fee }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
self.sync_mempools()
# Now try to disconnect the tip on each node...
self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
self.sync_blocks()
if __name__ == '__main__':
MempoolPackagesTest().main()
| {
"content_hash": "2f0e8a637dab76213247ffe18c0fe001",
"timestamp": "",
"source": "github",
"line_count": 335,
"max_line_length": 153,
"avg_line_length": 45.4089552238806,
"alnum_prop": 0.6153037075992638,
"repo_name": "achow101/bitcoin",
"id": "068fdc0b658d0477bc38b42d5408c13cf46a46cc",
"size": "15426",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "test/functional/mempool_packages.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28178"
},
{
"name": "Batchfile",
"bytes": "13"
},
{
"name": "C",
"bytes": "1225820"
},
{
"name": "C++",
"bytes": "9615124"
},
{
"name": "CMake",
"bytes": "29132"
},
{
"name": "Cap'n Proto",
"bytes": "1256"
},
{
"name": "Dockerfile",
"bytes": "1721"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "541"
},
{
"name": "M4",
"bytes": "237973"
},
{
"name": "Makefile",
"bytes": "141372"
},
{
"name": "Objective-C++",
"bytes": "5497"
},
{
"name": "Python",
"bytes": "2718039"
},
{
"name": "QMake",
"bytes": "438"
},
{
"name": "Sage",
"bytes": "56897"
},
{
"name": "Scheme",
"bytes": "24332"
},
{
"name": "Shell",
"bytes": "207137"
}
],
"symlink_target": ""
} |
from sqlalchemy import *
from migrate import *
from nova import log as logging
from nova import utils
meta = MetaData()
networks = Table('networks', meta,
Column("id", Integer(), primary_key=True, nullable=False))
# Add priority column to networks table
priority = Column('priority', Integer())
def upgrade(migrate_engine):
meta.bind = migrate_engine
try:
networks.create_column(priority)
except Exception:
logging.error(_("priority column not added to networks table"))
raise
def downgrade(migrate_engine):
meta.bind = migrate_engine
networks.drop_column(priority)
| {
"content_hash": "774353c7b72d16e6f0c5a6b09a9e7a82",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 71,
"avg_line_length": 21.517241379310345,
"alnum_prop": 0.7019230769230769,
"repo_name": "nii-cloud/dodai-compute",
"id": "b9b0ea37cec8163e1a64d81c6e91137bd560289b",
"size": "1252",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "nova/db/sqlalchemy/migrate_repo/versions/045_add_network_priority.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7412"
},
{
"name": "Python",
"bytes": "4253758"
},
{
"name": "Shell",
"bytes": "42407"
}
],
"symlink_target": ""
} |
"""Help to generate SQL string usable by the Python DB-API
:author: Logilab
:copyright: 2003-2008 LOGILAB S.A. (Paris, FRANCE)
:contact: http://www.logilab.fr/ -- mailto:[email protected]
"""
__docformat__ = "restructuredtext en"
# SQLGenerator ################################################################
class SQLGenerator :
"""
Helper class to generate SQL strings to use with python's DB-API
"""
def where(self, keys, addon=None) :
"""
keys : list of keys
>>> s = SQLGenerator()
>>> s.where(['nom'])
'nom = %(nom)s'
>>> s.where(['nom','prenom'])
'nom = %(nom)s AND prenom = %(prenom)s'
>>> s.where(['nom','prenom'], 'x.id = y.id')
'x.id = y.id AND nom = %(nom)s AND prenom = %(prenom)s'
"""
restriction = ["%s = %%(%s)s" % (x, x) for x in keys]
if addon:
restriction.insert(0, addon)
return " AND ".join(restriction)
def set(self, keys) :
"""
keys : list of keys
>>> s = SQLGenerator()
>>> s.set(['nom'])
'nom = %(nom)s'
>>> s.set(['nom','prenom'])
'nom = %(nom)s, prenom = %(prenom)s'
"""
return ", ".join(["%s = %%(%s)s" % (x, x) for x in keys])
def insert(self, table, params) :
"""
table : name of the table
params : dictionnary that will be used as in cursor.execute(sql,params)
>>> s = SQLGenerator()
>>> s.insert('test',{'nom':'dupont'})
'INSERT INTO test ( nom ) VALUES ( %(nom)s )'
>>> s.insert('test',{'nom':'dupont','prenom':'jean'})
'INSERT INTO test ( nom, prenom ) VALUES ( %(nom)s, %(prenom)s )'
"""
keys = ', '.join(params.keys())
values = ', '.join(["%%(%s)s" % x for x in params])
sql = 'INSERT INTO %s ( %s ) VALUES ( %s )' % (table, keys, values)
return sql
def select(self, table, params) :
"""
table : name of the table
params : dictionnary that will be used as in cursor.execute(sql,params)
>>> s = SQLGenerator()
>>> s.select('test',{})
'SELECT * FROM test'
>>> s.select('test',{'nom':'dupont'})
'SELECT * FROM test WHERE nom = %(nom)s'
>>> s.select('test',{'nom':'dupont','prenom':'jean'})
'SELECT * FROM test WHERE nom = %(nom)s AND prenom = %(prenom)s'
"""
sql = 'SELECT * FROM %s' % table
where = self.where(params.keys())
if where :
sql = sql + ' WHERE %s' % where
return sql
def adv_select(self, model, tables, params, joins=None) :
"""
model : list of columns to select
tables : list of tables used in from
params : dictionnary that will be used as in cursor.execute(sql, params)
joins : optional list of restriction statements to insert in the where
clause. Usually used to perform joins.
>>> s = SQLGenerator()
>>> s.adv_select(['column'],[('test', 't')], {})
'SELECT column FROM test AS t'
>>> s.adv_select(['column'],[('test', 't')], {'nom':'dupont'})
'SELECT column FROM test AS t WHERE nom = %(nom)s'
"""
table_names = ["%s AS %s" % (k, v) for k, v in tables]
sql = 'SELECT %s FROM %s' % (', '.join(model), ', '.join(table_names))
if joins and type(joins) != type(''):
joins = ' AND '.join(joins)
where = self.where(params.keys(), joins)
if where :
sql = sql + ' WHERE %s' % where
return sql
def delete(self, table, params) :
"""
table : name of the table
params : dictionnary that will be used as in cursor.execute(sql,params)
>>> s = SQLGenerator()
>>> s.delete('test',{'nom':'dupont'})
'DELETE FROM test WHERE nom = %(nom)s'
>>> s.delete('test',{'nom':'dupont','prenom':'jean'})
'DELETE FROM test WHERE nom = %(nom)s AND prenom = %(prenom)s'
"""
where = self.where(params.keys())
sql = 'DELETE FROM %s WHERE %s' % (table, where)
return sql
def update(self, table, params, unique) :
"""
table : name of the table
params : dictionnary that will be used as in cursor.execute(sql,params)
>>> s = SQLGenerator()
>>> s.update('test', {'id':'001','nom':'dupont'}, ['id'])
'UPDATE test SET nom = %(nom)s WHERE id = %(id)s'
>>> s.update('test',{'id':'001','nom':'dupont','prenom':'jean'},['id'])
'UPDATE test SET nom = %(nom)s, prenom = %(prenom)s WHERE id = %(id)s'
"""
where = self.where(unique)
set = self.set([key for key in params if key not in unique])
sql = 'UPDATE %s SET %s WHERE %s' % (table, set, where)
return sql
class BaseTable:
"""
Another helper class to ease SQL table manipulation
"""
# table_name = "default"
# supported types are s/i/d
# table_fields = ( ('first_field','s'), )
# primary_key = 'first_field'
def __init__(self, table_name, table_fields, primary_key=None):
if primary_key is None:
self._primary_key = table_fields[0][0]
else:
self._primary_key = primary_key
self._table_fields = table_fields
self._table_name = table_name
info = {
'key' : self._primary_key,
'table' : self._table_name,
'columns' : ",".join( [ f for f,t in self._table_fields ] ),
'values' : ",".join( [sql_repr(t, "%%(%s)s" % f)
for f,t in self._table_fields] ),
'updates' : ",".join( ["%s=%s" % (f, sql_repr(t, "%%(%s)s" % f))
for f,t in self._table_fields] ),
}
self._insert_stmt = ("INSERT into %(table)s (%(columns)s) "
"VALUES (%(values)s) WHERE %(key)s=%%(key)s") % info
self._update_stmt = ("UPDATE %(table)s SET (%(updates)s) "
"VALUES WHERE %(key)s=%%(key)s") % info
self._select_stmt = ("SELECT %(columns)s FROM %(table)s "
"WHERE %(key)s=%%(key)s") % info
self._delete_stmt = ("DELETE FROM %(table)s "
"WHERE %(key)s=%%(key)s") % info
for k, t in table_fields:
if hasattr(self, k):
raise ValueError("Cannot use %s as a table field" % k)
setattr(self, k,None)
def as_dict(self):
d = {}
for k, t in self._table_fields:
d[k] = getattr(self, k)
return d
def select(self, cursor):
d = { 'key' : getattr(self,self._primary_key) }
cursor.execute(self._select_stmt % d)
rows = cursor.fetchall()
if len(rows)!=1:
msg = "Select: ambiguous query returned %d rows"
raise ValueError(msg % len(rows))
for (f, t), v in zip(self._table_fields, rows[0]):
setattr(self, f, v)
def update(self, cursor):
d = self.as_dict()
cursor.execute(self._update_stmt % d)
def delete(self, cursor):
d = { 'key' : getattr(self,self._primary_key) }
# Helper functions #############################################################
def name_fields(cursor, records) :
"""
Take a cursor and a list of records fetched with that cursor, then return a
list of dictionnaries (one for each record) whose keys are column names and
values are records' values.
cursor : cursor used to execute the query
records : list returned by fetch*()
"""
result = []
for record in records :
record_dict = {}
for i in range(len(record)) :
record_dict[cursor.description[i][0]] = record[i]
result.append(record_dict)
return result
def sql_repr(type, val):
if type == 's':
return "'%s'" % (val,)
else:
return val
if __name__ == "__main__":
import doctest
from clonedigger.logilab.common import sqlgen
print doctest.testmod(sqlgen)
| {
"content_hash": "b9a6d25fe3c510f9735f2c604e25c066",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 83,
"avg_line_length": 35.64192139737991,
"alnum_prop": 0.5026954177897575,
"repo_name": "h2oloopan/easymerge",
"id": "c19756df8aa8d843acfef35214b07ed438237b07",
"size": "8861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EasyMerge/clonedigger/logilab/common/sqlgen.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "13487"
},
{
"name": "CSS",
"bytes": "416664"
},
{
"name": "D",
"bytes": "2012"
},
{
"name": "Java",
"bytes": "583078"
},
{
"name": "JavaScript",
"bytes": "285692"
},
{
"name": "Python",
"bytes": "4212549"
},
{
"name": "Ruby",
"bytes": "920"
},
{
"name": "Shell",
"bytes": "40508"
},
{
"name": "TeX",
"bytes": "114952"
}
],
"symlink_target": ""
} |
"""Python 2/3 compatibility module"""
import sys
if sys.version_info < (3,):
reload(sys)
sys.setdefaultencoding('utf-8')
| {
"content_hash": "d50e2b093a62c5094595130b0c7ba028",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 37,
"avg_line_length": 18.142857142857142,
"alnum_prop": 0.6850393700787402,
"repo_name": "pcbje/gransk",
"id": "db43d5570a27400be84946f3aff8ee5c1ffc324b",
"size": "170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gransk/core/compat.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10819"
},
{
"name": "HTML",
"bytes": "21524"
},
{
"name": "JavaScript",
"bytes": "88057"
},
{
"name": "PowerShell",
"bytes": "317"
},
{
"name": "Python",
"bytes": "142629"
},
{
"name": "Shell",
"bytes": "4522"
}
],
"symlink_target": ""
} |
from __future__ import print_function
# Form implementation generated from reading ui file './acq4/analysis/modules/MapImager/SpatialCorrelatorCtrlTemplate.ui'
#
# Created: Tue Dec 24 01:49:13 2013
# by: PyQt4 UI code generator 4.10
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(273, 234)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setMargin(3)
self.gridLayout.setSpacing(3)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(1)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label = QtGui.QLabel(Form)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout.addWidget(self.label)
self.spontSpin = SpinBox(Form)
self.spontSpin.setSuffix(_fromUtf8(""))
self.spontSpin.setObjectName(_fromUtf8("spontSpin"))
self.horizontalLayout.addWidget(self.spontSpin)
self.gridLayout.addLayout(self.horizontalLayout, 1, 0, 1, 2)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setSpacing(1)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label_2 = QtGui.QLabel(Form)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout_2.addWidget(self.label_2)
self.deltaTSpin = SpinBox(Form)
self.deltaTSpin.setObjectName(_fromUtf8("deltaTSpin"))
self.horizontalLayout_2.addWidget(self.deltaTSpin)
self.gridLayout.addLayout(self.horizontalLayout_2, 2, 0, 1, 2)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setSpacing(1)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.label_3 = QtGui.QLabel(Form)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_3.addWidget(self.label_3)
self.radiusSpin = SpinBox(Form)
self.radiusSpin.setObjectName(_fromUtf8("radiusSpin"))
self.horizontalLayout_3.addWidget(self.radiusSpin)
self.gridLayout.addLayout(self.horizontalLayout_3, 3, 0, 1, 2)
self.disableChk = QtGui.QCheckBox(Form)
self.disableChk.setObjectName(_fromUtf8("disableChk"))
self.gridLayout.addWidget(self.disableChk, 6, 0, 1, 1)
self.processBtn = QtGui.QPushButton(Form)
self.processBtn.setObjectName(_fromUtf8("processBtn"))
self.gridLayout.addWidget(self.processBtn, 6, 1, 1, 1)
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout_2 = QtGui.QGridLayout(self.groupBox)
self.gridLayout_2.setMargin(3)
self.gridLayout_2.setSpacing(3)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.probabilityRadio = QtGui.QRadioButton(self.groupBox)
self.probabilityRadio.setChecked(True)
self.probabilityRadio.setObjectName(_fromUtf8("probabilityRadio"))
self.gridLayout_2.addWidget(self.probabilityRadio, 0, 0, 1, 2)
self.thresholdSpin = SpinBox(self.groupBox)
self.thresholdSpin.setEnabled(True)
self.thresholdSpin.setObjectName(_fromUtf8("thresholdSpin"))
self.gridLayout_2.addWidget(self.thresholdSpin, 2, 1, 1, 1)
self.label_4 = QtGui.QLabel(self.groupBox)
self.label_4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout_2.addWidget(self.label_4, 2, 0, 1, 1)
self.thresholdRadio = QtGui.QRadioButton(self.groupBox)
self.thresholdRadio.setObjectName(_fromUtf8("thresholdRadio"))
self.gridLayout_2.addWidget(self.thresholdRadio, 1, 0, 1, 2)
self.gridLayout.addWidget(self.groupBox, 5, 0, 1, 2)
self.eventCombo = ComboBox(Form)
self.eventCombo.setObjectName(_fromUtf8("eventCombo"))
self.gridLayout.addWidget(self.eventCombo, 0, 1, 1, 1)
self.label_5 = QtGui.QLabel(Form)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout.addWidget(self.label_5, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.label.setText(_translate("Form", "Spontaneous Event Rate:", None))
self.label_2.setText(_translate("Form", "Post-stimulus time window:", None))
self.label_3.setText(_translate("Form", "Correlation Radius:", None))
self.disableChk.setText(_translate("Form", "Disable", None))
self.processBtn.setText(_translate("Form", "re-Process", None))
self.groupBox.setTitle(_translate("Form", "Output data:", None))
self.probabilityRadio.setText(_translate("Form", "Probability values (float)", None))
self.label_4.setText(_translate("Form", "Threshold:", None))
self.thresholdRadio.setText(_translate("Form", "Spots that cross threshold (boolean)", None))
self.label_5.setText(_translate("Form", "Event Parameter to use:", None))
from acq4.pyqtgraph.widgets.SpinBox import SpinBox
from acq4.pyqtgraph.widgets.ComboBox import ComboBox
| {
"content_hash": "7791a8c158409d272297f4ffb6a200bd",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 121,
"avg_line_length": 50.452991452991455,
"alnum_prop": 0.6915127901067254,
"repo_name": "meganbkratz/acq4",
"id": "7ba2e28cd4c26277a9149230a0d4af472ef1bc30",
"size": "5927",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "acq4/analysis/modules/MapImager/SpatialCorrelatorCtrlTemplate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "3037"
},
{
"name": "Arduino",
"bytes": "18651"
},
{
"name": "Batchfile",
"bytes": "64"
},
{
"name": "C",
"bytes": "705091"
},
{
"name": "C++",
"bytes": "321384"
},
{
"name": "CSS",
"bytes": "716"
},
{
"name": "MATLAB",
"bytes": "1752"
},
{
"name": "Objective-C",
"bytes": "596020"
},
{
"name": "Processing",
"bytes": "13403"
},
{
"name": "Python",
"bytes": "5922488"
}
],
"symlink_target": ""
} |
import sys
import unittest
import numpy as np
from op_test import OpTest
from test_softmax_op import stable_softmax
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid import Program, program_guard
import paddle
import paddle.nn.functional as F
paddle.enable_static()
CUDA_BLOCK_SIZE = 32
class CTCForward:
def __init__(
self,
softmax,
softmax_lod,
labels,
labels_lod,
num_classes,
batch_size,
blank,
norm_by_times,
):
self.softmax = softmax
self.softmax_lod = softmax_lod
self.labels = labels
self.labels_lod = labels_lod
self.blank = blank
self.norm_by_times = norm_by_times
self.level = 0
self.num_classes = num_classes
self.batch_size = batch_size
self.loss = np.zeros([self.batch_size, 1], dtype=softmax.dtype)
self.gradient = np.zeros(self.softmax.shape, dtype=softmax.dtype)
# float64
self.EXP_MAX = sys.float_info.max
self.EXP_MIN = sys.float_info.min
self.LOG_ZERO = np.log(self.EXP_MIN)
self.LOG_INFINITY = np.log(self.EXP_MAX)
def safe_exp(self, x):
if x <= self.LOG_ZERO:
return 0.0
if x >= self.LOG_INFINITY:
return self.EXP_MAX
return np.exp(x)
def safe_log(self, x):
if x <= self.EXP_MIN:
return self.LOG_ZERO
return np.log(x)
# x = lna and y = lnb are in log scale, ln(a / b) = lna - lnb
def log_div(self, x, y):
res = x - y
if res <= self.LOG_ZERO:
return self.LOG_ZERO
if res >= self.LOG_INFINITY:
return self.LOG_INFINITY
return res
# x = lna and y = lnb are in log scale, ln(a * b) = lna + lnb
def log_mul(self, x, y):
res = x + y
if res <= self.LOG_ZERO:
return self.LOG_ZERO
if res >= self.LOG_INFINITY:
return self.LOG_INFINITY
return res
# x = lna and y = lnb are in log scale,
# ln(a + b) = lna + ln(1 + exp(lnb - lna)), where b > a
def log_add(self, x, y):
if x < y:
t = y
y = x
x = t
return x + self.safe_log(1 + self.safe_exp(y - x))
def segment_range(self, time, total_times, total_segments):
start = max(0, total_segments - (2 * (total_times - time)))
end = min(total_segments, 2 * (time + 1))
return start, end
def forward_a_sequence(self, softmax_a_sequence, labels_a_sequence):
total_times = softmax_a_sequence.shape[0]
total_segments = labels_a_sequence.shape[0] * 2 + 1
required_times = labels_a_sequence.shape[0]
old_label = -1
for i in range(labels_a_sequence.shape[0]):
# two contingous labels with the same value
if labels_a_sequence[i, 0] == old_label:
required_times = required_times + 1
old_label = labels_a_sequence[i, 0]
if total_times < required_times:
return 0
# calculate the forward and backward variables,
# reference Chapter 7.3 of "Alex Grave, Supervised Sequence
# Labelling with Recurrent Neural Networks"
log_acts = np.zeros(
[total_times, self.num_classes], dtype=softmax_a_sequence.dtype
)
for i in range(total_times):
for j in range(self.num_classes):
log_acts[i, j] = self.safe_log(softmax_a_sequence[i, j])
# calculate the forward variables
forward_vars = np.zeros(
[total_times, total_segments], dtype=softmax_a_sequence.dtype
)
for i in range(total_times):
for j in range(total_segments):
forward_vars[i, j] = self.LOG_ZERO
for i in range(total_times):
# dp initialization at t0
if i == 0:
forward_vars[i, 0] = log_acts[0, self.blank]
if total_segments > 1:
forward_vars[i, 1] = log_acts[0, labels_a_sequence[i, 0]]
continue
# dp from t1
start, end = self.segment_range(i, total_times, total_segments)
for k in range(end - start):
j = k + start
if j & 1 == 1:
label_idx = j // 2
label_val = labels_a_sequence[label_idx, 0]
fv = self.log_add(
forward_vars[i - 1, j], forward_vars[i - 1, j - 1]
)
if (
j > 1
and label_val != labels_a_sequence[label_idx - 1, 0]
):
fv = self.log_add(fv, forward_vars[i - 1, j - 2])
fv = self.log_mul(fv, log_acts[i, label_val])
else:
fv = forward_vars[i - 1, j]
if j > 0:
fv = self.log_add(fv, forward_vars[i - 1, j - 1])
fv = self.log_mul(fv, log_acts[i, self.blank])
forward_vars[i, j] = fv
# sum the last two value as log_prob
log_prob = forward_vars[total_times - 1, total_segments - 1]
if total_segments > 1:
log_prob = self.log_add(
log_prob, forward_vars[total_times - 1, total_segments - 2]
)
return -log_prob
def forward(self):
softmax_offset = 0
labels_offset = 0
for i in range(self.batch_size):
if self.labels.shape[1] == 1:
softmax_start_i = softmax_offset
softmax_end_i = softmax_offset + self.softmax_lod[self.level][i]
labels_start_i = labels_offset
labels_end_i = labels_offset + self.labels_lod[self.level][i]
softmax_a_sequence = self.softmax[
softmax_start_i:softmax_end_i, :
]
labels_a_sequence = self.labels[labels_start_i:labels_end_i, :]
self.loss[i] = self.forward_a_sequence(
softmax_a_sequence, labels_a_sequence
)
softmax_offset += self.softmax_lod[self.level][i]
labels_offset += self.labels_lod[self.level][i]
else:
softmax_a_sequence = self.softmax[: self.softmax_lod[i], i, :]
labels_a_sequence = self.labels[: self.labels_lod[i], :]
self.loss[i] = self.forward_a_sequence(
softmax_a_sequence, labels_a_sequence
)
return self.loss
def python_api(
logits,
label,
logits_length=None,
labels_length=None,
blank=0,
norm_by_times=False,
):
return paddle.fluid.layers.warpctc(
logits, label, blank, norm_by_times, logits_length, labels_length
)
class TestWarpCTCOp(OpTest):
def config(self):
self.batch_size = 4
self.num_classes = 12
self.logits_lod = [[4, 1, 3, 3]]
self.labels_lod = [[3, 1, 4, 4]]
self.blank = self.num_classes - 1
self.norm_by_times = False
def setUp(self):
self.op_type = "warpctc"
self.config()
logits = np.random.uniform(
0.1, 1.0, [sum(self.logits_lod[0]), self.num_classes]
).astype("float32")
softmax = np.apply_along_axis(stable_softmax, 1, logits)
# labels should not be blank
labels = np.random.randint(
0, self.num_classes - 1, [sum(self.labels_lod[0]), 1], dtype="int32"
)
ctc = CTCForward(
softmax,
self.logits_lod,
labels,
self.labels_lod,
self.num_classes,
self.batch_size,
self.blank,
self.norm_by_times,
)
loss = ctc.forward()
max_sequence_length = 0
for i in range(self.batch_size):
max_sequence_length = max(
max_sequence_length, self.logits_lod[0][i]
)
self.gradient = np.zeros(
[max_sequence_length, self.batch_size, self.num_classes],
dtype=logits.dtype,
)
self.inputs = {
"Logits": (logits, self.logits_lod),
"Label": (labels, self.labels_lod),
}
self.outputs = {"Loss": loss}
self.attrs = {
"blank": self.blank,
"norm_by_times": self.norm_by_times,
}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.outputs['WarpCTCGrad'] = self.gradient
if core.is_compiled_with_rocm():
self.check_grad(
["Logits"],
"Loss",
max_relative_error=0.009,
check_dygraph=False,
)
else:
self.check_grad(
["Logits"],
"Loss",
max_relative_error=0.007,
check_dygraph=False,
)
class TestWarpCTCOpCase1(TestWarpCTCOp):
def config(self):
self.batch_size = 4
self.num_classes = CUDA_BLOCK_SIZE + 2
self.logits_lod = [[4, 1, 3, 3]]
self.labels_lod = [[3, 1, 4, 4]]
self.blank = self.num_classes - 1
self.norm_by_times = False
class TestWarpCTCOpWithPadding(OpTest):
def config(self):
self.batch_size = 4
self.num_classes = 8
self.logits_lod = [[4, 1, 3, 3]]
self.labels_lod = [[3, 1, 4, 4]]
self.logits_length = np.array([4, 1, 3, 3], dtype=np.int64)
self.labels_length = np.array([3, 1, 4, 4], dtype=np.int64)
self.blank = self.num_classes - 1
self.norm_by_times = False
def setUp(self):
self.op_type = "warpctc"
self.python_api = python_api
self.python_out_sig = ["Loss"]
self.config()
logits = np.random.uniform(
0.1, 1.0, [sum(self.logits_length), self.num_classes]
).astype("float32")
softmax = np.apply_along_axis(stable_softmax, 1, logits)
# labels should not be blank
labels = np.random.randint(
0, self.num_classes - 1, [sum(self.labels_length), 1], dtype="int32"
)
ctc = CTCForward(
softmax,
self.logits_lod,
labels,
self.labels_lod,
self.num_classes,
self.batch_size,
self.blank,
self.norm_by_times,
)
loss = ctc.forward()
max_sequence_length = 0
for i in range(self.batch_size):
max_sequence_length = max(
max_sequence_length, self.logits_length[i]
)
# reshape logits to T*N*S
new_logits = np.zeros(
[max_sequence_length, self.batch_size, self.num_classes],
dtype=logits.dtype,
)
cur = 0
for batch_id in range(self.batch_size):
for i in range(self.logits_length[batch_id]):
for j in range(self.num_classes):
new_logits[i, batch_id, j] = logits[cur + i, j]
cur = cur + self.logits_length[batch_id]
# reshape labels to N*S
max_target_seq_length = 0
for i in range(self.batch_size):
max_target_seq_length = max(
max_target_seq_length, self.labels_length[i]
)
new_labels = np.zeros(
[self.batch_size, max_target_seq_length], dtype="int32"
)
cur = 0
for batch_id in range(self.batch_size):
for i in range(self.labels_length[batch_id]):
new_labels[batch_id, i] = labels[cur + i]
cur = cur + self.labels_length[batch_id]
self.gradient = np.zeros(
[max_sequence_length, self.batch_size, self.num_classes],
dtype=logits.dtype,
)
self.inputs = {
"Logits": new_logits,
"Label": new_labels,
"LogitsLength": self.logits_length,
"LabelLength": self.labels_length,
}
self.outputs = {"Loss": loss}
self.attrs = {
"blank": self.blank,
"norm_by_times": self.norm_by_times,
}
def test_check_output(self):
self.check_output(check_eager=True)
def test_check_grad(self):
self.outputs['WarpCTCGrad'] = self.gradient
if core.is_compiled_with_rocm():
self.check_grad(
["Logits"],
"Loss",
max_relative_error=0.009,
check_dygraph=False,
)
else:
self.check_grad(
["Logits"],
"Loss",
max_relative_error=0.007,
check_dygraph=False,
)
class TestWarpCTCOpWithPaddingCase1(TestWarpCTCOpWithPadding):
def config(self):
self.batch_size = 4
self.num_classes = CUDA_BLOCK_SIZE + 2
self.logits_lod = [[4, 1, 3, 3]]
self.labels_lod = [[3, 1, 4, 4]]
self.logits_length = np.array([4, 1, 3, 3], dtype=np.int64)
self.labels_length = np.array([3, 1, 4, 4], dtype=np.int64)
self.blank = self.num_classes - 1
self.norm_by_times = False
class TestWarpCTCOpFp64(OpTest):
def config(self):
self.batch_size = 4
self.num_classes = 8
self.logits_lod = [[4, 1, 5, 5]]
self.labels_lod = [[3, 1, 4, 2]]
self.logits_length = np.array([4, 1, 5, 5], dtype=np.int64)
self.labels_length = np.array([3, 1, 4, 2], dtype=np.int64)
self.blank = self.num_classes - 1
self.norm_by_times = False
def setUp(self):
self.op_type = "warpctc"
self.python_api = python_api
self.python_out_sig = ["Loss"]
self.config()
logits = np.random.uniform(
0.1, 1.0, [sum(self.logits_length), self.num_classes]
).astype("float64")
softmax = np.apply_along_axis(stable_softmax, 1, logits)
# labels should not be blank
labels = np.random.randint(
0, self.num_classes - 1, [sum(self.labels_length), 1], dtype="int32"
)
ctc = CTCForward(
softmax,
self.logits_lod,
labels,
self.labels_lod,
self.num_classes,
self.batch_size,
self.blank,
self.norm_by_times,
)
loss = ctc.forward()
max_sequence_length = 0
for i in range(self.batch_size):
max_sequence_length = max(
max_sequence_length, self.logits_length[i]
)
# reshape logits to T*N*S
new_logits = np.zeros(
[max_sequence_length, self.batch_size, self.num_classes],
dtype=logits.dtype,
)
cur = 0
for batch_id in range(self.batch_size):
for i in range(self.logits_length[batch_id]):
for j in range(self.num_classes):
new_logits[i, batch_id, j] = logits[cur + i, j]
cur = cur + self.logits_length[batch_id]
# reshape labels to N*S
max_target_seq_length = 0
for i in range(self.batch_size):
max_target_seq_length = max(
max_target_seq_length, self.labels_length[i]
)
new_labels = np.zeros(
[self.batch_size, max_target_seq_length], dtype="int32"
)
cur = 0
for batch_id in range(self.batch_size):
for i in range(self.labels_length[batch_id]):
new_labels[batch_id, i] = labels[cur + i]
cur = cur + self.labels_length[batch_id]
self.gradient = np.zeros(
[max_sequence_length, self.batch_size, self.num_classes],
dtype=logits.dtype,
)
self.inputs = {
"Logits": new_logits,
"Label": new_labels,
"LogitsLength": self.logits_length,
"LabelLength": self.labels_length,
}
self.outputs = {"Loss": loss}
self.attrs = {
"blank": self.blank,
"norm_by_times": self.norm_by_times,
}
def test_check_output(self):
self.check_output(check_eager=True)
def test_check_grad(self):
self.outputs['WarpCTCGrad'] = self.gradient
self.check_grad(["Logits"], "Loss", check_eager=True)
class TestWarpCTCOpError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
logits = fluid.data(
name='logits', shape=[5, 16, 6], dtype='float32'
)
logits_length = fluid.data(
name='logits_length', shape=[None], dtype='int64'
)
label = fluid.data(name='label', shape=[16, 3], dtype='int32')
label_length = fluid.data(
name='labels_length', shape=[None], dtype='int64'
)
def test_logits_Variable():
logits_data = np.random.rand(5, 16, 6).astype(logits.dtype)
fluid.layers.warpctc(
input=logits_data,
label=label,
input_length=logits_length,
label_length=label_length,
)
self.assertRaises(TypeError, test_logits_Variable)
def test_label_Variable():
label_data = np.random.randint(0, 5, [5, 1]).astype("int32")
fluid.layers.warpctc(
input=logits,
label=label_data,
input_length=logits_length,
label_length=label_length,
)
self.assertRaises(TypeError, test_label_Variable)
def test_logits_len_Variable():
logits_length_data = np.array([5] * 16).astype("int64")
fluid.layers.warpctc(
input=logits,
label=label,
input_length=logits_length_data,
label_length=label_length,
)
self.assertRaises(TypeError, test_logits_len_Variable)
def test_label_len_Variable():
label_length_data = np.array([3] * 16).astype("int64")
fluid.layers.warpctc(
input=logits,
label=label,
input_length=logits_length,
label_length=label_length_data,
)
self.assertRaises(TypeError, test_label_len_Variable)
def test_dygraph_errors(self):
def test_dygraph_with_lod():
logits = np.random.uniform(0.1, 1.0, [20, 15]).astype("float32")
# labels should not be blank
labels = np.random.randint(0, 15 - 1, [15, 1], dtype="int32")
softmax = paddle.to_tensor(logits)
labels = paddle.to_tensor(labels)
fluid.layers.warpctc(input=softmax, label=labels)
paddle.disable_static()
self.assertRaises(ValueError, test_dygraph_with_lod)
paddle.enable_static()
class TestCTCLossAPICase(unittest.TestCase):
def test_functinal_api(self):
self.batch_size = 4
self.num_classes = CUDA_BLOCK_SIZE + 2
self.logits_length = np.array([4, 1, 3, 3], dtype=np.int64)
self.labels_length = np.array([3, 1, 4, 4], dtype=np.int64)
self.blank = self.num_classes - 1
self.norm_by_times = False
logits = np.random.uniform(
0.1,
1.0,
[max(self.logits_length), self.batch_size, self.num_classes],
).astype("float32")
softmax = np.apply_along_axis(stable_softmax, -1, logits)
# labels should not be blank
labels = np.random.randint(
0,
self.num_classes - 1,
[self.batch_size, max(self.labels_length)],
dtype="int32",
)
ctc = CTCForward(
softmax,
self.logits_length,
labels,
self.labels_length,
self.num_classes,
self.batch_size,
self.blank,
self.norm_by_times,
)
loss_np = ctc.forward()
paddle.disable_static()
softmax = paddle.to_tensor(logits)
labels = paddle.to_tensor(labels)
logits_length = paddle.to_tensor(self.logits_length)
labels_length = paddle.to_tensor(self.labels_length)
loss_pd_mean = F.ctc_loss(
softmax,
labels,
logits_length,
labels_length,
blank=self.blank,
reduction='mean',
)
loss_pd_mean = loss_pd_mean.numpy()
loss_pd_sum = F.ctc_loss(
softmax,
labels,
logits_length,
labels_length,
blank=self.blank,
reduction='sum',
)
loss_pd_sum = loss_pd_sum.numpy()
paddle.enable_static()
loss_np = np.squeeze(loss_np, axis=-1)
loss_np_mean = (loss_np / labels_length.numpy()).mean()
loss_np_sum = loss_np.sum()
np.testing.assert_allclose(
loss_pd_mean, loss_np_mean, rtol=1e-05, atol=1
)
np.testing.assert_allclose(loss_pd_sum, loss_np_sum, rtol=1e-05, atol=1)
def test_class_api(self):
self.batch_size = 3
self.num_classes = 15
self.logits_length = np.array([3, 3, 3], dtype=np.int64)
self.labels_length = np.array([0, 1, 2], dtype=np.int64)
self.blank = 0
self.norm_by_times = False
logits = np.random.uniform(
0.1,
1.0,
[max(self.logits_length), self.batch_size, self.num_classes],
).astype("float32")
softmax = np.apply_along_axis(stable_softmax, -1, logits)
# labels should not be blank
labels = np.random.randint(
1,
self.num_classes,
[self.batch_size, max(self.labels_length)],
dtype="int32",
)
ctc = CTCForward(
softmax,
self.logits_length,
labels,
self.labels_length,
self.num_classes,
self.batch_size,
self.blank,
self.norm_by_times,
)
loss_np = ctc.forward()
paddle.disable_static()
softmax = paddle.to_tensor(logits)
labels = paddle.to_tensor(labels)
logits_length = paddle.to_tensor(self.logits_length)
labels_length = paddle.to_tensor(self.labels_length)
loss_pd = paddle.nn.CTCLoss(self.blank, 'none')(
softmax, labels, logits_length, labels_length
)
loss_pd = loss_pd.numpy()
paddle.enable_static()
loss_np = np.squeeze(loss_np, axis=-1)
np.testing.assert_allclose(loss_pd, loss_np, rtol=1e-05, atol=1)
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "05f9ae7363af46396ec4e7fcc9aee579",
"timestamp": "",
"source": "github",
"line_count": 706,
"max_line_length": 80,
"avg_line_length": 32.555240793201136,
"alnum_prop": 0.5170988513748694,
"repo_name": "PaddlePaddle/Paddle",
"id": "b3febb9b40daa1cf75deb90196d0caee90a0779b",
"size": "23597",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/test_warpctc_op.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36848680"
},
{
"name": "CMake",
"bytes": "902619"
},
{
"name": "Cuda",
"bytes": "5227207"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36203874"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553177"
}
],
"symlink_target": ""
} |
NXT_TST_INTERVAL = 1
NEW_ROUND_INTERVAL = 2
TOGGLE_INTERVAL = 0.5
from rpiledmatrix import switch_on, switch_off, turn_all_off, cleanup, blink_on, blink_off, LED_INDICES
import time as time
def blink_on_serial():
for led in LED_INDICES:
blink_on(led)
time.sleep(TOGGLE_INTERVAL)
blink_off(led)
pass
def steady_on_serial():
for led in LED_INDICES:
switch_on(led)
time.sleep(TOGGLE_INTERVAL)
switch_off(led)
pass
try:
while True:
steady_on_serial()
time.sleep(NXT_TST_INTERVAL)
blink_on_serial()
time.sleep(NEW_ROUND_INTERVAL)
except KeyboardInterrupt:
turn_all_off()
finally:
cleanup()
| {
"content_hash": "b698fc684d276793e3452be2f74766b6",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 103,
"avg_line_length": 24.344827586206897,
"alnum_prop": 0.6317280453257791,
"repo_name": "san2488/echo-lets-play-tic-tac-toe",
"id": "30bd05e4870e7fd7efc5dc81cda110428fc8b29b",
"size": "728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ledmatdiag.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9367"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from grab.error import GrabError
__all__ = ('SpiderError', 'SpiderMisuseError', 'FatalError',
'SpiderInternalError',
'NoTaskHandler', 'NoDataHandler')
class SpiderError(GrabError):
"""Base class for Spider exceptions"""
class SpiderConfigurationError(SpiderError):
pass
class SpiderMisuseError(SpiderError):
"""Improper usage of Spider framework"""
class FatalError(SpiderError):
"""Fatal error which should stop parsing process"""
class SpiderInternalError(SpiderError):
"""
Used to indicate error in some internal spider services
like spider class discovering, CLI error
"""
class NoTaskHandler(SpiderError):
"""
Used then it is not possible to find which
handler should be used to process network response.
"""
class NoDataHandler(SpiderError):
"""
Used then it is not possible to find which
handler should be used to process Data object.
"""
| {
"content_hash": "1ead80a47aabdde63aba9b43984c3224",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 60,
"avg_line_length": 22.431818181818183,
"alnum_prop": 0.7011144883485309,
"repo_name": "mawentao007/reading_grab",
"id": "1402ffc1299d1b05770181f649787c3ef4055475",
"size": "1001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grab/spider/error.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "5434"
},
{
"name": "Makefile",
"bytes": "910"
},
{
"name": "PostScript",
"bytes": "2788"
},
{
"name": "Python",
"bytes": "407915"
}
],
"symlink_target": ""
} |
import datetime
import random
import uuid
from oslo.utils import timeutils
import six
from keystone.common import sql
from keystone.contrib.oauth1 import core
from keystone import exception
from keystone.i18n import _
from keystone.openstack.common import jsonutils
class Consumer(sql.ModelBase, sql.DictBase):
__tablename__ = 'consumer'
attributes = ['id', 'description', 'secret']
id = sql.Column(sql.String(64), primary_key=True, nullable=False)
description = sql.Column(sql.String(64), nullable=True)
secret = sql.Column(sql.String(64), nullable=False)
extra = sql.Column(sql.JsonBlob(), nullable=False)
class RequestToken(sql.ModelBase, sql.DictBase):
__tablename__ = 'request_token'
attributes = ['id', 'request_secret',
'verifier', 'authorizing_user_id', 'requested_project_id',
'role_ids', 'consumer_id', 'expires_at']
id = sql.Column(sql.String(64), primary_key=True, nullable=False)
request_secret = sql.Column(sql.String(64), nullable=False)
verifier = sql.Column(sql.String(64), nullable=True)
authorizing_user_id = sql.Column(sql.String(64), nullable=True)
requested_project_id = sql.Column(sql.String(64), nullable=False)
role_ids = sql.Column(sql.Text(), nullable=True)
consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'),
nullable=False, index=True)
expires_at = sql.Column(sql.String(64), nullable=True)
@classmethod
def from_dict(cls, user_dict):
return cls(**user_dict)
def to_dict(self):
return dict(six.iteritems(self))
class AccessToken(sql.ModelBase, sql.DictBase):
__tablename__ = 'access_token'
attributes = ['id', 'access_secret', 'authorizing_user_id',
'project_id', 'role_ids', 'consumer_id',
'expires_at']
id = sql.Column(sql.String(64), primary_key=True, nullable=False)
access_secret = sql.Column(sql.String(64), nullable=False)
authorizing_user_id = sql.Column(sql.String(64), nullable=False,
index=True)
project_id = sql.Column(sql.String(64), nullable=False)
role_ids = sql.Column(sql.Text(), nullable=False)
consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'),
nullable=False)
expires_at = sql.Column(sql.String(64), nullable=True)
@classmethod
def from_dict(cls, user_dict):
return cls(**user_dict)
def to_dict(self):
return dict(six.iteritems(self))
class OAuth1(object):
def _get_consumer(self, session, consumer_id):
consumer_ref = session.query(Consumer).get(consumer_id)
if consumer_ref is None:
raise exception.NotFound(_('Consumer not found'))
return consumer_ref
def get_consumer_with_secret(self, consumer_id):
session = sql.get_session()
consumer_ref = self._get_consumer(session, consumer_id)
return consumer_ref.to_dict()
def get_consumer(self, consumer_id):
return core.filter_consumer(
self.get_consumer_with_secret(consumer_id))
def create_consumer(self, consumer):
consumer['secret'] = uuid.uuid4().hex
if not consumer.get('description'):
consumer['description'] = None
session = sql.get_session()
with session.begin():
consumer_ref = Consumer.from_dict(consumer)
session.add(consumer_ref)
return consumer_ref.to_dict()
def _delete_consumer(self, session, consumer_id):
consumer_ref = self._get_consumer(session, consumer_id)
session.delete(consumer_ref)
def _delete_request_tokens(self, session, consumer_id):
q = session.query(RequestToken)
req_tokens = q.filter_by(consumer_id=consumer_id)
req_tokens_list = set([x.id for x in req_tokens])
for token_id in req_tokens_list:
token_ref = self._get_request_token(session, token_id)
session.delete(token_ref)
def _delete_access_tokens(self, session, consumer_id):
q = session.query(AccessToken)
acc_tokens = q.filter_by(consumer_id=consumer_id)
acc_tokens_list = set([x.id for x in acc_tokens])
for token_id in acc_tokens_list:
token_ref = self._get_access_token(session, token_id)
session.delete(token_ref)
def delete_consumer(self, consumer_id):
session = sql.get_session()
with session.begin():
self._delete_request_tokens(session, consumer_id)
self._delete_access_tokens(session, consumer_id)
self._delete_consumer(session, consumer_id)
def list_consumers(self):
session = sql.get_session()
cons = session.query(Consumer)
return [core.filter_consumer(x.to_dict()) for x in cons]
def update_consumer(self, consumer_id, consumer):
session = sql.get_session()
with session.begin():
consumer_ref = self._get_consumer(session, consumer_id)
old_consumer_dict = consumer_ref.to_dict()
old_consumer_dict.update(consumer)
new_consumer = Consumer.from_dict(old_consumer_dict)
consumer_ref.description = new_consumer.description
consumer_ref.extra = new_consumer.extra
return core.filter_consumer(consumer_ref.to_dict())
def create_request_token(self, consumer_id, project_id, token_duration,
request_token_id=None, request_token_secret=None):
if request_token_id is None:
request_token_id = uuid.uuid4().hex
if request_token_secret is None:
request_token_secret = uuid.uuid4().hex
expiry_date = None
if token_duration:
now = timeutils.utcnow()
future = now + datetime.timedelta(seconds=token_duration)
expiry_date = timeutils.isotime(future, subsecond=True)
ref = {}
ref['id'] = request_token_id
ref['request_secret'] = request_token_secret
ref['verifier'] = None
ref['authorizing_user_id'] = None
ref['requested_project_id'] = project_id
ref['role_ids'] = None
ref['consumer_id'] = consumer_id
ref['expires_at'] = expiry_date
session = sql.get_session()
with session.begin():
token_ref = RequestToken.from_dict(ref)
session.add(token_ref)
return token_ref.to_dict()
def _get_request_token(self, session, request_token_id):
token_ref = session.query(RequestToken).get(request_token_id)
if token_ref is None:
raise exception.NotFound(_('Request token not found'))
return token_ref
def get_request_token(self, request_token_id):
session = sql.get_session()
token_ref = self._get_request_token(session, request_token_id)
return token_ref.to_dict()
def authorize_request_token(self, request_token_id, user_id,
role_ids):
session = sql.get_session()
with session.begin():
token_ref = self._get_request_token(session, request_token_id)
token_dict = token_ref.to_dict()
token_dict['authorizing_user_id'] = user_id
token_dict['verifier'] = ''.join(random.sample(core.VERIFIER_CHARS,
8))
token_dict['role_ids'] = jsonutils.dumps(role_ids)
new_token = RequestToken.from_dict(token_dict)
for attr in RequestToken.attributes:
if (attr == 'authorizing_user_id' or attr == 'verifier'
or attr == 'role_ids'):
setattr(token_ref, attr, getattr(new_token, attr))
return token_ref.to_dict()
def create_access_token(self, request_token_id, token_duration,
access_token_id=None, access_token_secret=None):
if access_token_id is None:
access_token_id = uuid.uuid4().hex
if access_token_secret is None:
access_token_secret = uuid.uuid4().hex
session = sql.get_session()
with session.begin():
req_token_ref = self._get_request_token(session, request_token_id)
token_dict = req_token_ref.to_dict()
expiry_date = None
if token_duration:
now = timeutils.utcnow()
future = now + datetime.timedelta(seconds=token_duration)
expiry_date = timeutils.isotime(future, subsecond=True)
# add Access Token
ref = {}
ref['id'] = access_token_id
ref['access_secret'] = access_token_secret
ref['authorizing_user_id'] = token_dict['authorizing_user_id']
ref['project_id'] = token_dict['requested_project_id']
ref['role_ids'] = token_dict['role_ids']
ref['consumer_id'] = token_dict['consumer_id']
ref['expires_at'] = expiry_date
token_ref = AccessToken.from_dict(ref)
session.add(token_ref)
# remove request token, it's been used
session.delete(req_token_ref)
return token_ref.to_dict()
def _get_access_token(self, session, access_token_id):
token_ref = session.query(AccessToken).get(access_token_id)
if token_ref is None:
raise exception.NotFound(_('Access token not found'))
return token_ref
def get_access_token(self, access_token_id):
session = sql.get_session()
token_ref = self._get_access_token(session, access_token_id)
return token_ref.to_dict()
def list_access_tokens(self, user_id):
session = sql.get_session()
q = session.query(AccessToken)
user_auths = q.filter_by(authorizing_user_id=user_id)
return [core.filter_token(x.to_dict()) for x in user_auths]
def delete_access_token(self, user_id, access_token_id):
session = sql.get_session()
with session.begin():
token_ref = self._get_access_token(session, access_token_id)
token_dict = token_ref.to_dict()
if token_dict['authorizing_user_id'] != user_id:
raise exception.Unauthorized(_('User IDs do not match'))
session.delete(token_ref)
| {
"content_hash": "9a34036a72220d510fc48bad1e682c7c",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 79,
"avg_line_length": 40.63529411764706,
"alnum_prop": 0.6078942289133372,
"repo_name": "scrapinghub/keystone",
"id": "9ad6cd4d64f841b50e18819c5f24e98ff631a485",
"size": "10948",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "keystone/contrib/oauth1/backends/sql.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
import inspect
class StubOutForTesting:
"""Sample Usage:
You want os.path.exists() to always return true during testing.
stubs = StubOutForTesting()
stubs.Set(os.path, 'exists', lambda x: 1)
...
stubs.UnsetAll()
The above changes os.path.exists into a lambda that returns 1. Once
the ... part of the code finishes, the UnsetAll() looks up the old value
of os.path.exists and restores it.
"""
def __init__(self):
self.cache = []
self.stubs = []
def __del__(self):
self.SmartUnsetAll()
self.UnsetAll()
def SmartSet(self, obj, attr_name, new_attr):
"""Replace obj.attr_name with new_attr. This method is smart and works
at the module, class, and instance level while preserving proper
inheritance. It will not stub out C types however unless that has been
explicitly allowed by the type.
This method supports the case where attr_name is a staticmethod or a
classmethod of obj.
Notes:
- If obj is an instance, then it is its class that will actually be
stubbed. Note that the method Set() does not do that: if obj is
an instance, it (and not its class) will be stubbed.
- The stubbing is using the builtin getattr and setattr. So, the __get__
and __set__ will be called when stubbing (TODO: A better idea would
probably be to manipulate obj.__dict__ instead of getattr() and
setattr()).
Raises AttributeError if the attribute cannot be found.
"""
if (inspect.ismodule(obj) or
(not inspect.isclass(obj) and obj.__dict__.has_key(attr_name))):
orig_obj = obj
orig_attr = getattr(obj, attr_name)
else:
if not inspect.isclass(obj):
mro = list(inspect.getmro(obj.__class__))
else:
mro = list(inspect.getmro(obj))
mro.reverse()
orig_attr = None
for cls in mro:
try:
orig_obj = cls
orig_attr = getattr(obj, attr_name)
except AttributeError:
continue
if orig_attr is None:
raise AttributeError("Attribute not found.")
# Calling getattr() on a staticmethod transforms it to a 'normal' function.
# We need to ensure that we put it back as a staticmethod.
old_attribute = obj.__dict__.get(attr_name)
if old_attribute is not None and isinstance(old_attribute, staticmethod):
orig_attr = staticmethod(orig_attr)
self.stubs.append((orig_obj, attr_name, orig_attr))
setattr(orig_obj, attr_name, new_attr)
def SmartUnsetAll(self):
"""Reverses all the SmartSet() calls, restoring things to their original
definition. Its okay to call SmartUnsetAll() repeatedly, as later calls
have no effect if no SmartSet() calls have been made.
"""
self.stubs.reverse()
for args in self.stubs:
setattr(*args)
self.stubs = []
def Set(self, parent, child_name, new_child):
"""Replace child_name's old definition with new_child, in the context
of the given parent. The parent could be a module when the child is a
function at module scope. Or the parent could be a class when a class'
method is being replaced. The named child is set to new_child, while
the prior definition is saved away for later, when UnsetAll() is called.
This method supports the case where child_name is a staticmethod or a
classmethod of parent.
"""
old_child = getattr(parent, child_name)
old_attribute = parent.__dict__.get(child_name)
if old_attribute is not None and isinstance(old_attribute, staticmethod):
old_child = staticmethod(old_child)
self.cache.append((parent, old_child, child_name))
setattr(parent, child_name, new_child)
def UnsetAll(self):
"""Reverses all the Set() calls, restoring things to their original
definition. Its okay to call UnsetAll() repeatedly, as later calls have
no effect if no Set() calls have been made.
"""
# Undo calls to Set() in reverse order, in case Set() was called on the
# same arguments repeatedly (want the original call to be last one undone)
self.cache.reverse()
for (parent, old_child, child_name) in self.cache:
setattr(parent, child_name, old_child)
self.cache = []
| {
"content_hash": "bb9f3da0bf9c9ce8f72216620b1d1f59",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 79,
"avg_line_length": 34.403225806451616,
"alnum_prop": 0.6601031411157994,
"repo_name": "chromium/chromium",
"id": "ba391045b6d693f10cd0b39b19d1830b7ec3d76d",
"size": "4957",
"binary": false,
"copies": "19",
"ref": "refs/heads/main",
"path": "third_party/protobuf/python/stubout.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
HTTP client.
"""
from __future__ import division, absolute_import
import os
import warnings
try:
from urlparse import urlunparse, urljoin, urldefrag
except ImportError:
from urllib.parse import urljoin, urldefrag
from urllib.parse import urlunparse as _urlunparse
def urlunparse(parts):
result = _urlunparse(tuple([p.decode("charmap") for p in parts]))
return result.encode("charmap")
import zlib
from functools import wraps
from zope.interface import implementer
from twisted.python import log
from twisted.python.compat import _PY3, networkString
from twisted.python.compat import nativeString, intToBytes, unicode, itervalues
from twisted.python.deprecate import deprecatedModuleAttribute
from twisted.python.failure import Failure
from twisted.python.versions import Version
from twisted.web.iweb import IPolicyForHTTPS, IAgentEndpointFactory
from twisted.python.deprecate import getDeprecationWarningString
from twisted.web import http
from twisted.internet import defer, protocol, task, reactor
from twisted.internet.interfaces import IProtocol
from twisted.internet.endpoints import TCP4ClientEndpoint, SSL4ClientEndpoint
from twisted.python.util import InsensitiveDict
from twisted.python.components import proxyForInterface
from twisted.web import error
from twisted.web.iweb import UNKNOWN_LENGTH, IAgent, IBodyProducer, IResponse
from twisted.web.http_headers import Headers
class PartialDownloadError(error.Error):
"""
Page was only partially downloaded, we got disconnected in middle.
@ivar response: All of the response body which was downloaded.
"""
class HTTPPageGetter(http.HTTPClient):
"""
Gets a resource via HTTP, then quits.
Typically used with L{HTTPClientFactory}. Note that this class does not, by
itself, do anything with the response. If you want to download a resource
into a file, use L{HTTPPageDownloader} instead.
@ivar _completelyDone: A boolean indicating whether any further requests are
necessary after this one completes in order to provide a result to
C{self.factory.deferred}. If it is C{False}, then a redirect is going
to be followed. Otherwise, this protocol's connection is the last one
before firing the result Deferred. This is used to make sure the result
Deferred is only fired after the connection is cleaned up.
"""
quietLoss = 0
followRedirect = True
failed = 0
_completelyDone = True
_specialHeaders = set((b'host', b'user-agent', b'cookie', b'content-length'))
def connectionMade(self):
method = getattr(self.factory, 'method', b'GET')
self.sendCommand(method, self.factory.path)
if self.factory.scheme == b'http' and self.factory.port != 80:
host = self.factory.host + b':' + intToBytes(self.factory.port)
elif self.factory.scheme == b'https' and self.factory.port != 443:
host = self.factory.host + b':' + intToBytes(self.factory.port)
else:
host = self.factory.host
self.sendHeader(b'Host', self.factory.headers.get(b"host", host))
self.sendHeader(b'User-Agent', self.factory.agent)
data = getattr(self.factory, 'postdata', None)
if data is not None:
self.sendHeader(b"Content-Length", intToBytes(len(data)))
cookieData = []
for (key, value) in self.factory.headers.items():
if key.lower() not in self._specialHeaders:
# we calculated it on our own
self.sendHeader(key, value)
if key.lower() == b'cookie':
cookieData.append(value)
for cookie, cookval in self.factory.cookies.items():
cookieData.append(cookie + b'=' + cookval)
if cookieData:
self.sendHeader(b'Cookie', b'; '.join(cookieData))
self.endHeaders()
self.headers = {}
if data is not None:
self.transport.write(data)
def handleHeader(self, key, value):
"""
Called every time a header is received. Stores the header information
as key-value pairs in the C{headers} attribute.
@type key: C{str}
@param key: An HTTP header field name.
@type value: C{str}
@param value: An HTTP header field value.
"""
key = key.lower()
l = self.headers.setdefault(key, [])
l.append(value)
def handleStatus(self, version, status, message):
"""
Handle the HTTP status line.
@param version: The HTTP version.
@type version: L{bytes}
@param status: The HTTP status code, an integer represented as a
bytestring.
@type status: L{bytes}
@param message: The HTTP status message.
@type message: L{bytes}
"""
self.version, self.status, self.message = version, status, message
self.factory.gotStatus(version, status, message)
def handleEndHeaders(self):
self.factory.gotHeaders(self.headers)
m = getattr(self, 'handleStatus_' + nativeString(self.status),
self.handleStatusDefault)
m()
def handleStatus_200(self):
pass
handleStatus_201 = lambda self: self.handleStatus_200()
handleStatus_202 = lambda self: self.handleStatus_200()
def handleStatusDefault(self):
self.failed = 1
def handleStatus_301(self):
l = self.headers.get(b'location')
if not l:
self.handleStatusDefault()
return
url = l[0]
if self.followRedirect:
self.factory._redirectCount += 1
if self.factory._redirectCount >= self.factory.redirectLimit:
err = error.InfiniteRedirection(
self.status,
b'Infinite redirection detected',
location=url)
self.factory.noPage(Failure(err))
self.quietLoss = True
self.transport.loseConnection()
return
self._completelyDone = False
self.factory.setURL(url)
if self.factory.scheme == b'https':
from twisted.internet import ssl
contextFactory = ssl.ClientContextFactory()
reactor.connectSSL(nativeString(self.factory.host),
self.factory.port,
self.factory, contextFactory)
else:
reactor.connectTCP(nativeString(self.factory.host),
self.factory.port,
self.factory)
else:
self.handleStatusDefault()
self.factory.noPage(
Failure(
error.PageRedirect(
self.status, self.message, location = url)))
self.quietLoss = True
self.transport.loseConnection()
def handleStatus_302(self):
if self.afterFoundGet:
self.handleStatus_303()
else:
self.handleStatus_301()
def handleStatus_303(self):
self.factory.method = b'GET'
self.handleStatus_301()
def connectionLost(self, reason):
"""
When the connection used to issue the HTTP request is closed, notify the
factory if we have not already, so it can produce a result.
"""
if not self.quietLoss:
http.HTTPClient.connectionLost(self, reason)
self.factory.noPage(reason)
if self._completelyDone:
# Only if we think we're completely done do we tell the factory that
# we're "disconnected". This way when we're following redirects,
# only the last protocol used will fire the _disconnectedDeferred.
self.factory._disconnectedDeferred.callback(None)
def handleResponse(self, response):
if self.quietLoss:
return
if self.failed:
self.factory.noPage(
Failure(
error.Error(
self.status, self.message, response)))
if self.factory.method == b'HEAD':
# Callback with empty string, since there is never a response
# body for HEAD requests.
self.factory.page(b'')
elif self.length != None and self.length != 0:
self.factory.noPage(Failure(
PartialDownloadError(self.status, self.message, response)))
else:
self.factory.page(response)
# server might be stupid and not close connection. admittedly
# the fact we do only one request per connection is also
# stupid...
self.transport.loseConnection()
def timeout(self):
self.quietLoss = True
self.transport.loseConnection()
self.factory.noPage(defer.TimeoutError("Getting %s took longer than %s seconds." % (self.factory.url, self.factory.timeout)))
class HTTPPageDownloader(HTTPPageGetter):
transmittingPage = 0
def handleStatus_200(self, partialContent=0):
HTTPPageGetter.handleStatus_200(self)
self.transmittingPage = 1
self.factory.pageStart(partialContent)
def handleStatus_206(self):
self.handleStatus_200(partialContent=1)
def handleResponsePart(self, data):
if self.transmittingPage:
self.factory.pagePart(data)
def handleResponseEnd(self):
if self.length:
self.transmittingPage = 0
self.factory.noPage(
Failure(
PartialDownloadError(self.status)))
if self.transmittingPage:
self.factory.pageEnd()
self.transmittingPage = 0
if self.failed:
self.factory.noPage(
Failure(
error.Error(
self.status, self.message, None)))
self.transport.loseConnection()
class HTTPClientFactory(protocol.ClientFactory):
"""Download a given URL.
@type deferred: Deferred
@ivar deferred: A Deferred that will fire when the content has
been retrieved. Once this is fired, the ivars `status', `version',
and `message' will be set.
@type status: bytes
@ivar status: The status of the response.
@type version: bytes
@ivar version: The version of the response.
@type message: bytes
@ivar message: The text message returned with the status.
@type response_headers: dict
@ivar response_headers: The headers that were specified in the
response from the server.
@type method: bytes
@ivar method: The HTTP method to use in the request. This should be one of
OPTIONS, GET, HEAD, POST, PUT, DELETE, TRACE, or CONNECT (case
matters). Other values may be specified if the server being contacted
supports them.
@type redirectLimit: int
@ivar redirectLimit: The maximum number of HTTP redirects that can occur
before it is assumed that the redirection is endless.
@type afterFoundGet: C{bool}
@ivar afterFoundGet: Deviate from the HTTP 1.1 RFC by handling redirects
the same way as most web browsers; if the request method is POST and a
302 status is encountered, the redirect is followed with a GET method
@type _redirectCount: int
@ivar _redirectCount: The current number of HTTP redirects encountered.
@ivar _disconnectedDeferred: A L{Deferred} which only fires after the last
connection associated with the request (redirects may cause multiple
connections to be required) has closed. The result Deferred will only
fire after this Deferred, so that callers can be assured that there are
no more event sources in the reactor once they get the result.
"""
protocol = HTTPPageGetter
url = None
scheme = None
host = b''
port = None
path = None
def __init__(self, url, method=b'GET', postdata=None, headers=None,
agent=b"Twisted PageGetter", timeout=0, cookies=None,
followRedirect=True, redirectLimit=20,
afterFoundGet=False):
self.followRedirect = followRedirect
self.redirectLimit = redirectLimit
self._redirectCount = 0
self.timeout = timeout
self.agent = agent
self.afterFoundGet = afterFoundGet
if cookies is None:
cookies = {}
self.cookies = cookies
if headers is not None:
self.headers = InsensitiveDict(headers)
else:
self.headers = InsensitiveDict()
if postdata is not None:
self.headers.setdefault(b'Content-Length',
intToBytes(len(postdata)))
# just in case a broken http/1.1 decides to keep connection alive
self.headers.setdefault(b"connection", b"close")
self.postdata = postdata
self.method = method
self.setURL(url)
self.waiting = 1
self._disconnectedDeferred = defer.Deferred()
self.deferred = defer.Deferred()
# Make sure the first callback on the result Deferred pauses the
# callback chain until the request connection is closed.
self.deferred.addBoth(self._waitForDisconnect)
self.response_headers = None
def _waitForDisconnect(self, passthrough):
"""
Chain onto the _disconnectedDeferred, preserving C{passthrough}, so that
the result is only available after the associated connection has been
closed.
"""
self._disconnectedDeferred.addCallback(lambda ignored: passthrough)
return self._disconnectedDeferred
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.url)
def setURL(self, url):
self.url = url
uri = URI.fromBytes(url)
if uri.scheme and uri.host:
self.scheme = uri.scheme
self.host = uri.host
self.port = uri.port
self.path = uri.originForm
def buildProtocol(self, addr):
p = protocol.ClientFactory.buildProtocol(self, addr)
p.followRedirect = self.followRedirect
p.afterFoundGet = self.afterFoundGet
if self.timeout:
timeoutCall = reactor.callLater(self.timeout, p.timeout)
self.deferred.addBoth(self._cancelTimeout, timeoutCall)
return p
def _cancelTimeout(self, result, timeoutCall):
if timeoutCall.active():
timeoutCall.cancel()
return result
def gotHeaders(self, headers):
self.response_headers = headers
if b'set-cookie' in headers:
for cookie in headers[b'set-cookie']:
cookparts = cookie.split(b';')
cook = cookparts[0]
cook.lstrip()
k, v = cook.split(b'=', 1)
self.cookies[k.lstrip()] = v.lstrip()
def gotStatus(self, version, status, message):
"""
Set the status of the request on us.
@param version: The HTTP version.
@type version: L{bytes}
@param status: The HTTP status code, an integer represented as a
bytestring.
@type status: L{bytes}
@param message: The HTTP status message.
@type message: L{bytes}
"""
self.version, self.status, self.message = version, status, message
def page(self, page):
if self.waiting:
self.waiting = 0
self.deferred.callback(page)
def noPage(self, reason):
if self.waiting:
self.waiting = 0
self.deferred.errback(reason)
def clientConnectionFailed(self, _, reason):
"""
When a connection attempt fails, the request cannot be issued. If no
result has yet been provided to the result Deferred, provide the
connection failure reason as an error result.
"""
if self.waiting:
self.waiting = 0
# If the connection attempt failed, there is nothing more to
# disconnect, so just fire that Deferred now.
self._disconnectedDeferred.callback(None)
self.deferred.errback(reason)
class HTTPDownloader(HTTPClientFactory):
"""
Download to a file.
"""
protocol = HTTPPageDownloader
value = None
def __init__(self, url, fileOrName,
method=b'GET', postdata=None, headers=None,
agent=b"Twisted client", supportPartial=False,
timeout=0, cookies=None, followRedirect=True,
redirectLimit=20, afterFoundGet=False):
self.requestedPartial = 0
if isinstance(fileOrName, (str, unicode)):
self.fileName = fileOrName
self.file = None
if supportPartial and os.path.exists(self.fileName):
fileLength = os.path.getsize(self.fileName)
if fileLength:
self.requestedPartial = fileLength
if headers == None:
headers = {}
headers[b"range"] = b"bytes=" + intToBytes(fileLength) + b"-"
else:
self.file = fileOrName
HTTPClientFactory.__init__(
self, url, method=method, postdata=postdata, headers=headers,
agent=agent, timeout=timeout, cookies=cookies,
followRedirect=followRedirect, redirectLimit=redirectLimit,
afterFoundGet=afterFoundGet)
def gotHeaders(self, headers):
HTTPClientFactory.gotHeaders(self, headers)
if self.requestedPartial:
contentRange = headers.get(b"content-range", None)
if not contentRange:
# server doesn't support partial requests, oh well
self.requestedPartial = 0
return
start, end, realLength = http.parseContentRange(contentRange[0])
if start != self.requestedPartial:
# server is acting weirdly
self.requestedPartial = 0
def openFile(self, partialContent):
if partialContent:
file = open(self.fileName, 'rb+')
file.seek(0, 2)
else:
file = open(self.fileName, 'wb')
return file
def pageStart(self, partialContent):
"""Called on page download start.
@param partialContent: tells us if the download is partial download we requested.
"""
if partialContent and not self.requestedPartial:
raise ValueError("we shouldn't get partial content response if we didn't want it!")
if self.waiting:
try:
if not self.file:
self.file = self.openFile(partialContent)
except IOError:
#raise
self.deferred.errback(Failure())
def pagePart(self, data):
if not self.file:
return
try:
self.file.write(data)
except IOError:
#raise
self.file = None
self.deferred.errback(Failure())
def noPage(self, reason):
"""
Close the storage file and errback the waiting L{Deferred} with the
given reason.
"""
if self.waiting:
self.waiting = 0
if self.file:
try:
self.file.close()
except:
log.err(None, "Error closing HTTPDownloader file")
self.deferred.errback(reason)
def pageEnd(self):
self.waiting = 0
if not self.file:
return
try:
self.file.close()
except IOError:
self.deferred.errback(Failure())
return
self.deferred.callback(self.value)
class URI(object):
"""
A URI object.
@see: U{https://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-21}
"""
def __init__(self, scheme, netloc, host, port, path, params, query,
fragment):
"""
@type scheme: L{bytes}
@param scheme: URI scheme specifier.
@type netloc: L{bytes}
@param netloc: Network location component.
@type host: L{bytes}
@param host: Host name. For IPv6 address literals the brackets are
stripped.
@type port: L{int}
@param port: Port number.
@type path: L{bytes}
@param path: Hierarchical path.
@type params: L{bytes}
@param params: Parameters for last path segment.
@type query: L{bytes}
@param query: Query string.
@type fragment: L{bytes}
@param fragment: Fragment identifier.
"""
self.scheme = scheme
self.netloc = netloc
self.host = host.strip(b'[]')
self.port = port
self.path = path
self.params = params
self.query = query
self.fragment = fragment
@classmethod
def fromBytes(cls, uri, defaultPort=None):
"""
Parse the given URI into a L{URI}.
@type uri: C{bytes}
@param uri: URI to parse.
@type defaultPort: C{int} or C{None}
@param defaultPort: An alternate value to use as the port if the URI
does not include one.
@rtype: L{URI}
@return: Parsed URI instance.
"""
uri = uri.strip()
scheme, netloc, path, params, query, fragment = http.urlparse(uri)
if defaultPort is None:
if scheme == b'https':
defaultPort = 443
else:
defaultPort = 80
if b':' in netloc:
host, port = netloc.rsplit(b':', 1)
try:
port = int(port)
except ValueError:
host, port = netloc, defaultPort
else:
host, port = netloc, defaultPort
return cls(scheme, netloc, host, port, path, params, query, fragment)
def toBytes(self):
"""
Assemble the individual parts of the I{URI} into a fully formed I{URI}.
@rtype: C{bytes}
@return: A fully formed I{URI}.
"""
return urlunparse(
(self.scheme, self.netloc, self.path, self.params, self.query,
self.fragment))
@property
def originForm(self):
"""
The absolute I{URI} path including I{URI} parameters, query string and
fragment identifier.
@see: U{https://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-21#section-5.3}
@return: The absolute path in original form.
@rtype: L{bytes}
"""
# The HTTP bis draft says the origin form should not include the
# fragment.
path = urlunparse(
(b'', b'', self.path, self.params, self.query, b''))
if path == b'':
path = b'/'
return path
def _urljoin(base, url):
"""
Construct a full ("absolute") URL by combining a "base URL" with another
URL. Informally, this uses components of the base URL, in particular the
addressing scheme, the network location and (part of) the path, to provide
missing components in the relative URL.
Additionally, the fragment identifier is preserved according to the HTTP
1.1 bis draft.
@type base: C{bytes}
@param base: Base URL.
@type url: C{bytes}
@param url: URL to combine with C{base}.
@return: An absolute URL resulting from the combination of C{base} and
C{url}.
@see: L{urlparse.urljoin}
@see: U{https://tools.ietf.org/html/draft-ietf-httpbis-p2-semantics-22#section-7.1.2}
"""
base, baseFrag = urldefrag(base)
url, urlFrag = urldefrag(urljoin(base, url))
return urljoin(url, b'#' + (urlFrag or baseFrag))
def _makeGetterFactory(url, factoryFactory, contextFactory=None,
*args, **kwargs):
"""
Create and connect an HTTP page getting factory.
Any additional positional or keyword arguments are used when calling
C{factoryFactory}.
@param factoryFactory: Factory factory that is called with C{url}, C{args}
and C{kwargs} to produce the getter
@param contextFactory: Context factory to use when creating a secure
connection, defaulting to C{None}
@return: The factory created by C{factoryFactory}
"""
uri = URI.fromBytes(url)
factory = factoryFactory(url, *args, **kwargs)
if uri.scheme == b'https':
from twisted.internet import ssl
if contextFactory is None:
contextFactory = ssl.ClientContextFactory()
reactor.connectSSL(
nativeString(uri.host), uri.port, factory, contextFactory)
else:
reactor.connectTCP(nativeString(uri.host), uri.port, factory)
return factory
def getPage(url, contextFactory=None, *args, **kwargs):
"""
Download a web page as a string.
Download a page. Return a deferred, which will callback with a
page (as a string) or errback with a description of the error.
See L{HTTPClientFactory} to see what extra arguments can be passed.
"""
return _makeGetterFactory(
url,
HTTPClientFactory,
contextFactory=contextFactory,
*args, **kwargs).deferred
def downloadPage(url, file, contextFactory=None, *args, **kwargs):
"""
Download a web page to a file.
@param file: path to file on filesystem, or file-like object.
See HTTPDownloader to see what extra args can be passed.
"""
factoryFactory = lambda url, *a, **kw: HTTPDownloader(url, file, *a, **kw)
return _makeGetterFactory(
url,
factoryFactory,
contextFactory=contextFactory,
*args, **kwargs).deferred
# The code which follows is based on the new HTTP client implementation. It
# should be significantly better than anything above, though it is not yet
# feature equivalent.
from twisted.web.error import SchemeNotSupported
from twisted.web._newclient import Request, Response, HTTP11ClientProtocol
from twisted.web._newclient import ResponseDone, ResponseFailed
from twisted.web._newclient import RequestNotSent, RequestTransmissionFailed
from twisted.web._newclient import (
ResponseNeverReceived, PotentialDataLoss, _WrapperException)
try:
from OpenSSL import SSL
except ImportError:
SSL = None
else:
from twisted.internet.ssl import (CertificateOptions,
platformTrust,
optionsForClientTLS)
def _requireSSL(decoratee):
"""
The decorated method requires pyOpenSSL to be present, or it raises
L{NotImplementedError}.
@param decoratee: A function which requires pyOpenSSL.
@type decoratee: L{callable}
@return: A function which raises L{NotImplementedError} if pyOpenSSL is not
installed; otherwise, if it is installed, simply return C{decoratee}.
@rtype: L{callable}
"""
if SSL is None:
@wraps(decoratee)
def raiseNotImplemented(*a, **kw):
"""
pyOpenSSL is not available.
@param a: The positional arguments for C{decoratee}.
@param kw: The keyword arguments for C{decoratee}.
@raise NotImplementedError: Always.
"""
raise NotImplementedError("SSL support unavailable")
return raiseNotImplemented
return decoratee
class WebClientContextFactory(object):
"""
This class is deprecated. Please simply use L{Agent} as-is, or if you want
to customize something, use L{BrowserLikePolicyForHTTPS}.
A L{WebClientContextFactory} is an HTTPS policy which totally ignores the
hostname and port. It performs basic certificate verification, however the
lack of validation of service identity (e.g. hostname validation) means it
is still vulnerable to man-in-the-middle attacks. Don't use it any more.
"""
def _getCertificateOptions(self, hostname, port):
"""
Return a L{CertificateOptions}.
@param hostname: ignored
@param port: ignored
@return: A new CertificateOptions instance.
@rtype: L{CertificateOptions}
"""
return CertificateOptions(
method=SSL.SSLv23_METHOD,
trustRoot=platformTrust()
)
@_requireSSL
def getContext(self, hostname, port):
"""
Return an L{OpenSSL.SSL.Context}.
@param hostname: ignored
@param port: ignored
@return: A new SSL context.
@rtype: L{OpenSSL.SSL.Context}
"""
return self._getCertificateOptions(hostname, port).getContext()
@implementer(IPolicyForHTTPS)
class BrowserLikePolicyForHTTPS(object):
"""
SSL connection creator for web clients.
"""
def __init__(self, trustRoot=None):
self._trustRoot = trustRoot
@_requireSSL
def creatorForNetloc(self, hostname, port):
"""
Create a L{client connection creator
<twisted.internet.interfaces.IOpenSSLClientConnectionCreator>} for a
given network location.
@param tls: The TLS protocol to create a connection for.
@type tls: L{twisted.protocols.tls.TLSMemoryBIOProtocol}
@param hostname: The hostname part of the URI.
@type hostname: L{bytes}
@param port: The port part of the URI.
@type port: L{int}
@return: a connection creator with appropriate verification
restrictions set
@rtype: L{client connection creator
<twisted.internet.interfaces.IOpenSSLClientConnectionCreator>}
"""
return optionsForClientTLS(hostname.decode("ascii"),
trustRoot=self._trustRoot)
deprecatedModuleAttribute(Version("Twisted", 14, 0, 0),
getDeprecationWarningString(
WebClientContextFactory,
Version("Twisted", 14, 0, 0),
replacement=BrowserLikePolicyForHTTPS)
.split("; ")[1],
WebClientContextFactory.__module__,
WebClientContextFactory.__name__)
class _ContextFactoryWithContext(object):
"""
A L{_ContextFactoryWithContext} is like a
L{twisted.internet.ssl.ContextFactory} with a pre-created context.
@ivar _context: A Context.
@type _context: L{OpenSSL.SSL.Context}
"""
def __init__(self, context):
"""
Initialize a L{_ContextFactoryWithContext} with a context.
@param context: An SSL context.
@type context: L{OpenSSL.SSL.Context}
"""
self._context = context
def getContext(self):
"""
Return the context created by
L{_DeprecatedToCurrentPolicyForHTTPS._webContextFactory}.
@return: An old-style context factory.
@rtype: object with C{getContext} method, like
L{twisted.internet.ssl.ContextFactory}.
"""
return self._context
@implementer(IPolicyForHTTPS)
class _DeprecatedToCurrentPolicyForHTTPS(object):
"""
Adapt a web context factory to a normal context factory.
@ivar _webContextFactory: An object providing a getContext method with
C{hostname} and C{port} arguments.
@type _webContextFactory: L{WebClientContextFactory} (or object with a
similar C{getContext} method).
"""
def __init__(self, webContextFactory):
"""
Wrap a web context factory in an L{IPolicyForHTTPS}.
@param webContextFactory: An object providing a getContext method with
C{hostname} and C{port} arguments.
@type webContextFactory: L{WebClientContextFactory} (or object with a
similar C{getContext} method).
"""
self._webContextFactory = webContextFactory
def creatorForNetloc(self, hostname, port):
"""
Called the wrapped web context factory's C{getContext} method with a
hostname and port number and return the resulting context object.
@param hostname: The hostname part of the URI.
@type hostname: L{bytes}
@param port: The port part of the URI.
@type port: L{int}
@return: An old-style context factory.
@rtype: object with C{getContext} method, like
L{twisted.internet.ssl.ContextFactory}.
"""
context = self._webContextFactory.getContext(hostname, port)
return _ContextFactoryWithContext(context)
@implementer(IBodyProducer)
class FileBodyProducer(object):
"""
L{FileBodyProducer} produces bytes from an input file object incrementally
and writes them to a consumer.
Since file-like objects cannot be read from in an event-driven manner,
L{FileBodyProducer} uses a L{Cooperator} instance to schedule reads from
the file. This process is also paused and resumed based on notifications
from the L{IConsumer} provider being written to.
The file is closed after it has been read, or if the producer is stopped
early.
@ivar _inputFile: Any file-like object, bytes read from which will be
written to a consumer.
@ivar _cooperate: A method like L{Cooperator.cooperate} which is used to
schedule all reads.
@ivar _readSize: The number of bytes to read from C{_inputFile} at a time.
"""
def __init__(self, inputFile, cooperator=task, readSize=2 ** 16):
self._inputFile = inputFile
self._cooperate = cooperator.cooperate
self._readSize = readSize
self.length = self._determineLength(inputFile)
def _determineLength(self, fObj):
"""
Determine how many bytes can be read out of C{fObj} (assuming it is not
modified from this point on). If the determination cannot be made,
return C{UNKNOWN_LENGTH}.
"""
try:
seek = fObj.seek
tell = fObj.tell
except AttributeError:
return UNKNOWN_LENGTH
originalPosition = tell()
seek(0, os.SEEK_END)
end = tell()
seek(originalPosition, os.SEEK_SET)
return end - originalPosition
def stopProducing(self):
"""
Permanently stop writing bytes from the file to the consumer by
stopping the underlying L{CooperativeTask}.
"""
self._inputFile.close()
self._task.stop()
def startProducing(self, consumer):
"""
Start a cooperative task which will read bytes from the input file and
write them to C{consumer}. Return a L{Deferred} which fires after all
bytes have been written.
@param consumer: Any L{IConsumer} provider
"""
self._task = self._cooperate(self._writeloop(consumer))
d = self._task.whenDone()
def maybeStopped(reason):
# IBodyProducer.startProducing's Deferred isn't support to fire if
# stopProducing is called.
reason.trap(task.TaskStopped)
return defer.Deferred()
d.addCallbacks(lambda ignored: None, maybeStopped)
return d
def _writeloop(self, consumer):
"""
Return an iterator which reads one chunk of bytes from the input file
and writes them to the consumer for each time it is iterated.
"""
while True:
bytes = self._inputFile.read(self._readSize)
if not bytes:
self._inputFile.close()
break
consumer.write(bytes)
yield None
def pauseProducing(self):
"""
Temporarily suspend copying bytes from the input file to the consumer
by pausing the L{CooperativeTask} which drives that activity.
"""
self._task.pause()
def resumeProducing(self):
"""
Undo the effects of a previous C{pauseProducing} and resume copying
bytes to the consumer by resuming the L{CooperativeTask} which drives
the write activity.
"""
self._task.resume()
class _HTTP11ClientFactory(protocol.Factory):
"""
A factory for L{HTTP11ClientProtocol}, used by L{HTTPConnectionPool}.
@ivar _quiescentCallback: The quiescent callback to be passed to protocol
instances, used to return them to the connection pool.
@since: 11.1
"""
def __init__(self, quiescentCallback):
self._quiescentCallback = quiescentCallback
def buildProtocol(self, addr):
return HTTP11ClientProtocol(self._quiescentCallback)
class _RetryingHTTP11ClientProtocol(object):
"""
A wrapper for L{HTTP11ClientProtocol} that automatically retries requests.
@ivar _clientProtocol: The underlying L{HTTP11ClientProtocol}.
@ivar _newConnection: A callable that creates a new connection for a
retry.
"""
def __init__(self, clientProtocol, newConnection):
self._clientProtocol = clientProtocol
self._newConnection = newConnection
def _shouldRetry(self, method, exception, bodyProducer):
"""
Indicate whether request should be retried.
Only returns C{True} if method is idempotent, no response was
received, the reason for the failed request was not due to
user-requested cancellation, and no body was sent. The latter
requirement may be relaxed in the future, and PUT added to approved
method list.
@param method: The method of the request.
@type method: L{bytes}
"""
if method not in (b"GET", b"HEAD", b"OPTIONS", b"DELETE", b"TRACE"):
return False
if not isinstance(exception, (RequestNotSent,
RequestTransmissionFailed,
ResponseNeverReceived)):
return False
if isinstance(exception, _WrapperException):
for aFailure in exception.reasons:
if aFailure.check(defer.CancelledError):
return False
if bodyProducer is not None:
return False
return True
def request(self, request):
"""
Do a request, and retry once (with a new connection) if it fails in
a retryable manner.
@param request: A L{Request} instance that will be requested using the
wrapped protocol.
"""
d = self._clientProtocol.request(request)
def failed(reason):
if self._shouldRetry(request.method, reason.value,
request.bodyProducer):
return self._newConnection().addCallback(
lambda connection: connection.request(request))
else:
return reason
d.addErrback(failed)
return d
class HTTPConnectionPool(object):
"""
A pool of persistent HTTP connections.
Features:
- Cached connections will eventually time out.
- Limits on maximum number of persistent connections.
Connections are stored using keys, which should be chosen such that any
connections stored under a given key can be used interchangeably.
Failed requests done using previously cached connections will be retried
once if they use an idempotent method (e.g. GET), in case the HTTP server
timed them out.
@ivar persistent: Boolean indicating whether connections should be
persistent. Connections are persistent by default.
@ivar maxPersistentPerHost: The maximum number of cached persistent
connections for a C{host:port} destination.
@type maxPersistentPerHost: C{int}
@ivar cachedConnectionTimeout: Number of seconds a cached persistent
connection will stay open before disconnecting.
@ivar retryAutomatically: C{boolean} indicating whether idempotent
requests should be retried once if no response was received.
@ivar _factory: The factory used to connect to the proxy.
@ivar _connections: Map (scheme, host, port) to lists of
L{HTTP11ClientProtocol} instances.
@ivar _timeouts: Map L{HTTP11ClientProtocol} instances to a
C{IDelayedCall} instance of their timeout.
@since: 12.1
"""
_factory = _HTTP11ClientFactory
maxPersistentPerHost = 2
cachedConnectionTimeout = 240
retryAutomatically = True
def __init__(self, reactor, persistent=True):
self._reactor = reactor
self.persistent = persistent
self._connections = {}
self._timeouts = {}
def getConnection(self, key, endpoint):
"""
Supply a connection, newly created or retrieved from the pool, to be
used for one HTTP request.
The connection will remain out of the pool (not available to be
returned from future calls to this method) until one HTTP request has
been completed over it.
Afterwards, if the connection is still open, it will automatically be
added to the pool.
@param key: A unique key identifying connections that can be used
interchangeably.
@param endpoint: An endpoint that can be used to open a new connection
if no cached connection is available.
@return: A C{Deferred} that will fire with a L{HTTP11ClientProtocol}
(or a wrapper) that can be used to send a single HTTP request.
"""
# Try to get cached version:
connections = self._connections.get(key)
while connections:
connection = connections.pop(0)
# Cancel timeout:
self._timeouts[connection].cancel()
del self._timeouts[connection]
if connection.state == "QUIESCENT":
if self.retryAutomatically:
newConnection = lambda: self._newConnection(key, endpoint)
connection = _RetryingHTTP11ClientProtocol(
connection, newConnection)
return defer.succeed(connection)
return self._newConnection(key, endpoint)
def _newConnection(self, key, endpoint):
"""
Create a new connection.
This implements the new connection code path for L{getConnection}.
"""
def quiescentCallback(protocol):
self._putConnection(key, protocol)
factory = self._factory(quiescentCallback)
return endpoint.connect(factory)
def _removeConnection(self, key, connection):
"""
Remove a connection from the cache and disconnect it.
"""
connection.transport.loseConnection()
self._connections[key].remove(connection)
del self._timeouts[connection]
def _putConnection(self, key, connection):
"""
Return a persistent connection to the pool. This will be called by
L{HTTP11ClientProtocol} when the connection becomes quiescent.
"""
if connection.state != "QUIESCENT":
# Log with traceback for debugging purposes:
try:
raise RuntimeError(
"BUG: Non-quiescent protocol added to connection pool.")
except:
log.err()
return
connections = self._connections.setdefault(key, [])
if len(connections) == self.maxPersistentPerHost:
dropped = connections.pop(0)
dropped.transport.loseConnection()
self._timeouts[dropped].cancel()
del self._timeouts[dropped]
connections.append(connection)
cid = self._reactor.callLater(self.cachedConnectionTimeout,
self._removeConnection,
key, connection)
self._timeouts[connection] = cid
def closeCachedConnections(self):
"""
Close all persistent connections and remove them from the pool.
@return: L{defer.Deferred} that fires when all connections have been
closed.
"""
results = []
for protocols in itervalues(self._connections):
for p in protocols:
results.append(p.abort())
self._connections = {}
for dc in itervalues(self._timeouts):
dc.cancel()
self._timeouts = {}
return defer.gatherResults(results).addCallback(lambda ign: None)
class _AgentBase(object):
"""
Base class offering common facilities for L{Agent}-type classes.
@ivar _reactor: The C{IReactorTime} implementation which will be used by
the pool, and perhaps by subclasses as well.
@ivar _pool: The L{HTTPConnectionPool} used to manage HTTP connections.
"""
def __init__(self, reactor, pool):
if pool is None:
pool = HTTPConnectionPool(reactor, False)
self._reactor = reactor
self._pool = pool
def _computeHostValue(self, scheme, host, port):
"""
Compute the string to use for the value of the I{Host} header, based on
the given scheme, host name, and port number.
"""
if (scheme, port) in ((b'http', 80), (b'https', 443)):
return host
return host + b":" + intToBytes(port)
def _requestWithEndpoint(self, key, endpoint, method, parsedURI,
headers, bodyProducer, requestPath):
"""
Issue a new request, given the endpoint and the path sent as part of
the request.
"""
# Create minimal headers, if necessary:
if headers is None:
headers = Headers()
if not headers.hasHeader(b'host'):
headers = headers.copy()
headers.addRawHeader(
b'host', self._computeHostValue(parsedURI.scheme,
parsedURI.host,
parsedURI.port))
d = self._pool.getConnection(key, endpoint)
def cbConnected(proto):
return proto.request(
Request._construct(method, requestPath, headers, bodyProducer,
persistent=self._pool.persistent,
parsedURI=parsedURI))
d.addCallback(cbConnected)
return d
@implementer(IAgentEndpointFactory)
class _StandardEndpointFactory(object):
"""
Standard HTTP endpoint destinations - TCP for HTTP, TCP+TLS for HTTPS.
@ivar _policyForHTTPS: A web context factory which will be used to create
SSL context objects for any SSL connections the agent needs to make.
@ivar _connectTimeout: If not C{None}, the timeout passed to
L{TCP4ClientEndpoint} or C{SSL4ClientEndpoint} for specifying the
connection timeout.
@ivar _bindAddress: If not C{None}, the address passed to
L{TCP4ClientEndpoint} or C{SSL4ClientEndpoint} for specifying the local
address to bind to.
"""
def __init__(self, reactor, contextFactory, connectTimeout, bindAddress):
"""
@param reactor: A provider of
L{twisted.internet.interfaces.IReactorTCP} and
L{twisted.internet.interfaces.IReactorSSL} for this L{Agent} to
place outgoing connections.
@type reactor: L{twisted.internet.interfaces.IReactorTCP} and
L{twisted.internet.interfaces.IReactorSSL}
@param contextFactory: A factory for TLS contexts, to control the
verification parameters of OpenSSL.
@type contextFactory: L{IPolicyForHTTPS}.
@param connectTimeout: The amount of time that this L{Agent} will wait
for the peer to accept a connection.
@type connectTimeout: L{float} or L{None}
@param bindAddress: The local address for client sockets to bind to.
@type bindAddress: L{bytes} or L{None}
"""
self._reactor = reactor
self._policyForHTTPS = contextFactory
self._connectTimeout = connectTimeout
self._bindAddress = bindAddress
def endpointForURI(self, uri):
"""
Connect directly over TCP for C{b'http'} scheme, and TLS for C{b'https'}.
@param uri: L{URI} to connect to.
@return: Endpoint to connect to.
@rtype: L{TCP4ClientEndpoint} or L{SSL4ClientEndpoint}
"""
kwargs = {}
if self._connectTimeout is not None:
kwargs['timeout'] = self._connectTimeout
kwargs['bindAddress'] = self._bindAddress
try:
host = nativeString(uri.host)
except UnicodeDecodeError:
raise ValueError(("The host of the provided URI ({uri.host!r}) "
"contains non-ASCII octets, it should be ASCII "
"decodable.").format(uri=uri))
if uri.scheme == b'http':
return TCP4ClientEndpoint(self._reactor, host, uri.port, **kwargs)
elif uri.scheme == b'https':
tlsPolicy = self._policyForHTTPS.creatorForNetloc(uri.host,
uri.port)
return SSL4ClientEndpoint(self._reactor, host, uri.port, tlsPolicy,
**kwargs)
else:
raise SchemeNotSupported("Unsupported scheme: %r" % (uri.scheme,))
@implementer(IAgent)
class Agent(_AgentBase):
"""
L{Agent} is a very basic HTTP client. It supports I{HTTP} and I{HTTPS}
scheme URIs.
@ivar _pool: An L{HTTPConnectionPool} instance.
@ivar _endpointFactory: The L{IAgentEndpointFactory} which will
be used to create endpoints for outgoing connections.
@since: 9.0
"""
def __init__(self, reactor,
contextFactory=BrowserLikePolicyForHTTPS(),
connectTimeout=None, bindAddress=None,
pool=None):
"""
Create an L{Agent}.
@param reactor: A provider of
L{twisted.internet.interfaces.IReactorTCP} and
L{twisted.internet.interfaces.IReactorSSL} for this L{Agent} to
place outgoing connections.
@type reactor: L{twisted.internet.interfaces.IReactorTCP} and
L{twisted.internet.interfaces.IReactorSSL}
@param contextFactory: A factory for TLS contexts, to control the
verification parameters of OpenSSL. The default is to use a
L{BrowserLikePolicyForHTTPS}, so unless you have special
requirements you can leave this as-is.
@type contextFactory: L{IPolicyForHTTPS}.
@param connectTimeout: The amount of time that this L{Agent} will wait
for the peer to accept a connection.
@type connectTimeout: L{float}
@param bindAddress: The local address for client sockets to bind to.
@type bindAddress: L{bytes}
@param pool: An L{HTTPConnectionPool} instance, or C{None}, in which
case a non-persistent L{HTTPConnectionPool} instance will be
created.
@type pool: L{HTTPConnectionPool}
"""
if not IPolicyForHTTPS.providedBy(contextFactory):
warnings.warn(
repr(contextFactory) +
" was passed as the HTTPS policy for an Agent, but it does "
"not provide IPolicyForHTTPS. Since Twisted 14.0, you must "
"pass a provider of IPolicyForHTTPS.",
stacklevel=2, category=DeprecationWarning
)
contextFactory = _DeprecatedToCurrentPolicyForHTTPS(contextFactory)
endpointFactory = _StandardEndpointFactory(
reactor, contextFactory, connectTimeout, bindAddress)
self._init(reactor, endpointFactory, pool)
@classmethod
def usingEndpointFactory(cls, reactor, endpointFactory, pool=None):
"""
Create a new L{Agent} that will use the endpoint factory to figure
out how to connect to the server.
@param reactor: A provider of
L{twisted.internet.interfaces.IReactorTime}.
@param endpointFactory: Used to construct endpoints which the
HTTP client will connect with.
@type endpointFactory: an L{IAgentEndpointFactory} provider.
@param pool: An L{HTTPConnectionPool} instance, or C{None}, in which
case a non-persistent L{HTTPConnectionPool} instance will be
created.
@type pool: L{HTTPConnectionPool}
@return: A new L{Agent}.
"""
agent = cls.__new__(cls)
agent._init(reactor, endpointFactory, pool)
return agent
def _init(self, reactor, endpointFactory, pool):
"""
Initialize a new L{Agent}.
@param reactor: A provider of relevant reactor interfaces, at a minimum
L{twisted.internet.interfaces.IReactorTime}.
@param endpointFactory: Used to construct endpoints which the
HTTP client will connect with.
@type endpointFactory: an L{IAgentEndpointFactory} provider.
@param pool: An L{HTTPConnectionPool} instance, or C{None}, in which
case a non-persistent L{HTTPConnectionPool} instance will be
created.
@type pool: L{HTTPConnectionPool}
@return: A new L{Agent}.
"""
_AgentBase.__init__(self, reactor, pool)
self._endpointFactory = endpointFactory
def _getEndpoint(self, uri):
"""
Get an endpoint for the given URI, using C{self._endpointFactory}.
@param uri: The URI of the request.
@type uri: L{URI}
@return: An endpoint which can be used to connect to given address.
"""
return self._endpointFactory.endpointForURI(uri)
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Issue a request to the server indicated by the given C{uri}.
An existing connection from the connection pool may be used or a new
one may be created.
I{HTTP} and I{HTTPS} schemes are supported in C{uri}.
@see: L{twisted.web.iweb.IAgent.request}
"""
parsedURI = URI.fromBytes(uri)
try:
endpoint = self._getEndpoint(parsedURI)
except SchemeNotSupported:
return defer.fail(Failure())
key = (parsedURI.scheme, parsedURI.host, parsedURI.port)
return self._requestWithEndpoint(key, endpoint, method, parsedURI,
headers, bodyProducer,
parsedURI.originForm)
@implementer(IAgent)
class ProxyAgent(_AgentBase):
"""
An HTTP agent able to cross HTTP proxies.
@ivar _proxyEndpoint: The endpoint used to connect to the proxy.
@since: 11.1
"""
def __init__(self, endpoint, reactor=None, pool=None):
if reactor is None:
from twisted.internet import reactor
_AgentBase.__init__(self, reactor, pool)
self._proxyEndpoint = endpoint
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Issue a new request via the configured proxy.
"""
# Cache *all* connections under the same key, since we are only
# connecting to a single destination, the proxy:
key = ("http-proxy", self._proxyEndpoint)
# To support proxying HTTPS via CONNECT, we will use key
# ("http-proxy-CONNECT", scheme, host, port), and an endpoint that
# wraps _proxyEndpoint with an additional callback to do the CONNECT.
return self._requestWithEndpoint(key, self._proxyEndpoint, method,
URI.fromBytes(uri), headers,
bodyProducer, uri)
class _FakeUrllib2Request(object):
"""
A fake C{urllib2.Request} object for C{cookielib} to work with.
@see: U{http://docs.python.org/library/urllib2.html#request-objects}
@type uri: native L{str}
@ivar uri: Request URI.
@type headers: L{twisted.web.http_headers.Headers}
@ivar headers: Request headers.
@type type: native L{str}
@ivar type: The scheme of the URI.
@type host: native L{str}
@ivar host: The host[:port] of the URI.
@since: 11.1
"""
def __init__(self, uri):
"""
Create a fake Urllib2 request.
@param uri: Request URI.
@type uri: L{bytes}
"""
self.uri = nativeString(uri)
self.headers = Headers()
_uri = URI.fromBytes(uri)
self.type = nativeString(_uri.scheme)
self.host = nativeString(_uri.host)
if (_uri.scheme, _uri.port) not in ((b'http', 80), (b'https', 443)):
# If it's not a schema on the regular port, add the port.
self.host += ":" + str(_uri.port)
if _PY3:
self.origin_req_host = nativeString(_uri.host)
self.unverifiable = lambda _: False
def has_header(self, header):
return self.headers.hasHeader(networkString(header))
def add_unredirected_header(self, name, value):
self.headers.addRawHeader(networkString(name), networkString(value))
def get_full_url(self):
return self.uri
def get_header(self, name, default=None):
headers = self.headers.getRawHeaders(networkString(name), default)
if headers is not None:
headers = [nativeString(x) for x in headers]
return headers[0]
return None
def get_host(self):
return self.host
def get_type(self):
return self.type
def is_unverifiable(self):
# In theory this shouldn't be hardcoded.
return False
class _FakeUrllib2Response(object):
"""
A fake C{urllib2.Response} object for C{cookielib} to work with.
@type response: C{twisted.web.iweb.IResponse}
@ivar response: Underlying Twisted Web response.
@since: 11.1
"""
def __init__(self, response):
self.response = response
def info(self):
class _Meta(object):
def getheaders(zelf, name):
# PY2
headers = self.response.headers.getRawHeaders(name, [])
return headers
def get_all(zelf, name, default):
# PY3
headers = self.response.headers.getRawHeaders(
networkString(name), default)
h = [nativeString(x) for x in headers]
return h
return _Meta()
@implementer(IAgent)
class CookieAgent(object):
"""
L{CookieAgent} extends the basic L{Agent} to add RFC-compliant
handling of HTTP cookies. Cookies are written to and extracted
from a C{cookielib.CookieJar} instance.
The same cookie jar instance will be used for any requests through this
agent, mutating it whenever a I{Set-Cookie} header appears in a response.
@type _agent: L{twisted.web.client.Agent}
@ivar _agent: Underlying Twisted Web agent to issue requests through.
@type cookieJar: C{cookielib.CookieJar}
@ivar cookieJar: Initialized cookie jar to read cookies from and store
cookies to.
@since: 11.1
"""
def __init__(self, agent, cookieJar):
self._agent = agent
self.cookieJar = cookieJar
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Issue a new request to the wrapped L{Agent}.
Send a I{Cookie} header if a cookie for C{uri} is stored in
L{CookieAgent.cookieJar}. Cookies are automatically extracted and
stored from requests.
If a C{'cookie'} header appears in C{headers} it will override the
automatic cookie header obtained from the cookie jar.
@see: L{Agent.request}
"""
if headers is None:
headers = Headers()
lastRequest = _FakeUrllib2Request(uri)
# Setting a cookie header explicitly will disable automatic request
# cookies.
if not headers.hasHeader(b'cookie'):
self.cookieJar.add_cookie_header(lastRequest)
cookieHeader = lastRequest.get_header('Cookie', None)
if cookieHeader is not None:
headers = headers.copy()
headers.addRawHeader(b'cookie', networkString(cookieHeader))
d = self._agent.request(method, uri, headers, bodyProducer)
d.addCallback(self._extractCookies, lastRequest)
return d
def _extractCookies(self, response, request):
"""
Extract response cookies and store them in the cookie jar.
@type response: L{twisted.web.iweb.IResponse}
@param response: Twisted Web response.
@param request: A urllib2 compatible request object.
"""
resp = _FakeUrllib2Response(response)
self.cookieJar.extract_cookies(resp, request)
return response
class GzipDecoder(proxyForInterface(IResponse)):
"""
A wrapper for a L{Response} instance which handles gzip'ed body.
@ivar original: The original L{Response} object.
@since: 11.1
"""
def __init__(self, response):
self.original = response
self.length = UNKNOWN_LENGTH
def deliverBody(self, protocol):
"""
Override C{deliverBody} to wrap the given C{protocol} with
L{_GzipProtocol}.
"""
self.original.deliverBody(_GzipProtocol(protocol, self.original))
class _GzipProtocol(proxyForInterface(IProtocol)):
"""
A L{Protocol} implementation which wraps another one, transparently
decompressing received data.
@ivar _zlibDecompress: A zlib decompress object used to decompress the data
stream.
@ivar _response: A reference to the original response, in case of errors.
@since: 11.1
"""
def __init__(self, protocol, response):
self.original = protocol
self._response = response
self._zlibDecompress = zlib.decompressobj(16 + zlib.MAX_WBITS)
def dataReceived(self, data):
"""
Decompress C{data} with the zlib decompressor, forwarding the raw data
to the original protocol.
"""
try:
rawData = self._zlibDecompress.decompress(data)
except zlib.error:
raise ResponseFailed([Failure()], self._response)
if rawData:
self.original.dataReceived(rawData)
def connectionLost(self, reason):
"""
Forward the connection lost event, flushing remaining data from the
decompressor if any.
"""
try:
rawData = self._zlibDecompress.flush()
except zlib.error:
raise ResponseFailed([reason, Failure()], self._response)
if rawData:
self.original.dataReceived(rawData)
self.original.connectionLost(reason)
@implementer(IAgent)
class ContentDecoderAgent(object):
"""
An L{Agent} wrapper to handle encoded content.
It takes care of declaring the support for content in the
I{Accept-Encoding} header, and automatically decompresses the received data
if it's effectively using compression.
@param decoders: A list or tuple of (name, decoder) objects. The name
declares which decoding the decoder supports, and the decoder must
return a response object when called/instantiated. For example,
C{(('gzip', GzipDecoder))}. The order determines how the decoders are
going to be advertized to the server.
@since: 11.1
"""
def __init__(self, agent, decoders):
self._agent = agent
self._decoders = dict(decoders)
self._supported = b','.join([decoder[0] for decoder in decoders])
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Send a client request which declares supporting compressed content.
@see: L{Agent.request}.
"""
if headers is None:
headers = Headers()
else:
headers = headers.copy()
headers.addRawHeader(b'accept-encoding', self._supported)
deferred = self._agent.request(method, uri, headers, bodyProducer)
return deferred.addCallback(self._handleResponse)
def _handleResponse(self, response):
"""
Check if the response is encoded, and wrap it to handle decompression.
"""
contentEncodingHeaders = response.headers.getRawHeaders(
b'content-encoding', [])
contentEncodingHeaders = b','.join(contentEncodingHeaders).split(b',')
while contentEncodingHeaders:
name = contentEncodingHeaders.pop().strip()
decoder = self._decoders.get(name)
if decoder is not None:
response = decoder(response)
else:
# Add it back
contentEncodingHeaders.append(name)
break
if contentEncodingHeaders:
response.headers.setRawHeaders(
b'content-encoding', [b','.join(contentEncodingHeaders)])
else:
response.headers.removeHeader(b'content-encoding')
return response
@implementer(IAgent)
class RedirectAgent(object):
"""
An L{Agent} wrapper which handles HTTP redirects.
The implementation is rather strict: 301 and 302 behaves like 307, not
redirecting automatically on methods different from I{GET} and I{HEAD}.
See L{BrowserLikeRedirectAgent} for a redirecting Agent that behaves more
like a web browser.
@param redirectLimit: The maximum number of times the agent is allowed to
follow redirects before failing with a L{error.InfiniteRedirection}.
@cvar _redirectResponses: A L{list} of HTTP status codes to be redirected
for I{GET} and I{HEAD} methods.
@cvar _seeOtherResponses: A L{list} of HTTP status codes to be redirected
for any method and the method altered to I{GET}.
@since: 11.1
"""
_redirectResponses = [http.MOVED_PERMANENTLY, http.FOUND,
http.TEMPORARY_REDIRECT]
_seeOtherResponses = [http.SEE_OTHER]
def __init__(self, agent, redirectLimit=20):
self._agent = agent
self._redirectLimit = redirectLimit
def request(self, method, uri, headers=None, bodyProducer=None):
"""
Send a client request following HTTP redirects.
@see: L{Agent.request}.
"""
deferred = self._agent.request(method, uri, headers, bodyProducer)
return deferred.addCallback(
self._handleResponse, method, uri, headers, 0)
def _resolveLocation(self, requestURI, location):
"""
Resolve the redirect location against the request I{URI}.
@type requestURI: C{bytes}
@param requestURI: The request I{URI}.
@type location: C{bytes}
@param location: The redirect location.
@rtype: C{bytes}
@return: Final resolved I{URI}.
"""
return _urljoin(requestURI, location)
def _handleRedirect(self, response, method, uri, headers, redirectCount):
"""
Handle a redirect response, checking the number of redirects already
followed, and extracting the location header fields.
"""
if redirectCount >= self._redirectLimit:
err = error.InfiniteRedirection(
response.code,
b'Infinite redirection detected',
location=uri)
raise ResponseFailed([Failure(err)], response)
locationHeaders = response.headers.getRawHeaders(b'location', [])
if not locationHeaders:
err = error.RedirectWithNoLocation(
response.code, b'No location header field', uri)
raise ResponseFailed([Failure(err)], response)
location = self._resolveLocation(uri, locationHeaders[0])
deferred = self._agent.request(method, location, headers)
def _chainResponse(newResponse):
newResponse.setPreviousResponse(response)
return newResponse
deferred.addCallback(_chainResponse)
return deferred.addCallback(
self._handleResponse, method, uri, headers, redirectCount + 1)
def _handleResponse(self, response, method, uri, headers, redirectCount):
"""
Handle the response, making another request if it indicates a redirect.
"""
if response.code in self._redirectResponses:
if method not in (b'GET', b'HEAD'):
err = error.PageRedirect(response.code, location=uri)
raise ResponseFailed([Failure(err)], response)
return self._handleRedirect(response, method, uri, headers,
redirectCount)
elif response.code in self._seeOtherResponses:
return self._handleRedirect(response, b'GET', uri, headers,
redirectCount)
return response
class BrowserLikeRedirectAgent(RedirectAgent):
"""
An L{Agent} wrapper which handles HTTP redirects in the same fashion as web
browsers.
Unlike L{RedirectAgent}, the implementation is more relaxed: 301 and 302
behave like 303, redirecting automatically on any method and altering the
redirect request to a I{GET}.
@see: L{RedirectAgent}
@since: 13.1
"""
_redirectResponses = [http.TEMPORARY_REDIRECT]
_seeOtherResponses = [http.MOVED_PERMANENTLY, http.FOUND, http.SEE_OTHER]
class _ReadBodyProtocol(protocol.Protocol):
"""
Protocol that collects data sent to it.
This is a helper for L{IResponse.deliverBody}, which collects the body and
fires a deferred with it.
@ivar deferred: See L{__init__}.
@ivar status: See L{__init__}.
@ivar message: See L{__init__}.
@ivar dataBuffer: list of byte-strings received
@type dataBuffer: L{list} of L{bytes}
"""
def __init__(self, status, message, deferred):
"""
@param status: Status of L{IResponse}
@ivar status: L{int}
@param message: Message of L{IResponse}
@type message: L{bytes}
@param deferred: deferred to fire when response is complete
@type deferred: L{Deferred} firing with L{bytes}
"""
self.deferred = deferred
self.status = status
self.message = message
self.dataBuffer = []
def dataReceived(self, data):
"""
Accumulate some more bytes from the response.
"""
self.dataBuffer.append(data)
def connectionLost(self, reason):
"""
Deliver the accumulated response bytes to the waiting L{Deferred}, if
the response body has been completely received without error.
"""
if reason.check(ResponseDone):
self.deferred.callback(b''.join(self.dataBuffer))
elif reason.check(PotentialDataLoss):
self.deferred.errback(
PartialDownloadError(self.status, self.message,
b''.join(self.dataBuffer)))
else:
self.deferred.errback(reason)
def readBody(response):
"""
Get the body of an L{IResponse} and return it as a byte string.
This is a helper function for clients that don't want to incrementally
receive the body of an HTTP response.
@param response: The HTTP response for which the body will be read.
@type response: L{IResponse} provider
@return: A L{Deferred} which will fire with the body of the response.
Cancelling it will close the connection to the server immediately.
"""
def cancel(deferred):
"""
Cancel a L{readBody} call, close the connection to the HTTP server
immediately, if it is still open.
@param deferred: The cancelled L{defer.Deferred}.
"""
abort = getAbort()
if abort is not None:
abort()
d = defer.Deferred(cancel)
protocol = _ReadBodyProtocol(response.code, response.phrase, d)
def getAbort():
return getattr(protocol.transport, 'abortConnection', None)
response.deliverBody(protocol)
if protocol.transport is not None and getAbort() is None:
warnings.warn(
'Using readBody with a transport that does not have an '
'abortConnection method',
category=DeprecationWarning,
stacklevel=2)
return d
__all__ = [
'PartialDownloadError', 'HTTPPageGetter', 'HTTPPageDownloader',
'HTTPClientFactory', 'HTTPDownloader', 'getPage', 'downloadPage',
'ResponseDone', 'Response', 'ResponseFailed', 'Agent', 'CookieAgent',
'ProxyAgent', 'ContentDecoderAgent', 'GzipDecoder', 'RedirectAgent',
'HTTPConnectionPool', 'readBody', 'BrowserLikeRedirectAgent', 'URI']
| {
"content_hash": "193d2cbb30bb0419c5377bee1bbf4445",
"timestamp": "",
"source": "github",
"line_count": 2166,
"max_line_length": 133,
"avg_line_length": 33.5517082179132,
"alnum_prop": 0.6187992789619253,
"repo_name": "ArcherSys/ArcherSys",
"id": "ae6b90460addec3ebb88de51e735b35fe2ae2bc7",
"size": "72832",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Lib/site-packages/twisted/web/client.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
def amalgamate_sql(app, model, extra_fields=None, table_name=None,
view_name=None):
"""This function returns a string, containing SQL to create
a view exposing given model's content_type_id and object_id,
with some extra_fields optionally.
This function does *not* escape or quote its arguments.
>>> print(utils.amalgamate_sql('tests', 'phone', ['foo', 'x as y']))
CREATE VIEW tests_phone_view AS
SELECT
U0.id::text || '_' || test_phone.id::text AS fake_id,
U0.id AS content_type_id,
tests_phone.id AS object_id
, foo, x as y
FROM
tests_phone,
(SELECT
id
FROM
django_content_type
WHERE
app_label = 'tests' AND
model = 'phone'
) U0;
"""
if extra_fields == None:
extra_fields = []
extra_fields = ", ".join(extra_fields)
if extra_fields:
extra_fields = ", " + extra_fields
if not table_name:
table_name = "%s_%s" % (app, model)
if not view_name:
view_name = "%s_view" % table_name
return """
CREATE VIEW %(view_name)s AS
SELECT
U0.id::text || '_' || %(table_name)s.id AS fake_id,
U0.id AS content_type_id,
%(table_name)s.id AS object_id
%(extra_fields)s
FROM
%(table_name)s,
(SELECT
id
FROM
django_content_type
WHERE
app_label = '%(app)s' AND
model = '%(model)s'
) U0;
""" % dict(view_name=view_name,
app=app,
model=model,
table_name=table_name,
extra_fields=extra_fields)
def union_sql(view_name, *tables):
"""This function generates string containing SQL code, that creates
a big VIEW, that consists of many SELECTs.
>>> utils.union_sql('global', 'foo', 'bar', 'baz')
'CREATE VIEW global SELECT * FROM foo UNION SELECT * FROM bar UNION SELECT * FROM baz'
"""
if not tables:
raise Exception("no tables given")
ret = ""
pre = "CREATE VIEW %s AS SELECT * FROM " % view_name
for table in tables:
ret += pre + table
pre = " UNION SELECT * FROM "
return ret | {
"content_hash": "71187386f8950612896ee82a9928c0bb",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 90,
"avg_line_length": 29.952380952380953,
"alnum_prop": 0.48370429252782193,
"repo_name": "mpasternak/djorm-ext-filtered-contenttypes",
"id": "a0cd7be686cf9c605a4a9721bf63b511ebad8cfe",
"size": "2543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "filtered_contenttypes/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "891"
},
{
"name": "Python",
"bytes": "23075"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from ..maths import UnaryMaths
def test_UnaryMaths_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='%s',
mandatory=True,
position=2,
),
internal_datatype=dict(argstr='-dt %s',
position=1,
),
nan2zeros=dict(argstr='-nan',
position=3,
),
operation=dict(argstr='-%s',
mandatory=True,
position=4,
),
out_file=dict(argstr='%s',
genfile=True,
hash_files=False,
position=-2,
),
output_datatype=dict(argstr='-odt %s',
position=-1,
),
output_type=dict(),
terminal_output=dict(deprecated='1.0.0',
nohash=True,
),
)
inputs = UnaryMaths.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_UnaryMaths_outputs():
output_map = dict(out_file=dict(),
)
outputs = UnaryMaths.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| {
"content_hash": "e981056e8b3cd176428b52239eec244e",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 67,
"avg_line_length": 23.87272727272727,
"alnum_prop": 0.6047220106626047,
"repo_name": "mick-d/nipype",
"id": "4132931d57accf6f0cb4769c6629257a6e364401",
"size": "1367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nipype/interfaces/fsl/tests/test_auto_UnaryMaths.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "1854"
},
{
"name": "Matlab",
"bytes": "1999"
},
{
"name": "Python",
"bytes": "4607773"
},
{
"name": "Shell",
"bytes": "380"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import unicodedata
from collections import deque
class InputTranslator(object):
def push(self, evt):
pass
def get(self):
pass
def empty(self):
pass
class KeymapTranslator(InputTranslator):
def __init__(self, keymap, verbose=0,
invalid_cls=None, character_cls=None):
self.verbose = verbose
from pyrepl.keymap import compile_keymap, parse_keys
self.keymap = keymap
self.invalid_cls = invalid_cls
self.character_cls = character_cls
d = {}
for keyspec, command in keymap:
keyseq = tuple(parse_keys(keyspec))
d[keyseq] = command
if self.verbose:
print(d)
self.k = self.ck = compile_keymap(d, ())
self.results = deque()
self.stack = []
def push(self, evt):
if self.verbose:
print("pushed", evt.data, end='')
key = evt.data
d = self.k.get(key)
if isinstance(d, dict):
if self.verbose:
print("transition")
self.stack.append(key)
self.k = d
else:
if d is None:
if self.verbose:
print("invalid")
if self.stack or len(key) > 1 or unicodedata.category(key) == 'C':
self.results.append(
(self.invalid_cls, self.stack + [key]))
else:
# small optimization:
self.k[key] = self.character_cls
self.results.append(
(self.character_cls, [key]))
else:
if self.verbose:
print("matched", d)
self.results.append((d, self.stack + [key]))
self.stack = []
self.k = self.ck
def get(self):
if self.results:
return self.results.popleft()
else:
return None
def empty(self):
return not self.results
| {
"content_hash": "66d887c82db0fad3cc78924824652e9a",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 82,
"avg_line_length": 29.34285714285714,
"alnum_prop": 0.49756572541382665,
"repo_name": "timm/timmnix",
"id": "47e22ca01a9d48a7a34a9a76c681567bc551865a",
"size": "3629",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pypy3-v5.5.0-linux64/lib_pypy/pyrepl/input.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "1641"
},
{
"name": "Batchfile",
"bytes": "1234"
},
{
"name": "C",
"bytes": "436685"
},
{
"name": "CSS",
"bytes": "96"
},
{
"name": "Common Lisp",
"bytes": "4"
},
{
"name": "Emacs Lisp",
"bytes": "290698"
},
{
"name": "HTML",
"bytes": "111577"
},
{
"name": "Makefile",
"bytes": "1681"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PowerShell",
"bytes": "1540"
},
{
"name": "Prolog",
"bytes": "14301"
},
{
"name": "Python",
"bytes": "21267592"
},
{
"name": "Roff",
"bytes": "21080"
},
{
"name": "Shell",
"bytes": "27687"
},
{
"name": "TeX",
"bytes": "3052861"
},
{
"name": "VBScript",
"bytes": "481"
}
],
"symlink_target": ""
} |
"""Script for setuptools."""
from setuptools import setup, find_packages
with open('README.md') as readme:
long_description = readme.read()
version = __import__('gooey').__version__
setup(
name='Gooey',
version=version,
url='http://pypi.python.org/pypi/Gooey/',
author='Chris Kiehl',
author_email='[email protected]',
description=('Turn (almost) any command line program into a full GUI '
'application with one line'),
license='MIT',
packages=find_packages(),
include_package_data=True,
dependency_links = ["http://www.wxpython.org/download.php"],
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Desktop Environment',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Widget Sets',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
],
long_description='''
<h1>Gooey (Beta)</h1>
<h3>Turn (almost) any Python Console Program into a GUI application with one line</h3>
<p align="center">
<img src="https://cloud.githubusercontent.com/assets/1408720/7904381/f54f97f6-07c5-11e5-9bcb-c3c102920769.png"/>
</p>
<h2>Quick Start</h2>
<p>Gooey is attached to your code via a simple decorator on your `main` method.</p>
<pre>
from gooey import Gooey
@Gooey <--- all it takes! :)
def main():
# rest of code
</pre>
With the decorator attached, run your program and the GUI will now appear!
<b>Note: PyPi's formatting is ancient, so checkout the full documentation, instructions, and source on <a href="https://github.com/chriskiehl/Gooey">github!</a></b>
<br /><br /><br /><br />'''
)
| {
"content_hash": "d204fadbafb1b8bd1b33ed6b8f8680a9",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 164,
"avg_line_length": 30.266666666666666,
"alnum_prop": 0.6244493392070485,
"repo_name": "alexkolar/Gooey",
"id": "79f6fe9c86f43c700f05aa7a53415e621bf90b15",
"size": "1816",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "164280"
}
],
"symlink_target": ""
} |
import numpy as np
import cv2
import urllib
def setEmpty(event, x, y, flags, param):
global emptyFrame, emptyFrame32
if event == cv2.EVENT_LBUTTONDOWN:
emptyFrame = frame
elif event == cv2.EVENT_LBUTTONDBLCLK:
emptyFrame = np.zeros(np.shape(frame), np.uint8)
emptyFrame32 = np.float32(emptyFrame)
def genBuffMask(bufferFrames):
'create bitwise mask for buffer length'
buffMask = 1
for i in range(0, BUFF_LEN-1):
buffMask = (buffMask)<<1 | buffMask
return buffMask
BUFF_LEN = 10
buffMask = genBuffMask(BUFF_LEN)
currBuff = 0
videoLocation = 0
cap = cv2.VideoCapture(videoLocation)
_, frame = cap.read()
blankFrame = np.zeros(np.shape(frame), np.uint8)
emptyFrame = blankFrame
emptyFrame32 = np.float32(blankFrame)
cv2.namedWindow('frame')
cv2.setMouseCallback('frame', setEmpty)
while(True):
_, frame = cap.read()
frame32 = np.float32(frame)
diff32 = np.absolute(frame32 - emptyFrame32)
norm32 = np.sqrt(diff32[:,:,0]**2 + diff32[:,:,1]**2 + diff32[:,:,2]**2)/np.sqrt(255**2 + 255**2 + 255**2)
diff = np.uint8(norm32*255)
_, thresh = cv2.threshold(diff, 100, 255, 0)
kernel = np.ones((20,20), np.uint8)
blobby = cv2.dilate(thresh, kernel, iterations= 4)
cv2.imshow('blob', blobby)
# buffer
pastBuff = currBuff
currBuff = ( (currBuff << 1) | (np.any(blobby)) ) & buffMask
fra, contours, hierarchy = cv2.findContours(blobby, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
area = 0
largest_contour = -1
for i in xrange(len(contours)):
if cv2.contourArea(contours[i])>area:
largest_contour = i
frameMod = np.copy(frame)
cv2.drawContours(frameMod, contours, largest_contour, [0, 0, 255], 3)
if currBuff == buffMask:
cv2.imshow('frame', frameMod)
else:
cv2.imshow('frame', frame)
cv2.imshow('fra', fra)
if cv2.waitKey(1) & 0xFF == 27:
break
cap.release()
cv2.destroyAllWindows()
| {
"content_hash": "160f02543ee29f357fb9bf5fdb2f10c3",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 110,
"avg_line_length": 25.294871794871796,
"alnum_prop": 0.644703497212367,
"repo_name": "SSG-DRD-IOT/commercial-iot-security-system",
"id": "3ae63dd622cd3e7a170d59f8664e24e058c1e96c",
"size": "1973",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "opencv/experiments_pretty/staticRoomMotion/motion_detect_contour.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "341"
},
{
"name": "Python",
"bytes": "278625"
}
],
"symlink_target": ""
} |
"""``test_networkd.py``
`NetworkD Unittests`
"""
from unittest.mock import MagicMock
from testlib.linux.networkd import NetworkD
class TestNetworkD(object):
def test_single_mgmt_port(self):
run_command = MagicMock()
n = NetworkD(run_command, ["test"])
n.clear_settings()
assert run_command.call_args_list[0][0][
0] == "find /etc/systemd/network/ -mindepth 1 -not \\( -name 'test.network' -or " \
"-name 'test.netdev' -or -name 'test.link' -or -name 'test.swport' \\) -delete"
def test_multiple_mgmt_port(self):
run_command = MagicMock()
n = NetworkD(run_command, ["test1", "test2"])
n.clear_settings()
assert run_command.call_args_list[0][0][
0] == "find /etc/systemd/network/ -mindepth 1 -not \\( -name 'test1.network' -or " \
"-name 'test1.netdev' -or -name 'test1.link' -or -name 'test1.swport' -or " \
"-name 'test2.network' -or -name 'test2.netdev' -or -name 'test2.link' -or " \
"-name 'test2.swport' \\) -delete"
def test_empty_list(self):
run_command = MagicMock()
n = NetworkD(run_command, [])
n.clear_settings()
assert run_command.call_args_list[0][0][
0] == "find /etc/systemd/network/ -mindepth 1 -not \\( \\) -delete"
def test_extra_excludes_are_appended(self):
run_command = MagicMock()
n = NetworkD(run_command, ["test1", "test2"])
n.clear_settings(exclude_ports=["extra1", "extra2"])
assert run_command.call_args_list[0][0][
0] == "find /etc/systemd/network/ -mindepth 1 -not \\( -name 'test1.network' -or " \
"-name 'test1.netdev' -or -name 'test1.link' -or -name 'test1.swport' -or " \
"-name 'test2.network' -or -name 'test2.netdev' -or -name 'test2.link' -or " \
"-name 'test2.swport' -or -name 'extra1.network' -or -name 'extra1.netdev' -or " \
"-name 'extra1.link' -or -name 'extra1.swport' -or -name 'extra2.network' -or " \
"-name 'extra2.netdev' -or -name 'extra2.link' -or -name 'extra2.swport' \\) -delete"
def test_just_extra_excludes(self):
run_command = MagicMock()
n = NetworkD(run_command, [])
n.clear_settings(exclude_ports=["extra1", "extra2"])
assert run_command.call_args_list[0][0][
0] == "find /etc/systemd/network/ -mindepth 1 -not \\( -name 'extra1.network' -or " \
"-name 'extra1.netdev' -or -name 'extra1.link' -or -name 'extra1.swport' -or " \
"-name 'extra2.network' -or -name 'extra2.netdev' -or -name 'extra2.link' -or " \
"-name 'extra2.swport' \\) -delete"
| {
"content_hash": "f778fd3219fa24b80c264b1116560e36",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 103,
"avg_line_length": 47.793103448275865,
"alnum_prop": 0.5580808080808081,
"repo_name": "taf3/taf",
"id": "b5fdce1f323850c69013285963f10fa3d15bf2b1",
"size": "3388",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "unittests/linux/test_networkd.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6745"
},
{
"name": "Dockerfile",
"bytes": "4185"
},
{
"name": "JavaScript",
"bytes": "1771"
},
{
"name": "Python",
"bytes": "3859799"
},
{
"name": "Shell",
"bytes": "3146"
},
{
"name": "Tcl",
"bytes": "68098"
},
{
"name": "XSLT",
"bytes": "41538"
}
],
"symlink_target": ""
} |
import logging
from datetime import datetime
from sqlalchemy import or_
from aleph.core import db, url_for
from aleph.model.user import User
from aleph.model.forms import ListForm
log = logging.getLogger(__name__)
list_user_table = db.Table('list_user', db.metadata,
db.Column('list_id', db.Integer, db.ForeignKey('list.id')), # noqa
db.Column('user_id', db.Integer, db.ForeignKey('user.id')) # noqa
)
class List(db.Model):
id = db.Column(db.Integer(), primary_key=True)
label = db.Column(db.Unicode)
public = db.Column(db.Boolean, default=False)
creator_id = db.Column(db.Integer(), db.ForeignKey('user.id'),
nullable=True)
creator = db.relationship(User)
created_at = db.Column(db.DateTime, default=datetime.utcnow)
updated_at = db.Column(db.DateTime, default=datetime.utcnow,
onupdate=datetime.utcnow)
users = db.relationship(User, secondary=list_user_table,
backref='lists')
def to_dict(self):
return {
'id': self.id,
'api_url': url_for('lists.view', id=self.id),
'entities_api_url': url_for('entities.index', list=self.id),
'label': self.label,
'public': self.public,
'creator_id': self.creator_id,
'created_at': self.created_at,
'updated_at': self.updated_at
}
@classmethod
def create(cls, data, user):
lst = cls()
lst.update(data, user)
lst.creator = user
db.session.add(lst)
return lst
def update(self, data, user):
data = ListForm().deserialize(data)
self.label = data.get('label')
if data.get('public') is not None:
self.public = data.get('public')
users = set(data.get('users', []))
if user is not None:
users.add(user)
self.users = list(users)
def delete(self):
# for entity in self.entities:
# entity.delete()
db.session.delete(self)
@classmethod
def by_label(cls, label):
q = db.session.query(cls).filter_by(label=label)
return q.first()
@classmethod
def by_id(cls, id):
q = db.session.query(cls).filter_by(id=id)
return q.first()
@classmethod
def user_list_ids(cls, user=None, include_public=True):
logged_in = user is not None and user.is_authenticated()
q = db.session.query(cls.id)
conds = []
if include_public:
conds.append(cls.public == True) # noqa
if logged_in:
conds.append(cls.users.any(User.id == user.id))
if not len(conds):
return []
if not (logged_in and user.is_admin):
q = q.filter(or_(*conds))
return [c.id for c in q.all()]
@classmethod
def all_by_user(cls, user):
q = db.session.query(cls)
q = q.filter(cls.id.in_(cls.user_list_ids(user)))
q = q.order_by(cls.id.desc())
return q
@property
def terms(self):
from aleph.model.entity import Entity
from aleph.model.selector import Selector
q = db.session.query(Selector.normalized)
q = q.join(Entity, Entity.id == Selector.entity_id)
q = q.filter(Entity.list_id == self.id)
q = q.distinct()
return set([r[0] for r in q])
def __repr__(self):
return '<List(%r, %r)>' % (self.id, self.label)
def __unicode__(self):
return self.label
| {
"content_hash": "642eaced5011a310f1f38436154f39f0",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 72,
"avg_line_length": 30.344827586206897,
"alnum_prop": 0.5713068181818182,
"repo_name": "nightsh/aleph",
"id": "5c78797cea59a6a4c2df176fa49935b726e9422e",
"size": "3520",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "aleph/model/list.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "234264"
},
{
"name": "HTML",
"bytes": "34472"
},
{
"name": "JavaScript",
"bytes": "32061"
},
{
"name": "Makefile",
"bytes": "212"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "95149"
}
],
"symlink_target": ""
} |
def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__,'bbi_file.so')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
| {
"content_hash": "17ee224d9927005f3b6d91f6a63bb8af",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 69,
"avg_line_length": 39.714285714285715,
"alnum_prop": 0.5935251798561151,
"repo_name": "poojavade/Genomics_Docker",
"id": "430aa1c508a7403f58708914a9010fe447713f87",
"size": "278",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/bx_python-0.7.1-py2.7-linux-x86_64.egg/bx/bbi/bbi_file.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10509"
},
{
"name": "C",
"bytes": "1265138"
},
{
"name": "C++",
"bytes": "4734960"
},
{
"name": "CSS",
"bytes": "17332"
},
{
"name": "FORTRAN",
"bytes": "10375"
},
{
"name": "GLSL",
"bytes": "493"
},
{
"name": "Groff",
"bytes": "77173"
},
{
"name": "HTML",
"bytes": "395483"
},
{
"name": "Java",
"bytes": "9223"
},
{
"name": "JavaScript",
"bytes": "783663"
},
{
"name": "Jupyter Notebook",
"bytes": "189877"
},
{
"name": "Lua",
"bytes": "28217"
},
{
"name": "Makefile",
"bytes": "77825"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Perl",
"bytes": "244796"
},
{
"name": "Python",
"bytes": "54562861"
},
{
"name": "R",
"bytes": "2568"
},
{
"name": "Shell",
"bytes": "40620"
},
{
"name": "Smarty",
"bytes": "21035"
},
{
"name": "TeX",
"bytes": "55310"
}
],
"symlink_target": ""
} |
import pytest
import easypost
@pytest.mark.vcr()
def test_parcel_create(basic_parcel):
parcel = easypost.Parcel.create(**basic_parcel)
assert isinstance(parcel, easypost.Parcel)
assert str.startswith(parcel.id, "prcl_")
assert parcel.weight == 15.4
@pytest.mark.vcr()
def test_parcel_retrieve(basic_parcel):
parcel = easypost.Parcel.create(**basic_parcel)
retrieved_parcel = easypost.Parcel.retrieve(parcel.id)
assert isinstance(retrieved_parcel, easypost.Parcel)
assert retrieved_parcel == parcel
| {
"content_hash": "618e6680c00dd25ea708388a57bbac2e",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 58,
"avg_line_length": 24.454545454545453,
"alnum_prop": 0.7304832713754646,
"repo_name": "dsanders11/easypost-python",
"id": "fe21100ac76f1868686d162b2d49a8e3d0a9dd03",
"size": "538",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_parcel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2404"
},
{
"name": "Python",
"bytes": "106139"
}
],
"symlink_target": ""
} |
"""Common utilities for registering LinearOperator methods."""
# Note: only use this method in the commuting case.
def combined_commuting_self_adjoint_hint(operator_a, operator_b):
"""Get combined hint for self-adjoint-ness."""
# The property is preserved under composition when the operators commute.
if operator_a.is_self_adjoint and operator_b.is_self_adjoint:
return True
# The property is not preserved when an operator with the property is composed
# with an operator without the property.
# pylint:disable=g-bool-id-comparison
if ((operator_a.is_self_adjoint is True and
operator_b.is_self_adjoint is False) or
(operator_a.is_self_adjoint is False and
operator_b.is_self_adjoint is True)):
return False
# pylint:enable=g-bool-id-comparison
# The property is not known when operators are not known to have the property
# or both operators don't have the property (the property for the complement
# class is not closed under composition).
return None
def is_square(operator_a, operator_b):
"""Return a hint to whether the composition is square."""
if operator_a.is_square and operator_b.is_square:
return True
if operator_a.is_square is False and operator_b.is_square is False: # pylint:disable=g-bool-id-comparison
# Let A have shape [B, M, N], B have shape [B, N, L].
m = operator_a.range_dimension
l = operator_b.domain_dimension
if m is not None and l is not None:
return m == l
if (operator_a.is_square != operator_b.is_square) and (
operator_a.is_square is not None and operator_b.is_square is not None):
return False
return None
# Note: Positive definiteness is only guaranteed to be preserved
# when the operators commute and are symmetric. Only use this method in
# commuting cases.
def combined_commuting_positive_definite_hint(operator_a, operator_b):
"""Get combined PD hint for compositions."""
# pylint:disable=g-bool-id-comparison
if (operator_a.is_positive_definite is True and
operator_a.is_self_adjoint is True and
operator_b.is_positive_definite is True and
operator_b.is_self_adjoint is True):
return True
# pylint:enable=g-bool-id-comparison
return None
def combined_non_singular_hint(operator_a, operator_b):
"""Get combined hint for when ."""
# If either operator is not-invertible the composition isn't.
# pylint:disable=g-bool-id-comparison
if (operator_a.is_non_singular is False or
operator_b.is_non_singular is False):
return False
# pylint:enable=g-bool-id-comparison
return operator_a.is_non_singular and operator_b.is_non_singular
| {
"content_hash": "342130a36ac03768f4915e35871936e8",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 108,
"avg_line_length": 36.12328767123287,
"alnum_prop": 0.7224118316268487,
"repo_name": "gautam1858/tensorflow",
"id": "b6ae4f40d0db045e0edc357d13d428498e5653fc",
"size": "3326",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "tensorflow/python/ops/linalg/registrations_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "47492"
},
{
"name": "C",
"bytes": "1129549"
},
{
"name": "C#",
"bytes": "13496"
},
{
"name": "C++",
"bytes": "116904214"
},
{
"name": "CMake",
"bytes": "165809"
},
{
"name": "Cython",
"bytes": "5003"
},
{
"name": "Dockerfile",
"bytes": "341994"
},
{
"name": "Go",
"bytes": "2052513"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "1053827"
},
{
"name": "JavaScript",
"bytes": "5772"
},
{
"name": "Jupyter Notebook",
"bytes": "787371"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "9549263"
},
{
"name": "Makefile",
"bytes": "2760"
},
{
"name": "Objective-C",
"bytes": "180638"
},
{
"name": "Objective-C++",
"bytes": "295149"
},
{
"name": "Pawn",
"bytes": "5336"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "43775271"
},
{
"name": "Roff",
"bytes": "5034"
},
{
"name": "Ruby",
"bytes": "7854"
},
{
"name": "Shell",
"bytes": "566970"
},
{
"name": "Smarty",
"bytes": "89664"
},
{
"name": "SourcePawn",
"bytes": "8509"
},
{
"name": "Starlark",
"bytes": "6897556"
},
{
"name": "Swift",
"bytes": "78435"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
from flask_script import Manager
from flask import Flask
import os
from lxml import etree
from app.models import db, Literature, Author
from app.models import AuthorLiterature, Cite, Citememe
from datetime import datetime
import re
import spacy
basedir = os.path.abspath(os.path.dirname(__file__))
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
manager = Manager(app)
article_namespace = "article"
nlp = spacy.load("en")
# local version
def obtaning_data(infile):
nsmap = {}
for event, elem in etree.iterparse(infile, events=('start-ns',)):
ns, url = elem
if ns == "":
ns = article_namespace
nsmap[ns] = url
# print(nsmap)
parser = etree.XMLParser(ns_clean=True)
tree = etree.parse(infile, parser)
root = tree.getroot()
literature = Literature()
# To find the pmc_id
pmc_uid = article_id_find(root, "pmc-uid", nsmap)
# print(pmc_uid)
temp_literature = Literature.query.filter_by(pmc_uid=pmc_uid).first()
if temp_literature:
print("Literature has already been updated from pmc, " +
"and the pmc_uid is {0}, the program would not proceed."
.format(pmc_uid))
# print("Testing, stop return.")
return
# To find the pmid
pmid = article_id_find(root, "pmid", nsmap)
# print(pmid)
# Literature already been added as a reference.
# The task next is to update the info for the original reference.
temp_literature = Literature.query.filter_by(pmid=pmid).first()
if temp_literature:
print(
"Literature already been added as a reference, " +
"and the pmid is {0}, and the literature will be updated by pmc data.".format(
temp_literature.pmid
)
)
temp_literature.delete_by_pmid(pmid)
literature.pmid = pmid
literature.pmc_uid = pmc_uid
# The default updated value for PMC literature is True
literature.updated = True
# The default type value for PMC literature is journal
literature.type = "journal"
# To find the journal id
temp_journal_id_list = root.xpath(
"//{0}:journal-id[@journal-id-type=\'nlm-ta\']".format(
article_namespace),
namespaces=nsmap)
if temp_journal_id_list:
journal_id = temp_journal_id_list[0].text
# print(journal_id)
literature.source_id = journal_id
# To find the journal title
temp_journal_title_list = root.xpath(
"//{0}:journal-title".format(article_namespace), namespaces=nsmap)
if temp_journal_title_list:
journal_title = temp_journal_title_list[0].text
# print(journal_title)
literature.source_title = journal_title
# To find the doi
doi = article_id_find(root, "doi", nsmap)
# print(doi)
literature.doi = doi
# To obtain the title
temp_title_list = root.xpath(
"//{0}:title-group/{0}:article-title".format(article_namespace),
namespaces=nsmap)
if temp_title_list:
temp_title = temp_title_list[0]
title = temp_title.text + ''.join(tag.tail for tag in temp_title)
else:
title = None
# print(title)
literature.title = title
# To obtain the epub_date
temp_epub_date_list = root.xpath(
"//{0}:pub-date[@pub-type=\'epub\']".format(article_namespace),
namespaces=nsmap)
if temp_epub_date_list:
day = 1
month = 1
epub_date_etree = etree.ElementTree(temp_epub_date_list[0])
temp_day_list = epub_date_etree.xpath(
"{0}:day".format(article_namespace), namespaces=nsmap)
if temp_day_list:
day = int(temp_day_list[0].text)
temp_month_list = epub_date_etree.xpath(
"{0}:month".format(article_namespace), namespaces=nsmap)
if temp_month_list:
month = int(temp_month_list[0].text)
year_text = epub_date_etree.xpath(
"{0}:year".format(article_namespace), namespaces=nsmap)[0].text
year = int(year_text)
epub_date = datetime(year, month, day)
# print(epub_date)
else:
epub_date = None
literature.pub_date = epub_date
# To obtain the affiliations
affiliations_pair = {}
corresp_text = "corresp"
temp_affiliations_list = root.xpath(
"//{0}:aff".format(article_namespace), namespaces=nsmap)
for i, temp_affiliation in enumerate(temp_affiliations_list):
temp_aff_name_text = temp_affiliation.text
if (temp_aff_name_text is not None) and (temp_aff_name_text.strip() is not ""):
aff_name = re.sub(r' and\s*$|,\s*$', '', temp_aff_name_text)
affiliation_corresp = Affiliation(name=aff_name)
affiliations_pair[corresp_text] = affiliation_corresp
else:
affiliation_tree = etree.ElementTree(temp_affiliation)
temp_affiliation_elements = affiliation_tree.xpath(
"{0}:*".format(article_namespace), namespaces=nsmap)
for j, affiliation_elem in enumerate(temp_affiliation_elements):
temp_aff_name_text = affiliation_elem.tail
if (temp_aff_name_text is not None) and \
(temp_aff_name_text.strip() is not ""):
aff_name = re.sub(r' and\s*$|,\s*$', '',
temp_aff_name_text)
affiliation = Affiliation(name=aff_name)
affiliation_key = temp_affiliation.attrib['id']
affiliations_pair[affiliation_key] = affiliation
else:
temp_aff_name_text = affiliation_elem.text
if (temp_aff_name_text is not None) and (temp_aff_name_text.strip() is not ""):
aff_name = re.sub(r' and\s*$|,\s*$', '',
temp_aff_name_text)
affiliation = Affiliation(name=aff_name)
affiliation_key = temp_affiliation.attrib['id']
affiliations_pair[affiliation_key] = affiliation
del temp_affiliations_list
# To get the authors' data, including affiliations
target_tag = 'contrib'
obtain_data(target_tag, nsmap, infile, obtain_author, affiliations_pair,
literature)
# To obtain the references
citation_type_list = root.xpath(
"//{0}:back/{0}:ref-list/{0}:ref/{0}:*".format(article_namespace),
namespaces=nsmap
)
for citation_type in citation_type_list:
if citation_type.tag == "{" + nsmap[article_namespace] + "}label":
continue
citation_tag = citation_type.tag
citation_tag = citation_tag.replace("{" + nsmap[article_namespace] + "}", "")
# print(citation_tag)
break
del citation_type_list
# To obtain the reference info
target_tag = 'ref'
obtain_data(target_tag, nsmap, infile, obtain_reference,
citation_tag, literature)
# To obtain the citememes
target_tag = 'p'
obtain_data(target_tag, nsmap, infile, obtain_citememe, literature)
db.session.commit()
def article_id_find(tree, article_id_type, nsmap):
path = "//" + article_namespace + \
":article-id[@pub-id-type=\'" + article_id_type + "\']"
# print(path)
temp_list = tree.xpath(path, namespaces=nsmap)
if temp_list:
article_id = temp_list[0].text
else:
article_id = None
return article_id
# This the general method to obtain data (e.g. citememe, reference, author)
def obtain_data(target_tag, namespace, infile, obtain_method, *arguments):
tag = '{' + namespace[article_namespace] + '}' + target_tag
# print(tag)
context = etree.iterparse(infile, events=('end',), tag=tag)
obtain_method(context, namespace, *arguments)
# To obtain the citememes
def obtain_citememe(context, nsmap, *arguments):
citer = arguments[0]
body_parent_xpath = etree.XPath(
"ancestor::{0}:body".format(article_namespace),
namespaces=nsmap
)
bibr_xpath = etree.XPath(
"{0}:xref[@ref-type=\'bibr\']".format(article_namespace),
namespaces=nsmap
)
for event, elem in context:
# Only within the tag <body>, if not continue
if len(body_parent_xpath(elem)) == 0:
continue
bibr_list = bibr_xpath(elem)
former_bibr = None
former_bibr_seq = ""
for bibr in bibr_list:
# print(bibr.attrib["rid"])
bibr.text = ""
if (former_bibr is not None) and (re.match("~.*?~[-–]", former_bibr.tail)):
former_bibr.tail = re.sub("[-–]", "", former_bibr.tail)
end_rid = (bibr.attrib["rid"])
# print(end_rid)
end_cite = Cite.query.filter_by(citer=citer, local_reference_id=end_rid).first()
start_rid_sequence = int(former_bibr_seq)
if end_cite is not None:
end_cite_sequence = end_cite.reference_sequence
for i in range(start_rid_sequence + 1, end_cite_sequence + 1):
former_bibr.tail = former_bibr.tail + "~" + str(i) + "~,"
former_bibr.tail = former_bibr.tail
former_bibr = bibr
continue
rids = (bibr.attrib["rid"]).split(" ")
seqs_str = []
for rid in rids:
cite = Cite.query.filter_by(citer=citer, local_reference_id=rid).first()
if cite is not None:
seq_str = str(cite.reference_sequence)
seqs_str.append(seq_str)
if len(seqs_str) == 0:
print("len(seqs) is 0.")
if bibr.tail is None:
bibr.tail = ""
seqs_text = "~{0}~".format("~~".join(seqs_str))
bibr.tail = seqs_text + bibr.tail
bibr.tail = bibr.tail
# print(bibr.tail)
former_bibr = bibr
former_bibr_seq = seqs_str[-1]
del bibr_list
# the paragraph without outer tag
para_text = etree.tostring(elem, encoding="unicode")
para_text = re.sub("<.*?>", "", para_text)
# eliminate the outer tag
# para_text = re.sub('<p.*?>|</p>\s*$', "", para_text)
para = nlp(para_text)
# To record
former_sent_str = ""
for sent in para.sents:
sent_str = str(sent).strip()
# sent_str do not start with A-Z
if (re.match(r'^[^A-Z]', sent_str)):
sent_str = former_sent_str + sent_str
# print(sent_str)
search_obj = re.findall(r"~.*?~", sent_str)
for so in search_obj:
# print(so.strip("~"))
# sent_str = re.sub("~.*?~", "", sent_str)
reference_sequence = int(so.strip("~"))
# print(local_reference_sequence)
citence = re.sub('\s+', ' ', sent_str)
cite = Cite.query.filter_by(citer=citer, reference_sequence=reference_sequence).first()
citememe = Citememe(
cite=cite,
citence=citence
)
db.session.add(citememe)
del search_obj
former_sent_str = sent_str
del para
elem.clear()
while elem.getprevious() is not None:
del elem.getparent()[0]
del context
db.session.commit()
def obtain_reference(context, nsmap, *arguments):
# arguments = citation_tag, citer(literature)
citation_tag, citer = arguments
# print(citation_tag)
citation_type_xpath = etree.XPath(
"{0}:{1}".format(article_namespace, citation_tag),
namespaces=nsmap)
# To find the ref-id, e.g. B1, B2
ref_id_xpath = etree.XPath("@id", namespaces=nsmap)
pmid_xpath = etree.XPath(
"{0}:{1}/{0}:pub-id[@pub-id-type='pmid']".format(
article_namespace, citation_tag
),
namespaces=nsmap
)
article_title_xpath = etree.XPath(
"{0}:{1}/{0}:article-title".format(article_namespace,
citation_tag),
namespaces=nsmap)
source_title_xpath = etree.XPath(
"{0}:{1}/{0}:source".format(article_namespace,
citation_tag),
namespaces=nsmap)
year_xpath = etree.XPath(
"{0}:{1}/{0}:year".format(article_namespace,
citation_tag),
namespaces=nsmap)
volume_xpath = etree.XPath(
"{0}:{1}/{0}:volume".format(article_namespace,
citation_tag),
namespaces=nsmap)
fpage_xpath = etree.XPath(
"{0}:{1}/{0}:fpage".format(article_namespace,
citation_tag),
namespaces=nsmap)
name_xpath = etree.XPath(
"{0}:{1}/{0}:person-group[@person-group-type='author']/{0}:name".format
(article_namespace, citation_tag), namespaces=nsmap
)
surname_xpath = etree.XPath(
"{0}:surname".format(article_namespace), namespaces=nsmap)
given_names_xpath = etree.XPath(
"{0}:given-names".format(article_namespace), namespaces=nsmap)
suffix_xpath = etree.XPath(
"{0}:suffix".format(article_namespace), namespaces=nsmap)
reference_sequence = 0
for event, elem in context:
reference_sequence += 1
reference = Literature()
ref_id_list = ref_id_xpath(elem)
if ref_id_list:
ref_id = ref_id_list[0]
# print(ref_id)
else:
print("ref_id_list is None")
pmid_list = pmid_xpath(elem)
if pmid_list:
pmid = pmid_list[0].text
temp_reference = Literature.query.filter_by(pmid=pmid).first()
if temp_reference:
print(
"Reference already exist. And the reference pmc id is {0}".format(
temp_reference.pmid
)
)
reference = temp_reference
else:
reference.pmid = pmid
reference.updated = False
# print(pmid)
cite = Cite(
citer=citer,
cited=reference,
local_reference_id=ref_id,
reference_sequence=reference_sequence
)
db.session.add(cite)
continue
reference.updated = False
citation_type_list = citation_type_xpath(elem)
if citation_type_list:
citation_type = citation_type_list[0].attrib.values()[0]
reference.type = citation_type
article_titles = article_title_xpath(elem)
if article_titles:
article_title = article_titles[0].text
reference.title = article_title
# print(article_title)
source_titles = source_title_xpath(elem)
if source_titles:
source_title = source_titles[0].text
reference.source_title = source_title
# print(source_title)
years = year_xpath(elem)
if years:
year = years[0].text
reference.pub_year = year
# print(year)
volumes = volume_xpath(elem)
if volumes:
volume = volumes[0].text
reference.volume = volume
# print(volume)
frontpage_list = fpage_xpath(elem)
if frontpage_list:
frontpage = frontpage_list[0].text
reference.fpage = frontpage
# print(frontpage)
name_list = name_xpath(elem)
author_rank = 0
for name in name_list:
author_rank += 1
surname_list = surname_xpath(name)
if surname_list:
surname = surname_list[0].text
# print(surname)
given_names_list = given_names_xpath(name)
if given_names_list:
given_names = given_names_list[0].text
else:
given_names = suffix_xpath(name)[0].text.replace(".", "")
author = Author(surname=surname, given_names=given_names)
author_literature = AuthorLiterature(
author=author, literature=reference, author_rank=author_rank
)
db.session.add(author_literature)
del name_list
cite = Cite(
citer=citer,
cited=reference,
local_reference_id=ref_id,
reference_sequence=reference_sequence
)
db.session.add(cite)
elem.clear()
while elem.getprevious() is not None:
del elem.getparent()[0]
del context
db.session.commit()
# To obtain the authors' info
def obtain_author(context, nsmap, *arguments):
affiliations_pair, literature = arguments
# To comfirm whether the contributor type is "author"
contrib_type_xpath = etree.XPath("@contrib-type", namespaces=nsmap)
# To confirm whether the author is the corresponding author
corresp_contrib_attrib_xpath = etree.XPath("@corresp", namespaces=nsmap)
# To find <xref ref-type="corresp"
corresp_xref_xpath = etree.XPath(
"{0}:xref[@ref-type=\'corresp\']".format(article_namespace),
namespaces=nsmap)
# To find <xref ref-type="aff"
aff_xref_xpath = etree.XPath(
"{0}:xref[@ref-type=\'aff\']".format(article_namespace),
namespaces=nsmap)
# The surname
surname_xpath = etree.XPath(
"{0}:name/{0}:surname".format(article_namespace), namespaces=nsmap)
# The given names
given_names_xpath = etree.XPath(
"{0}:name/{0}:given-names".format(article_namespace), namespaces=nsmap)
# The email
email_xpath = etree.XPath(
"{0}:email".format(article_namespace), namespaces=nsmap)
# The email in author_notes
email_in_author_notes_xpath = etree.XPath(
"//{0}:author-notes/{0}:corresp/{0}:email".format(article_namespace),
namespaces=nsmap)
# The children nodes
children_nodes_xpath = etree.XPath(
"./{0}:*".format(article_namespace), namespaces=nsmap)
author_rank = 0
for event, elem in context:
if (contrib_type_xpath(elem))[0] != "author":
continue
author = Author()
author_rank += 1
corresponding = bool(0)
# The email
temp_emails = email_xpath(elem)
if temp_emails:
email = temp_emails[0].text
else:
email = None
# To make sure whether the author is the corresponding author
condition1 = bool(0)
if (corresp_contrib_attrib_xpath(elem) and (corresp_contrib_attrib_xpath(elem)[0] == "yes")):
condition1 = bool(1)
condition2 = corresp_xref_xpath(elem)
if (condition1 or condition2):
corresponding = bool(1)
if (corresp_xref_xpath(elem)) and (email is None):
temp_emails = email_in_author_notes_xpath(elem)
if temp_emails:
email = temp_emails[0].text
else:
email = None
author_with_email = None
if email:
author_with_email = Author.query.filter_by(email=email).first()
else:
corresponding = bool(0)
if author_with_email:
author = author_with_email
author_literature = AuthorLiterature(
author=author,
literature=literature,
author_rank=author_rank,
corresponding=corresponding)
db.session.add(author_literature)
continue
else:
author.email = email
# The surnames
temp_surnames = surname_xpath(elem)
if temp_surnames:
surname = temp_surnames[0].text
else:
surname = None
# print(surname)
author.surname = surname
# The given names
temp_given_names = given_names_xpath(elem)
if temp_given_names:
given_names = temp_given_names[0].text
else:
given_names = None
# print(given_names)
author.given_names = given_names
# The affilitions
temp_affilitions = aff_xref_xpath(elem)
if temp_affilitions:
for i, temp_affilition in enumerate(temp_affilitions):
rid = temp_affilition.attrib["rid"]
if (
temp_affilition.text is not None
) and (
temp_affilition.text.startswith("\n") is True
):
temp_affilition = children_nodes_xpath(temp_affilition)[0]
# print(affiliations_pair)
affiliation = affiliations_pair[rid]
author_affiliation = AuthorAffiliation(
author=author, affiliation=affiliation
)
db.session.add(author_affiliation)
# author.affiliations.append(affiliations_pair[rid + "##" +
# temp_affilition.text])
del temp_affilitions
if corresp_xref_xpath(elem):
# add the affiliation for the corresponding author
if ("corresp" in affiliations_pair.keys()) is True:
affiliation = affiliations_pair["corresp"]
author_affiliation = AuthorAffiliation(
author=author, affiliation=affiliation)
db.session.add(author_affiliation)
# author.affiliations.append(affiliations_pair["corresp"])
author_literature = AuthorLiterature(
author=author,
literature=literature,
author_rank=author_rank,
corresponding=corresponding)
db.session.add(author_literature)
elem.clear()
while elem.getprevious() is not None:
del elem.getparent()[0]
del context
db.session.commit()
'''
# web version
def obtain_data_webfile(pmc_uid):
# later use the configeration file to store the prefix and postfix of query
# query = "https://www.ncbi.nlm.nih.gov/pmc/oai/oai.cgi?
# verb=GetRecord&identifier=oai:pubmedcentral.nih.gov:" \
# + str(pmc_uid) \
# + "&metadataPrefix=pmc"
# page = requests.get(query)
query = "https://www.ncbi.nlm.nih.gov/pmc/oai/oai.cgi?
verb=GetRecord&identifier=oai:pubmedcentral.nih.gov:" \
+ str(pmc_uid) \
+ "&metadataPrefix=pmc"
page = requests.get(query)
tree = etree.fromstring(page.content)
# print(tree.xpath("//" + article_namespace + ":title/text()",
# namespaces=ns))
front = tree.xpath("//" + article_namespace + ":article/front",
namespaces=nsmap)
print(len(front))
'''
| {
"content_hash": "53196fa242882b68f45b3b8e60dec388",
"timestamp": "",
"source": "github",
"line_count": 680,
"max_line_length": 103,
"avg_line_length": 33.8,
"alnum_prop": 0.5627392969021928,
"repo_name": "cit563emef2dasdme/jklasjdf12nfasfdkl",
"id": "304d1ee5f3da286f2ea0b217f95d231471b0f7b6",
"size": "22988",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "obtaning_pmc_oai.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "33156"
},
{
"name": "HTML",
"bytes": "29871"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "135191"
}
],
"symlink_target": ""
} |
import base64
from django.contrib.auth import get_user_model
from django.contrib.auth.tokens import default_token_generator
from django.core import mail
from django.core.files.images import get_image_dimensions
from rest_framework.reverse import reverse
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from test_project import settings
from test_project.test_app.tests.factories import UserFactory
from yak.rest_core.test import SchemaTestCase
User = get_user_model()
def encode_string(str):
"""
Encode a unicode string as base 64 bytes, then decode back to unicode for use as a string
"""
return base64.encodebytes(bytes(str, 'utf8')).decode()
class UserTests(SchemaTestCase):
def setUp(self):
super(UserTests, self).setUp()
self.user = User.objects.create_user(username='tester1', email='[email protected]', password='password')
def test_autocomplete(self):
"""
Tests that when a string is sent with the user's name or username, we return a filtered list of users
"""
url = reverse("users-list")
bob = UserFactory(username="bob")
frank = UserFactory(username="frank")
UserFactory(username="mindy")
parameters = {"search": "fra"}
response = self.assertSchemaGet(url, parameters, "$userResponse", bob)
self.assertEqual(response.data["count"], 1)
self.assertEqual(response.data["results"][0]["username"], frank.username)
def test_update_user(self):
me = UserFactory()
stranger = UserFactory()
url = reverse("users-detail", args=[me.pk])
data = {
"fullname": "Hodor",
"username": me.username # Don't freak out if unchanged unique data is sent
}
# Unauthenticated user can't update a user's profile
self.assertSchemaPatch(url, "$userRequest", "$userResponse", data, None, unauthorized=True)
self.assertEqual(User.objects.get(pk=me.pk).fullname, None)
# Stranger can't update another user's profile
self.assertSchemaPatch(url, "$userRequest", "$userResponse", data, stranger, unauthorized=True)
self.assertEqual(User.objects.get(pk=me.pk).fullname, None)
# User can update their own profile
self.assertSchemaPatch(url, "$userRequest", "$userResponse", data, me)
self.assertEqual(User.objects.get(pk=me.pk).fullname, "Hodor")
def test_get_logged_in_user(self):
me = UserFactory()
UserFactory()
url = reverse("users-me")
response = self.assertSchemaGet(url, None, "$userResponse", me)
self.assertEqual(response.data["id"], me.pk)
def test_user_can_sign_up(self):
url = reverse("sign_up")
password = encode_string("testtest")
data = {
"fullname": "tester",
"username": "tester",
"password": password
}
self.assertSchemaPost(url, "$signUpRequest", "$signUpResponse", data, None)
user = User.objects.filter(username="tester")
self.assertEqual(user.count(), 1)
# Password gets decoded and hashed
self.assertTrue(user[0].check_password("testtest"))
def test_password_min_length(self):
url = reverse("sign_up")
password = encode_string("test")
data = {
"fullname": "tester2",
"username": "tester2",
"email": "[email protected]",
"password": password
}
response = self.client.post(url, data, format="json")
self.assertEqual(response.status_code, 400)
def test_user_can_log_in(self):
url = reverse("login")
# With the correct username and password, a user can log in with basic auth
auth_string = encode_string("tester1:password")
self.client.credentials(HTTP_AUTHORIZATION='Basic ' + auth_string)
response = self.client.get(url)
self.assertValidJSONResponse(response)
self.check_response_data(response, "$loginResponse")
# Incorrect credentials return unauthorized
auth_string = encode_string("tester1:WRONGPASSWORD")
self.client.credentials(HTTP_AUTHORIZATION='Basic ' + auth_string)
response = self.client.get(url)
self.assertHttpUnauthorized(response)
def test_user_can_sign_in(self):
url = reverse("sign_in")
# With the correct username and password, a user can sign in
good_data = {
"username": "tester1",
"password": "password"
}
self.assertSchemaPost(url, "$signInRequest", "$loginResponse", good_data, None, status_OK=True)
# Incorrect credentials return unauthorized
bad_data = {
"username": "tester1",
"password": "WRONGPASSWORD"
}
response = self.client.post(url, bad_data, format="json")
self.assertHttpUnauthorized(response)
def test_inexact_signup(self):
"""
Email and username are case insensitive
"""
UserFactory(username="used", email="[email protected]")
url = reverse("sign_up")
data = {
'username': 'useD',
'email': '[email protected]',
'password': encode_string("password")
}
response = self.client.post(url, data, format="json")
self.assertHttpBadRequest(response)
data = {
'username': "new_username",
'email': '[email protected]',
'password': encode_string("password")
}
response = self.client.post(url, data, format="json")
self.assertHttpBadRequest(response)
def test_inexact_login(self):
url = reverse("login")
# username is case-insensitive for login
auth_string = encode_string("Tester1:password")
self.client.credentials(HTTP_AUTHORIZATION='Basic ' + auth_string)
response = self.client.get(url)
self.assertValidJSONResponse(response)
self.check_response_data(response, "$loginResponse")
def test_edit_user_to_inexact_match(self):
"""
You also can't edit a user to an inexact match of someone else's username. This fails correctly at the DB level,
but need to add validation in the API to give better errors
"""
user1 = UserFactory(username="baylee")
UserFactory(username="winnie")
url = reverse("users-detail", args=[user1.pk])
data = {"username": "Winnie"}
self.add_credentials(user1)
response = self.client.patch(url, data, format="json")
self.assertHttpBadRequest(response)
def test_user_can_get_token(self):
"""
Below is the test I want. But it fails because django-oauth-toolkit will only accept requests with
content-type application/x-www-form-urlencoded. DRF does not appear to support this type.
url = reverse("oauth2_provider:token")
data = {
"client_id": self.user.oauth2_provider_application.first().client_id,
"client_secret": self.user.oauth2_provider_application.first().client_secret,
"grant_type": "client_credentials"
}
self.assertManticomPOSTResponse(url, "$tokenRequest", "$tokenResponse", data, None)
"""
pass
def test_token_authenticates_user(self):
pass
def test_photo_resize(self):
me = UserFactory()
url = reverse("users-detail", args=[me.pk])
data = {
"original_photo": open(settings.PROJECT_ROOT + "/test_app/tests/img/yeti.jpg", 'rb')
}
self.assertSchemaPatch(url, "$userRequest", "$userResponse", data, me, format="multipart")
user = User.objects.get(pk=me.pk)
# Check the original photo is saved
self.assertEqual(
user.original_photo.file.read(),
open(settings.PROJECT_ROOT + "/test_app/tests/img/yeti.jpg", 'rb').read()
)
# Check the photo is correctly resized
for size_field, size in User.SIZES.items():
w, h = get_image_dimensions(getattr(user, size_field).file)
self.assertEqual(size['height'], h)
self.assertEqual(size['width'], w)
class PasswordResetTests(SchemaTestCase):
def test_user_can_change_password(self):
felicia = UserFactory(username='felicia')
felicia.set_password('password')
felicia.save()
url = reverse("password_change")
data = {
"old_password": encode_string("password"),
"password": encode_string("felicia"),
"confirm_password": encode_string("felicia")
}
# Unauthenticated user can't change password
self.assertSchemaPatch(url, "$changePasswordRequest", "$changePasswordResponse", data, None, unauthorized=True)
self.assertFalse(User.objects.get(pk=felicia.pk).check_password("felicia"))
# User can't change password if the old / current password is incorrect
bad_data = {
"old_password": encode_string("wrong_password"),
"password": encode_string("felicia"),
"confirm_password": encode_string("felicia")
}
self.assertSchemaPatch(url, "$changePasswordRequest", "$changePasswordResponse", bad_data, felicia,
unauthorized=True)
self.assertFalse(User.objects.get(pk=felicia.pk).check_password("felicia"))
# User can't change password if the two new passwords don't match
mismatch_password_data = {
"old_password": encode_string("password"),
"password": encode_string("felicia"),
"confirm_password": encode_string("FELICIA")
}
self.add_credentials(felicia)
response = self.client.patch(url, mismatch_password_data, format='json')
self.assertEqual(response.status_code, 400)
self.assertFalse(User.objects.get(pk=felicia.pk).check_password("felicia"))
# User can change their own password
self.assertSchemaPatch(url, "$changePasswordRequest", "$changePasswordResponse", data, felicia)
self.assertTrue(User.objects.get(pk=felicia.pk).check_password("felicia"))
def test_user_can_get_reset_password_email(self):
jeanluc = UserFactory(username="jeanluc")
url = reverse("password_reset")
data = {
"email": jeanluc.email
}
self.assertSchemaPost(url, "$resetPasswordRequest", "$resetPasswordResponse", data, None, status_OK=True)
self.assertEqual(len(mail.outbox), 1)
def test_user_can_reset_password(self):
url = reverse("password_new")
beverly = UserFactory(username="beverly")
beverly.set_password("jack")
beverly.save()
mismatch_password_data = {
"uid": urlsafe_base64_encode(force_bytes(beverly.pk)).decode(),
"token": default_token_generator.make_token(beverly),
"password": encode_string("wesley"),
"confirm_password": encode_string("WESLEY")
}
response = self.client.post(url, mismatch_password_data, format='json')
self.assertEqual(response.status_code, 400)
self.assertFalse(User.objects.get(username='beverly').check_password('wesley'))
bad_uid_data = {
"uid": urlsafe_base64_encode(force_bytes(UserFactory().pk)).decode(),
"token": default_token_generator.make_token(beverly),
"password": encode_string("wesley"),
"confirm_password": encode_string("wesley")
}
response = self.client.post(url, bad_uid_data, format='json')
self.assertEqual(response.status_code, 400)
self.assertFalse(User.objects.get(username='beverly').check_password('wesley'))
good_data = {
"uid": urlsafe_base64_encode(force_bytes(beverly.pk)).decode(),
"token": default_token_generator.make_token(beverly),
"password": encode_string("wesley"),
"confirm_password": encode_string("wesley")
}
self.assertSchemaPost(url, "$setPasswordRequest", "$userResponse", good_data, None, status_OK=True)
self.assertTrue(User.objects.get(username='beverly').check_password('wesley'))
| {
"content_hash": "a10f90f974693fcda62610574662441f",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 120,
"avg_line_length": 40.491749174917494,
"alnum_prop": 0.6266199364251365,
"repo_name": "yeti/YAK-server",
"id": "dfd3761bb28fce125bca056e9552ad4a2238b15e",
"size": "12269",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_project/test_app/tests/test_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2288"
},
{
"name": "Python",
"bytes": "177485"
}
],
"symlink_target": ""
} |
"""Join sensor data files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=missing-docstring
import argparse
import glob
import os
import sys
# Basic model parameters as external flags.
FLAGS = None
def join_sensor_data_file():
target_directory_paths = glob.glob(FLAGS.join_dir_prefix + '*')
if len(target_directory_paths) > 0:
# 結合結果を保存するディレクトリが存在しない場合は作成
if not os.path.exists(FLAGS.out_dir):
print('make out dir: ' + FLAGS.out_dir)
os.makedirs(FLAGS.out_dir)
# 結合対象のディレクトリに対してループ
for target_directory_path in target_directory_paths:
target_file_paths = glob.glob(target_directory_path + os.sep + 'sensor_data_*')
if len(target_file_paths) > 0:
for target_file_path in target_file_paths:
# 結合結果を保存するディレクトリに同一のファイル名称のファイルを作成し、追記していく
split_target_file_path = target_file_path.split(os.sep)
target_file_name = split_target_file_path[len(split_target_file_path) - 1]
with open(FLAGS.out_dir + os.sep + target_file_name, 'a') as fout:
for line in open(target_file_path, 'r'):
fout.write(line)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--join_dir_prefix',
type=str,
default='..' + os.sep + 'data' + os.sep + 'cond_',
help='target join sensor data directory name prefix'
)
parser.add_argument(
'--out_dir',
type=str,
default='..' + os.sep + 'data' + os.sep + 'join_out',
help='output directory for joined files'
)
FLAGS, unparsed = parser.parse_known_args()
join_sensor_data_file()
| {
"content_hash": "cf1cf83777ce8dcb04544ed22b900bae",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 94,
"avg_line_length": 34.660377358490564,
"alnum_prop": 0.5955362003266195,
"repo_name": "eq-inc/eq-tensorflow-learn_uh_sensor_values",
"id": "ece3fbd5d5f595071d845e6cb43d9e4a7b6986ff",
"size": "2023",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/join_data.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "34300"
}
],
"symlink_target": ""
} |
with open('inputs/strings.txt') as f:
content = f.read().splitlines()[:-1]
content_for_encode = content
total_original_length = 0
total_string_length = 0
for i in range(0,len(content)):
total_original_length += len(content[i])
#Remove quote marks from two ends of each line
if (content[i][0] == '\"') and (content[i][-1] == '\"'):
content[i] = content[i][1:-1]
#"Parse" the line
line_length = 0
j = 0
while j<len(content[i]):
if (content[i][j] == '\\') and (content[i][j+1] == 'x'): #Assuming legal input provided by the question, otherwise will need to have additional checks for list index
line_length += 1
j += 4
elif (content[i][j] == '\\') and (content[i][j+1] == '\"'):
line_length += 1
j += 2
elif (content[i][j] == '\\') and (content[i][j+1] == '\\'):
line_length += 1
j += 2
else:
line_length += 1
j += 1
total_string_length += line_length
part_one_result = total_original_length - total_string_length
print "Original Length - Total String Length (Part One): ", part_one_result
total_encoded_length = 0
for i in range(0,len(content_for_encode)):
j = 0
line_len = len(content_for_encode[i])
encoded_string = "\"\\\"" #Assuming legal input, with quotes on each side
while j < line_len:
if (content_for_encode[i][j] == '\\') and (content_for_encode[i][j+1] == '"'):
encoded_string += "\\\\\\\""
j += 2
elif (content_for_encode[i][j] == '"'):
encoded_string += "\\\""
j += 1
elif (content_for_encode[i][j] == '\\') and (content_for_encode[i][j+1] == '\\'):
encoded_string += "\\\\\\\\"
j += 2
elif (content_for_encode[i][j] == '\\') and (content_for_encode[i][j+1] == 'x'):
encoded_string += "\\\\"
j += 1
else:
encoded_string += content_for_encode[i][j]
j += 1
encoded_string += "\\\"\""
total_encoded_length += len(encoded_string)
part_two_result = total_encoded_length - total_original_length
print "Total Encoded Length - Original Length (Part Two): ", part_two_result
| {
"content_hash": "883432f9690dab40e90842e4b7273645",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 173,
"avg_line_length": 33.701492537313435,
"alnum_prop": 0.524800708591674,
"repo_name": "icydoge/AdventOfCodeSolutions",
"id": "b2d2f415ad6bb8f1c7fb063396faffdf212de903",
"size": "2331",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "day8.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "51597"
}
],
"symlink_target": ""
} |
from random import randint
from Entity import Entity
__author__ = 'George'
class Ball(Entity):
def __init__(self, app):
super().__init__(app.canvas)
self.app = app
self.speed = 6
self.width = 50
self.height = 50
self.color = "#FF00FF"
self.lastNotePos = {
'x': self.x,
'y': self.y
}
self.notePos = 20
self.reset()
def render(self):
ctx = self.ctx
self.check_hitbox()
self.add_delta()
ctx.create_oval(self.x, self.y, self.x + self.width, self.height + self.y, fill=self.color)
if self.notePos <= 0:
self.notePos = 20
self.lastNotePos['x'] = self.x
self.lastNotePos['y'] = self.y
self.notePos -= 1
def check_hitbox(self):
windowWidth = self.app.w
windowHeight = self.app.h
x = self.x
y = self.y
height = self.height
width = self.width
playerPaddle = self.app.playerPaddle
cpuPaddle = self.app.cpuPaddle
if y <= 0 or y + height > windowHeight:
self.bounce_y()
if x <= 0:
self.app.cpuWin()
self.reset()
if x + width >= windowWidth:
self.app.playerWin()
self.reset()
if x + width >= cpuPaddle.x and cpuPaddle.y < y <= cpuPaddle.y + cpuPaddle.height:
self.bounce_x(abs(y - (cpuPaddle.y + (cpuPaddle.height/2)))) # bounce with the offset the ball is from the middle of the paddle
if x <= playerPaddle.x + playerPaddle.width and playerPaddle.y < y <= playerPaddle.y + playerPaddle.height:
self.bounce_x(abs(y - (playerPaddle.y + (playerPaddle.height/2)))) # bounce with the offset the ball is from the middle of the paddle
def reset(self):
self.x = (self.app.w - self.width) / 2
self.y = (self.app.h - self.height) / 2
self.delta_x = randint(-1, 1) * self.speed
self.delta_y = randint(-1, 1) * self.speed
if self.delta_x * self.delta_y == 0:
self.delta_x = self.speed
self.delta_y = -self.speed
def bounce_y(self):
self.delta_y = -self.delta_y
def bounce_x(self, offset):
if self.delta_x < 0:
self.delta_x = -self.delta_x + offset/10 + (randint(-self.speed, self.speed))/2
else:
self.delta_x = -self.delta_x - offset/10 - (randint(-self.speed, self.speed))/2
| {
"content_hash": "1c05b8dd48edbe8c865019c61e248288",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 145,
"avg_line_length": 28.906976744186046,
"alnum_prop": 0.5442477876106194,
"repo_name": "georgePadolsey/PythonProjects",
"id": "ea58db72f710da3a9b0de5b5d7161e4a31443535",
"size": "2486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PingPong/georgep/pokuit/ping_pong/Ball.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7355"
}
],
"symlink_target": ""
} |
import json
import re
import time
from datetime import datetime, timedelta
from itertools import cycle
from os import path
from django import test
from django.conf import settings
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django.utils import translation
import mock
import requests
import waffle
from cache_nuggets.lib import Token
from jingo.helpers import urlparams
from nose import SkipTest
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from requests.structures import CaseInsensitiveDict
import mkt
import mkt.ratings
import mkt.site.tests
from lib.crypto import packaged
from lib.crypto.tests import mock_sign
from mkt.abuse.models import AbuseReport
from mkt.api.tests.test_oauth import RestOAuth
from mkt.comm.tests.test_views import CommTestMixin
from mkt.comm.utils import create_comm_note
from mkt.constants import MANIFEST_CONTENT_TYPE, comm
from mkt.developers.models import ActivityLog, AppLog
from mkt.files.models import File
from mkt.ratings.models import Review, ReviewFlag
from mkt.reviewers.models import (QUEUE_TARAKO, CannedResponse,
EscalationQueue, RereviewQueue,
ReviewerScore)
from mkt.reviewers.utils import ReviewersQueuesHelper
from mkt.reviewers.views import (_progress, app_review, queue_apps,
route_reviewer)
from mkt.site.fixtures import fixture
from mkt.site.helpers import absolutify, isotime
from mkt.site.storage_utils import private_storage, public_storage
from mkt.site.tests import (check_links, days_ago, formset, initial,
req_factory_factory, user_factory)
from mkt.site.utils import app_factory, make_game, paginate, version_factory
from mkt.submit.tests.test_views import BasePackagedAppTest, SetupFilesMixin
from mkt.users.models import UserProfile
from mkt.versions.models import Version
from mkt.webapps.models import AddonDeviceType, Webapp
from mkt.webapps.tasks import unindex_webapps
from mkt.websites.utils import website_factory
from mkt.zadmin.models import get_config, set_config
TIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
TEST_PATH = path.dirname(path.abspath(__file__))
ATTACHMENTS_DIR = path.abspath(path.join(TEST_PATH, '..', '..', 'comm',
'tests', 'attachments'))
class AttachmentManagementMixin(object):
def _attachment_management_form(self, num=1):
"""
Generate and return data for a management form for `num` attachments
"""
return {'attachment-TOTAL_FORMS': max(1, num),
'attachment-INITIAL_FORMS': 0,
'attachment-MAX_NUM_FORMS': 1000}
def _attachments(self, num):
"""Generate and return data for `num` attachments """
data = {}
files = ['bacon.jpg', 'bacon.txt']
descriptions = ['mmm, bacon', '']
if num > 0:
for n in xrange(num):
i = 0 if n % 2 else 1
attachment = open(path.join(ATTACHMENTS_DIR, files[i]), 'r+')
data.update({
'attachment-%d-attachment' % n: attachment,
'attachment-%d-description' % n: descriptions[i]
})
return data
class TestedonManagementMixin(object):
def _testedon_management_form(self, num=0):
"""
Generate and return data for a management form for `num` tested on
platforms.
"""
return {'testedon-TOTAL_FORMS': max(1, num),
'testedon-INITIAL_FORMS': 0,
'testedon-MAX_NUM_FORMS': 1000}
def _platforms(self, num, device_types=[u'\xd0esktop', u'FirefoxOS'],
devices=[u'PC ', u'ZT\xc8 Open'],
versions=[u'34', u'1.3<']):
"""Generate and return data for `num` tested on platforms """
data = {}
if num > 0:
for n in xrange(num):
i = n % len(device_types)
data.update({
'testedon-%d-device_type' % n: device_types[i],
'testedon-%d-device' % n: devices[i],
'testedon-%d-version' % n: versions[i],
})
return data
class AppReviewerTest(mkt.site.tests.TestCase):
def setUp(self):
super(AppReviewerTest, self).setUp()
self.reviewer_user = user_factory(email='editor')
self.grant_permission(self.reviewer_user, 'Apps:Review')
self.snr_reviewer_user = user_factory(email='snrreviewer')
self.grant_permission(self.snr_reviewer_user, 'Apps:Review,Apps:Edit,'
'Apps:ReviewEscalated,Apps:ReviewPrivileged',
name='Senior App Reviewers')
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
self.regular_user = user_factory(email='regular')
self.contact_user = user_factory(email='contact')
self.login_as_editor()
def login_as_admin(self):
self.login(self.admin_user)
def login_as_editor(self):
self.login(self.reviewer_user)
def login_as_senior_reviewer(self):
self.login(self.snr_reviewer_user)
def check_actions(self, expected, elements):
"""Check the action buttons on the review page.
`expected` is a list of tuples containing action name and action form
value. `elements` is a PyQuery list of input elements.
"""
for idx, item in enumerate(expected):
text, form_value = item
e = elements.eq(idx)
eq_(e.parent().text(), text)
eq_(e.attr('name'), 'action')
eq_(e.val(), form_value)
def uses_es(self):
return waffle.switch_is_active('reviewer-tools-elasticsearch')
class AccessMixin(object):
def test_403_for_non_editor(self, *args, **kwargs):
self.login('[email protected]')
eq_(self.client.head(self.url).status_code, 403)
def test_302_for_anonymous(self, *args, **kwargs):
self.client.logout()
eq_(self.client.head(self.url).status_code, 302)
class SearchMixin(object):
def test_search_query(self):
# Light test to make sure queues can handle search queries.
res = self.client.get(self.url, {'text_query': 'test'})
eq_(res.status_code, 200)
@mock.patch('mkt.webapps.models.Webapp.get_cached_manifest', mock.Mock)
class TestReviewersHome(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersHome, self).setUp()
self.url = reverse('reviewers.home')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Bear',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING,
file_kw={'status': mkt.STATUS_PENDING})]
self.packaged_app = app_factory(name='Dinosaur',
status=mkt.STATUS_PUBLIC,
is_packaged=True)
version_factory(addon=self.packaged_app,
file_kw={'status': mkt.STATUS_PENDING})
# Add a disabled app for good measure.
app_factory(name='Elephant', disabled_by_user=True,
status=mkt.STATUS_PENDING)
# Escalate one app to make sure it doesn't affect stats.
escalated = app_factory(name='Eyelash Pit Viper',
status=mkt.STATUS_PENDING)
EscalationQueue.objects.create(addon=escalated)
# Add a public app under re-review.
rereviewed = app_factory(name='Finch', status=mkt.STATUS_PUBLIC)
rq = RereviewQueue.objects.create(addon=rereviewed)
rq.update(created=self.days_ago(1))
# Add an app with latest update deleted. It shouldn't affect anything.
app = app_factory(name='Great White Shark',
status=mkt.STATUS_PUBLIC,
version_kw={'version': '1.0'},
is_packaged=True)
v = version_factory(addon=app,
version='2.1',
file_kw={'status': mkt.STATUS_PENDING})
v.update(deleted=True)
def test_route_reviewer(self):
# App reviewers go to apps home.
req = mkt.site.tests.req_factory_factory(
reverse('reviewers'),
user=UserProfile.objects.get(email='[email protected]'))
r = route_reviewer(req)
self.assert3xx(r, reverse('reviewers.home'))
def test_progress_pending(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(8))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['pending']['week'], 1)
eq_(counts['pending']['new'], 1)
eq_(counts['pending']['old'], 1)
eq_(counts['pending']['med'], 1)
self.assertAlmostEqual(percentages['pending']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['pending']['med'], 33.333333333333)
def test_progress_rereview(self):
rq = RereviewQueue.objects.create(addon=self.apps[0])
rq.update(created=self.days_ago(8))
rq = RereviewQueue.objects.create(addon=self.apps[1])
rq.update(created=self.days_ago(15))
counts, percentages = _progress()
eq_(counts['rereview']['week'], 1)
eq_(counts['rereview']['new'], 1)
eq_(counts['rereview']['old'], 1)
eq_(counts['rereview']['med'], 1)
self.assertAlmostEqual(percentages['rereview']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['rereview']['med'], 33.333333333333)
def test_progress_updated(self):
extra_app = app_factory(name='Jackalope',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(8))
extra_app = app_factory(name='Jackrabbit',
status=mkt.STATUS_PUBLIC,
is_packaged=True,
created=self.days_ago(35))
version_factory(addon=extra_app,
file_kw={'status': mkt.STATUS_PENDING},
created=self.days_ago(25),
nomination=self.days_ago(25))
counts, percentages = _progress()
eq_(counts['updates']['week'], 1)
eq_(counts['updates']['new'], 1)
eq_(counts['updates']['old'], 1)
eq_(counts['updates']['med'], 1)
self.assertAlmostEqual(percentages['updates']['new'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['old'], 33.333333333333)
self.assertAlmostEqual(percentages['updates']['med'], 33.333333333333)
def test_stats_waiting(self):
self.apps[0].latest_version.update(nomination=self.days_ago(1))
self.apps[1].latest_version.update(nomination=self.days_ago(5))
self.apps[2].latest_version.update(nomination=self.days_ago(15))
self.packaged_app.update(created=self.days_ago(1))
doc = pq(self.client.get(self.url).content)
anchors = doc('.editor-stats-title a')
eq_(anchors.eq(0).text(), '3 Pending App Reviews')
eq_(anchors.eq(1).text(), '1 Re-review')
eq_(anchors.eq(2).text(), '1 Update Review')
divs = doc('.editor-stats-table > div')
# Pending review.
eq_(divs.eq(0).text(), '2 unreviewed app submissions this week.')
# Re-reviews.
eq_(divs.eq(2).text(), '1 unreviewed app submission this week.')
# Update review.
eq_(divs.eq(4).text(), '1 unreviewed app submission this week.')
# Maths.
# Pending review.
eq_(doc('.waiting_new').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_med').eq(0).attr('title')[-3:], '33%')
eq_(doc('.waiting_old').eq(0).attr('title')[-3:], '33%')
# Re-reviews.
eq_(doc('.waiting_new').eq(1).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(1).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(1).attr('title')[-3:], ' 0%')
# Update review.
eq_(doc('.waiting_new').eq(2).attr('title')[-4:], '100%')
eq_(doc('.waiting_med').eq(2).attr('title')[-3:], ' 0%')
eq_(doc('.waiting_old').eq(2).attr('title')[-3:], ' 0%')
def test_reviewer_leaders(self):
reviewers = UserProfile.objects.all()[:2]
# 1st user reviews 2, 2nd user only 1.
users = cycle(reviewers)
for app in self.apps:
mkt.log(mkt.LOG.APPROVE_VERSION, app, app.latest_version,
user=users.next(), details={'comments': 'hawt'})
doc = pq(self.client.get(self.url).content.decode('utf-8'))
# Top Reviews.
table = doc('#editors-stats .editor-stats-table').eq(0)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
# Top Reviews this month.
table = doc('#editors-stats .editor-stats-table').eq(1)
eq_(table.find('td').eq(0).text(), reviewers[0].email)
eq_(table.find('td').eq(1).text(), u'2')
eq_(table.find('td').eq(2).text(), reviewers[1].email)
eq_(table.find('td').eq(3).text(), u'1')
class FlagsMixin(object):
def test_flag_packaged_app(self):
self.apps[0].update(is_packaged=True)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_packaged, True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
td = pq(res.content)('#addon-queue tbody tr td.flags').eq(0)
flag = td('div.sprite-reviewer-packaged-app')
eq_(flag.length, 1)
def test_flag_premium_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
if self.uses_es():
self.reindex(Webapp)
eq_(self.apps[0].is_premium(), True)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-premium')
eq_(flags.length, 1)
def test_flag_free_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp.free').length, 1)
def test_flag_premium_inapp_app(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM_INAPP)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
eq_(tds('div.sprite-reviewer-premium.inapp').length, 1)
def test_flag_info(self):
self.apps[0].latest_version.update(has_info_request=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-info')
eq_(flags.length, 1)
def test_flag_comment(self):
self.apps[0].latest_version.update(has_editor_comment=True)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-editor')
eq_(flags.length, 1)
class XSSMixin(object):
def test_xss_in_queue(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)('#addon-queue tbody').html()
assert '<script>' in tbody
assert '<script>' not in tbody
class TestAppQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestAppQueue, self).setUp()
self.apps = [app_factory(name='XXX',
status=mkt.STATUS_PENDING,
version_kw={'nomination': self.days_ago(2)},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='YYY',
status=mkt.STATUS_PENDING,
version_kw={'nomination': self.days_ago(1)},
file_kw={'status': mkt.STATUS_PENDING}),
app_factory(name='ZZZ')]
self.apps[0].update(created=self.days_ago(12))
self.apps[1].update(created=self.days_ago(11))
RereviewQueue.objects.create(addon=self.apps[2])
self.url = reverse('reviewers.apps.queue_pending')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestAppQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_queue_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.queue_viewing')).status_code,
200)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = Webapp.objects.filter(
status=mkt.STATUS_PENDING).order_by('created')
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_pending(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_rejected(self):
# Check action buttons for a previously rejected app.
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_cantreview(self):
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
@mock.patch('mkt.versions.models.Version.is_privileged', True)
def test_action_buttons_privileged_canreview(self):
self.login_as_senior_reviewer()
self.apps[0].update(is_packaged=True)
self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_devices(self):
AddonDeviceType.objects.create(addon=self.apps[0], device_type=1)
AddonDeviceType.objects.create(addon=self.apps[0], device_type=2)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(5)')
eq_(tds('ul li:not(.unavailable)').length, 2)
def test_payments(self):
self.apps[0].update(premium_type=mkt.ADDON_PREMIUM)
self.apps[1].update(premium_type=mkt.ADDON_FREE_INAPP)
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(6)')
eq_(tds.eq(0).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_PREMIUM]))
eq_(tds.eq(1).text(),
unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_FREE_INAPP]))
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (2)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
# self.apps[2] is not pending so doesn't show up either.
eq_([a.app.id for a in res.context['addons']], [self.apps[1].id])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (1)')
eq_(links[1].text, u'Re-reviews (1)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_incomplete_no_in_queue(self):
[app.update(status=mkt.STATUS_NULL) for app in self.apps]
if self.uses_es():
self.reindex(Webapp)
req = req_factory_factory(
self.url,
user=UserProfile.objects.get(email='[email protected]'))
doc = pq(queue_apps(req).content)
assert not doc('#addon-queue tbody tr').length
def test_waiting_time(self):
"""Check objects show queue objects' created."""
res = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(res.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps[0:2]]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestAppQueueES(mkt.site.tests.ESTestCase, TestAppQueue):
def setUp(self):
super(TestAppQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
class TestRegionQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestRegionQueue, self).setUp()
self.apps = [app_factory(name='WWW',
status=mkt.STATUS_PUBLIC),
app_factory(name='XXX',
status=mkt.STATUS_APPROVED),
app_factory(name='YYY',
status=mkt.STATUS_PUBLIC),
app_factory(name='ZZZ',
status=mkt.STATUS_PENDING)]
# WWW and XXX are the only ones actually requested to be public.
self.apps[0].geodata.update(region_cn_status=mkt.STATUS_PENDING,
region_cn_nominated=self.days_ago(2))
self.apps[1].geodata.update(region_cn_status=mkt.STATUS_PENDING,
region_cn_nominated=self.days_ago(1))
self.apps[2].geodata.update(region_cn_status=mkt.STATUS_PUBLIC)
self.grant_permission(self.reviewer_user, 'Apps:ReviewRegionCN')
self.login_as_editor()
self.url = reverse('reviewers.apps.queue_region',
args=[mkt.regions.CHN.slug])
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('.regional-queue tbody tr td:first-child a')
apps = Webapp.objects.pending_in_region('cn').order_by(
'_geodata__region_cn_nominated')
src = '?src=queue-region-cn'
expected = [
(unicode(apps[0].name), apps[0].get_url_path() + src),
(unicode(apps[1].name), apps[1].get_url_path() + src),
]
check_links(expected, links, verify=False)
def test_escalated_not_in_queue(self):
self.grant_permission(self.snr_reviewer_user, 'Apps:ReviewRegionCN')
self.login_as_senior_reviewer()
self.apps[0].escalationqueue_set.create()
res = self.client.get(self.url)
eq_([a.app for a in res.context['addons']], [self.apps[1]])
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestRereviewQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestRereviewQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
RereviewQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
RereviewQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
RereviewQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
if self.uses_es():
self.reindex(Webapp)
self.url = reverse('reviewers.apps.queue_rereview')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestRereviewQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = [rq.addon for rq in
RereviewQueue.objects.all().order_by('created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.rereviewqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Clear Re-review', 'clear_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (3)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
if self.uses_es():
self.assertSetEqual([a.id for a in res.context['addons']],
[a.id for a in self.apps[1:]])
else:
self.assertSetEqual([a.app for a in res.context['addons']],
self.apps[1:])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (2)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (1)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(RereviewQueue.objects.filter(addon=app).exists(), False)
class TestRereviewQueueES(mkt.site.tests.ESTestCase, TestRereviewQueue):
def setUp(self):
super(TestRereviewQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestUpdateQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin,
XSSMixin):
def setUp(self):
super(TestUpdateQueue, self).setUp()
app1 = app_factory(is_packaged=True, name='XXX',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
app2 = app_factory(is_packaged=True, name='YYY',
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
version_factory(addon=app1, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
version_factory(addon=app2, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
self.apps = list(Webapp.objects.order_by('id'))
self.url = reverse('reviewers.apps.queue_updates')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestUpdateQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_template_links(self):
self.apps[0].versions.latest().update(nomination=self.days_ago(2))
self.apps[1].versions.latest().update(nomination=self.days_ago(1))
if self.uses_es():
self.reindex(Webapp)
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
expected = [
(unicode(self.apps[0].name), self.review_url(self.apps[0])),
(unicode(self.apps[1].name), self.review_url(self.apps[1])),
]
check_links(expected, links, verify=False)
def test_action_buttons_public_senior_reviewer(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
self.login_as_senior_reviewer()
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_public(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].versions.latest().files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Request Re-review', 'manual_rereview'),
(u'Escalate', 'escalate'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
def test_queue_count_senior_reviewer(self):
self.login_as_senior_reviewer()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (2)')
eq_(links[3].text, u'Escalations (0)')
def test_escalated_not_in_queue(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.apps[0])
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_([a.app.id for a in res.context['addons']],
[app.id for app in self.apps[1:]])
doc = pq(res.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (1)')
eq_(links[3].text, u'Escalations (1)')
def test_order(self):
self.apps[0].update(created=self.days_ago(10))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].versions.latest().update(nomination=self.days_ago(1))
self.apps[1].versions.latest().update(nomination=self.days_ago(4))
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = list(res.context['addons'])
eq_(apps[0].app.id, self.apps[1].id)
eq_(apps[1].app.id, self.apps[0].id)
def test_only_updates_in_queue(self):
# Add new packaged app, which should only show up in the pending queue.
app = app_factory(is_packaged=True, name='ZZZ',
status=mkt.STATUS_PENDING,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING})
self.apps.append(app)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
apps = [a.app for a in res.context['addons']]
assert app not in apps, (
'Unexpected: Found a new packaged app in the updates queue.')
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (2)')
def test_approved_update_in_queue(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_APPROVED,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': self.days_ago(2)})
self.apps.append(app)
File.objects.filter(version__addon=app).update(status=app.status)
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=self.days_ago(1),
file_kw={'status': mkt.STATUS_PENDING})
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_update_queue_with_empty_nomination(self):
app = app_factory(is_packaged=True, name='YYY',
status=mkt.STATUS_NULL,
version_kw={'version': '1.0',
'created': self.days_ago(2),
'nomination': None})
self.apps.append(app)
first_version = app.latest_version
version_factory(addon=app, version='1.1', created=self.days_ago(1),
nomination=None,
file_kw={'status': mkt.STATUS_PENDING})
# Now that we have a version with nomination=None, reset app status.
app.update(status=mkt.STATUS_APPROVED)
File.objects.filter(version=first_version).update(status=app.status)
# Safeguard: we /really/ want to test with nomination=None.
eq_(app.latest_version.reload().nomination, None)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
assert app.id in [a.app.id for a in res.context['addons']]
eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)')
def test_deleted_version_not_in_queue(self):
"""
This tests that an app with a prior pending version that got
deleted doesn't trigger the app to remain in the review queue.
"""
app = self.apps[0]
# File is PENDING and delete current version.
old_ver = app.versions.order_by('id')[0]
old_ver.files.latest().update(status=mkt.STATUS_PENDING)
old_ver.delete()
# "Approve" the app.
app.versions.latest().files.latest().update(status=mkt.STATUS_PUBLIC)
eq_(app.reload().status, mkt.STATUS_PUBLIC)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
# Verify that our app has 2 versions.
eq_(Version.with_deleted.filter(addon=app).count(), 2)
# Verify the apps in the context are what we expect.
doc = pq(res.content)
eq_(doc('.tabnav li a')[2].text, u'Updates (1)')
apps = [a.app.id for a in res.context['addons']]
ok_(app.id not in apps)
ok_(self.apps[1].id in apps)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [isotime(app.latest_version.nomination)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
class TestUpdateQueueES(mkt.site.tests.ESTestCase, TestUpdateQueue):
def setUp(self):
super(TestUpdateQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
@mock.patch('mkt.versions.models.Version.is_privileged', False)
class TestEscalationQueue(AppReviewerTest, AccessMixin, FlagsMixin,
SearchMixin, XSSMixin):
def setUp(self):
super(TestEscalationQueue, self).setUp()
self.apps = [app_factory(name='XXX'),
app_factory(name='YYY'),
app_factory(name='ZZZ')]
EscalationQueue.objects.create(addon=self.apps[0]).update(
created=self.days_ago(5))
EscalationQueue.objects.create(addon=self.apps[1]).update(
created=self.days_ago(3))
EscalationQueue.objects.create(addon=self.apps[2]).update(
created=self.days_ago(1))
self.apps[0].update(created=self.days_ago(15))
self.apps[1].update(created=self.days_ago(13))
self.apps[2].update(created=self.days_ago(11))
self.login_as_senior_reviewer()
self.url = reverse('reviewers.apps.queue_escalated')
def tearDown(self):
if self.uses_es():
unindex_webapps([app.id for app in self.apps])
super(TestEscalationQueue, self).tearDown()
def review_url(self, app):
return reverse('reviewers.apps.review', args=[app.app_slug])
def test_flag_blocked(self):
# Blocklisted apps should only be in the update queue, so this flag
# check is here rather than in FlagsMixin.
self.apps[0].update(status=mkt.STATUS_BLOCKED)
if self.uses_es():
self.reindex(Webapp)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tds = pq(res.content)('#addon-queue tbody tr td.flags')
flags = tds('div.sprite-reviewer-blocked')
eq_(flags.length, 1)
def test_no_access_regular_reviewer(self):
self.login_as_editor()
res = self.client.get(self.url)
eq_(res.status_code, 403)
def test_template_links(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a')
apps = [rq.addon for rq in
EscalationQueue.objects.all().order_by('addon__created')]
expected = [
(unicode(apps[0].name), self.review_url(apps[0])),
(unicode(apps[1].name), self.review_url(apps[1])),
(unicode(apps[2].name), self.review_url(apps[2])),
]
check_links(expected, links, verify=False)
def test_waiting_time(self):
"""Check objects show queue objects' created."""
r = self.client.get(self.url)
waiting_times = [wait.attrib['isotime'] for wait in
pq(r.content)('td time')]
expected_waiting_times = [
isotime(app.escalationqueue_set.all()[0].created)
for app in self.apps]
self.assertSetEqual(expected_waiting_times, waiting_times)
def test_action_buttons_public(self):
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Reject', 'reject'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_action_buttons_reject(self):
self.apps[0].update(status=mkt.STATUS_REJECTED)
self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED)
r = self.client.get(self.review_url(self.apps[0]))
eq_(r.status_code, 200)
actions = pq(r.content)('#review-actions input')
expected = [
(u'Approve', 'public'),
(u'Ban app', 'disable'),
(u'Request Re-review', 'manual_rereview'),
(u'Clear Escalation', 'clear_escalation'),
(u'Message developer', 'info'),
(u'Private comment', 'comment'),
]
self.check_actions(expected, actions)
def test_invalid_page(self):
r = self.client.get(self.url, {'page': 999})
eq_(r.status_code, 200)
eq_(r.context['pager'].number, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Escalations (3)')
def test_addon_deleted(self):
app = self.apps[0]
app.delete()
eq_(EscalationQueue.objects.filter(addon=app).exists(), False)
class TestEscalationQueueES(mkt.site.tests.ESTestCase, TestEscalationQueue):
def setUp(self):
super(TestEscalationQueueES, self).setUp()
self.create_switch('reviewer-tools-elasticsearch')
self.reindex(Webapp)
class TestReviewTransaction(AttachmentManagementMixin,
mkt.site.tests.MockEsMixin,
mkt.site.tests.MockBrowserIdMixin,
test.TransactionTestCase,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewTransaction, self).setUp()
mkt.site.tests.TestCase.grant_permission(
user_factory(email='editor'), 'Apps:Review')
self.mock_browser_id()
def get_app(self):
return Webapp.objects.get(id=337141)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign_app')
def test_public_sign(self, sign_mock, json_mock, update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
with private_storage.open(
self.version.files.all()[0].file_path, 'w') as f:
f.write('.')
public_storage.delete(self.version.files.all()[0].file_path)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
update_cached_manifests.reset_mock()
sign_mock.return_value = None # Didn't fail.
json_mock.return_value = {'name': 'Something'}
self.login('[email protected]')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(update_cached_manifests.delay.call_count, 1)
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.get_manifest_json')
@mock.patch('lib.crypto.packaged.sign')
def test_public_sign_failure(self, sign_mock, json_mock,
update_cached_manifests):
self.app = self.get_app()
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING, is_packaged=True,
_current_version=None, _signal=False)
eq_(self.get_app().status, mkt.STATUS_PENDING)
sign_mock.side_effect = packaged.SigningError
json_mock.return_value = {'name': 'Something'}
self.login('[email protected]')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
resp = self.client.post(
reverse('reviewers.apps.review', args=[self.app.app_slug]), data)
eq_(resp.status_code, 302)
eq_(self.get_app().status, mkt.STATUS_PENDING)
eq_(update_cached_manifests.delay.call_count, 0)
class TestReviewMixin(object):
# E.g commreply+12e0caffc4ca4174a6f62300c0ff180a@marketplace.firefox.com .
COMM_REPLY_RE = r'^commreply\+[a-f0-9]+\@marketplace\.firefox\.com$'
def post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _check_email(self, msg, subject, to=None):
if to:
eq_(msg.to, to)
else:
eq_(msg.to, list(self.app.authors.values_list('email', flat=True)))
assert re.match(self.COMM_REPLY_RE, msg.extra_headers['Reply-To'])
eq_(msg.cc, [])
eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL)
if subject:
eq_(msg.subject, '%s: %s' % (subject, self.app.name))
def _get_mail(self, email):
return filter(lambda x: x.to[0].startswith(email), mail.outbox)[0]
def _check_email_dev_and_contact(self, subject, outbox_len=2):
"""
Helper for checking developer and Mozilla contact get emailed.
"""
eq_(len(mail.outbox), outbox_len)
# Developer.
self._check_email(self._get_mail('steamcube'), subject)
# Mozilla contact.
self._check_email(self._get_mail('contact'), subject,
to=[self.mozilla_contact])
def _check_thread(self):
thread = self.app.threads
eq_(thread.count(), 1)
thread = thread.get()
perms = ('developer', 'reviewer', 'staff')
for key in perms:
assert getattr(thread, 'read_permission_%s' % key)
def _check_email_body(self, msg=None):
if not msg:
msg = mail.outbox[0]
body = msg.message().as_string()
url = self.app.get_url_path()
assert url in body, 'Could not find apps detail URL in %s' % msg
def _check_log(self, action):
assert AppLog.objects.filter(
addon=self.app, activity_log__action=action.id).exists(), (
"Didn't find `%s` action in logs." % action.short)
def _check_score(self, reviewed_type):
scores = ReviewerScore.objects.all()
assert len(scores) > 0
eq_(scores[0].score, mkt.REVIEWED_SCORES[reviewed_type])
eq_(scores[0].note_key, reviewed_type)
class TestReviewApp(SetupFilesMixin, AppReviewerTest, TestReviewMixin,
AccessMixin, AttachmentManagementMixin,
TestedonManagementMixin):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestReviewApp, self).setUp()
self.mozilla_contact = '[email protected]'
self.app = self.get_app()
self.app = make_game(self.app, True)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact)
self.version = self.app.latest_version
self.version.files.all().update(status=mkt.STATUS_PENDING)
self.file = self.version.all_files[0]
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.setup_files()
def get_app(self):
return Webapp.objects.get(id=337141)
def test_review_viewing_ping(self):
eq_(self.client.post(reverse('reviewers.review_viewing')).status_code,
200)
@mock.patch('mkt.webapps.models.Webapp.in_rereview_queue')
def test_rereview(self, is_rereview_queue):
is_rereview_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('#queue-rereview').length
@mock.patch('mkt.webapps.models.Webapp.in_escalation_queue')
def test_escalated(self, in_escalation_queue):
in_escalation_queue.return_value = True
content = pq(self.client.get(self.url).content)
assert content('#queue-escalation').length
def test_cannot_review_my_app(self):
with self.settings(ALLOW_SELF_REVIEWS=False):
self.app.addonuser_set.create(
user=UserProfile.objects.get(email='[email protected]'))
res = self.client.head(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_cannot_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
res = self.client.get(self.url)
self.assert3xx(res, reverse('reviewers.home'))
res = self.client.post(self.url)
self.assert3xx(res, reverse('reviewers.home'))
def test_review_no_latest_version(self):
self.app.versions.all().delete()
self.app.reload()
eq_(self.app.latest_version, None)
eq_(self.app.current_version, None)
response = self.client.get(self.url)
eq_(response.status_code, 200)
doc = pq(response.content)
assert not doc('input[name=action][value=info]').length
assert not doc('input[name=action][value=comment]').length
assert not doc('input[name=action][value=public]').length
assert not doc('input[name=action][value=reject]').length
# Also try with a packaged app.
self.app.update(is_packaged=True)
response = self.client.get(self.url)
eq_(response.status_code, 200)
def test_sr_can_review_blocklisted_app(self):
self.app.update(status=mkt.STATUS_BLOCKED)
self.login_as_senior_reviewer()
eq_(self.client.get(self.url).status_code, 200)
data = {'action': 'public', 'comments': 'yo'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_pending'))
def test_pending_to_reject_w_device_overrides(self):
# This shouldn't be possible unless there's form hacking.
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_DESKTOP.id)
AddonDeviceType.objects.create(addon=self.app,
device_type=mkt.DEVICE_TABLET.id)
eq_(self.app.publish_type, mkt.PUBLISH_IMMEDIATE)
data = {'action': 'reject', 'comments': 'something',
'device_override': [mkt.DEVICE_DESKTOP.id]}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
eq_(set([o.id for o in app.device_types]),
set([mkt.DEVICE_DESKTOP.id, mkt.DEVICE_TABLET.id]))
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_pending_to_public_w_requirements_overrides(self, storefront_mock):
data = {'action': 'public', 'comments': 'something',
'has_sms': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_sms
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_sms
eq_(app.publish_type, mkt.PUBLISH_PRIVATE)
eq_(app.status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE)
# A reviewer changing features shouldn't generate a re-review.
eq_(RereviewQueue.objects.count(), 0)
assert not storefront_mock.called
def test_pending_to_reject_w_requirements_overrides(self):
# Rejecting an app doesn't let you override features requirements.
data = {'action': 'reject', 'comments': 'something',
'has_sms': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert not self.app.latest_version.features.has_sms
self.post(data)
app = self.get_app()
assert not app.latest_version.features.has_sms
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_REJECTED)
def test_pending_to_reject_w_requirements_overrides_nothing_changed(self):
self.version.features.update(has_sms=True)
data = {'action': 'public', 'comments': 'something',
'has_sms': True}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
assert self.app.latest_version.features.has_sms
self.post(data)
app = self.get_app()
assert app.latest_version.features.has_sms
eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE)
eq_(app.status, mkt.STATUS_PUBLIC)
action_id = mkt.LOG.REVIEW_FEATURES_OVERRIDE.id
assert not AppLog.objects.filter(
addon=self.app, activity_log__action=action_id).exists()
@mock.patch('mkt.reviewers.views.messages.success', new=mock.Mock)
def test_incomplete_cant_approve(self):
self.app.update(status=mkt.STATUS_NULL)
self.app.latest_version.files.update(status=mkt.STATUS_NULL)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
# Still incomplete.
eq_(self.get_app().status, mkt.STATUS_NULL)
def test_notification_email_translation(self):
# https://bugzilla.mozilla.org/show_bug.cgi?id=1127790
raise SkipTest
"""Test that the app name is translated with the app's default_locale
and not the reviewer's when we are sending notification emails."""
original_name = unicode(self.app.name)
fr_translation = u'Mais allô quoi!'
es_translation = u'¿Dónde está la biblioteca?'
self.app.name = {
'fr': fr_translation,
'es': es_translation,
}
self.app.default_locale = 'fr'
self.app.save()
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data, HTTP_ACCEPT_LANGUAGE='es')
eq_(translation.get_language(), 'es')
eq_(len(mail.outbox), 2)
msg = mail.outbox[0]
assert original_name not in msg.subject
assert es_translation not in msg.subject
assert fr_translation in msg.subject
assert original_name not in msg.body
assert es_translation not in msg.body
assert fr_translation in msg.body
@mock.patch('lib.crypto.packaged.sign')
def test_require_sig_for_public(self, sign):
sign.side_effect = packaged.SigningError
self.get_app().update(is_packaged=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.client.post(self.url, data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_pending_to_public_no_mozilla_contact(self, storefront_mock):
self.app.update(mozilla_contact='')
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], ('Approved'))
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
assert storefront_mock.called
@mock.patch('mkt.reviewers.views.messages.success')
def test_pending_to_escalation(self, messages):
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
eq_(messages.call_args_list[0][0][1], 'Review successfully processed.')
def test_pending_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
self._check_email_dev_and_contact('Banned')
def test_pending_to_disable(self):
# Only senior reviewers can ban apps.
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data)
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(len(mail.outbox), 0)
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
def test_escalation_to_public(self, storefront_mock):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
assert storefront_mock.called
def test_escalation_to_reject(self):
EscalationQueue.objects.create(addon=self.app)
eq_(self.app.status, mkt.STATUS_PENDING)
files = list(self.version.files.values_list('id', flat=True))
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(File.objects.filter(id__in=files)[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
def test_escalation_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
app = self.get_app()
eq_(app.status, mkt.STATUS_DISABLED)
eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(EscalationQueue.objects.count(), 0)
self._check_email_dev_and_contact('Banned')
def test_escalation_to_disable(self):
EscalationQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='escalated')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(EscalationQueue.objects.count(), 1)
eq_(len(mail.outbox), 0)
def test_clear_escalation(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
EscalationQueue.objects.create(addon=self.app)
data = {'action': 'clear_escalation', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='escalated')
eq_(EscalationQueue.objects.count(), 0)
self._check_log(mkt.LOG.ESCALATION_CLEARED)
# Ensure we don't send email to developer on clearing escalations.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_rereview_to_reject(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_REJECTED)
self._check_log(mkt.LOG.REJECT_VERSION)
eq_(RereviewQueue.objects.count(), 0)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_disable_senior_reviewer(self):
self.login_as_senior_reviewer()
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(self.get_app().status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.APP_DISABLED)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 0)
self._check_email_dev_and_contact('Banned')
def test_rereview_to_disable(self):
RereviewQueue.objects.create(addon=self.app)
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'disable', 'comments': 'banned ur app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
res = self.client.post(self.url, data, queue='rereview')
eq_(res.status_code, 200)
ok_('action' in res.context['form'].errors)
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.filter(addon=self.app).count(), 1)
eq_(len(mail.outbox), 0)
def test_manual_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
data = {'action': 'manual_rereview', 'comments': 'man dem'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
# The app status shouldn't change.
eq_(self.get_app().status, mkt.STATUS_PUBLIC)
eq_(RereviewQueue.objects.count(), 1)
self._check_log(mkt.LOG.REREVIEW_MANUAL)
# Ensure we don't send email to developer on manual rereviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
def test_clear_rereview(self):
self.app.update(status=mkt.STATUS_PUBLIC)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
# Ensure we don't send emails to the developer on clearing re-reviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_clear_rereview_unlisted(self):
self.app.update(status=mkt.STATUS_UNLISTED)
self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC)
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'clear_rereview', 'comments': 'all clear'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(RereviewQueue.objects.count(), 0)
self._check_log(mkt.LOG.REREVIEW_CLEARED)
# Ensure we don't send emails to the developer on clearing re-reviews.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW)
def test_rereview_to_escalation(self):
RereviewQueue.objects.create(addon=self.app)
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data, queue='rereview')
eq_(EscalationQueue.objects.count(), 1)
self._check_log(mkt.LOG.ESCALATE_MANUAL)
# Test 2 emails: 1 to dev, 1 to admin.
eq_(len(mail.outbox), 2)
self._check_email(self._get_mail('steamcube'), 'Escalated')
self._check_email(
self._get_mail('snrreviewer'), 'Escalated',
to=[self.snr_reviewer_user.email])
def test_more_information(self):
# Test the same for all queues.
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().status, mkt.STATUS_PENDING)
self._check_log(mkt.LOG.REQUEST_INFORMATION)
vqs = self.get_app().versions.all()
eq_(vqs.count(), 1)
eq_(vqs.filter(has_info_request=True).count(), 1)
self._check_email_dev_and_contact('Reviewer comment')
def test_multi_cc_email(self):
# Test multiple mozilla_contact emails via more information.
contacts = [user_factory(email=u'á').email,
user_factory(email=u'ç').email]
self.mozilla_contact = ', '.join(contacts)
self.app.update(mozilla_contact=self.mozilla_contact)
data = {'action': 'info', 'comments': 'Knead moor in faux'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 3)
subject = 'Reviewer comment'
self._check_email(self._get_mail('steamcube'), subject)
self._check_email(self._get_mail(contacts[0]), subject,
to=[contacts[0]])
self._check_email(self._get_mail(contacts[1]), subject,
to=[contacts[1]])
def test_comment(self):
# Test the same for all queues.
data = {'action': 'comment', 'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], None, to=[self.mozilla_contact])
self._check_log(mkt.LOG.COMMENT_VERSION)
def test_receipt_no_node(self):
res = self.client.get(self.url)
eq_(len(pq(res.content)('#receipt-check-result')), 0)
def test_receipt_has_node(self):
self.get_app().update(premium_type=mkt.ADDON_PREMIUM)
res = self.client.get(self.url)
eq_(len(pq(res.content)('.reviewers-desktop #receipt-check-result')),
1)
eq_(len(pq(res.content)('.reviewers-mobile #receipt-check-result')),
1)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json(self, mock_get):
m = mock.Mock()
m.content = 'the manifest contents <script>'
m.headers = CaseInsensitiveDict(
{'content-type': 'application/x-web-app-manifest+json <script>'})
mock_get.return_value = m
expected = {
'content': 'the manifest contents <script>',
'headers': {'content-type':
'application/x-web-app-manifest+json <script>'},
'success': True,
'permissions': {}
}
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), expected)
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_unicode(self, mock_get):
m = mock.Mock()
m.content = u'كك some foreign ish'
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'كك some foreign ish',
'headers': {}, 'success': True,
'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding(self, mock_get):
m = mock.Mock()
m.content = open(self.manifest_path('non-utf8.webapp')).read()
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert u'"name": "W2MO\u017d"' in data['content']
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_encoding_empty(self, mock_get):
m = mock.Mock()
m.content = ''
m.headers = CaseInsensitiveDict({})
mock_get.return_value = m
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content), {'content': u'', 'headers': {},
'success': True, 'permissions': {}})
@mock.patch('mkt.reviewers.views.requests.get')
def test_manifest_json_traceback_in_response(self, mock_get):
m = mock.Mock()
m.content = {'name': 'Some name'}
m.headers = CaseInsensitiveDict({})
mock_get.side_effect = requests.exceptions.SSLError
mock_get.return_value = m
# We should not 500 on a traceback.
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
data = json.loads(r.content)
assert data['content'], 'There should be a content with the traceback'
eq_(data['headers'], {})
@mock.patch('mkt.reviewers.views.json.dumps')
def test_manifest_json_packaged(self, mock_):
# Test that when the app is packaged, _mini_manifest is called.
mock_.return_value = '{}'
self.get_app().update(is_packaged=True)
res = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(res.status_code, 200)
assert mock_.called
@mock.patch('mkt.reviewers.views._get_manifest_json')
def test_manifest_json_perms(self, mock_):
mock_.return_value = {
'permissions': {
"foo": {"description": "foo"},
"camera": {"description": "<script>"}
}
}
self.get_app().update(is_packaged=True)
r = self.client.get(reverse('reviewers.apps.review.manifest',
args=[self.app.app_slug]))
eq_(r.status_code, 200)
eq_(json.loads(r.content)['permissions'],
{'foo': {'description': 'foo', 'type': 'web'},
'camera': {'description': '<script>', 'type': 'priv'}})
def test_abuse(self):
AbuseReport.objects.create(addon=self.app, message='!@#$')
res = self.client.get(self.url)
doc = pq(res.content)
dd = doc('.reviewers-desktop #summary dd.abuse-reports')
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
dd = doc('.reviewers-mobile #summary dd.abuse-reports')
eq_(dd.text(), u'1')
eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug]))
def _attachment_form_data(self, num=1, action='comment'):
data = {'action': action,
'comments': 'mmm, nice app'}
data.update(self._attachment_management_form(num=num))
data.update(self._attachments(num))
return data
@override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR)
@mock.patch('mkt.site.storage_utils.LocalFileStorage.save')
def test_no_attachments(self, save_mock):
""" Test addition of no attachment """
data = self._attachment_form_data(num=0, action='public')
data.update(self._testedon_management_form())
self.post(data)
eq_(save_mock.called, False, save_mock.call_args_list)
def test_idn_app_domain(self):
response = self.client.get(self.url)
assert 'IDN domain!' not in response.content
self.get_app().update(app_domain=u'http://www.allïzom.org')
response = self.client.get(self.url)
assert 'IDN domain!' in response.content
def test_xss_domain(self):
# It shouldn't be possible to have this in app domain, it will never
# validate, but better safe than sorry.
self.get_app().update(app_domain=u'<script>alert(42)</script>')
response = self.client.get(self.url)
assert '<script>alert(42)</script>' not in response.content
assert '<script>alert(42)</script>' in response.content
def test_priority_flag_cleared_for_public(self):
self.get_app().update(priority_review=True)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, False)
def test_priority_flag_uncleared_for_reject(self):
self.get_app().update(priority_review=True)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(self.get_app().priority_review, True)
def test_versions_history_pagination(self):
self.app.update(is_packaged=True)
version_factory(addon=self.app, version='2.0')
version_factory(addon=self.app, version='3.0')
# Mock paginate to paginate with only 2 versions to limit the
# number of versions this test has to create.
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(self.url).content)
eq_(len(content('#review-files tr.listing-body')), 2)
eq_(len(content('#review-files-paginate a[rel=next]')), 1)
eq_(len(content('#review-files-paginate a[rel=prev]')), 0)
link = content('#review-files-paginate a[rel=next]')[0].attrib['href']
eq_(link, '%s?page=2#history' % self.url)
# Look at page 2.
with mock.patch('mkt.reviewers.views.paginate',
lambda req, objs, limit: paginate(req, objs, 2)):
content = pq(self.client.get(link).content)
eq_(len(content('#review-files tr.listing-body')), 1)
eq_(len(content('#review-files-paginate a[rel=next]')), 0)
eq_(len(content('#review-files-paginate a[rel=prev]')), 1)
eq_(content('#review-files-paginate a[rel=prev]')[0].attrib['href'],
'%s?page=1#history' % self.url)
class TestCannedResponses(AppReviewerTest):
def setUp(self):
super(TestCannedResponses, self).setUp()
self.login_as_editor()
self.app = app_factory(name='XXX', status=mkt.STATUS_PENDING)
self.cr = CannedResponse.objects.create(
name=u'app reason', response=u'app reason body',
sort_group=u'public')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_ok(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
form = r.context['form']
choices = form.fields['canned_response'].choices[1][1]
# choices is grouped by the sort_group, where choices[0] is the
# default "Choose a response..." option.
# Within that, it's paired by [group, [[response, name],...]].
# So above, choices[1][1] gets the first real group's list of
# responses.
eq_(len(choices), 1)
assert self.cr.response in choices[0]
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApproveHostedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin, TestedonManagementMixin):
"""
A separate test class for apps going to an approved state. All other state
transitions are tested above.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApproveHostedApp, self).setUp()
self.mozilla_contact = '[email protected]'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Web App Review" successfully processed (+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages, storefront_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 1)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, storefront_mock):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
self._check_message(messages)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 1)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, storefront_mock):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
# File status is PUBLIC since it is the only version.
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_message(messages)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
# The app is not private but can still be installed by team members,
# so we should call those:
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 1)
# App is not packaged, no need to call update_cached_manifests.
eq_(update_cached_manifests.delay.call_count, 0)
# App is private so we don't send this yet.
eq_(storefront_mock.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
def test_pending_to_reject(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, storefront_mock):
index_webapps.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
data = {'action': 'reject', 'comments': 'suxor'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
eq_(index_webapps.delay.call_count, 1)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_message(messages)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_HOSTED)
eq_(update_name.call_count, 0) # Not a packaged app.
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedApp(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
"""
A separate test class for packaged apps going to an approved state.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedApp, self).setUp()
self.mozilla_contact = '[email protected]'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.file.update(status=mkt.STATUS_PENDING)
self.app.update(status=mkt.STATUS_PENDING,
mozilla_contact=self.mozilla_contact,
_current_version=None, is_packaged=True)
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Packaged App Review" successfully processed '
'(+60 points, 60 total).')
def test_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, storefront_mock, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_hidden(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, storefront_mock, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 1)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_approved(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, storefront_mock, sign_mock):
self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(self.file.reload().status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk)
def test_pending_to_rejected(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, storefront_mock, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_REJECTED)
eq_(self.file.reload().status, mkt.STATUS_DISABLED)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_pending_to_approved_app_private_prior_version_rejected(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, storefront_mock, sign_mock):
"""
Test that everything works out ok when v1.0 was rejected and developer
submitted v1.1 that is then approved. This should still be considered a
packaged review (not an update) and set the approved version to PUBLIC
since the proir verison is DISABLED. See bug 1075042.
"""
self.app.update(status=mkt.STATUS_REJECTED,
publish_type=mkt.PUBLISH_PRIVATE)
self.file.update(status=mkt.STATUS_DISABLED)
self.new_version = version_factory(
addon=self.app, version='1.1',
file_kw={'status': mkt.STATUS_PENDING})
index_webapps.delay.reset_mock()
update_cached_manifests.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(storefront_mock.call_count, 0)
eq_(self.app.current_version, None)
eq_(self.app.latest_version, self.new_version)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.latest_version, self.new_version)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
@mock.patch('lib.crypto.packaged.sign')
@mock.patch('mkt.webapps.models.Webapp.set_iarc_storefront_data')
@mock.patch('mkt.reviewers.views.messages.success')
@mock.patch('mkt.webapps.tasks.index_webapps')
@mock.patch('mkt.webapps.tasks.update_cached_manifests')
@mock.patch('mkt.webapps.models.Webapp.update_supported_locales')
@mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest')
class TestApprovePackagedVersions(AppReviewerTest, TestReviewMixin,
AttachmentManagementMixin,
TestedonManagementMixin):
"""
A separate test class for packaged apps with a 2nd version going to an
approved state.
We're doing this to make the mocks easier to handle.
"""
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestApprovePackagedVersions, self).setUp()
self.mozilla_contact = '[email protected]'
self.app = self.get_app()
self.file = self.app.latest_version.files.all()[0]
self.app.update(status=mkt.STATUS_PUBLIC,
mozilla_contact=self.mozilla_contact,
is_packaged=True)
self.new_version = version_factory(
addon=self.app, version='2.0',
file_kw={'status': mkt.STATUS_PENDING})
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def get_app(self):
return Webapp.objects.get(id=337141)
def _check_message(self, msg):
eq_(msg.call_args_list[0][0][1],
'"Updated Packaged App Review" successfully processed '
'(+40 points, 40 total).')
def test_version_pending_to_public(self, update_name, update_locales,
update_cached_manifests, index_webapps,
messages, storefront_mock, sign_mock):
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved(self, update_name, update_locales,
update_cached_manifests,
index_webapps, messages,
storefront_mock, sign_mock):
self.app.update(publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, storefront_mock, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, storefront_mock, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_public_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, storefront_mock, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
eq_(app.current_version, self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
self._check_log(mkt.LOG.APPROVE_VERSION)
self._check_email_dev_and_contact('Approved')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 1)
eq_(sign_mock.call_args[0][0], app.current_version.pk)
def test_version_pending_to_approved_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, storefront_mock, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED,
publish_type=mkt.PUBLISH_PRIVATE)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'public', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED)
self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE)
self._check_email_dev_and_contact('Approved but private')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 1)
eq_(update_locales.call_count, 1)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 1)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_args[0][0], self.new_version.pk)
def test_version_pending_to_rejected_app_public(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, storefront_mock, sign_mock):
self.app.update(status=mkt.STATUS_PUBLIC)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_PUBLIC)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_unlisted(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, storefront_mock, sign_mock):
self.app.update(status=mkt.STATUS_UNLISTED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_UNLISTED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_count, 0)
def test_version_pending_to_rejected_app_private(
self, update_name, update_locales, update_cached_manifests,
index_webapps, messages, storefront_mock, sign_mock):
self.app.update(status=mkt.STATUS_APPROVED)
index_webapps.delay.reset_mock()
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
data = {'action': 'reject', 'comments': 'something'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self.post(data)
app = self.get_app()
eq_(app.status, mkt.STATUS_APPROVED)
ok_(app.current_version != self.new_version)
eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC)
eq_(self.new_version.all_files[0].status, mkt.STATUS_DISABLED)
self._check_log(mkt.LOG.REJECT_VERSION)
self._check_email_dev_and_contact('Rejected')
self._check_email_body()
self._check_score(mkt.REVIEWED_WEBAPP_UPDATE)
self._check_message(messages)
eq_(update_name.call_count, 0)
eq_(update_locales.call_count, 0)
eq_(index_webapps.delay.call_count, 1)
eq_(update_cached_manifests.delay.call_count, 0)
eq_(storefront_mock.call_count, 0)
eq_(sign_mock.call_count, 0)
class TestReviewLog(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewLog, self).setUp()
# Note: if `created` is not specified, `app_factory` uses a randomly
# generated timestamp.
self.apps = [app_factory(name='XXX', created=days_ago(3),
status=mkt.STATUS_PENDING),
app_factory(name='YYY', created=days_ago(2),
status=mkt.STATUS_PENDING)]
self.url = reverse('reviewers.apps.logs')
patcher = mock.patch.object(settings, 'TASK_USER_ID',
self.admin_user.id)
patcher.start()
self.addCleanup(patcher.stop)
def get_user(self):
return self.reviewer_user
def make_approvals(self):
d = 1
for app in self.apps:
days_ago = self.days_ago(d)
mkt.log(mkt.LOG.REJECT_VERSION, app, app.latest_version,
user=self.get_user(), details={'comments': 'youwin'},
created=days_ago)
# Throw in a few tasks logs that shouldn't get queried.
mkt.log(mkt.LOG.REREVIEW_MANIFEST_CHANGE, app, app.latest_version,
user=self.admin_user, details={'comments': 'foo'},
created=days_ago)
d += 1
def make_an_approval(self, action, comment='youwin', user=None, app=None):
if not user:
user = self.get_user()
if not app:
app = self.apps[0]
mkt.log(action, app, app.latest_version, user=user,
details={'comments': comment})
def test_basic(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
assert doc('#log-filter button'), 'No filters.'
# Should have 2 showing.
rows = doc('tbody tr')
logs = rows.filter(':not(.hide)')
eq_(logs.length, 2)
# Ensure that the app links are valid.
eq_(logs.find('.name .app-link').eq(0).attr('href'),
self.apps[0].get_url_path())
eq_(logs.find('.name .app-link').eq(1).attr('href'),
self.apps[1].get_url_path())
eq_(rows.filter('.hide').eq(0).text(), 'youwin')
def test_search_app_soft_deleted(self):
self.make_approvals()
self.apps[0].update(status=mkt.STATUS_DELETED)
res = self.client.get(self.url)
eq_(res.status_code, 200)
doc = pq(res.content)
all_reviews = [d.attrib.get('data-addonid')
for d in doc('#log-listing tbody tr')]
assert str(self.apps[0].pk) in all_reviews, (
'Soft deleted review did not show up in listing')
def test_xss(self):
a = self.apps[0]
a.name = '<script>alert("xss")</script>'
a.save()
mkt.log(mkt.LOG.REJECT_VERSION, a, a.latest_version,
user=self.get_user(), details={'comments': 'xss!'})
r = self.client.get(self.url)
eq_(r.status_code, 200)
inner_html = pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' in inner_html
assert '<script>' not in inner_html
def test_end_filter(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
# Make sure we show the stuff we just made.
date = time.strftime('%Y-%m-%d')
r = self.client.get(self.url, dict(end=date))
eq_(r.status_code, 200)
doc = pq(r.content)('#log-listing tbody')
eq_(doc('tr:not(.hide)').length, 2)
eq_(doc('tr.hide').eq(0).text(), 'youwin')
def test_end_filter_wrong(self):
"""
Let's use today as an end-day filter and make sure we see stuff if we
filter.
"""
self.make_approvals()
r = self.client.get(self.url, dict(end='wrong!'))
# If this is broken, we'll get a traceback.
eq_(r.status_code, 200)
eq_(pq(r.content)('#log-listing tr:not(.hide)').length, 3)
def test_search_comment_exists(self):
"""Search by comment."""
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='hello'))
eq_(r.status_code, 200)
eq_(pq(r.content)('#log-listing tbody tr.hide').eq(0).text(), 'hello')
def test_search_comment_doesnt_exist(self):
"""Search by comment, with no results."""
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello')
r = self.client.get(self.url, dict(search='bye'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_author_exists(self):
"""Search by author."""
self.make_approvals()
user = UserProfile.objects.get(email='[email protected]')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user, comment='hi')
r = self.client.get(self.url, dict(search='regular'))
eq_(r.status_code, 200)
rows = pq(r.content)('#log-listing tbody tr')
eq_(rows.filter(':not(.hide)').length, 1)
eq_(rows.filter('.hide').eq(0).text(), 'hi')
def test_search_author_doesnt_exist(self):
"""Search by author, with no results."""
self.make_approvals()
user = UserProfile.objects.get(email='[email protected]')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user)
r = self.client.get(self.url, dict(search='wrong'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
def test_search_addon_exists(self):
"""Search by add-on name."""
self.make_approvals()
app = self.apps[0]
r = self.client.get(self.url, dict(search=app.name))
eq_(r.status_code, 200)
tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id)
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_by_slug_exists(self):
"""Search by app slug."""
app = self.apps[0]
app.app_slug = 'a-fox-was-sly'
app.save()
self.make_approvals()
r = self.client.get(self.url, dict(search='fox'))
eq_(r.status_code, 200)
tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id)
eq_(tr.length, 1)
eq_(tr.siblings('.comments').text(), 'youwin')
def test_search_addon_doesnt_exist(self):
"""Search by add-on name, with no results."""
self.make_approvals()
r = self.client.get(self.url, dict(search='zzz'))
eq_(r.status_code, 200)
eq_(pq(r.content)('.no-results').length, 1)
@mock.patch('mkt.developers.models.ActivityLog.arguments', new=mock.Mock)
def test_addon_missing(self):
self.make_approvals()
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td').eq(1).text(),
'App has been deleted.')
def test_request_info_logs(self):
self.make_an_approval(mkt.LOG.REQUEST_INFORMATION)
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td a').eq(1).text(),
'More information requested')
def test_escalate_logs(self):
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
eq_(pq(r.content)('#log-listing tr td a').eq(1).text(),
'Reviewer escalation')
def test_no_double_encode(self):
version = self.apps[0].latest_version
version.update(version='<foo>')
self.make_an_approval(mkt.LOG.ESCALATE_MANUAL)
r = self.client.get(self.url)
assert '<foo>' in pq(r.content)('#log-listing tr td').eq(1).text(), (
'Double-encoded string was found in reviewer log.')
class TestMotd(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestMotd, self).setUp()
self.url = reverse('reviewers.apps.motd')
self.key = u'mkt_reviewers_motd'
set_config(self.key, u'original value')
def test_perms_not_editor(self):
self.client.logout()
req = self.client.get(self.url, follow=True)
self.assert3xx(req, '%s?to=%s' % (reverse('users.login'), self.url))
self.client.login('[email protected]')
eq_(self.client.get(self.url).status_code, 403)
def test_perms_not_motd(self):
# Any type of reviewer can see the MOTD.
self.login_as_editor()
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'], None)
# No redirect means it didn't save.
eq_(self.client.post(self.url, dict(motd='motd')).status_code, 200)
eq_(get_config(self.key), u'original value')
def test_motd_change(self):
# Only users in the MOTD group can POST.
user = self.reviewer_user
self.grant_permission(user, 'AppReviewerMOTD:Edit')
self.login_as_editor()
# Get is a 200 with a form.
req = self.client.get(self.url)
eq_(req.status_code, 200)
eq_(req.context['form'].initial['motd'], u'original value')
# Empty post throws an error.
req = self.client.post(self.url, dict(motd=''))
eq_(req.status_code, 200) # Didn't redirect after save.
eq_(pq(req.content)('#editor-motd .errorlist').text(),
'This field is required.')
# A real post now.
req = self.client.post(self.url, dict(motd='new motd'))
self.assert3xx(req, self.url)
eq_(get_config(self.key), u'new motd')
class TestReviewAppComm(AppReviewerTest, AttachmentManagementMixin,
TestReviewMixin, TestedonManagementMixin):
"""
Integration test that notes are created and that emails are
sent to the right groups of people.
"""
def setUp(self):
super(TestReviewAppComm, self).setUp()
self.app = app_factory(rated=True, status=mkt.STATUS_PENDING,
mozilla_contact='[email protected]')
self.app.addonuser_set.create(user=user_factory(email='steamcube'))
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.mozilla_contact = '[email protected]'
def _post(self, data, queue='pending'):
res = self.client.post(self.url, data)
self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue))
def _get_note(self):
eq_(self.app.threads.count(), 1)
thread = self.app.threads.all()[0]
eq_(thread.notes.count(), 1)
return thread.notes.all()[0]
def test_email_cc(self):
"""
Emailed cc'ed people (those who have posted on the thread).
"""
poster = user_factory()
thread, note = create_comm_note(
self.app, self.app.latest_version, poster, 'lgtm')
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test emails.
self._check_email_dev_and_contact(None, outbox_len=5)
# Some person who joined the thread.
self._check_email(
self._get_mail(poster.email), 'Approved', to=[poster.email])
def test_approve(self):
"""
On approval, send an email to [developer, mozilla contact].
"""
data = {'action': 'public', 'comments': 'gud jerb'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.APPROVAL)
eq_(note.body, 'gud jerb')
# Test emails.
self._check_email_dev_and_contact(None)
def test_reject(self):
"""
On rejection, send an email to [developer, mozilla contact].
"""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, 'rubesh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_info(self):
"""
On info request, send an email to [developer, mozilla contact].
"""
data = {'action': 'info', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.MORE_INFO_REQUIRED)
eq_(note.body, 'huh')
# Test emails.
self._check_email_dev_and_contact(None)
def test_escalate(self):
"""
On escalation, send an email to senior reviewers and developer.
"""
data = {'action': 'escalate', 'comments': 'soup her man'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.ESCALATION)
eq_(note.body, 'soup her man')
# Test emails.
eq_(len(mail.outbox), 2)
self._check_email( # Senior reviewer.
self._get_mail(self.snr_reviewer_user.email), 'Escalated',
to=[self.snr_reviewer_user.email])
self._check_email(self._get_mail('steamcube'), 'Escalated')
def test_comment(self):
"""
On reviewer comment, send an email to those but developers.
"""
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REVIEWER_COMMENT)
eq_(note.body, 'huh')
# Test emails.
eq_(len(mail.outbox), 1)
self._check_email(mail.outbox[0], 'Private reviewer comment',
to=[self.mozilla_contact])
def test_disable(self):
"""
On banning, send an email to [developer, mozilla contact].
"""
self.login_as_admin()
data = {'action': 'disable', 'comments': 'u dun it'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form())
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.DISABLED)
eq_(note.body, 'u dun it')
# Test emails.
self._check_email_dev_and_contact(None)
def test_attachments(self):
data = {'action': 'comment', 'comments': 'huh'}
data.update(self._attachment_management_form(num=2))
data.update(self._attachments(num=2))
data.update(self._testedon_management_form())
self._post(data)
# Test attachments.
note = self._get_note()
eq_(note.attachments.count(), 2)
def test_tested_on_one(self):
"""Tested 'Tested on' message appended to note body."""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=1))
data.update(self._platforms(1))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34')
def test_tested_on_two(self):
"""Tested two 'Tested on' messages appended to note body."""
data = {'action': 'reject', 'comments': 'rubesh'}
data.update(self._attachment_management_form(num=0))
data.update(self._testedon_management_form(num=2))
data.update(self._platforms(2))
self._post(data)
# Test notes.
note = self._get_note()
eq_(note.note_type, comm.REJECTION)
eq_(note.body, u'rubesh\n\n'
u'Tested on \xd0esktop platform on PC with version 34; '
u'FirefoxOS platform on ZT\xc8 Open with version 1.3<')
class TestModeratedQueue(mkt.site.tests.TestCase, AccessMixin):
def setUp(self):
super(TestModeratedQueue, self).setUp()
self.app = app_factory()
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
user_factory(email='regular')
user1 = user_factory()
user2 = user_factory()
self.url = reverse('reviewers.apps.queue_moderated')
self.review1 = Review.objects.create(addon=self.app, body='body',
user=user1, rating=3,
editorreview=True)
ReviewFlag.objects.create(review=self.review1, flag=ReviewFlag.SPAM,
user=user1)
self.review2 = Review.objects.create(addon=self.app, body='body',
user=user2, rating=4,
editorreview=True)
ReviewFlag.objects.create(review=self.review2, flag=ReviewFlag.SUPPORT,
user=user2)
self.login(self.moderator_user)
def _post(self, action):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['reviews_formset'].forms[0]))
data_formset['form-0-action'] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
ReviewFlag.objects.all()[0].update(user=None)
ReviewFlag.objects.all()[1].delete()
res = self.client.get(self.url)
txt = pq(res.content)('.reviews-flagged-reasons li div span').text()
teststring = u'Flagged by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_setup(self):
eq_(Review.objects.filter(editorreview=True).count(), 2)
eq_(ReviewFlag.objects.filter(flag=ReviewFlag.SPAM).count(), 1)
res = self.client.get(self.url)
doc = pq(res.content)('#reviews-flagged')
# Test the default action is "skip".
eq_(doc('.reviewers-desktop #id_form-0-action_1:checked').length, 1)
def test_skip(self):
# Skip the first review, which still leaves two.
self._post(mkt.ratings.REVIEW_MODERATE_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_delete(self):
# Delete the first review, which leaves one.
self._post(mkt.ratings.REVIEW_MODERATE_DELETE)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.DELETE_REVIEW).count(), 1)
def test_keep(self):
# Keep the first review, which leaves one.
self._post(mkt.ratings.REVIEW_MODERATE_KEEP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
eq_(self._get_logs(mkt.LOG.APPROVE_REVIEW).count(), 1)
def test_no_reviews(self):
Review.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#reviews-flagged .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (2)')
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.moderator_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Moderated Reviews (2)')
def test_deleted_app(self):
"Test that a deleted app doesn't break the queue."
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_queue_count_deleted_app(self):
self.app.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (0)')
class AbuseQueueMixin(object):
def _setUp(self):
self.abuseviewer_user = user_factory(email='abuser')
self.grant_permission(self.abuseviewer_user, self.perm)
self.login(self.abuseviewer_user)
user_factory(email='regular')
self.url = reverse(self.view_name)
def _post(self, action, form_index=0):
ctx = self.client.get(self.url).context
data_formset = formset(initial(ctx['abuse_formset'].forms[0]))
data_formset['form-%s-action' % (form_index)] = action
res = self.client.post(self.url, data_formset)
self.assert3xx(res, self.url)
def _get_logs(self, action):
return ActivityLog.objects.filter(action=action.id)
def test_anonymous_flagger(self):
AbuseReport.objects.all()[0].update(reporter=None)
res = self.client.get(self.url)
txt = pq(res.content)('.abuse-reports-reports li div span').text()
teststring = u'Submitted by an anonymous user on'
ok_(txt.startswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_no_reviews(self):
AbuseReport.objects.all().delete()
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(pq(res.content)('#abuse-reports .no-results').length, 1)
def test_queue_count(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (2)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
def test_skip(self):
# Skip the first xxx's reports, which still leaves 2 apps/sites.
self._post(mkt.abuse.forms.ABUSE_REPORT_SKIP)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 2)
def test_first_read(self):
# Mark read the first xxx's reports, which leaves one.
self._post(mkt.abuse.forms.ABUSE_REPORT_READ)
res = self.client.get(self.url)
eq_(len(res.context['page'].object_list), 1)
# There are two abuse reports for app1/website1, so two log entries.
eq_(self._get_logs(self.log_const).count(), 2)
# Check the remaining abuse report remains unread.
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_first_flag(self):
# Flag the first xxx's reports.
self._post(mkt.abuse.forms.ABUSE_REPORT_FLAG)
res = self.client.get(self.url)
# Check one is left.
eq_(len(res.context['page'].object_list), 1)
# Check the object is flagged.
eq_(RereviewQueue.objects.count(), 1)
# As flagging marks read too, there should be 2 log entries.
eq_(self._get_logs(self.log_const).count(), 2)
# Check the remaining abuse report remains unread.
eq_(AbuseReport.objects.filter(read=False).count(), 1)
def test_xss(self):
xss = '<script>alert("xss")</script>'
AbuseReport.objects.all()[0].update(message=xss)
res = self.client.get(self.url)
eq_(res.status_code, 200)
tbody = pq(res.content)(
'#abuse-reports .abuse-reports-reports').html()
assert '<script>' in tbody
assert '<script>' not in tbody
def test_deleted_website(self):
"Test that a deleted app/website doesn't break the queue."
AbuseReport.objects.all()[0].object.delete()
r = self.client.get(self.url)
eq_(r.status_code, 200)
txt = pq(r.content)('.tabnav li a')[0].text
teststring = u'Abuse Reports (1)'
ok_(txt.endswith(teststring),
'"%s" doesn\'t start with "%s"' % (txt, teststring))
class TestAppAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Apps:ReadAbuse'
view_name = 'reviewers.apps.queue_abuse'
log_const = mkt.LOG.APP_ABUSE_MARKREAD
def setUp(self):
super(TestAppAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
app1 = app_factory()
app2 = app_factory()
# Add some extra apps, which shouldn't show up.
app_factory()
app_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
addon=app1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
addon=app1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
addon=app2, message='the worst')
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(addon=Webapp.objects.all()[0]).count(),
2)
res = self.client.get(self.url)
# Check there are 2 apps listed.
eq_(len(res.context['page'].object_list), 2)
def test_queue_count_reviewer_and_moderator(self):
self.grant_permission(self.abuseviewer_user, 'Apps:Review')
r = self.client.get(self.url)
eq_(r.status_code, 200)
doc = pq(r.content)
links = doc('.tabnav li a')
eq_(links[0].text, u'Apps (0)')
eq_(links[1].text, u'Re-reviews (0)')
eq_(links[2].text, u'Updates (0)')
eq_(links[3].text, u'Reviewing (0)')
eq_(links[4].text, u'Abuse Reports (2)')
class TestWebsiteAbuseQueue(mkt.site.tests.TestCase, AccessMixin,
AbuseQueueMixin):
perm = 'Websites:ReadAbuse'
view_name = 'reviewers.websites.queue_abuse'
log_const = mkt.LOG.WEBSITE_ABUSE_MARKREAD
def setUp(self):
super(TestWebsiteAbuseQueue, self).setUp()
self._setUp()
@classmethod
def setUpTestData(cls):
website1 = website_factory()
website2 = website_factory()
# Add some extra sites, which shouldn't show up.
website_factory()
website_factory()
user1 = user_factory()
user2 = user_factory()
AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89',
website=website1, message='bad')
AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89',
website=website1, message='terrible')
AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89',
website=website2, message='the worst')
cls.website1 = website1
def test_setup(self):
eq_(AbuseReport.objects.filter(read=False).count(), 3)
eq_(AbuseReport.objects.filter(website=self.website1).count(), 2)
res = self.client.get(self.url)
# Check there are 2 websites listed.
eq_(len(res.context['page'].object_list), 2)
def test_first_flag(self):
# No re-review flagging for Websites yet - no re-review queue!
raise SkipTest()
class TestGetSigned(BasePackagedAppTest, mkt.site.tests.TestCase):
def setUp(self):
super(TestGetSigned, self).setUp()
self.url = reverse('reviewers.signed', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('[email protected]')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.LocalFileStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_local(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
eq_(res.status_code, 200)
@override_settings(
DEFAULT_FILE_STORAGE='mkt.site.storage_utils.S3BotoPrivateStorage')
@mock.patch('lib.crypto.packaged.sign')
def test_reviewer_sign_arguments_storage(self, sign_mock):
sign_mock.side_effect = mock_sign
self.setup_files()
res = self.client.get(self.url)
sign_mock.assert_called_with(self.version.pk, reviewer=True)
self.assert3xx(res, private_storage.url(
self.file.signed_reviewer_file_path))
@mock.patch.object(packaged, 'sign', mock_sign)
def test_reviewer(self):
if not settings.XSENDFILE:
raise SkipTest()
self.setup_files()
res = self.client.get(self.url)
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
self.url = reverse('reviewers.signed', args=[self.app.app_slug, 0])
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_token_good(self):
if not settings.XSENDFILE:
raise SkipTest()
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
file_ = self.app.current_version.all_files[0]
eq_(res['x-sendfile'], file_.signed_reviewer_file_path)
eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestMiniManifestView(BasePackagedAppTest):
def setUp(self):
super(TestMiniManifestView, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.app.update(is_packaged=True)
self.version = self.app.versions.latest()
self.file = self.version.all_files[0]
self.file.update(filename='mozball.zip')
self.url = reverse('reviewers.mini_manifest', args=[self.app.app_slug,
self.version.pk])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('[email protected]')
def test_not_logged_in(self):
self.client.logout()
self.assertLoginRequired(self.client.get(self.url))
def test_not_reviewer(self):
self.client.logout()
self.login(user_factory())
eq_(self.client.get(self.url).status_code, 403)
def test_not_packaged(self):
self.app.update(is_packaged=False)
res = self.client.get(self.url)
eq_(res.status_code, 404)
def test_wrong_version(self):
url = reverse('reviewers.mini_manifest', args=[self.app.app_slug, 0])
res = self.client.get(url)
eq_(res.status_code, 404)
def test_reviewer(self):
self.setup_files()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug, self.version.id])))
def test_rejected(self):
# Rejected sets file.status to DISABLED and moves to a guarded path.
self.setup_files()
self.app.update(status=mkt.STATUS_REJECTED)
self.file.update(status=mkt.STATUS_DISABLED)
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
eq_(data['developer']['name'], 'Mozilla Marketplace')
eq_(data['package_path'],
absolutify(reverse('reviewers.signed',
args=[self.app.app_slug,
self.version.id])))
def test_minifest_name_matches_manifest_name(self):
self.setup_files()
self.app.name = 'XXX'
self.app.save()
manifest = self.app.get_manifest_json(self.file)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['name'], manifest['name'])
def test_token_good(self):
token = Token(data={'app_id': self.app.id})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 200)
eq_(res['Content-type'], MANIFEST_CONTENT_TYPE)
data = json.loads(res.content)
ok_('token=' in data['package_path'])
# Test token doesn't work the 2nd time.
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
def test_token_bad(self):
token = Token(data={'app_id': 'abcdef'})
token.save()
self.setup_files()
self.client.logout()
res = self.client.get(urlparams(self.url, token=token.token))
eq_(res.status_code, 403)
class TestReviewersScores(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestReviewersScores, self).setUp()
self.user = self.reviewer_user
self.url = reverse('reviewers.performance', args=[self.user.email])
def test_404(self):
res = self.client.get(reverse('reviewers.performance', args=['poop']))
eq_(res.status_code, 404)
def test_with_email(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_without_email(self):
res = self.client.get(reverse('reviewers.performance'))
eq_(res.status_code, 200)
eq_(res.context['profile'].id, self.user.id)
def test_no_reviews(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
assert u'No review points awarded yet' in res.content
class TestQueueSort(AppReviewerTest):
def setUp(self):
super(TestQueueSort, self).setUp()
"""Create and set up apps for some filtering fun."""
self.apps = [app_factory(name='Lillard',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE),
app_factory(name='Batum',
status=mkt.STATUS_PENDING,
is_packaged=True,
version_kw={'version': '1.0',
'has_editor_comment': True,
'has_info_request': True},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_PREMIUM)]
# Set up app attributes.
self.apps[0].update(created=self.days_ago(2))
self.apps[1].update(created=self.days_ago(5))
self.apps[0].addonuser_set.create(user=user_factory(email='XXX'))
self.apps[1].addonuser_set.create(user=user_factory(email='illmatic'))
self.apps[0].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
self.apps[1].addondevicetype_set.create(
device_type=mkt.DEVICE_MOBILE.id)
self.url = reverse('reviewers.apps.queue_pending')
def test_do_sort_webapp(self):
"""
Test that apps are sorted in order specified in GET params.
"""
rf = RequestFactory()
qs = Webapp.objects.all()
# Test apps are sorted by created/asc by default.
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
# Test sorting by created, descending.
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
# Test sorting by app name.
req = rf.get(self.url, {'sort': 'name', 'order': 'asc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'name', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[0], self.apps[1]])
def test_do_sort_version_nom(self):
"""Tests version nomination sort order."""
url = reverse('reviewers.apps.queue_pending')
user = UserProfile.objects.get(email='[email protected]')
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
# Throw in some disabled versions, they shouldn't affect order.
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[0],
nomination=days_ago(10))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(1))
version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1],
nomination=days_ago(20))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_1.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_0.addon.id))
req = mkt.site.tests.req_factory_factory(
url, user=user, data={'sort': 'nomination', 'order': 'desc'})
res = queue_apps(req)
doc = pq(res.content)
# Desktop and mobile (hidden on desktop) alternate, so we jump by 2.
eq_(doc('tbody tr')[0].get('data-addon'), str(version_0.addon.id))
eq_(doc('tbody tr')[2].get('data-addon'), str(version_1.addon.id))
def test_do_sort_queue_object(self):
"""Tests sorting queue object."""
rf = RequestFactory()
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=1)
later_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
# Assert the order that RereviewQueue objects were created is
# maintained.
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'asc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([later_rrq.addon, earlier_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'name', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([earlier_rrq.addon, later_rrq.addon], list(apps))
def test_sort_with_priority_review(self):
"""Tests the sorts are correct with a priority review flagged app."""
# Set up the priority review flagged app.
self.apps.append(app_factory(name='Foxkeh',
status=mkt.STATUS_PENDING,
is_packaged=False,
version_kw={'version': '1.0'},
file_kw={'status': mkt.STATUS_PENDING},
premium_type=mkt.ADDON_FREE,
priority_review=True))
# Set up app attributes.
self.apps[2].update(created=self.days_ago(1))
self.apps[2].addonuser_set.create(
user=user_factory(email='[email protected]'))
self.apps[2].addondevicetype_set.create(
device_type=mkt.DEVICE_DESKTOP.id)
# And check it also comes out top of waiting time with Webapp model.
rf = RequestFactory()
qs = Webapp.objects.all()
# Test apps are sorted by created/asc by default.
req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
# Test sorting by created, descending.
req = rf.get(self.url, {'sort': 'created', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs)
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Version model.
version_0 = self.apps[0].versions.get()
version_0.update(nomination=days_ago(1))
version_1 = self.apps[1].versions.get()
version_1.update(nomination=days_ago(2))
qs = (Version.objects.filter(
files__status=mkt.STATUS_PENDING,
addon__disabled_by_user=False,
addon__status=mkt.STATUS_PENDING)
.order_by('nomination', 'created')
.select_related('addon', 'files').no_transforms())
req = rf.get(self.url, {'sort': 'nomination'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]])
req = rf.get(self.url, {'sort': 'nomination', 'order': 'desc'})
sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination')
eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]])
# And with Rereview model.
url = reverse('reviewers.apps.queue_rereview')
earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0])
earlier_rrq.created += timedelta(days=1)
earlier_rrq.save()
later_rrq = RereviewQueue.objects.create(addon=self.apps[1])
later_rrq.created += timedelta(days=2)
later_rrq.save()
pri_rrq = RereviewQueue.objects.create(addon=self.apps[2])
pri_rrq.save()
request = rf.get(url, {'sort': 'created'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, earlier_rrq.addon, later_rrq.addon], list(apps))
request = rf.get(url, {'sort': 'created', 'order': 'desc'})
apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all())
eq_([pri_rrq.addon, later_rrq.addon, earlier_rrq.addon], list(apps))
class TestAppsReviewing(AppReviewerTest, AccessMixin):
def setUp(self):
super(TestAppsReviewing, self).setUp()
self.url = reverse('reviewers.apps.apps_reviewing')
self.apps = [app_factory(name='Antelope',
status=mkt.STATUS_PENDING),
app_factory(name='Bear',
status=mkt.STATUS_PENDING),
app_factory(name='Cougar',
status=mkt.STATUS_PENDING)]
def _view_app(self, app_id):
self.client.post(reverse('reviewers.review_viewing'), {
'addon_id': app_id})
def test_no_apps_reviewing(self):
res = self.client.get(self.url)
eq_(len(res.context['apps']), 0)
def test_apps_reviewing(self):
self._view_app(self.apps[0].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
def test_multiple_reviewers_no_cross_streams(self):
self._view_app(self.apps[0].id)
self._view_app(self.apps[1].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
# Now view an app as another user and verify app.
self.login('[email protected]')
self._view_app(self.apps[2].id)
res = self.client.get(self.url)
eq_(len(res.context['apps']), 1)
# Check original user again to make sure app list didn't increment.
self.login_as_editor()
res = self.client.get(self.url)
eq_(len(res.context['apps']), 2)
class TestLeaderboard(AppReviewerTest):
def setUp(self):
super(TestLeaderboard, self).setUp()
self.url = reverse('reviewers.leaderboard')
mkt.set_user(self.reviewer_user)
def _award_points(self, user, score):
ReviewerScore.objects.create(user=user, note_key=mkt.REVIEWED_MANUAL,
score=score, note='Thing.')
def test_leaderboard_ranks(self):
users = (self.reviewer_user,
self.regular_user,
user_factory(email='clouserw'))
self._award_points(users[0], mkt.REVIEWED_LEVELS[0]['points'] - 1)
self._award_points(users[1], mkt.REVIEWED_LEVELS[0]['points'] + 1)
self._award_points(users[2], mkt.REVIEWED_LEVELS[0]['points'] + 2)
def get_cells():
doc = pq(self.client.get(self.url).content.decode('utf-8'))
cells = doc('#leaderboard > tbody > tr > .name, '
'#leaderboard > tbody > tr > .level')
return [cells.eq(i).text() for i in range(0, cells.length)]
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
self._award_points(users[0], 1)
eq_(get_cells(),
[users[2].display_name,
users[1].display_name,
users[0].display_name,
mkt.REVIEWED_LEVELS[0]['name']])
self._award_points(users[0], -1)
self._award_points(users[2], (mkt.REVIEWED_LEVELS[1]['points'] -
mkt.REVIEWED_LEVELS[0]['points']))
eq_(get_cells(),
[users[2].display_name,
mkt.REVIEWED_LEVELS[1]['name'],
users[1].display_name,
mkt.REVIEWED_LEVELS[0]['name'],
users[0].display_name])
class TestReviewPage(mkt.site.tests.TestCase):
def setUp(self):
super(TestReviewPage, self).setUp()
self.app = app_factory(status=mkt.STATUS_PENDING)
self.reviewer = user_factory(email='editor')
self.grant_permission(self.reviewer, 'Apps:Review')
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
def test_iarc_ratingless_disable_approve_btn(self):
self.app.update(status=mkt.STATUS_NULL)
req = req_factory_factory(self.url, user=self.reviewer)
res = app_review(req, app_slug=self.app.app_slug)
doc = pq(res.content)
assert (doc('#review-actions input[value=public]')
.parents('li').hasClass('disabled'))
assert not (doc('#review-actions input[value=reject]')
.parents('li').hasClass('disabled'))
def test_iarc_content_ratings(self):
for body in [mkt.ratingsbodies.CLASSIND.id, mkt.ratingsbodies.USK.id]:
self.app.content_ratings.create(ratings_body=body, rating=0)
req = req_factory_factory(self.url, user=self.reviewer)
res = app_review(req, app_slug=self.app.app_slug)
doc = pq(res.content)
eq_(doc('.reviewers-desktop .content-rating').length, 2)
eq_(doc('.reviewers-mobile .content-rating').length, 2)
class TestAbusePage(AppReviewerTest):
def setUp(self):
super(TestAbusePage, self).setUp()
self.app = app_factory(name=u'My app é <script>alert(5)</script>')
self.url = reverse('reviewers.apps.review.abuse',
args=[self.app.app_slug])
AbuseReport.objects.create(addon=self.app, message=self.app.name)
def testXSS(self):
from django.utils.encoding import smart_unicode
from jinja2.utils import escape
content = smart_unicode(self.client.get(self.url).content)
ok_(not unicode(self.app.name) in content)
ok_(unicode(escape(self.app.name)) in content)
class TestReviewTranslate(RestOAuth):
def setUp(self):
super(TestReviewTranslate, self).setUp()
self.grant_permission(self.profile, 'Apps:ModerateReview')
self.create_switch('reviews-translate')
user = user_factory(email='diego')
app = app_factory(app_slug='myapp~-_')
self.review = app.reviews.create(title=u'yes', body=u'oui',
addon=app, user=user,
editorreview=True, rating=4)
def test_regular_call(self):
res = self.client.get(reverse('reviewers.review_translate',
args=[self.review.addon.app_slug,
self.review.id, 'fr']))
self.assert3xx(res, 'https://translate.google.com/#auto/fr/oui', 302)
@mock.patch('mkt.reviewers.views.requests')
def test_ajax_call(self, requests):
# Mock requests.
response = mock.Mock(status_code=200)
response.json.return_value = {
u'data': {
u'translations': [{
u'translatedText': u'oui',
u'detectedSourceLanguage': u'fr'
}]
}
}
requests.get.return_value = response
# Call translation.
review = self.review
url = reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr'])
res = self.client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 200)
eq_(res.content, '{"body": "oui", "title": "oui"}')
@mock.patch('mkt.reviewers.views.requests')
def test_invalid_api_key(self, requests):
# Mock requests.
response = mock.Mock(status_code=400)
response.json.return_value = {
'error': {
'code': 400,
'errors': [
{'domain': 'usageLimits',
'message': 'Bad Request',
'reason': 'keyInvalid'}
],
'message': 'Bad Request'
}
}
requests.get.return_value = response
# Call translation.
review = self.review
res = self.client.get(
reverse('reviewers.review_translate',
args=[review.addon.app_slug, review.id, 'fr']),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
eq_(res.status_code, 400)
class TestAdditionalReviewListingAccess(mkt.site.tests.TestCase):
def setUp(self):
super(TestAdditionalReviewListingAccess, self).setUp()
self.user = user_factory()
self.login(self.user)
def url(self):
return reverse('reviewers.apps.additional_review', args=[QUEUE_TARAKO])
def listing(self):
return self.client.get(self.url())
def test_regular_user_has_no_access(self):
eq_(self.listing().status_code, 403)
def test_regular_reviewer_has_no_access(self):
self.grant_permission(self.user, 'Apps:Review')
eq_(self.listing().status_code, 403)
def test_tarako_reviewer_has_access(self):
self.grant_permission(self.user, 'Apps:ReviewTarako')
eq_(self.listing().status_code, 200)
class TestReviewHistory(mkt.site.tests.TestCase, CommTestMixin):
def setUp(self):
super(TestReviewHistory, self).setUp()
self.app = self.addon = app_factory()
self.url = reverse('reviewers.apps.review', args=[self.app.app_slug])
self.grant_permission(user_factory(email='editor'), 'Apps:Review')
self.login('[email protected]')
self._thread_factory()
def test_comm_url(self):
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
def test_comm_url_multiple_thread(self):
self._thread_factory()
r = self.client.get(self.url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=2&serializer=simple')
def test_comm_url_no_encode(self):
self.addon = app_factory(app_slug='台北')
self._thread_factory()
url = reverse('reviewers.apps.review', args=[self.addon.app_slug])
r = self.client.get(url)
doc = pq(r.content)
eq_(doc('#history .item-history').attr('data-comm-app-url'),
reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) +
'?limit=1&serializer=simple')
class ModerateLogTest(mkt.site.tests.TestCase):
def setUp(self):
super(ModerateLogTest, self).setUp()
self.review = Review.objects.create(addon=app_factory(), body='body',
user=user_factory(), rating=4,
editorreview=True)
self.moderator_user = user_factory(email='moderator')
self.grant_permission(self.moderator_user, 'Apps:ModerateReview')
mkt.set_user(self.moderator_user)
self.login(self.moderator_user)
self.admin_user = user_factory(email='admin')
self.grant_permission(self.admin_user, '*:*')
user_factory(email='regular')
class TestModerateLog(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLog, self).setUp()
self.url = reverse('reviewers.apps.moderatelog')
def test_log(self):
r = self.client.get(self.url)
eq_(r.status_code, 200)
def test_start_filter(self):
r = self.client.get(self.url, dict(start='2011-01-01'))
eq_(r.status_code, 200)
def test_enddate_filter(self):
"""
Make sure that if our end date is 1/1/2011, that we include items from
1/1/2011.
"""
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
created=datetime(2011, 1, 1))
r = self.client.get(self.url, dict(end='2011-01-01'))
eq_(r.status_code, 200)
eq_(pq(r.content)('tbody td').eq(0).text(), 'Jan 1, 2011, 12:00:00 AM')
def test_action_filter(self):
"""
Based on setup we should see only two items if we filter for deleted
reviews.
"""
for i in xrange(2):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review)
r = self.client.get(self.url, dict(search='deleted'))
eq_(pq(r.content)('tbody tr').length, 2)
def test_no_results(self):
r = self.client.get(self.url, dict(end='2004-01-01'))
no_results = 'No events found for this period.'
assert no_results in r.content, 'Expected no results to be found.'
def test_display_name_xss(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon,
user=self.admin_user)
self.admin_user.display_name = '<script>alert("xss")</script>'
self.admin_user.save()
assert '<script>' in self.admin_user.display_name, (
'Expected <script> to be in display name')
r = self.client.get(self.url)
pq(r.content)('#log-listing tbody td').eq(1).html()
assert '<script>' not in r.content
assert '<script>' in r.content
class TestModerateLogDetail(ModerateLogTest, AccessMixin):
def setUp(self):
super(TestModerateLogDetail, self).setUp()
# AccessMixin needs a url property.
self.url = self._url(0)
def _url(self, id):
return reverse('reviewers.apps.moderatelog.detail', args=[id])
def test_detail_page(self):
mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review)
e_id = ActivityLog.objects.editor_events()[0].id
r = self.client.get(self._url(e_id))
eq_(r.status_code, 200)
def test_undelete_selfmoderation(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_admin(self):
e_id = mkt.log(
mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id
self.review.delete()
self.client.logout()
self.login(self.admin_user)
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 302)
self.review = Review.objects.get(id=self.review.id)
assert not self.review.deleted, 'Review should be undeleted now.'
def test_undelete_unauthorized(self):
# Delete as admin (or any other user than the reviewer).
e_id = mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review,
user=self.admin_user).id
self.review.delete()
# Try to undelete as normal reviewer.
r = self.client.post(self._url(e_id), {'action': 'undelete'})
eq_(r.status_code, 403)
self.review = Review.with_deleted.get(id=self.review.id)
assert self.review.deleted, 'Review shouldn`t have been undeleted.'
| {
"content_hash": "039b16d82bdbd000555773dd7a022003",
"timestamp": "",
"source": "github",
"line_count": 4338,
"max_line_length": 79,
"avg_line_length": 40.82618718303365,
"alnum_prop": 0.5935156744059987,
"repo_name": "tsl143/zamboni",
"id": "c025ec76c4ecf91492b02bcedecd79b69242da3f",
"size": "177140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mkt/reviewers/tests/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "357511"
},
{
"name": "HTML",
"bytes": "2330810"
},
{
"name": "JavaScript",
"bytes": "536153"
},
{
"name": "Makefile",
"bytes": "4281"
},
{
"name": "Python",
"bytes": "4365804"
},
{
"name": "Shell",
"bytes": "11156"
},
{
"name": "Smarty",
"bytes": "1159"
}
],
"symlink_target": ""
} |
"""Class MirroredStrategy implementing DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import threading
import six
from tensorflow.contrib.distribute.python import cross_tower_ops as cross_tower_ops_lib
from tensorflow.contrib.distribute.python import shared_variable_creator
from tensorflow.contrib.distribute.python import values
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import context
from tensorflow.python.eager import tape
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.training import coordinator
from tensorflow.python.training import device_util
from tensorflow.python.training import distribute as distribute_lib
# TODO(josh11b): Replace asserts in this file with if ...: raise ...
@contextlib.contextmanager
def _enter_graph(g):
if context.executing_eagerly():
with g.as_default(), context.eager_mode():
yield
else:
with g.as_default():
yield
def _cpu_device(device):
cpu_device = tf_device.DeviceSpec.from_string(device)
cpu_device.merge_from(tf_device.DeviceSpec(device_type="CPU", device_index=0))
return cpu_device.to_string()
class _RequestedStop(Exception):
pass
class MirroredStrategy(distribute_lib.DistributionStrategy):
"""Mirrors vars to distribute across multiple devices on a single machine.
This strategy uses one tower per device and sync replication.
"""
def __init__(self,
devices=None,
num_gpus=None,
cross_tower_ops=None,
prefetch_on_device=None):
super(MirroredStrategy, self).__init__()
# Convert `num_gpus` into `devices`, shouldn't specify both.
if devices is None:
if num_gpus is None:
num_gpus = context.num_gpus()
devices = ["/device:GPU:%d" % d for d in range(num_gpus)]
elif num_gpus is not None:
raise ValueError("Must only specify one of `devices` and `num_gpus`.")
assert devices, "Must specify at least one device."
assert len(set(devices)) == len(devices), (
"No duplicates allowed in `devices` argument.")
# TODO(josh11b): Require at least 2 devices?
self._devices = [device_util.resolve(d) for d in devices]
self._canonical_device_set = set(self._devices)
self._device_index = values.PerDevice(
dict((d, i) for i, d in enumerate(devices)))
self._cross_tower_ops = cross_tower_ops
self._prefetch_on_device = prefetch_on_device
# TODO(yuefengz): consider setting the default device.
def _create_variable(self, next_creator, *args, **kwargs):
"""Create a mirrored variable. See `DistributionStrategy.scope`."""
# Figure out what collections this variable should be added to.
# We'll add the MirroredVariable to those collections instead.
collections = kwargs.pop("collections", None)
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
kwargs["collections"] = []
colocate_with = kwargs.pop("colocate_with", None)
devices = self._get_devices_from(colocate_with)
tower_local = kwargs.pop("tower_local_reduce_method", None)
if tower_local is not None:
kwargs["trainable"] = False
# TODO(josh11b,apassos): It would be better if variable initialization
# was never recorded on the tape instead of having to do this manually
# here.
with tape.stop_recording():
index = {}
for i, d in enumerate(devices):
with ops.device(d):
if i > 0:
# Give replicas meaningful distinct names:
var0name = index[devices[0]].name.split(":")[0]
# We append a / to variable names created on towers with id > 0 to
# ensure that we ignore the name scope and instead use the given
# name as the absolute name of the variable.
kwargs["name"] = "%s/replica_%d/" % (var0name, i)
# Initialize replicas with the same value:
if context.executing_eagerly():
kwargs["initial_value"] = array_ops.identity(
index[devices[0]].value())
else:
def initial_value_fn(device=d):
with ops.device(device):
return array_ops.identity(index[devices[0]].initial_value)
kwargs["initial_value"] = initial_value_fn
with context.context().device_policy(context.DEVICE_PLACEMENT_SILENT):
v = next_creator(*args, **kwargs)
assert not isinstance(v, values.DistributedVariable)
index[d] = v
if tower_local is None:
result = values.MirroredVariable(index, index[devices[0]])
else:
result = values.TowerLocalVariable(
index, index[devices[0]], tower_local)
if not context.executing_eagerly():
g = ops.get_default_graph()
# If "trainable" is True, next_creator() will add the member variables
# to the TRAINABLE_VARIABLES collection, so we manually remove
# them and replace with the MirroredVariable. We can't set
# "trainable" to False for next_creator() since that causes functions
# like implicit_gradients to skip those variables.
if kwargs.get("trainable", True):
collections.append(ops.GraphKeys.TRAINABLE_VARIABLES)
l = g.get_collection_ref(ops.GraphKeys.TRAINABLE_VARIABLES)
for v in index.values():
l.remove(v)
g.add_to_collections(collections, result)
return result
def distribute_dataset(self, dataset_fn):
return values.PerDeviceDataset(
self._call_dataset_fn(dataset_fn), self._devices,
self._prefetch_on_device)
def _broadcast(self, tensor, destinations):
# TODO(josh11b): In eager mode, use one thread per device, or async mode.
return self._get_cross_tower_ops().broadcast(tensor, destinations or
self._devices)
def _call_for_each_tower(self, fn, *args, **kwargs):
"""Run `fn` in separate threads, once per tower/worker device.
Args:
fn: function to run (will be run once per device, each in its own thread).
*args: positional arguments for `fn`
**kwargs: keyword arguments for `fn`.
`"run_concurrently"`: Boolean indicating whether executions of `fn`
can be run concurrently (under eager execution only), defaults to
`True`.
Returns:
Merged return value of `fn` across all towers.
Raises:
RuntimeError: If fn() calls get_tower_context().merge_call() a different
number of times for when called for different devices.
"""
run_concurrently = kwargs.pop("run_concurrently", True)
if not context.executing_eagerly():
# Lots of TF library code isn't thread-safe in graph mode, and
# there is little to be gained by turning on multithreading when
# constructing a graph.
run_concurrently = False
# Needed for per-thread device, etc. contexts in graph mode.
ops.get_default_graph().switch_to_thread_local()
elif run_concurrently is None:
run_concurrently = True
coord = coordinator.Coordinator(
clean_stop_exception_types=(_RequestedStop,))
shared_variable_store = {}
# TODO(isaprykin): Create these threads once instead of during every run()
# call.
threads = []
for index, d in enumerate(self._devices):
variable_creator_fn = shared_variable_creator.make_fn(
shared_variable_store, index)
t = MirroredStrategy._MirroredTowerThread(
self, coord, d, variable_creator_fn, fn,
*values.select_device(d, args), **values.select_device(d, kwargs))
threads.append(t)
for t in threads:
t.start()
# When `fn` starts `should_run` event is set on _MirroredTowerThread
# (`MTT`) threads. The execution waits until
# `MTT.has_paused` is set, which indicates that either `fn` is
# complete or a `get_tower_context().merge_call()` is called. If `fn` is
# complete, then `MTT.done` is set to True. Otherwise, arguments
# of `get_tower_context().merge_call` from all paused threads are grouped
# and the `merge_fn` is performed. Results of the
# `get_tower_context().merge_call` are then set to `MTT.merge_result`.
# Each such `get_tower_context().merge_call` call returns the
# `MTT.merge_result` for that thread when `MTT.should_run` event
# is reset again. Execution of `fn` resumes.
try:
with coord.stop_on_exception():
all_done = False
while not all_done and not coord.should_stop():
done = []
if run_concurrently:
for t in threads:
t.should_run.set()
for t in threads:
t.has_paused.wait()
t.has_paused.clear()
if coord.should_stop():
return None
done.append(t.done)
else:
for t in threads:
t.should_run.set()
t.has_paused.wait()
t.has_paused.clear()
if coord.should_stop():
return None
done.append(t.done)
if coord.should_stop():
return None
all_done = all(done)
if not all_done:
if any(done):
raise RuntimeError("Some towers made a different number of "
"tower_context().merge_call() calls.")
# get_tower_context().merge_call() case
merge_args = values.regroup(
{t.device: t.merge_args for t in threads})
merge_kwargs = values.regroup(
{t.device: t.merge_kwargs for t in threads})
# We capture the name_scope of the MTT when we call merge_fn
# to ensure that if we have opened a name scope in the MTT,
# it will be respected when executing the merge function. We only
# capture the name_scope from the first MTT and assume it is
# the same for all other MTTs.
mtt_captured_name_scope = threads[0].captured_name_scope
with ops.name_scope(mtt_captured_name_scope):
merge_result = threads[0].merge_fn(
self, *merge_args, **merge_kwargs)
for t in threads:
t.merge_result = values.select_device(t.device, merge_result)
finally:
for t in threads:
t.should_run.set()
coord.join(threads)
return values.regroup({t.device: t.main_result for t in threads})
def map(self, map_over, fn, *args, **kwargs):
# TODO(josh11b): In eager mode, use one thread per device.
index = {}
i = 0
for m in map_over:
d = self._devices[i % len(self._devices)]
with ops.device(d):
l = index.get(d, [])
l.append(fn(m,
*values.select_device_mirrored(d, args),
**values.select_device_mirrored(d, kwargs)))
index[d] = l
# TODO(josh11b): Need a values.regroup equivalent that handles MapOutput
# in addition to PerDevice data.
return values.PerDevice({k: values.MapOutput(v) for k, v in index.items()})
def configure(self, session_config=None):
if self._cross_tower_ops is None:
self._cross_tower_ops = cross_tower_ops_lib.choose_the_best(
self._devices, session_config=session_config)
def _get_cross_tower_ops(self):
if self._cross_tower_ops is None:
self._cross_tower_ops = (
cross_tower_ops_lib.ReductionToOneDeviceCrossTowerOps())
return self._cross_tower_ops
def _reduce(self, method_string, value, destinations):
if len(self._devices) == 1 and not isinstance(value, values.PerDevice):
value = values.PerDevice({self._devices[0]: value})
assert isinstance(value, values.PerDevice)
return self._get_cross_tower_ops().reduce(
method_string, value, destinations=destinations)
def _batch_reduce(self, method_string, value_destination_pairs):
return self._get_cross_tower_ops().batch_reduce(method_string,
value_destination_pairs)
def _update(self, var, fn, *args, **kwargs):
# TODO(josh11b): Also support TowerLocalVariables here? If so, args and
# kwargs don't need to be mirrored.
assert isinstance(var, values.MirroredVariable)
# TODO(josh11b): In eager mode, use one thread per device.
updates = {}
for d, v in var._index.items(): # pylint: disable=protected-access
name = "update_%d" % self._device_index.get(d)
with ops.device(d), distribute_lib.UpdateContext(d), ops.name_scope(name):
updates[d] = fn(v,
*values.select_device_mirrored(d, args),
**values.select_device_mirrored(d, kwargs))
return values.regroup(updates, values.Mirrored)
def _update_non_slot(self, colocate_with, fn, *args, **kwargs):
assert isinstance(colocate_with, list)
# TODO(josh11b): In eager mode, use one thread per device.
updates = {}
for d in colocate_with:
name = "update_%d" % self._device_index.get(d)
with ops.device(d), distribute_lib.UpdateContext(d), ops.name_scope(name):
updates[d] = fn(*values.select_device_mirrored(d, args),
**values.select_device_mirrored(d, kwargs))
return values.regroup(updates, values.Mirrored)
def _fetch(self, val, destination, fn):
"""Return a copy of `val` or `fn(val)` on `destination`."""
if isinstance(val, values.TowerLocalVariable):
val = self.reduce(val.reduce_method, val, destinations=destination)
with ops.device(destination):
return fn(self.unwrap(val)[0])
assert isinstance(val, values.Mirrored), (
"val = %s (type %s)" % (val, val.__class__.__name__))
if val.on_device(destination):
with ops.device(destination):
# Use an identity here to make sure we are returning a tensor
# instead of e.g. a variable object.
return array_ops.identity(fn(val.get(destination)))
device = None
for d in self._devices:
if val.on_device(d):
device = d
break
assert device is not None, (
"Could not find destination %s in list of devices %s." %
(destination, val.devices))
with ops.device(device):
v = fn(val.get(device))
with ops.device(destination):
return array_ops.identity(v)
def _unwrap(self, val):
if isinstance(val, values.DistributedValues):
# Return in a deterministic order.
if set(val.devices) == self._canonical_device_set:
return [val.get(device=d) for d in self._devices]
return [val.get(device=d) for d in sorted(val.devices)]
return [val]
@property
def is_single_tower(self):
return len(self._devices) == 1
@property
def num_towers(self):
return len(self._devices)
def _worker_device_index(self):
return self._device_index
@property
def worker_devices(self):
# Make a copy to prevent users from accidentally mutating our copy.
return list(self._devices)
@property
def parameter_devices(self):
return list(self._devices)
def non_slot_devices(self, var_list):
del var_list
return list(self._devices)
def _get_devices_from(self, colocate_with=None):
if colocate_with is None:
return self._devices
elif isinstance(colocate_with, values.DistributedValues):
# pylint: disable=protected-access
return list(colocate_with._index.keys())
elif isinstance(colocate_with, six.string_types):
return [device_util.resolve(colocate_with)]
elif isinstance(colocate_with, list):
return [device_util.resolve(d) for d in colocate_with]
else:
return colocate_with
class _MirroredTowerThread(threading.Thread):
"""A thread that runs() a function on a device."""
def __init__(self, dist, coord, device, variable_creator_fn, fn, *args,
**kwargs):
super(MirroredStrategy._MirroredTowerThread, self).__init__() # pylint: disable=protected-access
self.coord = coord
self.distribution = dist
self.device = device
self.tower_id = dist.worker_devices.index(device)
self.variable_creator_fn = variable_creator_fn
# State needed to run and return the results of `fn`.
self.main_fn = fn
self.main_args = args
self.main_kwargs = kwargs
self.main_result = None
self.done = False
# State needed to run the next merge_call() (if any) requested via
# TowerContext.
self.merge_fn = None
self.merge_args = None
self.merge_kwargs = None
self.merge_result = None
self.captured_name_scope = None
# We use a thread.Event for the main thread to signal when this
# thread should start running (`should_run`), and another for
# this thread to transfer control back to the main thread
# (`has_paused`, either when it gets to a
# `get_tower_context().merge_call` or when `fn` returns). In
# either case the event starts cleared, is signaled by calling
# set(). The receiving thread waits for the signal by calling
# wait() and then immediately clearing the event using clear().
self.should_run = threading.Event()
self.has_paused = threading.Event()
# These fields have to do with inheriting various contexts from the
# parent thread:
# pylint: disable=protected-access
self.context_mode = context.context()._eager_context.mode
if not context.context()._context_handle:
context.context()._initialize_handle_and_devices()
self.context_device_policy = (
pywrap_tensorflow.TFE_ContextGetDevicePlacementPolicy(
context.context()._context_handle))
self.graph = ops.get_default_graph()
self._variable_creator_stack = self.graph._variable_creator_stack[:]
self._captured_var_scope = variable_scope.get_variable_scope()
# Adding a "/" at end lets us re-enter this scope later.
self._name_scope = self.graph.get_name_scope()
if self._name_scope:
self._name_scope += "/"
if self.tower_id > 0:
if not self._name_scope:
self._name_scope = ""
self._name_scope += "tower_%d/" % self.tower_id
def run(self):
# pylint: disable=protected-access
self.graph._variable_creator_stack = self._variable_creator_stack
self.should_run.wait()
self.should_run.clear()
try:
if self.coord.should_stop():
return
with self.coord.stop_on_exception(), \
context.context()._mode(self.context_mode), \
context.context().device_policy(self.context_device_policy), \
_enter_graph(self.graph), \
MirroredTowerContext(self.distribution, self.tower_id), \
ops.device(self.device), \
ops.name_scope(self._name_scope), \
variable_scope.variable_scope(
self._captured_var_scope, reuse=self.tower_id > 0), \
variable_scope.variable_creator_scope(self.variable_creator_fn):
self.main_result = self.main_fn(*self.main_args, **self.main_kwargs)
self.done = True
finally:
self.has_paused.set()
class MirroredTowerContext(distribute_lib.TowerContext):
"""TowerContext used in MirroredStrategy.call_for_each_tower().
Opened in `_MirroredTowerThread`, to allow the user to invoke
`MirroredStrategy`'s specific implementation of `merge_call()`,
which works by delegating the function and its arguments to
the main thread (the one that invoked
`MirroredStrategy.call_for_each_tower()`).
"""
def _merge_call(self, fn, *args, **kwargs):
"""Delegate to the main thread to actually perform merge_call()."""
t = threading.current_thread() # a _MirroredTowerThread
t.merge_fn = fn
t.merge_args = args
t.merge_kwargs = kwargs
t.captured_name_scope = t.graph.get_name_scope()
# Adding a "/" at end lets us re-enter this scope later.
if t.captured_name_scope:
t.captured_name_scope += "/"
t.has_paused.set()
t.should_run.wait()
t.should_run.clear()
if t.coord.should_stop():
raise _RequestedStop()
return t.merge_result
@property
def device(self):
distribute_lib.require_tower_context(self)
return self._distribution_strategy.worker_devices[self._tower_id]
| {
"content_hash": "da207c7166ef97b5c8abb3976c1f9729",
"timestamp": "",
"source": "github",
"line_count": 513,
"max_line_length": 103,
"avg_line_length": 40.15204678362573,
"alnum_prop": 0.6417613360520439,
"repo_name": "yanchen036/tensorflow",
"id": "cef0a2907b85d230606eb530a0e94549b6b95e53",
"size": "21287",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/distribute/python/mirrored_strategy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8458"
},
{
"name": "C",
"bytes": "203125"
},
{
"name": "C++",
"bytes": "31304292"
},
{
"name": "CMake",
"bytes": "653409"
},
{
"name": "Go",
"bytes": "1000126"
},
{
"name": "Java",
"bytes": "444071"
},
{
"name": "Jupyter Notebook",
"bytes": "1940755"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "40136"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "63210"
},
{
"name": "Perl",
"bytes": "7546"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "29015899"
},
{
"name": "Ruby",
"bytes": "327"
},
{
"name": "Shell",
"bytes": "384247"
}
],
"symlink_target": ""
} |
import atexit
import sys
import time
from typing import Dict
from urllib3.exceptions import HTTPError
from polyaxon import pkg, settings
from polyaxon.agents.base import BaseAgent
from polyaxon.auxiliaries import V1PolyaxonInitContainer, V1PolyaxonSidecarContainer
from polyaxon.lifecycle import V1StatusCondition, V1Statuses
from polyaxon.schemas.types import V1ConnectionType
from polyaxon.utils.versions import clean_version_for_check
from polyaxon_sdk import V1Agent
from polyaxon_sdk.rest import ApiException
class Agent(BaseAgent):
def __init__(self, owner, agent_uuid):
super().__init__(sleep_interval=None)
self.owner = owner
self.agent_uuid = agent_uuid
self._register()
def _register(self):
print("Agent is starting.")
try:
agent = self.get_info()
self._check_status(agent)
self.sync()
self.log_agent_running()
print("Agent is running.")
except (ApiException, HTTPError) as e:
self.log_agent_failed(
message="Could not start the agent {}.".format(repr(e))
)
sys.exit(1)
atexit.register(self._wait)
def _wait(self):
if not self._graceful_shutdown:
self.log_agent_warning()
time.sleep(1)
def get_info(self):
return self.client.agents_v1.get_agent(owner=self.owner, uuid=self.agent_uuid)
def get_state(self):
return self.client.agents_v1.get_agent_state(
owner=self.owner, uuid=self.agent_uuid
)
def log_agent_status(self, status: str, reason: str = None, message: str = None):
status_condition = V1StatusCondition.get_condition(
type=status, status=True, reason=reason, message=message
)
self.client.agents_v1.create_agent_status(
owner=self.owner,
uuid=self.agent_uuid,
body={"condition": status_condition},
async_req=True,
)
def sync(self):
self.client.agents_v1.sync_agent(
owner=self.owner,
agent_uuid=self.agent_uuid,
body=V1Agent(
content=settings.AGENT_CONFIG.to_dict(dump=True),
version=clean_version_for_check(pkg.VERSION),
version_api=self.spawner.k8s_manager.get_version(),
),
)
def sync_compatible_updates(self, compatible_updates: Dict):
if compatible_updates and settings.AGENT_CONFIG:
init = compatible_updates.get("init")
if init:
init = V1PolyaxonInitContainer.from_dict(init)
settings.AGENT_CONFIG.init = settings.AGENT_CONFIG.init.patch(init)
sidecar = compatible_updates.get("sidecar")
if sidecar:
sidecar = V1PolyaxonSidecarContainer.from_dict(sidecar)
settings.AGENT_CONFIG.sidecar = settings.AGENT_CONFIG.sidecar.patch(
sidecar
)
connections = compatible_updates.get("connections")
if connections:
settings.AGENT_CONFIG.connections = [
V1ConnectionType.from_dict(c) for c in connections
]
self.content = settings.AGENT_CONFIG.to_dict(dump=True)
self.sync()
def log_agent_running(self):
self.log_agent_status(status=V1Statuses.RUNNING, reason="AgentLogger")
def log_agent_failed(self, message=None):
self.log_agent_status(
status=V1Statuses.FAILED, reason="AgentLogger", message=message
)
def log_agent_warning(self):
self.log_agent_status(
status=V1Statuses.WARNING,
reason="AgentLogger",
message="The agent was interrupted, please check your deployment.",
)
| {
"content_hash": "fa09781b9912202c24bb526fe579dc38",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 86,
"avg_line_length": 34.285714285714285,
"alnum_prop": 0.6125,
"repo_name": "polyaxon/polyaxon",
"id": "f04b8cfd4ed605ba293eaa844e51e3b5902f105e",
"size": "4445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/polyaxon/agents/agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1989"
},
{
"name": "Python",
"bytes": "5201898"
},
{
"name": "Shell",
"bytes": "1565"
}
],
"symlink_target": ""
} |
from allauth.socialaccount import app_settings
from allauth.socialaccount.models import SocialApp
class Provider(object):
def get_login_url(self, request, next=None, **kwargs):
"""
Builds the URL to redirect to when initiating a login for this
provider.
"""
raise NotImplementedError, "get_login_url() for " + self.name
def get_app(self, request):
return SocialApp.objects.get_current(self.id)
def media_js(self, request):
"""
Some providers may require extra scripts (e.g. a Facebook connect)
"""
return ''
def wrap_account(self, social_account):
return self.account_class(social_account)
def get_settings(self):
return app_settings.PROVIDERS.get(self.id, {})
class ProviderAccount(object):
def __init__(self, social_account):
self.account = social_account
def build_token_args(self, social_app, social_token):
return {}
def update_token(self, social_app, social_token):
pass
def request_url(self, url, args={}, callback=None):
raise NotImplemented, 'request_url(url, args) for %s' % self.account.get_provider().id
def has_valid_authentication(self):
raise NotImplemented, 'has_valid_authentication() for %s' % self.account.get_provider().id
def get_token_args(self, app=None):
social_app = app if app else SocialApp.objects.get_current(self.account.get_provider().id)
try:
social_token = social_app.socialtoken_set.get(account=self.account)
return self.build_token_args(social_app, social_token)
except SocialToken.DoesNotExist:
return {}
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
def get_brand(self):
"""
Returns a dict containing an id and name identifying the
brand. Useful when displaying logos next to accounts in
templates.
For most providers, these are identical to the provider. For
OpenID however, the brand can derived from the OpenID identity
url.
"""
provider = self.account.get_provider()
return dict(id=provider.id,
name=provider.name)
def __unicode__(self):
return self.get_brand()['name']
| {
"content_hash": "cd3878a4ec21a8f88a2be88c710d1988",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 98,
"avg_line_length": 32.65277777777778,
"alnum_prop": 0.6316461080391322,
"repo_name": "rawjam/django-allauth",
"id": "571f8e533d4603ceb0a2baec233da8e031ef2d80",
"size": "2351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "allauth/socialaccount/providers/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "693"
},
{
"name": "HTML",
"bytes": "49842"
},
{
"name": "Python",
"bytes": "336850"
}
],
"symlink_target": ""
} |
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Algorithm import *
import numpy as np
### <summary>
### Basic template algorithm simply initializes the date range and cash. This is a skeleton
### framework you can use for designing an algorithm.
### </summary>
### <meta name="tag" content="using data" />
### <meta name="tag" content="using quantconnect" />
### <meta name="tag" content="trading and orders" />
class BasicTemplateAlgorithm(QCAlgorithm):
'''Basic template algorithm simply initializes the date range and cash'''
def Initialize(self):
'''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
self.SetStartDate(2013,10, 7) #Set Start Date
self.SetEndDate(2013,10,11) #Set End Date
self.SetCash(100000) #Set Strategy Cash
# Find more symbols here: http://quantconnect.com/data
self.AddEquity("SPY", Resolution.Second)
self.Debug("numpy test >>> print numpy.pi: " + str(np.pi))
def OnData(self, data):
'''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.
Arguments:
data: Slice object keyed by symbol containing the stock data
'''
if not self.Portfolio.Invested:
self.SetHoldings("SPY", 1) | {
"content_hash": "59c8d32f99af804f8870a76808290121",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 151,
"avg_line_length": 43.549019607843135,
"alnum_prop": 0.7172444844664565,
"repo_name": "redmeros/Lean",
"id": "30084c11f8e83cb9e58d1678486190bbd738bef4",
"size": "2223",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Algorithm.Python/BasicTemplateAlgorithm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3280"
},
{
"name": "C#",
"bytes": "5870523"
},
{
"name": "F#",
"bytes": "1723"
},
{
"name": "Java",
"bytes": "852"
},
{
"name": "Python",
"bytes": "122953"
},
{
"name": "Shell",
"bytes": "2845"
},
{
"name": "Visual Basic",
"bytes": "2448"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function, division
from os.path import join
import contextlib
import os
import shutil
import subprocess
import re
import shlex
import runpy
import zipfile
import tarfile
import platform
import click
import pysftp
import fnmatch
# https://virtualenv.pypa.io/en/latest/userguide.html#windows-notes
# scripts and executables on Windows go in ENV\Scripts\ instead of ENV/bin/
if platform.system() == "Windows":
VENV_BIN = "Scripts"
else:
VENV_BIN = "bin"
if platform.system() == "Windows":
def Archive(name):
a = zipfile.ZipFile(name, "w")
a.add = a.write
return a
else:
def Archive(name):
return tarfile.open(name, "w:gz")
RELEASE_DIR = join(os.path.dirname(os.path.realpath(__file__)))
DIST_DIR = join(RELEASE_DIR, "dist")
ROOT_DIR = os.path.normpath(join(RELEASE_DIR, ".."))
VERSION_FILE = join(ROOT_DIR, "netlib/version.py")
BUILD_DIR = join(RELEASE_DIR, "build")
PYINSTALLER_TEMP = join(BUILD_DIR, "pyinstaller")
PYINSTALLER_DIST = join(BUILD_DIR, "binaries")
VENV_DIR = join(BUILD_DIR, "venv")
VENV_PIP = join(VENV_DIR, VENV_BIN, "pip")
VENV_PYINSTALLER = join(VENV_DIR, VENV_BIN, "pyinstaller")
project = {
"name": "mitmproxy",
"tools": ["pathod", "pathoc", "mitmproxy", "mitmdump", "mitmweb"],
"dir": ROOT_DIR,
"python_version": "py2"
}
if platform.system() == "Windows":
project["tools"].remove("mitmproxy")
def get_version():
return runpy.run_path(VERSION_FILE)["VERSION"]
def get_snapshot_version():
last_tag, tag_dist, commit = git("describe --tags --long").strip().rsplit(b"-", 2)
tag_dist = int(tag_dist)
if tag_dist == 0:
return get_version()
else:
return "{version}dev{tag_dist:04}-{commit}".format(
version=get_version(), # this should already be the next version
tag_dist=tag_dist,
commit=commit
)
def archive_name():
platform_tag = {
"Darwin": "osx",
"Windows": "win32",
"Linux": "linux"
}.get(platform.system(), platform.system())
if platform.system() == "Windows":
ext = "zip"
else:
ext = "tar.gz"
return "{project}-{version}-{platform}.{ext}".format(
project=project["name"],
version=get_version(),
platform=platform_tag,
ext=ext
)
def wheel_name():
return "{project}-{version}-{py_version}-none-any.whl".format(
project=project["name"],
version=get_version(),
py_version=project["python_version"]
)
@contextlib.contextmanager
def empty_pythonpath():
"""
Make sure that the regular python installation is not on the python path,
which would give us access to modules installed outside of our virtualenv.
"""
pythonpath = os.environ.get("PYTHONPATH", "")
os.environ["PYTHONPATH"] = ""
yield
os.environ["PYTHONPATH"] = pythonpath
@contextlib.contextmanager
def chdir(path):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
def git(args):
with chdir(ROOT_DIR):
return subprocess.check_output(["git"] + shlex.split(args))
@click.group(chain=True)
def cli():
"""
mitmproxy build tool
"""
pass
@cli.command("contributors")
def contributors():
"""
Update CONTRIBUTORS.md
"""
with chdir(ROOT_DIR):
print("Updating CONTRIBUTORS...")
contributors_data = git("shortlog -n -s")
with open("CONTRIBUTORS", "w") as f:
f.write(contributors_data)
@cli.command("set-version")
@click.argument('version')
def set_version(version):
"""
Update version information
"""
print("Update versions...")
version = ", ".join(version.split("."))
print("Update %s..." % VERSION_FILE)
with open(VERSION_FILE, "rb") as f:
content = f.read()
new_content = re.sub(
r"IVERSION\s*=\s*\([\d,\s]+\)", "IVERSION = (%s)" % version,
content
)
with open(VERSION_FILE, "wb") as f:
f.write(new_content)
@cli.command("wheels")
def wheels():
"""
Build wheels
"""
with empty_pythonpath():
print("Building release...")
if os.path.exists(DIST_DIR):
shutil.rmtree(DIST_DIR)
print("Creating wheel for %s ..." % project["name"])
subprocess.check_call(
[
"python", "./setup.py", "-q",
"bdist_wheel", "--dist-dir", DIST_DIR,
],
cwd=project["dir"]
)
print("Creating virtualenv for test install...")
if os.path.exists(VENV_DIR):
shutil.rmtree(VENV_DIR)
subprocess.check_call(["virtualenv", "-q", VENV_DIR])
with chdir(DIST_DIR):
print("Installing %s..." % project["name"])
subprocess.check_call([VENV_PIP, "install", "-q", wheel_name()])
print("Running binaries...")
for tool in project["tools"]:
tool = join(VENV_DIR, VENV_BIN, tool)
print("> %s --version" % tool)
print(subprocess.check_output([tool, "--version"]))
print("Virtualenv available for further testing:")
print("source %s" % os.path.normpath(join(VENV_DIR, VENV_BIN, "activate")))
@cli.command("bdist")
@click.option("--use-existing-wheels/--no-use-existing-wheels", default=False)
@click.argument("pyinstaller_version", envvar="PYINSTALLER_VERSION", default="PyInstaller~=3.1.1")
@click.pass_context
def bdist(ctx, use_existing_wheels, pyinstaller_version):
"""
Build a binary distribution
"""
if os.path.exists(PYINSTALLER_TEMP):
shutil.rmtree(PYINSTALLER_TEMP)
if os.path.exists(PYINSTALLER_DIST):
shutil.rmtree(PYINSTALLER_DIST)
if not use_existing_wheels:
ctx.invoke(wheels)
print("Installing PyInstaller...")
subprocess.check_call([VENV_PIP, "install", "-q", pyinstaller_version])
if project["tools"]:
with Archive(join(DIST_DIR, archive_name())) as archive:
for tool in project["tools"]:
spec = join(RELEASE_DIR, "specs/%s.spec" % tool)
print("Building %s binary..." % tool)
subprocess.check_call(
[
VENV_PYINSTALLER,
"--clean",
"--workpath", PYINSTALLER_TEMP,
"--distpath", PYINSTALLER_DIST,
# This is PyInstaller, so setting a
# different log level obviously breaks it :-)
# "--log-level", "WARN",
spec
]
)
# Test if it works at all O:-)
executable = join(PYINSTALLER_DIST, tool)
if platform.system() == "Windows":
executable += ".exe"
print("> %s --version" % executable)
subprocess.check_call([executable, "--version"])
archive.add(executable, os.path.basename(executable))
print("Packed {}.".format(archive_name()))
@cli.command("upload-release")
@click.option('--username', prompt=True)
@click.password_option(confirmation_prompt=False)
@click.option('--repository', default="pypi")
def upload_release(username, password, repository):
"""
Upload wheels to PyPI
"""
for project in projects.keys():
filename = wheel_name()
print("Uploading {} to {}...".format(filename, repository))
subprocess.check_call([
"twine",
"upload",
"-u", username,
"-p", password,
"-r", repository,
join(DIST_DIR, filename)
])
@cli.command("upload-snapshot")
@click.option("--host", envvar="SNAPSHOT_HOST", prompt=True)
@click.option("--port", envvar="SNAPSHOT_PORT", type=int, default=22)
@click.option("--user", envvar="SNAPSHOT_USER", prompt=True)
@click.option("--private-key", default=join(RELEASE_DIR, "rtool.pem"))
@click.option("--private-key-password", envvar="SNAPSHOT_PASS", prompt=True, hide_input=True)
@click.option("--wheel/--no-wheel", default=False)
@click.option("--bdist/--no-bdist", default=False)
def upload_snapshot(host, port, user, private_key, private_key_password, wheel, bdist):
"""
Upload snapshot to snapshot server
"""
with pysftp.Connection(host=host,
port=port,
username=user,
private_key=private_key,
private_key_pass=private_key_password) as sftp:
for project, conf in projects.items():
dir_name = "snapshots/v{}".format(get_version())
sftp.makedirs(dir_name)
with sftp.cd(dir_name):
files = []
if wheel:
files.append(wheel_name())
if bdist and conf["tools"]:
files.append(archive_name())
for f in files:
local_path = join(DIST_DIR, f)
remote_filename = f.replace(get_version(), get_snapshot_version())
symlink_path = "../{}".format(f.replace(get_version(), "latest"))
# Delete old versions
old_version = f.replace(get_version(), "*")
for f_old in sftp.listdir():
if fnmatch.fnmatch(f_old, old_version):
print("Removing {}...".format(f_old))
sftp.remove(f_old)
# Upload new version
print("Uploading {} as {}...".format(f, remote_filename))
with click.progressbar(length=os.stat(local_path).st_size) as bar:
sftp.put(
local_path,
"." + remote_filename,
callback=lambda done, total: bar.update(done - bar.pos)
)
# We hide the file during upload.
sftp.rename("." + remote_filename, remote_filename)
# update symlink for the latest release
if sftp.lexists(symlink_path):
print("Removing {}...".format(symlink_path))
sftp.remove(symlink_path)
sftp.symlink("v{}/{}".format(get_version(), remote_filename), symlink_path)
@cli.command("wizard")
@click.option('--next-version', prompt=True)
@click.option('--username', prompt="PyPI Username")
@click.password_option(confirmation_prompt=False, prompt="PyPI Password")
@click.option('--repository', default="pypi")
@click.pass_context
def wizard(ctx, next_version, username, password, repository):
"""
Interactive Release Wizard
"""
is_dirty = git("status --porcelain")
if is_dirty:
raise RuntimeError("Repository is not clean.")
# update contributors file
ctx.invoke(contributors)
# Build test release
ctx.invoke(bdist)
try:
click.confirm("Please test the release now. Is it ok?", abort=True)
except click.Abort:
# undo changes
git("checkout CONTRIBUTORS")
raise
# Everything ok - let's ship it!
git("tag v{}".format(get_version()))
git("push --tags")
ctx.invoke(
upload_release,
username=username, password=password, repository=repository
)
click.confirm("Now please wait until CI has built binaries. Finished?")
# version bump commit
ctx.invoke(set_version, version=next_version)
git("commit -a -m \"bump version\"")
git("push")
click.echo("All done!")
if __name__ == "__main__":
cli()
| {
"content_hash": "95b98e110219933e2564acdde15c4c56",
"timestamp": "",
"source": "github",
"line_count": 373,
"max_line_length": 98,
"avg_line_length": 31.514745308310992,
"alnum_prop": 0.5659719268396427,
"repo_name": "ParthGanatra/mitmproxy",
"id": "6177b3f331bce404034d7aab9ad8cd156f11f676",
"size": "11777",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "release/rtool.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "410"
},
{
"name": "CSS",
"bytes": "211484"
},
{
"name": "HTML",
"bytes": "57472"
},
{
"name": "JavaScript",
"bytes": "1755960"
},
{
"name": "Python",
"bytes": "1190792"
},
{
"name": "Shell",
"bytes": "3990"
}
],
"symlink_target": ""
} |
"""
Test suite for SocketServer.py.
"""
import contextlib
import imp
import os
import select
import signal
import socket
import select
import errno
import tempfile
import unittest
import SocketServer
import test.test_support
from test.test_support import reap_children, reap_threads, verbose
try:
import threading
except ImportError:
threading = None
test.test_support.requires("network")
TEST_STR = "hello world\n"
HOST = test.test_support.HOST
HAVE_UNIX_SOCKETS = hasattr(socket, "AF_UNIX")
requires_unix_sockets = unittest.skipUnless(HAVE_UNIX_SOCKETS,
'requires Unix sockets')
HAVE_FORKING = hasattr(os, "fork") and os.name != "os2"
requires_forking = unittest.skipUnless(HAVE_FORKING, 'requires forking')
def signal_alarm(n):
"""Call signal.alarm when it exists (i.e. not on Windows)."""
if hasattr(signal, 'alarm'):
signal.alarm(n)
# Remember real select() to avoid interferences with mocking
_real_select = select.select
def receive(sock, n, timeout=20):
r, w, x = _real_select([sock], [], [], timeout)
if sock in r:
return sock.recv(n)
else:
raise RuntimeError, "timed out on %r" % (sock,)
if HAVE_UNIX_SOCKETS:
class ForkingUnixStreamServer(SocketServer.ForkingMixIn,
SocketServer.UnixStreamServer):
pass
class ForkingUnixDatagramServer(SocketServer.ForkingMixIn,
SocketServer.UnixDatagramServer):
pass
@contextlib.contextmanager
def simple_subprocess(testcase):
pid = os.fork()
if pid == 0:
# Don't raise an exception; it would be caught by the test harness.
os._exit(72)
yield None
pid2, status = os.waitpid(pid, 0)
testcase.assertEqual(pid2, pid)
testcase.assertEqual(72 << 8, status)
@unittest.skipUnless(threading, 'Threading required for this test.')
class SocketServerTest(unittest.TestCase):
"""Test all socket servers."""
def setUp(self):
signal_alarm(60) # Kill deadlocks after 60 seconds.
self.port_seed = 0
self.test_files = []
def tearDown(self):
signal_alarm(0) # Didn't deadlock.
reap_children()
for fn in self.test_files:
try:
os.remove(fn)
except os.error:
pass
self.test_files[:] = []
def pickaddr(self, proto):
if proto == socket.AF_INET:
return (HOST, 0)
else:
# XXX: We need a way to tell AF_UNIX to pick its own name
# like AF_INET provides port==0.
dir = None
if os.name == 'os2':
dir = '\socket'
fn = tempfile.mktemp(prefix='unix_socket.', dir=dir)
if os.name == 'os2':
# AF_UNIX socket names on OS/2 require a specific prefix
# which can't include a drive letter and must also use
# backslashes as directory separators
if fn[1] == ':':
fn = fn[2:]
if fn[0] in (os.sep, os.altsep):
fn = fn[1:]
if os.sep == '/':
fn = fn.replace(os.sep, os.altsep)
else:
fn = fn.replace(os.altsep, os.sep)
self.test_files.append(fn)
return fn
def make_server(self, addr, svrcls, hdlrbase):
class MyServer(svrcls):
def handle_error(self, request, client_address):
self.close_request(request)
self.server_close()
raise
class MyHandler(hdlrbase):
def handle(self):
line = self.rfile.readline()
self.wfile.write(line)
if verbose: print "creating server"
server = MyServer(addr, MyHandler)
self.assertEqual(server.server_address, server.socket.getsockname())
return server
@reap_threads
def run_server(self, svrcls, hdlrbase, testfunc):
server = self.make_server(self.pickaddr(svrcls.address_family),
svrcls, hdlrbase)
# We had the OS pick a port, so pull the real address out of
# the server.
addr = server.server_address
if verbose:
print "server created"
print "ADDR =", addr
print "CLASS =", svrcls
t = threading.Thread(
name='%s serving' % svrcls,
target=server.serve_forever,
# Short poll interval to make the test finish quickly.
# Time between requests is short enough that we won't wake
# up spuriously too many times.
kwargs={'poll_interval':0.01})
t.daemon = True # In case this function raises.
t.start()
if verbose: print "server running"
for i in range(3):
if verbose: print "test client", i
testfunc(svrcls.address_family, addr)
if verbose: print "waiting for server"
server.shutdown()
t.join()
if verbose: print "done"
def stream_examine(self, proto, addr):
s = socket.socket(proto, socket.SOCK_STREAM)
s.connect(addr)
s.sendall(TEST_STR)
buf = data = receive(s, 100)
while data and '\n' not in buf:
data = receive(s, 100)
buf += data
self.assertEqual(buf, TEST_STR)
s.close()
def dgram_examine(self, proto, addr):
s = socket.socket(proto, socket.SOCK_DGRAM)
s.sendto(TEST_STR, addr)
buf = data = receive(s, 100)
while data and '\n' not in buf:
data = receive(s, 100)
buf += data
self.assertEqual(buf, TEST_STR)
s.close()
def test_TCPServer(self):
self.run_server(SocketServer.TCPServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
def test_ThreadingTCPServer(self):
self.run_server(SocketServer.ThreadingTCPServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
@requires_forking
def test_ForkingTCPServer(self):
with simple_subprocess(self):
self.run_server(SocketServer.ForkingTCPServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
@requires_unix_sockets
def test_UnixStreamServer(self):
self.run_server(SocketServer.UnixStreamServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
@requires_unix_sockets
def test_ThreadingUnixStreamServer(self):
self.run_server(SocketServer.ThreadingUnixStreamServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
@requires_unix_sockets
@requires_forking
def test_ForkingUnixStreamServer(self):
with simple_subprocess(self):
self.run_server(ForkingUnixStreamServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
def test_UDPServer(self):
self.run_server(SocketServer.UDPServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
def test_ThreadingUDPServer(self):
self.run_server(SocketServer.ThreadingUDPServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
@requires_forking
def test_ForkingUDPServer(self):
with simple_subprocess(self):
self.run_server(SocketServer.ForkingUDPServer,
SocketServer.DatagramRequestHandler,
self.dgram_examine)
@contextlib.contextmanager
def mocked_select_module(self):
"""Mocks the select.select() call to raise EINTR for first call"""
old_select = select.select
class MockSelect:
def __init__(self):
self.called = 0
def __call__(self, *args):
self.called += 1
if self.called == 1:
# raise the exception on first call
raise select.error(errno.EINTR, os.strerror(errno.EINTR))
else:
# Return real select value for consecutive calls
return old_select(*args)
select.select = MockSelect()
try:
yield select.select
finally:
select.select = old_select
def test_InterruptServerSelectCall(self):
with self.mocked_select_module() as mock_select:
pid = self.run_server(SocketServer.TCPServer,
SocketServer.StreamRequestHandler,
self.stream_examine)
# Make sure select was called again:
self.assertGreater(mock_select.called, 1)
# Alas, on Linux (at least) recvfrom() doesn't return a meaningful
# client address so this cannot work:
# @requires_unix_sockets
# def test_UnixDatagramServer(self):
# self.run_server(SocketServer.UnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
#
# @requires_unix_sockets
# def test_ThreadingUnixDatagramServer(self):
# self.run_server(SocketServer.ThreadingUnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
#
# @requires_unix_sockets
# @requires_forking
# def test_ForkingUnixDatagramServer(self):
# self.run_server(SocketServer.ForkingUnixDatagramServer,
# SocketServer.DatagramRequestHandler,
# self.dgram_examine)
@reap_threads
def test_shutdown(self):
# Issue #2302: shutdown() should always succeed in making an
# other thread leave serve_forever().
class MyServer(SocketServer.TCPServer):
pass
class MyHandler(SocketServer.StreamRequestHandler):
pass
threads = []
for i in range(20):
s = MyServer((HOST, 0), MyHandler)
t = threading.Thread(
name='MyServer serving',
target=s.serve_forever,
kwargs={'poll_interval':0.01})
t.daemon = True # In case this function raises.
threads.append((t, s))
for t, s in threads:
t.start()
s.shutdown()
for t, s in threads:
t.join()
def test_main():
if imp.lock_held():
# If the import lock is held, the threads will hang
raise unittest.SkipTest("can't run when import lock is held")
test.test_support.run_unittest(SocketServerTest)
if __name__ == "__main__":
test_main()
| {
"content_hash": "1854ccf9e4ff968b460774888dbf8388",
"timestamp": "",
"source": "github",
"line_count": 326,
"max_line_length": 77,
"avg_line_length": 33.65337423312884,
"alnum_prop": 0.571780147662018,
"repo_name": "albertjan/pypyjs",
"id": "83f5e3f72074920231ca9313adaf6b9a32611367",
"size": "10971",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "website/js/pypy.js-0.2.0/lib/modules/test/test_socketserver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4486"
},
{
"name": "HTML",
"bytes": "5137"
},
{
"name": "JavaScript",
"bytes": "40140"
},
{
"name": "Makefile",
"bytes": "5929"
},
{
"name": "Python",
"bytes": "14932329"
}
],
"symlink_target": ""
} |
from nexus import settings,ProjectManager,Job
from nexus import generate_physical_system
from nexus import loop,linear,vmc,dmc
from qmcpack_calculations import standard_qmc
#general settings for nexus
settings(
pseudo_dir = './pseudopotentials',# directory with all pseudopotentials
sleep = 3, # check on runs every 'sleep' seconds
generate_only = 0, # only make input files
status_only = 0, # only show status of runs
machine = 'node16', # local machine is 16 core workstation
)
#generate the graphene physical system
graphene = generate_physical_system(
structure = 'graphite_aa', # graphite keyword
cell = 'hex', # hexagonal cell shape
tiling = (2,2,1), # tiling of primitive cell
constants = (2.462,10.0), # a,c constants
units = 'A', # in Angstrom
kgrid = (1,1,1), # Monkhorst-Pack grid
kshift = (.5,.5,.5), # and shift
C = 4 # C has 4 valence electrons
)
#generate the simulations for the qmc workflow
qsims = standard_qmc(
# subdirectory of runs
directory = 'graphene_test',
# description of the physical system
system = graphene,
pseudos = ['C.BFD.upf', # pwscf PP file
'C.BFD.xml'], # qmcpack PP file
# job parameters
scfjob = Job(cores=16), # cores to run scf
nscfjob = Job(cores=16), # cores to run non-scf
optjob = Job(cores=16), # cores for optimization
qmcjob = Job(cores=16), # cores for qmc
# dft parameters (pwscf)
functional = 'lda', # dft functional
ecut = 150 , # planewave energy cutoff (Ry)
conv_thr = 1e-6, # scf convergence threshold (Ry)
mixing_beta = .7, # charge mixing factor
scf_kgrid = (8,8,8), # MP grid of primitive cell
scf_kshift = (1,1,1), # to converge charge density
# qmc wavefunction parameters (qmcpack)
meshfactor = 1.0, # bspline grid spacing, larger is finer
jastrows = [
dict(type = 'J1', # 1-body
function = 'bspline', # bspline jastrow
size = 8), # with 8 knots
dict(type = 'J2', # 2-body
function = 'bspline', # bspline jastrow
size = 8) # with 8 knots
],
# opt parameters (qmcpack)
perform_opt = True, # produce optimal jastrows
block_opt = False, # if true, ignores opt and qmc
skip_submit_opt = False, # if true, writes input files, does not run opt
opt_kpoint = 'L', # supercell k-point for the optimization
opt_calcs = [ # qmcpack input parameters for opt
loop(max = 4, # No. of loop iterations
qmc = linear( # linearized optimization method
energy = 0.0, # cost function
unreweightedvariance = 1.0, # is all unreweighted variance
reweightedvariance = 0.0, # no energy or r.w. var.
timestep = 0.5, # vmc timestep (1/Ha)
warmupsteps = 100, # MC steps before data collected
samples = 16000,# samples used for cost function
stepsbetweensamples = 10, # steps between uncorr. samples
blocks = 10, # ignore this
minwalkers = 0.1,# and this
bigchange = 15.0,# and this
alloweddifference = 1e-4 # and this, for now
)
),
loop(max = 4,
qmc = linear( # same as above, except
energy = 0.5, # cost function
unreweightedvariance = 0.0, # is 50/50 energy and r.w. var.
reweightedvariance = 0.5,
timestep = 0.5,
warmupsteps = 100,
samples = 64000,# and there are more samples
stepsbetweensamples = 10,
blocks = 10,
minwalkers = 0.1,
bigchange = 15.0,
alloweddifference = 1.0e-4
)
)
],
# qmc parameters (qmcpack)
block_qmc = False, # if true, ignores qmc
skip_submit_qmc = False, # if true, writes input file, does not run qmc
qmc_calcs = [ # qmcpack input parameters for qmc
vmc( # vmc parameters
timestep = 0.5, # vmc timestep (1/Ha)
warmupsteps = 100, # No. of MC steps before data is collected
blocks = 200, # No. of data blocks recorded in scalar.dat
steps = 10, # No. of steps per block
substeps = 3, # MC steps taken w/o computing E_local
samplesperthread = 40 # No. of dmc walkers per thread
),
dmc( # dmc parameters
timestep = 0.01, # dmc timestep (1/Ha)
warmupsteps = 50, # No. of MC steps before data is collected
blocks = 400, # No. of data blocks recorded in scalar.dat
steps = 5, # No. of steps per block
nonlocalmoves = True # use Casula's T-moves
), # (retains variational principle for NLPP's)
],
# return a list or object containing simulations
return_list = False
)
#the project manager monitors all runs
pm = ProjectManager()
# give it the simulation objects
pm.add_simulations(qsims.list())
# run all the simulations
pm.run_project()
# print out the total energy
performed_runs = not settings.generate_only and not settings.status_only
if performed_runs:
# get the qmcpack analyzer object
# it contains all of the statistically analyzed data from the run
qa = qsims.qmc.load_analyzer_image()
# get the local energy from dmc.dat
le = qa.dmc[1].dmc.LocalEnergy # dmc series 1, dmc.dat, local energy
# print the total energy for the 8 atom system
print 'The DMC ground state energy for graphene is:'
print ' {0} +/- {1} Ha'.format(le.mean,le.error)
#end if
| {
"content_hash": "9adfeb10630857cc84238e50b6c142ec",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 79,
"avg_line_length": 44.601351351351354,
"alnum_prop": 0.5149219815179519,
"repo_name": "habanero-rice/hclib",
"id": "b476114712a3de1a9d25281b6d26a07e4cc11856",
"size": "6625",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/performance-regression/full-apps/qmcpack/nexus/examples/simple_qmc/graphene/graphene_example.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "64597"
},
{
"name": "C",
"bytes": "10643011"
},
{
"name": "C++",
"bytes": "15721482"
},
{
"name": "CMake",
"bytes": "257955"
},
{
"name": "CSS",
"bytes": "20536"
},
{
"name": "Cuda",
"bytes": "630404"
},
{
"name": "Fortran",
"bytes": "260512"
},
{
"name": "HTML",
"bytes": "470710"
},
{
"name": "M4",
"bytes": "4028"
},
{
"name": "MATLAB",
"bytes": "6509"
},
{
"name": "Makefile",
"bytes": "260753"
},
{
"name": "Objective-C",
"bytes": "1671681"
},
{
"name": "Perl",
"bytes": "183420"
},
{
"name": "PostScript",
"bytes": "4546458"
},
{
"name": "Python",
"bytes": "1734658"
},
{
"name": "Raku",
"bytes": "183"
},
{
"name": "Roff",
"bytes": "5051370"
},
{
"name": "Shell",
"bytes": "113750"
},
{
"name": "TeX",
"bytes": "205379"
},
{
"name": "xBase",
"bytes": "5062"
}
],
"symlink_target": ""
} |
"""
796 API Trading Example/DEMO in Python
After getToken.
"""
import urllib.request, urllib.error, urllib.parse
import time
import base64
import hashlib
import hmac
import http.client
import json
import os
def get_796_token(appid,apikey,secretkey):
timestamp = time.time()#"1414142919" #time.time()
params = {"apikey": apikey, "appid": appid, "secretkey": secretkey, "timestamp": str(timestamp)}
params = sorted(iter(params.items()), key=lambda d: d[0], reverse=False)
message = urllib.parse.urlencode(params)
print("secretkey=",secretkey)
print("message=",message)
s = hmac.new(secretkey.encode('utf-8'),
message.encode('utf-8'),
digestmod=hashlib.sha1).hexdigest()
print("hex=",s)
sig = base64.b64encode(s.encode('utf-8'))
print("sig=",sig)
payload = urllib.parse.urlencode({'appid': appid, 'apikey': apikey, 'timestamp': timestamp, 'sig': sig})
c = http.client.HTTPSConnection('796.com')
c.request("GET", "/oauth/token?"+payload)
r = c.getresponse()
if r.status == 200:
data = r.read()
jsonDict = json.loads(data.decode('utf-8'));
errno = jsonDict['errno']
if errno=="0":
return jsonDict['data']['access_token']
return None
def getUserInfo(sAccessToken):
sUrl = "/v1/user/get_info?access_token=%s" % (sAccessToken)
c = http.client.HTTPSConnection('796.com')
c.request("GET", sUrl)
r = c.getresponse()
print("r.status=",r.status)
print(r.read())
def getUserInfo1(sAccessToken):
sUrl = "https://796.com/v1/user/get_info?access_token=%s" % (sAccessToken)
response = urllib.request.urlopen(sUrl)
print(response.read())
def getUserInfo2(sAccessToken):
import requests
sUrl = "https://796.com/v1/user/get_info?access_token=%s" % (sAccessToken)
response = requests.get(sUrl, timeout=20)
print(response.content)
def getUserInfoError(sAccessToken):
"""
May be return {u'msg': u'Access_token repealed', u'errno': u'-102', u'data': []}
"""
import urllib.request, urllib.parse, urllib.error
payload = urllib.parse.urlencode({'access_token': sAccessToken})
c = http.client.HTTPSConnection('796.com')
c.request("GET", "/v1/user/get_info?"+payload)
r = c.getresponse()
data = r.read()
jsonDict = json.loads(data.decode('utf-8'));
print(jsonDict)
def testHMacSHA(secretkey,message):
print("secretkey=",secretkey)
print("message=",message)
s = hmac.new(secretkey, message.encode('utf-8'),
digestmod=hashlib.sha1).hexdigest()
print("hex=",s)
if __name__ == "__main__":
app_id = os.environ.get("APP_ID_796", None)
api_key = os.environ.get("API_KEY_796", None)
api_secret = os.environ.get("API_SECRET_796", None)
testHMacSHA(b"HF94bR940e1d9YZwfgickG5HR07SFJQGscgO+E3vFPQGwSzyGtUQLxIh6blv",
"apikey=5999a1ce-4312-8a3c-75a5-327c-f5cf5251&appid=11040&secretkey=HF94bR940e1d9YZwfgickG5HR07SFJQGscgO%2BE3vFPQGwSzyGtUQLxIh6blv×tamp=1414142919")
access_token = get_796_token(appid = app_id,
apikey=api_key,
secretkey=api_secret)
print("access_token=",access_token)
getUserInfo(access_token)
getUserInfo1(access_token)
getUserInfo2(access_token)
getUserInfoError(access_token)
| {
"content_hash": "aa1e46563558e0a9c98ae234d49b4240",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 161,
"avg_line_length": 33.71,
"alnum_prop": 0.6481756155443489,
"repo_name": "frrp/cryptoexchange",
"id": "198f45529cc89d3e67ff73bc2433348056ca8cc8",
"size": "3417",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cryptoexchange/api796.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "66504"
}
],
"symlink_target": ""
} |
import unittest
from ggrc.utils import structures
class TestCaseInsensitiveDict(unittest.TestCase):
def setUp(self):
self.ci_dict = structures.CaseInsensitiveDict()
def test_basic_dict_functions(self):
self.ci_dict["Hello"] = "World"
self.assertEqual(self.ci_dict["Hello"], "World")
self.ci_dict["empty"] = []
self.assertEqual(self.ci_dict["empty"], [])
self.ci_dict["EMpTY"].append(56)
self.assertEqual(self.ci_dict["EmpTy"], [56])
def get_(dict_, key):
return dict_[key]
self.assertRaises(KeyError, get_, self.ci_dict, "non existent key")
self.assertRaises(KeyError, get_, self.ci_dict, None)
def test_in_function(self):
self.ci_dict["Hello"] = "World"
self.assertTrue("Hello" in self.ci_dict)
self.assertTrue("hello" in self.ci_dict)
self.assertTrue("he" not in self.ci_dict)
def test_items(self):
"""Test that items return sames cases as they were set."""
self.ci_dict["Hello"] = "World"
self.ci_dict["HELLO"] = "World"
self.ci_dict["fOO"] = "bar"
self.assertEqual(
sorted(self.ci_dict.items()),
sorted([("HELLO", "World"), ("fOO", "bar")])
)
def test_lower_items(self):
"""Test that lower_items does not change values."""
self.ci_dict["Hello"] = "World"
self.ci_dict["FOO"] = "BAR"
self.assertEqual(
sorted(self.ci_dict.lower_items()),
sorted([("hello", "World"), ("foo", "BAR")])
)
class TestCaseInsensitiveDefDict(unittest.TestCase):
def setUp(self):
self.ci_dict = structures.CaseInsensitiveDefaultDict(list)
def test_basic_dict_functions(self):
self.ci_dict["Hello"] = "World"
self.assertEqual(self.ci_dict["Hello"], "World")
self.assertEqual(self.ci_dict["empty"], [])
self.ci_dict["empty"].append(55)
self.assertEqual(self.ci_dict["empty"], [55])
self.ci_dict["EMpTY"].append(56)
self.assertEqual(self.ci_dict["EmpTy"], [55, 56])
def test_in_function(self):
self.ci_dict["Hello"] = "World"
self.assertTrue("Hello" in self.ci_dict)
self.assertTrue("hello" in self.ci_dict)
self.assertTrue("he" not in self.ci_dict)
def test_items(self):
"""Test that items return sames cases as they were set."""
self.ci_dict["Hello"] = "World"
self.ci_dict["HELLO"] = "World"
self.ci_dict["fOO"] = "bar"
self.assertEqual(
sorted(self.ci_dict.items()),
sorted([("HELLO", "World"), ("fOO", "bar")])
)
def test_lower_items(self):
"""Test that lower_items does not change values."""
self.ci_dict["Hello"] = "World"
self.ci_dict["FOO"] = "BAR"
self.assertEqual(
sorted(self.ci_dict.lower_items()),
sorted([("hello", "World"), ("foo", "BAR")])
)
| {
"content_hash": "6194b8fb25aa73deb5460ef04cee1f61",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 71,
"avg_line_length": 29.75,
"alnum_prop": 0.6214833759590793,
"repo_name": "edofic/ggrc-core",
"id": "3a6e7615e8156da06ad313d8f8cdfdba717bf760",
"size": "2850",
"binary": false,
"copies": "7",
"ref": "refs/heads/develop",
"path": "test/unit/ggrc/utils/test_structures.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "191076"
},
{
"name": "Cucumber",
"bytes": "136322"
},
{
"name": "HTML",
"bytes": "1069698"
},
{
"name": "JavaScript",
"bytes": "1704619"
},
{
"name": "Makefile",
"bytes": "7103"
},
{
"name": "Mako",
"bytes": "4320"
},
{
"name": "Python",
"bytes": "2385925"
},
{
"name": "Shell",
"bytes": "30802"
}
],
"symlink_target": ""
} |
import unittest
import mock
from ...management.rules_configs import RulesConfigs
class TestRulesConfigs(unittest.TestCase):
def test_init_with_optionals(self):
t = RulesConfigs(
domain="domain", token="jwttoken", telemetry=False, timeout=(10, 2)
)
self.assertEqual(t.client.options.timeout, (10, 2))
telemetry_header = t.client.base_headers.get("Auth0-Client", None)
self.assertEqual(telemetry_header, None)
@mock.patch("auth0.v3.management.rules_configs.RestClient")
def test_all(self, mock_rc):
mock_instance = mock_rc.return_value
c = RulesConfigs(domain="domain", token="jwttoken")
c.all()
args, kwargs = mock_instance.get.call_args
self.assertEqual("https://domain/api/v2/rules-configs", args[0])
@mock.patch("auth0.v3.management.rules_configs.RestClient")
def test_unset(self, mock_rc):
mock_instance = mock_rc.return_value
c = RulesConfigs(domain="domain", token="jwttoken")
c.unset("an-id")
mock_instance.delete.assert_called_with(
"https://domain/api/v2/rules-configs/an-id"
)
@mock.patch("auth0.v3.management.rules_configs.RestClient")
def test_set(self, mock_rc):
mock_instance = mock_rc.return_value
g = RulesConfigs(domain="domain", token="jwttoken")
g.set("key", "MY_RULES_CONFIG_VALUES")
args, kwargs = mock_instance.put.call_args
self.assertEqual("https://domain/api/v2/rules-configs/key", args[0])
| {
"content_hash": "88f577b8728e347be2dd650c4d2d79f6",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 79,
"avg_line_length": 32.125,
"alnum_prop": 0.6465629053177692,
"repo_name": "auth0/auth0-python",
"id": "bfe08293303957c5c6ed6fc301f70a6e54207555",
"size": "1542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "auth0/v3/test/management/test_rules_configs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "437970"
},
{
"name": "Shell",
"bytes": "232"
}
],
"symlink_target": ""
} |
from django import forms
from .models import Link
class LinkCreationForm(forms.Form):
link = forms.URLField(label='url to be shortened')
| {
"content_hash": "c088144f05c31cc3c70ed7ba936ae07e",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 54,
"avg_line_length": 16.22222222222222,
"alnum_prop": 0.7465753424657534,
"repo_name": "xpostudio4/django-url-shortener",
"id": "bcbe673bfdab925c4e8dafcb335653f86af9f4eb",
"size": "146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shortener/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5255"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals, absolute_import, division
from armet.connectors.flask import resources as flask_resources
from armet.connectors.sqlalchemy import resources as sqlalchemy_resources
from alchemist import db
__all__ = [
'Resource',
'ModelResource',
]
class Resource(flask_resources.Resource):
@property
def session(self):
return db.session
def route(self, *args, **kwargs):
try:
# Continue on with the cycle.
result = super(Resource, self).route(*args, **kwargs)
# Commit the session.
db.session.commit()
# Return the result.
return result
except:
# Something occurred; rollback the session.
db.session.rollback()
# Re-raise the exception.
raise
class ModelResourceOptions(object):
def __init__(self, meta, name, bases):
#! SQLAlchemy session used to perform operations on the models.
self.session = db.session
class ModelResource(sqlalchemy_resources.ModelResource):
def route(self, *args, **kwargs):
return Resource.route(self, *args, **kwargs)
| {
"content_hash": "4f449c5d99008b9bc3a663342c805868",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 73,
"avg_line_length": 25.106382978723403,
"alnum_prop": 0.6322033898305085,
"repo_name": "concordusapps/alchemist-armet",
"id": "fdcf7b4c83246cae774b42afcb8bc4c9bc596d62",
"size": "1204",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alchemist_armet/resources.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3764"
}
],
"symlink_target": ""
} |
"""
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| {
"content_hash": "c6ca508ec417a8ea949d4c7c8ae7d125",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 79,
"avg_line_length": 48.61904761904762,
"alnum_prop": 0.7894221351616063,
"repo_name": "asnelzin/django-template",
"id": "9476749503f5f3051adad92c75e4339108ce697e",
"size": "1021",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "project_name/wsgi.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "2500"
},
{
"name": "Python",
"bytes": "8383"
}
],
"symlink_target": ""
} |
from linear_solver.core import solve_linear_system
| {
"content_hash": "5fcfd306a080143f5cd4398d616c4966",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 50,
"avg_line_length": 51,
"alnum_prop": 0.8431372549019608,
"repo_name": "tcmoore3/linear_solver",
"id": "683f40564e7c16bd41d29d97280f2aa9a12cf161",
"size": "51",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linear_solver/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "3814"
},
{
"name": "Python",
"bytes": "6462"
}
],
"symlink_target": ""
} |
print "deprecated since version 1.1.0 of mtools. Use 'mloginfo <logfile> --distinct' instead."
| {
"content_hash": "973d493f1bf57b2adbbcce560b7ddce5",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 94,
"avg_line_length": 95,
"alnum_prop": 0.7473684210526316,
"repo_name": "corymintz/mtools",
"id": "9a59e67ca00c3efe41fc7c77112b0cf98258381e",
"size": "118",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mtools/mlogdistinct/mlogdistinct.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5256"
},
{
"name": "HTML",
"bytes": "1947058"
},
{
"name": "JavaScript",
"bytes": "16468"
},
{
"name": "Python",
"bytes": "323672"
}
],
"symlink_target": ""
} |
"""
Class representing a Cloudstack instance. This module uses the csapi
library which calls the cloudstack API. For more information refer to
the Cloudstack documentation at https://github.com/syed/PerfKitBenchmarker.git
"""
import logging
from perfkitbenchmarker import flags
from perfkitbenchmarker import linux_virtual_machine as linux_vm
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers.cloudstack import cloudstack_disk
from perfkitbenchmarker.providers.cloudstack import cloudstack_network
from perfkitbenchmarker.providers.cloudstack import util
from perfkitbenchmarker import providers
UBUNTU_IMAGE = 'Ubuntu 14.04.2 HVM base (64bit)'
RHEL_IMAGE = 'CentOS 7 HVM base (64bit)'
FLAGS = flags.FLAGS
class CloudStackVirtualMachine(virtual_machine.BaseVirtualMachine):
"""Object representing a CloudStack Virtual Machine."""
CLOUD = providers.CLOUDSTACK
DEFAULT_ZONE = 'QC-1'
DEFAULT_MACHINE_TYPE = '1vCPU.1GB'
DEFAULT_IMAGE = 'Ubuntu 14.04.2 HVM base (64bit)'
DEFAULT_USER_NAME = 'cca-user'
DEFAULT_PROJECT = 'cloudops-Engineering'
def __init__(self, vm_spec):
"""Initialize a CloudStack virtual machine.
Args:
vm_spec: virtual_machine.BaseVirtualMachineSpec object of the vm.
"""
super(CloudStackVirtualMachine, self).__init__(vm_spec)
self.network = cloudstack_network.CloudStackNetwork.GetNetwork(self)
self.cs = util.CsClient(FLAGS.CS_API_URL,
FLAGS.CS_API_KEY,
FLAGS.CS_API_SECRET)
self.project_id = None
if FLAGS.project:
project = self.cs.get_project(FLAGS.project)
assert project, "Project not found"
self.project_id = project['id']
zone = self.cs.get_zone(self.zone)
assert zone, "Zone not found"
self.zone_id = zone['id']
self.user_name = self.DEFAULT_USER_NAME
self.image = self.image or self.DEFAULT_IMAGE
self.disk_counter = 0
@vm_util.Retry(max_retries=3)
def _CreateDependencies(self):
"""Create VM dependencies."""
# Create an ssh keypair
with open(self.ssh_public_key) as keyfd:
self.ssh_keypair_name = 'perfkit-sshkey-%s' % FLAGS.run_uri
pub_key = keyfd.read()
if not self.cs.get_ssh_keypair(self.ssh_keypair_name, self.project_id):
res = self.cs.register_ssh_keypair(self.ssh_keypair_name,
pub_key,
self.project_id)
assert res, "Unable to create ssh keypair"
# Allocate a public ip
network_id = self.network.id
if self.network.is_vpc:
network_id = self.network.vpc_id
public_ip = self.cs.alloc_public_ip(network_id, self.network.is_vpc)
if public_ip:
self.ip_address = public_ip['ipaddress']
self.ip_address_id = public_ip['id']
else:
logging.warn("Unable to allocate public IP")
def _DeleteDependencies(self):
"""Delete VM dependencies."""
# Remove the keypair
if self.cs.get_ssh_keypair(self.ssh_keypair_name, self.project_id):
self.cs.unregister_ssh_keypair(self.ssh_keypair_name, self.project_id)
# Remove the IP
if self.ip_address_id:
self.cs.release_public_ip(self.ip_address_id)
@vm_util.Retry(max_retries=3)
def _Create(self):
"""Create a Cloudstack VM instance."""
service_offering = self.cs.get_serviceoffering(self.machine_type)
assert service_offering, "No service offering found"
template = self.cs.get_template(self.image, self.project_id)
assert template, "No template found"
network_id = self.network.id
vm = None
vm = self.cs.create_vm(self.name,
self.zone_id,
service_offering['id'],
template['id'],
[network_id],
self.ssh_keypair_name,
self.project_id)
assert vm, "Unable to create VM"
self._vm = vm
self.id = vm['virtualmachine']['id']
@vm_util.Retry(max_retries=3)
def _PostCreate(self):
"""Get the instance's data."""
# assosiate the public ip created with the VMid
network_interface = self._vm['virtualmachine']['nic'][0]
self.internal_ip = network_interface['ipaddress']
# Create a Static NAT rule
if not self.cs.snat_rule_exists(self.ip_address_id, self.id):
snat_rule = self.cs.enable_static_nat(self.ip_address_id,
self.id,
self.network.id)
assert snat_rule, "Unable to create static NAT"
def _Delete(self):
"""Delete the VM instance."""
# Delete the VM
self.cs.delete_vm(self.id)
def _Exists(self):
"""Returns true if the VM exists."""
# Check if VM exisits
vm = self.cs.get_virtual_machine(self.name, self.project_id)
if vm and 'id' in vm:
return True
return False
def CreateScratchDisk(self, disk_spec):
"""Create a VM's scratch disk.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
"""
# Cloudstack doesn't really have a concept of local or remote disks A VM
# starts with one disk and all other volumes have to be attached via the
# API
self.disks = []
for i in xrange(disk_spec.num_striped_disks):
name = 'disk-%s-%d-%d' % (self.name, i + 1, self.disk_counter)
scratch_disk = cloudstack_disk.CloudStackDisk(disk_spec,
name,
self.zone_id,
self.project_id)
self.disks.append(scratch_disk)
self.disk_counter += 1
self._CreateScratchDiskFromDisks(disk_spec, self.disks)
class DebianBasedCloudStackVirtualMachine(CloudStackVirtualMachine,
linux_vm.DebianMixin):
DEFAULT_IMAGE = UBUNTU_IMAGE
class RhelBasedCloudStackVirtualMachine(CloudStackVirtualMachine,
linux_vm.RhelMixin):
DEFAULT_IMAGE = RHEL_IMAGE
| {
"content_hash": "cc4a96206809ac8180ca701919b057cc",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 79,
"avg_line_length": 31.14,
"alnum_prop": 0.6212267180475273,
"repo_name": "xiaolihope/PerfKitBenchmarker-1.7.0",
"id": "c5e5fa94fc259faa88d6ae8b6c5d6691b3421521",
"size": "6838",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "perfkitbenchmarker/providers/cloudstack/cloudstack_virtual_machine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "1547"
},
{
"name": "Python",
"bytes": "1727478"
},
{
"name": "Shell",
"bytes": "23457"
}
],
"symlink_target": ""
} |
"""
CLI interface for nova bare-metal management.
"""
import os
import sys
from oslo.config import cfg
from nova import config
from nova.openstack.common import cliutils
from nova.openstack.common import log as logging
from nova import version
from nova.virt.baremetal.db import migration as bmdb_migration
CONF = cfg.CONF
# Decorators for actions
def args(*args, **kwargs):
def _decorator(func):
func.__dict__.setdefault('args', []).insert(0, (args, kwargs))
return func
return _decorator
class BareMetalDbCommands(object):
"""Class for managing the bare-metal database."""
def __init__(self):
pass
@args('--version', dest='version', metavar='<version>',
help='Bare-metal Database version')
def sync(self, version=None):
"""Sync the database up to the most recent version."""
bmdb_migration.db_sync(version)
def version(self):
"""Print the current database version."""
v = bmdb_migration.db_version()
print(v)
# return for unittest
return v
CATEGORIES = {
'db': BareMetalDbCommands,
}
def methods_of(obj):
"""Get all callable methods of an object that don't start with underscore.
returns a list of tuples of the form (method_name, method)
"""
result = []
for i in dir(obj):
if callable(getattr(obj, i)) and not i.startswith('_'):
result.append((i, getattr(obj, i)))
return result
def add_command_parsers(subparsers):
parser = subparsers.add_parser('bash-completion')
parser.add_argument('query_category', nargs='?')
for category in CATEGORIES:
command_object = CATEGORIES[category]()
parser = subparsers.add_parser(category)
parser.set_defaults(command_object=command_object)
category_subparsers = parser.add_subparsers(dest='action')
for (action, action_fn) in methods_of(command_object):
parser = category_subparsers.add_parser(action)
action_kwargs = []
for args, kwargs in getattr(action_fn, 'args', []):
action_kwargs.append(kwargs['dest'])
kwargs['dest'] = 'action_kwarg_' + kwargs['dest']
parser.add_argument(*args, **kwargs)
parser.set_defaults(action_fn=action_fn)
parser.set_defaults(action_kwargs=action_kwargs)
parser.add_argument('action_args', nargs='*')
category_opt = cfg.SubCommandOpt('category',
title='Command categories',
help='Available categories',
handler=add_command_parsers)
def main():
"""Parse options and call the appropriate class/method."""
CONF.register_cli_opt(category_opt)
try:
config.parse_args(sys.argv)
logging.setup("nova")
except cfg.ConfigFilesNotFoundError:
cfgfile = CONF.config_file[-1] if CONF.config_file else None
if cfgfile and not os.access(cfgfile, os.R_OK):
st = os.stat(cfgfile)
print(_("Could not read %s. Re-running with sudo") % cfgfile)
try:
os.execvp('sudo', ['sudo', '-u', '#%s' % st.st_uid] + sys.argv)
except Exception:
print(_('sudo failed, continuing as if nothing happened'))
print(_('Please re-run nova-manage as root.'))
return(2)
if CONF.category.name == "version":
print(version.version_string_with_package())
return(0)
if CONF.category.name == "bash-completion":
if not CONF.category.query_category:
print(" ".join(CATEGORIES.keys()))
elif CONF.category.query_category in CATEGORIES:
fn = CATEGORIES[CONF.category.query_category]
command_object = fn()
actions = methods_of(command_object)
print(" ".join([k for (k, v) in actions]))
return(0)
fn = CONF.category.action_fn
fn_args = [arg.decode('utf-8') for arg in CONF.category.action_args]
fn_kwargs = {}
for k in CONF.category.action_kwargs:
v = getattr(CONF.category, 'action_kwarg_' + k)
if v is None:
continue
if isinstance(v, basestring):
v = v.decode('utf-8')
fn_kwargs[k] = v
# call the action with the remaining arguments
# check arguments
try:
cliutils.validate_args(fn, *fn_args, **fn_kwargs)
except cliutils.MissingArgs as e:
print(fn.__doc__)
print(e)
return(1)
try:
fn(*fn_args, **fn_kwargs)
return(0)
except Exception:
print(_("Command failed, please check log for more info"))
raise
| {
"content_hash": "49c68c025b703972f814f23fbd5e33cc",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 79,
"avg_line_length": 30.545454545454547,
"alnum_prop": 0.5986394557823129,
"repo_name": "DirectXMan12/nova-hacking",
"id": "e28289f8d1f011af30df4b13d654a6cc0f4daab0",
"size": "7164",
"binary": false,
"copies": "4",
"ref": "refs/heads/feature_novnc_krb",
"path": "nova/cmd/baremetal_manage.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "10361785"
},
{
"name": "Shell",
"bytes": "17485"
}
],
"symlink_target": ""
} |
from motorengine import Document, StringField, IntField, DateTimeField, \
ReferenceField
class BaseDocument(Document):
def to_dict(self):
data = super(Document, self).to_son()
data['id'] = self._id
return data
class School(BaseDocument):
name = StringField(required=True)
verifier = StringField()
class User(BaseDocument):
mobile = StringField(required=True)
password = StringField(required=True)
nickname = StringField(required=True)
gender = IntField(required=True, default=1)
description = StringField()
avatar_url = StringField()
school_id = ReferenceField(reference_document_type=School)
like_count = IntField(required=True, default=0)
follower_count = IntField(required=True, default=0)
following_count = IntField(required=True, default=0)
create_time = DateTimeField(required=True, auto_now_on_insert=True, auto_now_on_update=False)
def to_dict(self):
data = super(User, self).to_dict()
del data['password']
return data
| {
"content_hash": "bbeb58aa58d64c1c8a8b4852167bfcaa",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 97,
"avg_line_length": 29.13888888888889,
"alnum_prop": 0.6882745471877979,
"repo_name": "INAP-LABS/noc-orchestrator",
"id": "48c4cc2f840418dc110c4e82833f2fc5e1589b54",
"size": "1065",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "old_historico/sandbox/api_tornado/temp/tornaREST-master/data/collections.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "729"
},
{
"name": "Python",
"bytes": "82719"
},
{
"name": "Shell",
"bytes": "6417"
}
],
"symlink_target": ""
} |
"""Defines executor tasks handlers for MapReduce implementation."""
# pylint: disable=protected-access
# pylint: disable=g-bad-name
import datetime
import logging
import math
import os
import random
import sys
import time
import traceback
from mapreduce.third_party import simplejson
from google.appengine.ext import ndb
from google.appengine import runtime
from google.appengine.api import datastore_errors
from google.appengine.api import logservice
from google.appengine.api import modules
from google.appengine.api import taskqueue
from google.appengine.ext import db
from mapreduce import base_handler
from mapreduce import context
from mapreduce import errors
from mapreduce import input_readers
from mapreduce import model
from mapreduce import operation
from mapreduce import output_writers
from mapreduce import parameters
from mapreduce import util
from mapreduce.api import map_job
from mapreduce.api.map_job import shard_life_cycle
from google.appengine.runtime import apiproxy_errors
# pylint: disable=g-import-not-at-top
try:
import cloudstorage
# In 25 runtime, the above code will be scrubbed to import the stub version
# of cloudstorage. All occurences of the following if condition in MR
# codebase is to tell it apart.
# TODO(user): Remove after 25 runtime MR is abondoned.
if hasattr(cloudstorage, "_STUB"):
cloudstorage = None
except ImportError:
cloudstorage = None # CloudStorage library not available
# A guide to logging.
# log.critical: messages user absolutely should see, e.g. failed job.
# log.error: exceptions during processing user data, or unexpected
# errors detected by mr framework.
# log.warning: errors mr framework knows how to handle.
# log.info: other expected events.
# Set of strings of various test-injected faults.
_TEST_INJECTED_FAULTS = set()
def _run_task_hook(hooks, method, task, queue_name):
"""Invokes hooks.method(task, queue_name).
Args:
hooks: A hooks.Hooks instance or None.
method: The name of the method to invoke on the hooks class e.g.
"enqueue_kickoff_task".
task: The taskqueue.Task to pass to the hook method.
queue_name: The name of the queue to pass to the hook method.
Returns:
True if the hooks.Hooks instance handled the method, False otherwise.
"""
if hooks is not None:
try:
getattr(hooks, method)(task, queue_name)
except NotImplementedError:
# Use the default task addition implementation.
return False
return True
return False
class MapperWorkerCallbackHandler(base_handler.HugeTaskHandler):
"""Callback handler for mapreduce worker task."""
# These directives instruct self.__return() how to set state and enqueue task.
_TASK_DIRECTIVE = util._enum(
# Task is running as expected.
PROCEED_TASK="proceed_task",
# Need to retry task. Lock was NOT acquired when the error occur.
# Don't change payload or datastore.
RETRY_TASK="retry_task",
# Need to retry task. Lock was acquired when the error occurr.
# Don't change payload or datastore.
RETRY_SLICE="retry_slice",
# Drop the task (due to duplicated task). Must log permanent drop.
DROP_TASK="drop_task",
# See handlers.MapperWorkerCallbackHandler._attempt_slice_recovery.
RECOVER_SLICE="recover_slice",
# Need to retry the shard.
RETRY_SHARD="retry_shard",
# Need to drop task and fail the shard. Log permanent failure.
FAIL_TASK="fail_task",
# Need to abort the shard.
ABORT_SHARD="abort_shard")
def __init__(self, *args):
"""Constructor."""
super(MapperWorkerCallbackHandler, self).__init__(*args)
self._time = time.time
def _drop_gracefully(self):
"""Drop worker task gracefully.
Set current shard_state to failed. Controller logic will take care of
other shards and the entire MR.
"""
shard_id = self.request.headers[util._MR_SHARD_ID_TASK_HEADER]
mr_id = self.request.headers[util._MR_ID_TASK_HEADER]
shard_state, mr_state = db.get([
model.ShardState.get_key_by_shard_id(shard_id),
model.MapreduceState.get_key_by_job_id(mr_id)])
if shard_state and shard_state.active:
shard_state.set_for_failure()
config = util.create_datastore_write_config(mr_state.mapreduce_spec)
shard_state.put(config=config)
def _try_acquire_lease(self, shard_state, tstate):
"""Validate datastore and the task payload are consistent.
If so, attempt to get a lease on this slice's execution.
See model.ShardState doc on slice_start_time.
Args:
shard_state: model.ShardState from datastore.
tstate: model.TransientShardState from taskqueue paylod.
Returns:
A _TASK_DIRECTIVE enum. PROCEED_TASK if lock is acquired.
RETRY_TASK if task should be retried, DROP_TASK if task should
be dropped. Only old tasks (comparing to datastore state)
will be dropped. Future tasks are retried until they naturally
become old so that we don't ever stuck MR.
"""
# Controller will tally shard_states and properly handle the situation.
if not shard_state:
logging.warning("State not found for shard %s; Possible spurious task "
"execution. Dropping this task.",
tstate.shard_id)
return self._TASK_DIRECTIVE.DROP_TASK
if not shard_state.active:
logging.warning("Shard %s is not active. Possible spurious task "
"execution. Dropping this task.", tstate.shard_id)
logging.warning(str(shard_state))
return self._TASK_DIRECTIVE.DROP_TASK
# Validate shard retry count.
if shard_state.retries > tstate.retries:
logging.warning(
"Got shard %s from previous shard retry %s. Possible spurious "
"task execution. Dropping this task.",
tstate.shard_id,
tstate.retries)
logging.warning(str(shard_state))
return self._TASK_DIRECTIVE.DROP_TASK
elif shard_state.retries < tstate.retries:
# By the end of last slice, task enqueue succeeded but datastore commit
# failed. That transaction will be retried and adding the same task
# will pass.
logging.warning(
"ShardState for %s is behind slice. Waiting for it to catch up",
shard_state.shard_id)
return self._TASK_DIRECTIVE.RETRY_TASK
# Validate slice id.
# Taskqueue executes old successful tasks.
if shard_state.slice_id > tstate.slice_id:
logging.warning(
"Task %s-%s is behind ShardState %s. Dropping task.""",
tstate.shard_id, tstate.slice_id, shard_state.slice_id)
return self._TASK_DIRECTIVE.DROP_TASK
# By the end of last slice, task enqueue succeeded but datastore commit
# failed. That transaction will be retried and adding the same task
# will pass. User data is duplicated in this case.
elif shard_state.slice_id < tstate.slice_id:
logging.warning(
"Task %s-%s is ahead of ShardState %s. Waiting for it to catch up.",
tstate.shard_id, tstate.slice_id, shard_state.slice_id)
return self._TASK_DIRECTIVE.RETRY_TASK
# Check potential duplicated tasks for the same slice.
# See model.ShardState doc.
if shard_state.slice_start_time:
countdown = self._wait_time(shard_state,
parameters._LEASE_DURATION_SEC)
if countdown > 0:
logging.warning(
"Last retry of slice %s-%s may be still running."
"Will try again in %s seconds", tstate.shard_id, tstate.slice_id,
countdown)
# TODO(user): There might be a better way. Taskqueue's countdown
# only applies to add new tasks, not retry of tasks.
# Reduce contention.
time.sleep(countdown)
return self._TASK_DIRECTIVE.RETRY_TASK
# lease could have expired. Verify with logs API.
else:
if self._wait_time(shard_state,
parameters._MAX_LEASE_DURATION_SEC):
if not self._has_old_request_ended(shard_state):
logging.warning(
"Last retry of slice %s-%s is still in flight with request_id "
"%s. Will try again later.", tstate.shard_id, tstate.slice_id,
shard_state.slice_request_id)
return self._TASK_DIRECTIVE.RETRY_TASK
else:
logging.warning(
"Last retry of slice %s-%s has no log entry and has"
"timed out after %s seconds",
tstate.shard_id, tstate.slice_id,
parameters._MAX_LEASE_DURATION_SEC)
# Lease expired or slice_start_time not set.
config = util.create_datastore_write_config(tstate.mapreduce_spec)
@db.transactional(retries=5)
def _tx():
"""Use datastore to set slice_start_time to now.
If failed for any reason, raise error to retry the task (hence all
the previous validation code). The task would die naturally eventually.
Raises:
Rollback: If the shard state is missing.
Returns:
A _TASK_DIRECTIVE enum.
"""
fresh_state = model.ShardState.get_by_shard_id(tstate.shard_id)
if not fresh_state:
logging.warning("ShardState missing.")
raise db.Rollback()
if (fresh_state.active and
fresh_state.slice_id == shard_state.slice_id and
fresh_state.slice_start_time == shard_state.slice_start_time):
shard_state.slice_start_time = datetime.datetime.now()
shard_state.slice_request_id = os.environ.get("REQUEST_LOG_ID")
shard_state.acquired_once = True
shard_state.put(config=config)
return self._TASK_DIRECTIVE.PROCEED_TASK
else:
logging.warning(
"Contention on slice %s-%s execution. Will retry again.",
tstate.shard_id, tstate.slice_id)
# One proposer should win. In case all lost, back off arbitrarily.
time.sleep(random.randrange(1, 5))
return self._TASK_DIRECTIVE.RETRY_TASK
return _tx()
def _has_old_request_ended(self, shard_state):
"""Whether previous slice retry has ended according to Logs API.
Args:
shard_state: shard state.
Returns:
True if the request of previous slice retry has ended. False if it has
not or unknown.
"""
assert shard_state.slice_start_time is not None
assert shard_state.slice_request_id is not None
request_ids = [shard_state.slice_request_id]
logs = list(logservice.fetch(
request_ids=request_ids,
# TODO(user): Remove after b/8173230 is fixed.
module_versions=[(os.environ["CURRENT_MODULE_ID"],
modules.get_current_version_name())]))
if not logs or not logs[0].finished:
return False
return True
def _wait_time(self, shard_state, secs, now=datetime.datetime.now):
"""Time to wait until slice_start_time is secs ago from now.
Args:
shard_state: shard state.
secs: duration in seconds.
now: a func that gets now.
Returns:
0 if no wait. A positive int in seconds otherwise. Always around up.
"""
assert shard_state.slice_start_time is not None
delta = now() - shard_state.slice_start_time
duration = datetime.timedelta(seconds=secs)
if delta < duration:
return util.total_seconds(duration - delta)
else:
return 0
def _try_free_lease(self, shard_state, slice_retry=False):
"""Try to free lease.
A lightweight transaction to update shard_state and unset
slice_start_time to allow the next retry to happen without blocking.
We don't care if this fails or not because the lease will expire
anyway.
Under normal execution, _save_state_and_schedule_next is the exit point.
It updates/saves shard state and schedules the next slice or returns.
Other exit points are:
1. _are_states_consistent: at the beginning of handle, checks
if datastore states and the task are in sync.
If not, raise or return.
2. _attempt_slice_retry: may raise exception to taskqueue.
3. _save_state_and_schedule_next: may raise exception when taskqueue/db
unreachable.
This handler should try to free the lease on every exceptional exit point.
Args:
shard_state: model.ShardState.
slice_retry: whether to count this as a failed slice execution.
"""
@db.transactional
def _tx():
fresh_state = model.ShardState.get_by_shard_id(shard_state.shard_id)
if fresh_state and fresh_state.active:
# Free lease.
fresh_state.slice_start_time = None
fresh_state.slice_request_id = None
if slice_retry:
fresh_state.slice_retries += 1
fresh_state.put()
try:
_tx()
# pylint: disable=broad-except
except Exception, e:
logging.warning(e)
logging.warning(
"Release lock for shard %s failed. Wait for lease to expire.",
shard_state.shard_id)
def _maintain_LC(self, obj, slice_id, last_slice=False, begin_slice=True,
shard_ctx=None, slice_ctx=None):
"""Makes sure shard life cycle interface are respected.
Args:
obj: the obj that may have implemented _ShardLifeCycle.
slice_id: current slice_id
last_slice: whether this is the last slice.
begin_slice: whether this is the beginning or the end of a slice.
shard_ctx: shard ctx for dependency injection. If None, it will be read
from self.
slice_ctx: slice ctx for dependency injection. If None, it will be read
from self.
"""
if obj is None or not isinstance(obj, shard_life_cycle._ShardLifeCycle):
return
shard_context = shard_ctx or self.shard_context
slice_context = slice_ctx or self.slice_context
if begin_slice:
if slice_id == 0:
obj.begin_shard(shard_context)
obj.begin_slice(slice_context)
else:
obj.end_slice(slice_context)
if last_slice:
obj.end_shard(shard_context)
def handle(self):
"""Handle request.
This method has to be careful to pass the same ShardState instance to
its subroutines calls if the calls mutate or read from ShardState.
Note especially that Context instance caches and updates the ShardState
instance.
Returns:
Set HTTP status code and always returns None.
"""
# Reconstruct basic states.
self._start_time = self._time()
shard_id = self.request.headers[util._MR_SHARD_ID_TASK_HEADER]
mr_id = self.request.headers[util._MR_ID_TASK_HEADER]
spec = model.MapreduceSpec._get_mapreduce_spec(mr_id)
shard_state, control = db.get([
model.ShardState.get_key_by_shard_id(shard_id),
model.MapreduceControl.get_key_by_job_id(mr_id),
])
# Set context before any IO code is called.
ctx = context.Context(spec, shard_state,
task_retry_count=self.task_retry_count())
context.Context._set(ctx)
# Unmarshall input reader, output writer, and other transient states.
tstate = model.TransientShardState.from_request(self.request)
# Try acquire a lease on the shard.
if shard_state:
is_this_a_retry = shard_state.acquired_once
task_directive = self._try_acquire_lease(shard_state, tstate)
if task_directive in (self._TASK_DIRECTIVE.RETRY_TASK,
self._TASK_DIRECTIVE.DROP_TASK):
return self.__return(shard_state, tstate, task_directive)
assert task_directive == self._TASK_DIRECTIVE.PROCEED_TASK
# Abort shard if received signal.
if control and control.command == model.MapreduceControl.ABORT:
task_directive = self._TASK_DIRECTIVE.ABORT_SHARD
return self.__return(shard_state, tstate, task_directive)
# Retry shard if user disabled slice retry.
if (is_this_a_retry and
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS <= 1):
task_directive = self._TASK_DIRECTIVE.RETRY_SHARD
return self.__return(shard_state, tstate, task_directive)
# TODO(user): Find a better way to set these per thread configs.
# E.g. what if user change it?
util._set_ndb_cache_policy()
job_config = map_job.JobConfig._to_map_job_config(
spec,
os.environ.get("HTTP_X_APPENGINE_QUEUENAME"))
job_context = map_job.JobContext(job_config)
self.shard_context = map_job.ShardContext(job_context, shard_state)
self.slice_context = map_job.SliceContext(self.shard_context,
shard_state,
tstate)
try:
slice_id = tstate.slice_id
self._maintain_LC(tstate.handler, slice_id)
self._maintain_LC(tstate.input_reader, slice_id)
self._maintain_LC(tstate.output_writer, slice_id)
if is_this_a_retry:
task_directive = self._attempt_slice_recovery(shard_state, tstate)
if task_directive != self._TASK_DIRECTIVE.PROCEED_TASK:
return self.__return(shard_state, tstate, task_directive)
last_slice = self._process_inputs(
tstate.input_reader, shard_state, tstate, ctx)
self._maintain_LC(tstate.handler, slice_id, last_slice, False)
self._maintain_LC(tstate.input_reader, slice_id, last_slice, False)
self._maintain_LC(tstate.output_writer, slice_id, last_slice, False)
ctx.flush()
if last_slice:
# Since there was no exception raised, we can finalize output writer
# safely. Otherwise writer might be stuck in some bad state.
if (tstate.output_writer and
isinstance(tstate.output_writer, output_writers.OutputWriter)):
# It's possible that finalization is successful but
# saving state failed. In this case this shard will retry upon
# finalization error.
# TODO(user): make finalize method idempotent!
tstate.output_writer.finalize(ctx, shard_state)
shard_state.set_for_success()
# pylint: disable=broad-except
except Exception, e:
logging.warning("Shard %s got error.", shard_state.shard_id)
logging.error(traceback.format_exc())
# Fail fast.
if type(e) is errors.FailJobError:
logging.error("Got FailJobError.")
task_directive = self._TASK_DIRECTIVE.FAIL_TASK
else:
task_directive = self._TASK_DIRECTIVE.RETRY_SLICE
self.__return(shard_state, tstate, task_directive)
def __return(self, shard_state, tstate, task_directive):
"""Handler should always call this as the last statement."""
task_directive = self._set_state(shard_state, tstate, task_directive)
self._save_state_and_schedule_next(shard_state, tstate, task_directive)
def _process_inputs(self,
input_reader,
shard_state,
tstate,
ctx):
"""Read inputs, process them, and write out outputs.
This is the core logic of MapReduce. It reads inputs from input reader,
invokes user specified mapper function, and writes output with
output writer. It also updates shard_state accordingly.
e.g. if shard processing is done, set shard_state.active to False.
If errors.FailJobError is caught, it will fail this MR job.
All other exceptions will be logged and raised to taskqueue for retry
until the number of retries exceeds a limit.
Args:
input_reader: input reader.
shard_state: shard state.
tstate: transient shard state.
ctx: mapreduce context.
Returns:
Whether this shard has finished processing all its input split.
"""
processing_limit = self._processing_limit(tstate.mapreduce_spec)
if processing_limit == 0:
return
finished_shard = True
# Input reader may not be an iterator. It is only a container.
iterator = iter(input_reader)
while True:
try:
entity = iterator.next()
except StopIteration:
break
# Reading input got exception. If we assume
# 1. The input reader have done enough retries.
# 2. The input reader can still serialize correctly after this exception.
# 3. The input reader, upon resume, will try to re-read this failed
# record.
# 4. This exception doesn't imply the input reader is permanently stuck.
# we can serialize current slice immediately to avoid duplicated
# outputs.
# TODO(user): Validate these assumptions on all readers. MR should
# also have a way to detect fake forward progress.
if isinstance(entity, db.Model):
shard_state.last_work_item = repr(entity.key())
elif isinstance(entity, ndb.Model):
shard_state.last_work_item = repr(entity.key)
else:
shard_state.last_work_item = repr(entity)[:100]
processing_limit -= 1
if not self._process_datum(
entity, input_reader, ctx, tstate):
finished_shard = False
break
elif processing_limit == 0:
finished_shard = False
break
# Flush context and its pools.
self.slice_context.incr(
context.COUNTER_MAPPER_WALLTIME_MS,
int((self._time() - self._start_time)*1000))
return finished_shard
def _process_datum(self, data, input_reader, ctx, transient_shard_state):
"""Process a single data piece.
Call mapper handler on the data.
Args:
data: a datum to process.
input_reader: input reader.
ctx: mapreduce context
transient_shard_state: transient shard state.
Returns:
True if scan should be continued, False if scan should be stopped.
"""
if data is not input_readers.ALLOW_CHECKPOINT:
self.slice_context.incr(context.COUNTER_MAPPER_CALLS)
handler = transient_shard_state.handler
if isinstance(handler, map_job.Mapper):
handler(self.slice_context, data)
else:
if input_reader.expand_parameters:
result = handler(*data)
else:
result = handler(data)
if util.is_generator(result):
for output in result:
if isinstance(output, operation.Operation):
output(ctx)
else:
output_writer = transient_shard_state.output_writer
if not output_writer:
logging.warning(
"Handler yielded %s, but no output writer is set.", output)
else:
output_writer.write(output)
if self._time() - self._start_time >= parameters.config._SLICE_DURATION_SEC:
return False
return True
def _set_state(self, shard_state, tstate, task_directive):
"""Set shard_state and tstate based on task_directive.
Args:
shard_state: model.ShardState for current shard.
tstate: model.TransientShardState for current shard.
task_directive: self._TASK_DIRECTIVE for current shard.
Returns:
A _TASK_DIRECTIVE enum.
PROCEED_TASK if task should proceed normally.
RETRY_SHARD if shard should be retried.
RETRY_SLICE if slice should be retried.
FAIL_TASK if sahrd should fail.
RECOVER_SLICE if slice should be recovered.
ABORT_SHARD if shard should be aborted.
RETRY_TASK if task should be retried.
DROP_TASK if task should be dropped.
"""
if task_directive in (self._TASK_DIRECTIVE.RETRY_TASK,
self._TASK_DIRECTIVE.DROP_TASK):
return task_directive
if task_directive == self._TASK_DIRECTIVE.ABORT_SHARD:
shard_state.set_for_abort()
return task_directive
if task_directive == self._TASK_DIRECTIVE.PROCEED_TASK:
shard_state.advance_for_next_slice()
tstate.advance_for_next_slice()
return task_directive
if task_directive == self._TASK_DIRECTIVE.RECOVER_SLICE:
tstate.advance_for_next_slice(recovery_slice=True)
shard_state.advance_for_next_slice(recovery_slice=True)
return task_directive
if task_directive == self._TASK_DIRECTIVE.RETRY_SLICE:
task_directive = self._attempt_slice_retry(shard_state, tstate)
if task_directive == self._TASK_DIRECTIVE.RETRY_SHARD:
task_directive = self._attempt_shard_retry(shard_state, tstate)
if task_directive == self._TASK_DIRECTIVE.FAIL_TASK:
shard_state.set_for_failure()
return task_directive
def _save_state_and_schedule_next(self, shard_state, tstate, task_directive):
"""Save state and schedule task.
Save shard state to datastore.
Schedule next slice if needed.
Set HTTP response code.
No modification to any shard_state or tstate.
Args:
shard_state: model.ShardState for current shard.
tstate: model.TransientShardState for current shard.
task_directive: enum _TASK_DIRECTIVE.
Returns:
The task to retry if applicable.
"""
spec = tstate.mapreduce_spec
if task_directive == self._TASK_DIRECTIVE.DROP_TASK:
return
if task_directive in (self._TASK_DIRECTIVE.RETRY_SLICE,
self._TASK_DIRECTIVE.RETRY_TASK):
# Set HTTP code to 500.
return self.retry_task()
elif task_directive == self._TASK_DIRECTIVE.ABORT_SHARD:
logging.info("Aborting shard %d of job '%s'",
shard_state.shard_number, shard_state.mapreduce_id)
task = None
elif task_directive == self._TASK_DIRECTIVE.FAIL_TASK:
logging.critical("Shard %s failed permanently.", shard_state.shard_id)
task = None
elif task_directive == self._TASK_DIRECTIVE.RETRY_SHARD:
logging.warning("Shard %s is going to be attempted for the %s time.",
shard_state.shard_id,
shard_state.retries + 1)
task = self._state_to_task(tstate, shard_state)
elif task_directive == self._TASK_DIRECTIVE.RECOVER_SLICE:
logging.warning("Shard %s slice %s is being recovered.",
shard_state.shard_id,
shard_state.slice_id)
task = self._state_to_task(tstate, shard_state)
else:
assert task_directive == self._TASK_DIRECTIVE.PROCEED_TASK
countdown = self._get_countdown_for_next_slice(spec)
task = self._state_to_task(tstate, shard_state, countdown=countdown)
# Prepare parameters for db transaction and taskqueue.
queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME",
# For test only.
# TODO(user): Remove this.
"default")
config = util.create_datastore_write_config(spec)
@db.transactional(retries=5)
def _tx():
"""The Transaction helper."""
fresh_shard_state = model.ShardState.get_by_shard_id(tstate.shard_id)
if not fresh_shard_state:
raise db.Rollback()
if (not fresh_shard_state.active or
"worker_active_state_collision" in _TEST_INJECTED_FAULTS):
logging.warning("Shard %s is not active. Possible spurious task "
"execution. Dropping this task.", tstate.shard_id)
logging.warning("Datastore's %s", str(fresh_shard_state))
logging.warning("Slice's %s", str(shard_state))
return
fresh_shard_state.copy_from(shard_state)
fresh_shard_state.put(config=config)
# Add task in the same datastore transaction.
# This way we guarantee taskqueue is never behind datastore states.
# Old tasks will be dropped.
# Future task won't run until datastore states catches up.
if fresh_shard_state.active:
# Not adding task transactionally.
# transactional enqueue requires tasks with no name.
self._add_task(task, spec, queue_name)
try:
_tx()
except (datastore_errors.Error,
taskqueue.Error,
runtime.DeadlineExceededError,
apiproxy_errors.Error), e:
logging.warning(
"Can't transactionally continue shard. "
"Will retry slice %s %s for the %s time.",
tstate.shard_id,
tstate.slice_id,
self.task_retry_count() + 1)
self._try_free_lease(shard_state)
raise e
def _attempt_slice_recovery(self, shard_state, tstate):
"""Recover a slice.
This is run when a slice had been previously attempted and output
may have been written. If an output writer requires slice recovery,
we run those logic to remove output duplicates. Otherwise we just retry
the slice.
If recovery is needed, then the entire slice will be dedicated
to recovery logic. No data processing will take place. Thus we call
the slice "recovery slice". This is needed for correctness:
An output writer instance can be out of sync from its physical
medium only when the slice dies after acquring the shard lock but before
committing shard state to db. The worst failure case is when
shard state failed to commit after the NAMED task for the next slice was
added. Thus, recovery slice has a special logic to increment current
slice_id n to n+2. If the task for n+1 had been added, it will be dropped
because it is behind shard state.
Args:
shard_state: an instance of Model.ShardState.
tstate: an instance of Model.TransientShardState.
Returns:
_TASK_DIRECTIVE.PROCEED_TASK to continue with this retry.
_TASK_DIRECTIVE.RECOVER_SLICE to recover this slice.
The next slice will start at the same input as
this slice but output to a new instance of output writer.
Combining outputs from all writer instances is up to implementation.
"""
mapper_spec = tstate.mapreduce_spec.mapper
if not (tstate.output_writer and
tstate.output_writer._supports_slice_recovery(mapper_spec)):
return self._TASK_DIRECTIVE.PROCEED_TASK
tstate.output_writer = tstate.output_writer._recover(
tstate.mapreduce_spec, shard_state.shard_number,
shard_state.retries + 1)
return self._TASK_DIRECTIVE.RECOVER_SLICE
def _attempt_shard_retry(self, shard_state, tstate):
"""Whether to retry shard.
This method may modify shard_state and tstate to prepare for retry or fail.
Args:
shard_state: model.ShardState for current shard.
tstate: model.TransientShardState for current shard.
Returns:
A _TASK_DIRECTIVE enum. RETRY_SHARD if shard should be retried.
FAIL_TASK otherwise.
"""
shard_attempts = shard_state.retries + 1
if shard_attempts >= parameters.config.SHARD_MAX_ATTEMPTS:
logging.warning(
"Shard attempt %s exceeded %s max attempts.",
shard_attempts, parameters.config.SHARD_MAX_ATTEMPTS)
return self._TASK_DIRECTIVE.FAIL_TASK
if tstate.output_writer and (
not tstate.output_writer._supports_shard_retry(tstate)):
logging.warning("Output writer %s does not support shard retry.",
tstate.output_writer.__class__.__name__)
return self._TASK_DIRECTIVE.FAIL_TASK
shard_state.reset_for_retry()
logging.warning("Shard %s attempt %s failed with up to %s attempts.",
shard_state.shard_id,
shard_state.retries,
parameters.config.SHARD_MAX_ATTEMPTS)
output_writer = None
if tstate.output_writer:
output_writer = tstate.output_writer.create(
tstate.mapreduce_spec, shard_state.shard_number, shard_attempts + 1)
tstate.reset_for_retry(output_writer)
return self._TASK_DIRECTIVE.RETRY_SHARD
def _attempt_slice_retry(self, shard_state, tstate):
"""Attempt to retry this slice.
This method may modify shard_state and tstate to prepare for retry or fail.
Args:
shard_state: model.ShardState for current shard.
tstate: model.TransientShardState for current shard.
Returns:
A _TASK_DIRECTIVE enum. RETRY_SLICE if slice should be retried.
RETRY_SHARD if shard retry should be attempted.
"""
if (shard_state.slice_retries + 1 <
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS):
logging.warning(
"Slice %s %s failed for the %s of up to %s attempts "
"(%s of %s taskqueue execution attempts). "
"Will retry now.",
tstate.shard_id,
tstate.slice_id,
shard_state.slice_retries + 1,
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS,
self.task_retry_count() + 1,
parameters.config.TASK_MAX_ATTEMPTS)
# Clear info related to current exception. Otherwise, the real
# callstack that includes a frame for this method will show up
# in log.
sys.exc_clear()
self._try_free_lease(shard_state, slice_retry=True)
return self._TASK_DIRECTIVE.RETRY_SLICE
if parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS > 0:
logging.warning("Slice attempt %s exceeded %s max attempts.",
self.task_retry_count() + 1,
parameters.config.TASK_MAX_DATA_PROCESSING_ATTEMPTS)
return self._TASK_DIRECTIVE.RETRY_SHARD
@staticmethod
def get_task_name(shard_id, slice_id, retry=0):
"""Compute single worker task name.
Args:
shard_id: shard id.
slice_id: slice id.
retry: current shard retry count.
Returns:
task name which should be used to process specified shard/slice.
"""
# Prefix the task name with something unique to this framework's
# namespace so we don't conflict with user tasks on the queue.
return "appengine-mrshard-%s-%s-retry-%s" % (
shard_id, slice_id, retry)
def _get_countdown_for_next_slice(self, spec):
"""Get countdown for next slice's task.
When user sets processing rate, we set countdown to delay task execution.
Args:
spec: model.MapreduceSpec
Returns:
countdown in int.
"""
countdown = 0
if self._processing_limit(spec) != -1:
countdown = max(
int(parameters.config._SLICE_DURATION_SEC -
(self._time() - self._start_time)), 0)
return countdown
@classmethod
def _state_to_task(cls,
tstate,
shard_state,
eta=None,
countdown=None):
"""Generate task for slice according to current states.
Args:
tstate: An instance of TransientShardState.
shard_state: An instance of ShardState.
eta: Absolute time when the MR should execute. May not be specified
if 'countdown' is also supplied. This may be timezone-aware or
timezone-naive.
countdown: Time in seconds into the future that this MR should execute.
Defaults to zero.
Returns:
A model.HugeTask instance for the slice specified by current states.
"""
base_path = tstate.base_path
task_name = MapperWorkerCallbackHandler.get_task_name(
tstate.shard_id,
tstate.slice_id,
tstate.retries)
headers = util._get_task_headers(tstate.mapreduce_spec.mapreduce_id)
headers[util._MR_SHARD_ID_TASK_HEADER] = tstate.shard_id
worker_task = model.HugeTask(
url=base_path + "/worker_callback/" + tstate.shard_id,
params=tstate.to_dict(),
name=task_name,
eta=eta,
countdown=countdown,
parent=shard_state,
headers=headers)
return worker_task
@classmethod
def _add_task(cls,
worker_task,
mapreduce_spec,
queue_name):
"""Schedule slice scanning by adding it to the task queue.
Args:
worker_task: a model.HugeTask task for slice. This is NOT a taskqueue
task.
mapreduce_spec: an instance of model.MapreduceSpec.
queue_name: Optional queue to run on; uses the current queue of
execution or the default queue if unspecified.
"""
if not _run_task_hook(mapreduce_spec.get_hooks(),
"enqueue_worker_task",
worker_task,
queue_name):
try:
# Not adding transactionally because worker_task has name.
# Named task is not allowed for transactional add.
worker_task.add(queue_name)
except (taskqueue.TombstonedTaskError,
taskqueue.TaskAlreadyExistsError), e:
logging.warning("Task %r already exists. %s: %s",
worker_task.name,
e.__class__,
e)
def _processing_limit(self, spec):
"""Get the limit on the number of map calls allowed by this slice.
Args:
spec: a Mapreduce spec.
Returns:
The limit as a positive int if specified by user. -1 otherwise.
"""
processing_rate = float(spec.mapper.params.get("processing_rate", 0))
slice_processing_limit = -1
if processing_rate > 0:
slice_processing_limit = int(math.ceil(
parameters.config._SLICE_DURATION_SEC*processing_rate/
int(spec.mapper.shard_count)))
return slice_processing_limit
# Deprecated. Only used by old test cases.
# TODO(user): clean up tests.
@classmethod
def _schedule_slice(cls,
shard_state,
tstate,
queue_name=None,
eta=None,
countdown=None):
"""Schedule slice scanning by adding it to the task queue.
Args:
shard_state: An instance of ShardState.
tstate: An instance of TransientShardState.
queue_name: Optional queue to run on; uses the current queue of
execution or the default queue if unspecified.
eta: Absolute time when the MR should execute. May not be specified
if 'countdown' is also supplied. This may be timezone-aware or
timezone-naive.
countdown: Time in seconds into the future that this MR should execute.
Defaults to zero.
"""
queue_name = queue_name or os.environ.get("HTTP_X_APPENGINE_QUEUENAME",
"default")
task = cls._state_to_task(tstate, shard_state, eta, countdown)
cls._add_task(task, tstate.mapreduce_spec, queue_name)
class ControllerCallbackHandler(base_handler.HugeTaskHandler):
"""Supervises mapreduce execution.
Is also responsible for gathering execution status from shards together.
This task is "continuously" running by adding itself again to taskqueue if
and only if mapreduce is still active. A mapreduce is active if it has
actively running shards.
"""
def __init__(self, *args):
"""Constructor."""
super(ControllerCallbackHandler, self).__init__(*args)
self._time = time.time
def _drop_gracefully(self):
"""Gracefully drop controller task.
This method is called when decoding controller task payload failed.
Upon this we mark ShardState and MapreduceState as failed so all
tasks can stop.
Writing to datastore is forced (ignore read-only mode) because we
want the tasks to stop badly, and if force_writes was False,
the job would have never been started.
"""
mr_id = self.request.headers[util._MR_ID_TASK_HEADER]
state = model.MapreduceState.get_by_job_id(mr_id)
if not state or not state.active:
return
state.active = False
state.result_status = model.MapreduceState.RESULT_FAILED
config = util.create_datastore_write_config(state.mapreduce_spec)
puts = []
for ss in model.ShardState.find_all_by_mapreduce_state(state):
if ss.active:
ss.set_for_failure()
puts.append(ss)
# Avoid having too many shard states in memory.
if len(puts) > model.ShardState._MAX_STATES_IN_MEMORY:
db.put(puts, config=config)
puts = []
db.put(puts, config=config)
# Put mr_state only after all shard_states are put.
db.put(state, config=config)
def handle(self):
"""Handle request."""
spec = model.MapreduceSpec.from_json_str(
self.request.get("mapreduce_spec"))
state, control = db.get([
model.MapreduceState.get_key_by_job_id(spec.mapreduce_id),
model.MapreduceControl.get_key_by_job_id(spec.mapreduce_id),
])
if not state:
logging.warning("State not found for MR '%s'; dropping controller task.",
spec.mapreduce_id)
return
if not state.active:
logging.warning(
"MR %r is not active. Looks like spurious controller task execution.",
spec.mapreduce_id)
self._clean_up_mr(spec)
return
shard_states = model.ShardState.find_all_by_mapreduce_state(state)
self._update_state_from_shard_states(state, shard_states, control)
if state.active:
ControllerCallbackHandler.reschedule(
state, spec, self.serial_id() + 1)
def _update_state_from_shard_states(self, state, shard_states, control):
"""Update mr state by examing shard states.
Args:
state: current mapreduce state as MapreduceState.
shard_states: an iterator over shard states.
control: model.MapreduceControl entity.
"""
# Initialize vars.
state.active_shards, state.aborted_shards, state.failed_shards = 0, 0, 0
total_shards = 0
processed_counts = []
state.counters_map.clear()
# Tally across shard states once.
for s in shard_states:
total_shards += 1
if s.active:
state.active_shards += 1
if s.result_status == model.ShardState.RESULT_ABORTED:
state.aborted_shards += 1
elif s.result_status == model.ShardState.RESULT_FAILED:
state.failed_shards += 1
# Update stats in mapreduce state by aggregating stats from shard states.
state.counters_map.add_map(s.counters_map)
processed_counts.append(s.counters_map.get(context.COUNTER_MAPPER_CALLS))
state.set_processed_counts(processed_counts)
state.last_poll_time = datetime.datetime.utcfromtimestamp(self._time())
spec = state.mapreduce_spec
if total_shards != spec.mapper.shard_count:
logging.error("Found %d shard states. Expect %d. "
"Issuing abort command to job '%s'",
total_shards, spec.mapper.shard_count,
spec.mapreduce_id)
# We issue abort command to allow shards to stop themselves.
model.MapreduceControl.abort(spec.mapreduce_id)
# If any shard is active then the mr is active.
# This way, controller won't prematurely stop before all the shards have.
state.active = bool(state.active_shards)
if not control and (state.failed_shards or state.aborted_shards):
# Issue abort command if there are failed shards.
model.MapreduceControl.abort(spec.mapreduce_id)
if not state.active:
# Set final result status derived from shard states.
if state.failed_shards or not total_shards:
state.result_status = model.MapreduceState.RESULT_FAILED
# It's important failed shards is checked before aborted shards
# because failed shards will trigger other shards to abort.
elif state.aborted_shards:
state.result_status = model.MapreduceState.RESULT_ABORTED
else:
state.result_status = model.MapreduceState.RESULT_SUCCESS
self._finalize_outputs(spec, state)
self._finalize_job(spec, state)
else:
@db.transactional(retries=5)
def _put_state():
"""The helper for storing the state."""
fresh_state = model.MapreduceState.get_by_job_id(spec.mapreduce_id)
# We don't check anything other than active because we are only
# updating stats. It's OK if they are briefly inconsistent.
if not fresh_state.active:
logging.warning(
"Job %s is not active. Looks like spurious task execution. "
"Dropping controller task.", spec.mapreduce_id)
return
config = util.create_datastore_write_config(spec)
state.put(config=config)
_put_state()
def serial_id(self):
"""Get serial unique identifier of this task from request.
Returns:
serial identifier as int.
"""
return int(self.request.get("serial_id"))
@classmethod
def _finalize_outputs(cls, mapreduce_spec, mapreduce_state):
"""Finalize outputs.
Args:
mapreduce_spec: an instance of MapreduceSpec.
mapreduce_state: an instance of MapreduceState.
"""
# Only finalize the output writers if the job is successful.
if (mapreduce_spec.mapper.output_writer_class() and
mapreduce_state.result_status == model.MapreduceState.RESULT_SUCCESS):
mapreduce_spec.mapper.output_writer_class().finalize_job(mapreduce_state)
@classmethod
def _finalize_job(cls, mapreduce_spec, mapreduce_state):
"""Finalize job execution.
Invokes done callback and save mapreduce state in a transaction,
and schedule necessary clean ups. This method is idempotent.
Args:
mapreduce_spec: an instance of MapreduceSpec
mapreduce_state: an instance of MapreduceState
"""
config = util.create_datastore_write_config(mapreduce_spec)
queue_name = util.get_queue_name(mapreduce_spec.params.get(
model.MapreduceSpec.PARAM_DONE_CALLBACK_QUEUE))
done_callback = mapreduce_spec.params.get(
model.MapreduceSpec.PARAM_DONE_CALLBACK)
done_task = None
if done_callback:
done_task = taskqueue.Task(
url=done_callback,
headers=util._get_task_headers(mapreduce_spec.mapreduce_id,
util.CALLBACK_MR_ID_TASK_HEADER),
method=mapreduce_spec.params.get("done_callback_method", "POST"))
@db.transactional(retries=5)
def _put_state():
"""Helper to store state."""
fresh_state = model.MapreduceState.get_by_job_id(
mapreduce_spec.mapreduce_id)
if not fresh_state.active:
logging.warning(
"Job %s is not active. Looks like spurious task execution. "
"Dropping task.", mapreduce_spec.mapreduce_id)
return
mapreduce_state.put(config=config)
# Enqueue done_callback if needed.
if done_task and not _run_task_hook(
mapreduce_spec.get_hooks(),
"enqueue_done_task",
done_task,
queue_name):
done_task.add(queue_name, transactional=True)
_put_state()
logging.info("Final result for job '%s' is '%s'",
mapreduce_spec.mapreduce_id, mapreduce_state.result_status)
cls._clean_up_mr(mapreduce_spec)
@classmethod
def _clean_up_mr(cls, mapreduce_spec):
FinalizeJobHandler.schedule(mapreduce_spec)
@staticmethod
def get_task_name(mapreduce_spec, serial_id):
"""Compute single controller task name.
Args:
mapreduce_spec: specification of the mapreduce.
serial_id: id of the invocation as int.
Returns:
task name which should be used to process specified shard/slice.
"""
# Prefix the task name with something unique to this framework's
# namespace so we don't conflict with user tasks on the queue.
return "appengine-mrcontrol-%s-%s" % (
mapreduce_spec.mapreduce_id, serial_id)
@staticmethod
def controller_parameters(mapreduce_spec, serial_id):
"""Fill in controller task parameters.
Returned parameters map is to be used as task payload, and it contains
all the data, required by controller to perform its function.
Args:
mapreduce_spec: specification of the mapreduce.
serial_id: id of the invocation as int.
Returns:
string->string map of parameters to be used as task payload.
"""
return {"mapreduce_spec": mapreduce_spec.to_json_str(),
"serial_id": str(serial_id)}
@classmethod
def reschedule(cls,
mapreduce_state,
mapreduce_spec,
serial_id,
queue_name=None):
"""Schedule new update status callback task.
Args:
mapreduce_state: mapreduce state as model.MapreduceState
mapreduce_spec: mapreduce specification as MapreduceSpec.
serial_id: id of the invocation as int.
queue_name: The queue to schedule this task on. Will use the current
queue of execution if not supplied.
"""
task_name = ControllerCallbackHandler.get_task_name(
mapreduce_spec, serial_id)
task_params = ControllerCallbackHandler.controller_parameters(
mapreduce_spec, serial_id)
if not queue_name:
queue_name = os.environ.get("HTTP_X_APPENGINE_QUEUENAME", "default")
controller_callback_task = model.HugeTask(
url=(mapreduce_spec.params["base_path"] + "/controller_callback/" +
mapreduce_spec.mapreduce_id),
name=task_name, params=task_params,
countdown=parameters.config._CONTROLLER_PERIOD_SEC,
parent=mapreduce_state,
headers=util._get_task_headers(mapreduce_spec.mapreduce_id))
if not _run_task_hook(mapreduce_spec.get_hooks(),
"enqueue_controller_task",
controller_callback_task,
queue_name):
try:
controller_callback_task.add(queue_name)
except (taskqueue.TombstonedTaskError,
taskqueue.TaskAlreadyExistsError), e:
logging.warning("Task %r with params %r already exists. %s: %s",
task_name, task_params, e.__class__, e)
class KickOffJobHandler(base_handler.TaskQueueHandler):
"""Taskqueue handler which kicks off a mapreduce processing.
This handler is idempotent.
Precondition:
The Model.MapreduceState entity for this mr is already created and
saved to datastore by StartJobHandler._start_map.
Request Parameters:
mapreduce_id: in string.
"""
# Datastore key used to save json serialized input readers.
_SERIALIZED_INPUT_READERS_KEY = "input_readers_for_mr_%s"
def handle(self):
"""Handles kick off request."""
# Get and verify mr state.
mr_id = self.request.get("mapreduce_id")
# Log the mr_id since this is started in an unnamed task
logging.info("Processing kickoff for job %s", mr_id)
state = model.MapreduceState.get_by_job_id(mr_id)
if not self._check_mr_state(state, mr_id):
return
# Create input readers.
readers, serialized_readers_entity = self._get_input_readers(state)
if readers is None:
# We don't have any data. Finish map.
logging.warning("Found no mapper input data to process.")
state.active = False
state.result_status = model.MapreduceState.RESULT_SUCCESS
ControllerCallbackHandler._finalize_job(
state.mapreduce_spec, state)
return False
# Create output writers.
self._setup_output_writer(state)
# Save states and make sure we use the saved input readers for
# subsequent operations.
result = self._save_states(state, serialized_readers_entity)
if result is None:
readers, _ = self._get_input_readers(state)
elif not result:
return
queue_name = self.request.headers.get("X-AppEngine-QueueName")
KickOffJobHandler._schedule_shards(state.mapreduce_spec, readers,
queue_name,
state.mapreduce_spec.params["base_path"],
state)
ControllerCallbackHandler.reschedule(
state, state.mapreduce_spec, serial_id=0, queue_name=queue_name)
def _drop_gracefully(self):
"""See parent."""
mr_id = self.request.get("mapreduce_id")
logging.error("Failed to kick off job %s", mr_id)
state = model.MapreduceState.get_by_job_id(mr_id)
if not self._check_mr_state(state, mr_id):
return
# Issue abort command just in case there are running tasks.
config = util.create_datastore_write_config(state.mapreduce_spec)
model.MapreduceControl.abort(mr_id, config=config)
# Finalize job and invoke callback.
state.active = False
state.result_status = model.MapreduceState.RESULT_FAILED
ControllerCallbackHandler._finalize_job(state.mapreduce_spec, state)
def _get_input_readers(self, state):
"""Get input readers.
Args:
state: a MapreduceState model.
Returns:
A tuple: (a list of input readers, a model._HugeTaskPayload entity).
The payload entity contains the json serialized input readers.
(None, None) when input reader inplitting returned no data to process.
"""
serialized_input_readers_key = (self._SERIALIZED_INPUT_READERS_KEY %
state.key().id_or_name())
serialized_input_readers = model._HugeTaskPayload.get_by_key_name(
serialized_input_readers_key, parent=state)
# Initialize input readers.
input_reader_class = state.mapreduce_spec.mapper.input_reader_class()
split_param = state.mapreduce_spec.mapper
if issubclass(input_reader_class, map_job.InputReader):
split_param = map_job.JobConfig._to_map_job_config(
state.mapreduce_spec,
os.environ.get("HTTP_X_APPENGINE_QUEUENAME"))
if serialized_input_readers is None:
readers = input_reader_class.split_input(split_param)
else:
readers = [input_reader_class.from_json_str(json) for json in
simplejson.loads(serialized_input_readers.payload)]
if not readers:
return None, None
# Update state and spec with actual shard count.
state.mapreduce_spec.mapper.shard_count = len(readers)
state.active_shards = len(readers)
# Prepare to save serialized input readers.
if serialized_input_readers is None:
# Use mr_state as parent so it can be easily cleaned up later.
serialized_input_readers = model._HugeTaskPayload(
key_name=serialized_input_readers_key, parent=state)
readers_json_str = [i.to_json_str() for i in readers]
serialized_input_readers.payload = simplejson.dumps(readers_json_str)
return readers, serialized_input_readers
def _setup_output_writer(self, state):
if not state.writer_state:
output_writer_class = state.mapreduce_spec.mapper.output_writer_class()
if output_writer_class:
output_writer_class.init_job(state)
@db.transactional
def _save_states(self, state, serialized_readers_entity):
"""Run transaction to save state.
Args:
state: a model.MapreduceState entity.
serialized_readers_entity: a model._HugeTaskPayload entity containing
json serialized input readers.
Returns:
False if a fatal error is encountered and this task should be dropped
immediately. True if transaction is successful. None if a previous
attempt of this same transaction has already succeeded.
"""
mr_id = state.key().id_or_name()
fresh_state = model.MapreduceState.get_by_job_id(mr_id)
if not self._check_mr_state(fresh_state, mr_id):
return False
if fresh_state.active_shards != 0:
logging.warning(
"Mapreduce %s already has active shards. Looks like spurious task "
"execution.", mr_id)
return None
config = util.create_datastore_write_config(state.mapreduce_spec)
db.put([state, serialized_readers_entity], config=config)
return True
@classmethod
def _schedule_shards(cls,
spec,
readers,
queue_name,
base_path,
mr_state):
"""Prepares shard states and schedules their execution.
Even though this method does not schedule shard task and save shard state
transactionally, it's safe for taskqueue to retry this logic because
the initial shard_state for each shard is the same from any retry.
This is an important yet reasonable assumption on model.ShardState.
Args:
spec: mapreduce specification as MapreduceSpec.
readers: list of InputReaders describing shard splits.
queue_name: The queue to run this job on.
base_path: The base url path of mapreduce callbacks.
mr_state: The MapReduceState of current job.
"""
# Create shard states.
shard_states = []
for shard_number, input_reader in enumerate(readers):
shard_state = model.ShardState.create_new(spec.mapreduce_id, shard_number)
shard_state.shard_description = str(input_reader)
shard_states.append(shard_state)
# Retrieves already existing shard states.
existing_shard_states = db.get(shard.key() for shard in shard_states)
existing_shard_keys = set(shard.key() for shard in existing_shard_states
if shard is not None)
# Save non existent shard states.
# Note: we could do this transactionally if necessary.
db.put((shard for shard in shard_states
if shard.key() not in existing_shard_keys),
config=util.create_datastore_write_config(spec))
# Create output writers.
writer_class = spec.mapper.output_writer_class()
writers = [None] * len(readers)
if writer_class:
for shard_number, shard_state in enumerate(shard_states):
writers[shard_number] = writer_class.create(
mr_state.mapreduce_spec,
shard_state.shard_number, shard_state.retries + 1,
mr_state.writer_state)
# Schedule ALL shard tasks.
# Since each task is named, _add_task will fall back gracefully if a
# task already exists.
for shard_number, (input_reader, output_writer) in enumerate(
zip(readers, writers)):
shard_id = model.ShardState.shard_id_from_number(
spec.mapreduce_id, shard_number)
task = MapperWorkerCallbackHandler._state_to_task(
model.TransientShardState(
base_path, spec, shard_id, 0, input_reader, input_reader,
output_writer=output_writer,
handler=spec.mapper.handler),
shard_states[shard_number])
MapperWorkerCallbackHandler._add_task(task,
spec,
queue_name)
@classmethod
def _check_mr_state(cls, state, mr_id):
"""Check MapreduceState.
Args:
state: an MapreduceState instance.
mr_id: mapreduce id.
Returns:
True if state is valid. False if not and this task should be dropped.
"""
if state is None:
logging.warning(
"Mapreduce State for job %s is missing. Dropping Task.",
mr_id)
return False
if not state.active:
logging.warning(
"Mapreduce %s is not active. Looks like spurious task "
"execution. Dropping Task.", mr_id)
return False
return True
class StartJobHandler(base_handler.PostJsonHandler):
"""Command handler starts a mapreduce job.
This handler allows user to start a mr via a web form. It's _start_map
method can also be used independently to start a mapreduce.
"""
def handle(self):
"""Handles start request."""
# Mapper spec as form arguments.
mapreduce_name = self._get_required_param("name")
mapper_input_reader_spec = self._get_required_param("mapper_input_reader")
mapper_handler_spec = self._get_required_param("mapper_handler")
mapper_output_writer_spec = self.request.get("mapper_output_writer")
mapper_params = self._get_params(
"mapper_params_validator", "mapper_params.")
params = self._get_params(
"params_validator", "params.")
# Default values.
mr_params = map_job.JobConfig._get_default_mr_params()
mr_params.update(params)
if "queue_name" in mapper_params:
mr_params["queue_name"] = mapper_params["queue_name"]
# Set some mapper param defaults if not present.
mapper_params["processing_rate"] = int(mapper_params.get(
"processing_rate") or parameters.config.PROCESSING_RATE_PER_SEC)
# Validate the Mapper spec, handler, and input reader.
mapper_spec = model.MapperSpec(
mapper_handler_spec,
mapper_input_reader_spec,
mapper_params,
int(mapper_params.get("shard_count", parameters.config.SHARD_COUNT)),
output_writer_spec=mapper_output_writer_spec)
mapreduce_id = self._start_map(
mapreduce_name,
mapper_spec,
mr_params,
queue_name=mr_params["queue_name"],
_app=mapper_params.get("_app"))
self.json_response["mapreduce_id"] = mapreduce_id
def _get_params(self, validator_parameter, name_prefix):
"""Retrieves additional user-supplied params for the job and validates them.
Args:
validator_parameter: name of the request parameter which supplies
validator for this parameter set.
name_prefix: common prefix for all parameter names in the request.
Raises:
Any exception raised by the 'params_validator' request parameter if
the params fail to validate.
Returns:
The user parameters.
"""
params_validator = self.request.get(validator_parameter)
user_params = {}
for key in self.request.arguments():
if key.startswith(name_prefix):
values = self.request.get_all(key)
adjusted_key = key[len(name_prefix):]
if len(values) == 1:
user_params[adjusted_key] = values[0]
else:
user_params[adjusted_key] = values
if params_validator:
resolved_validator = util.for_name(params_validator)
resolved_validator(user_params)
return user_params
def _get_required_param(self, param_name):
"""Get a required request parameter.
Args:
param_name: name of request parameter to fetch.
Returns:
parameter value
Raises:
errors.NotEnoughArgumentsError: if parameter is not specified.
"""
value = self.request.get(param_name)
if not value:
raise errors.NotEnoughArgumentsError(param_name + " not specified")
return value
@classmethod
def _start_map(cls,
name,
mapper_spec,
mapreduce_params,
queue_name,
eta=None,
countdown=None,
hooks_class_name=None,
_app=None,
in_xg_transaction=False):
# pylint: disable=g-doc-args
# pylint: disable=g-doc-return-or-yield
"""See control.start_map.
Requirements for this method:
1. The request that invokes this method can either be regular or
from taskqueue. So taskqueue specific headers can not be used.
2. Each invocation transactionally starts an isolated mapreduce job with
a unique id. MapreduceState should be immediately available after
returning. See control.start_map's doc on transactional.
3. Method should be lightweight.
"""
# Validate input reader.
mapper_input_reader_class = mapper_spec.input_reader_class()
mapper_input_reader_class.validate(mapper_spec)
# Validate output writer.
mapper_output_writer_class = mapper_spec.output_writer_class()
if mapper_output_writer_class:
mapper_output_writer_class.validate(mapper_spec)
# Create a new id and mr spec.
mapreduce_id = model.MapreduceState.new_mapreduce_id()
mapreduce_spec = model.MapreduceSpec(
name,
mapreduce_id,
mapper_spec.to_json(),
mapreduce_params,
hooks_class_name)
# Validate mapper handler.
ctx = context.Context(mapreduce_spec, None)
context.Context._set(ctx)
try:
# pylint: disable=pointless-statement
mapper_spec.handler
finally:
context.Context._set(None)
# Save states and enqueue task.
if in_xg_transaction:
propagation = db.MANDATORY
else:
propagation = db.INDEPENDENT
@db.transactional(propagation=propagation)
def _txn():
cls._create_and_save_state(mapreduce_spec, _app)
cls._add_kickoff_task(mapreduce_params["base_path"], mapreduce_spec, eta,
countdown, queue_name)
_txn()
return mapreduce_id
@classmethod
def _create_and_save_state(cls, mapreduce_spec, _app):
"""Save mapreduce state to datastore.
Save state to datastore so that UI can see it immediately.
Args:
mapreduce_spec: model.MapreduceSpec,
_app: app id if specified. None otherwise.
Returns:
The saved Mapreduce state.
"""
state = model.MapreduceState.create_new(mapreduce_spec.mapreduce_id)
state.mapreduce_spec = mapreduce_spec
state.active = True
state.active_shards = 0
if _app:
state.app_id = _app
config = util.create_datastore_write_config(mapreduce_spec)
state.put(config=config)
return state
@classmethod
def _add_kickoff_task(cls,
base_path,
mapreduce_spec,
eta,
countdown,
queue_name):
"""Enqueues a new kickoff task."""
params = {"mapreduce_id": mapreduce_spec.mapreduce_id}
# Task is not named so that it can be added within a transaction.
kickoff_task = taskqueue.Task(
url=base_path + "/kickoffjob_callback/" + mapreduce_spec.mapreduce_id,
headers=util._get_task_headers(mapreduce_spec.mapreduce_id),
params=params,
eta=eta,
countdown=countdown)
hooks = mapreduce_spec.get_hooks()
if hooks is not None:
try:
hooks.enqueue_kickoff_task(kickoff_task, queue_name)
return
except NotImplementedError:
pass
kickoff_task.add(queue_name, transactional=True)
class FinalizeJobHandler(base_handler.TaskQueueHandler):
"""Finalize map job by deleting all temporary entities."""
def handle(self):
mapreduce_id = self.request.get("mapreduce_id")
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
if mapreduce_state:
config = (
util.create_datastore_write_config(mapreduce_state.mapreduce_spec))
keys = [model.MapreduceControl.get_key_by_job_id(mapreduce_id)]
for ss in model.ShardState.find_all_by_mapreduce_state(mapreduce_state):
keys.extend(list(
model._HugeTaskPayload.all().ancestor(ss).run(keys_only=True)))
keys.extend(list(model._HugeTaskPayload.all().ancestor(
mapreduce_state).run(keys_only=True)))
db.delete(keys, config=config)
@classmethod
def schedule(cls, mapreduce_spec):
"""Schedule finalize task.
Args:
mapreduce_spec: mapreduce specification as MapreduceSpec.
"""
task_name = mapreduce_spec.mapreduce_id + "-finalize"
finalize_task = taskqueue.Task(
name=task_name,
url=(mapreduce_spec.params["base_path"] + "/finalizejob_callback/" +
mapreduce_spec.mapreduce_id),
params={"mapreduce_id": mapreduce_spec.mapreduce_id},
headers=util._get_task_headers(mapreduce_spec.mapreduce_id))
queue_name = util.get_queue_name(None)
if not _run_task_hook(mapreduce_spec.get_hooks(),
"enqueue_controller_task",
finalize_task,
queue_name):
try:
finalize_task.add(queue_name)
except (taskqueue.TombstonedTaskError,
taskqueue.TaskAlreadyExistsError), e:
logging.warning("Task %r already exists. %s: %s",
task_name, e.__class__, e)
class CleanUpJobHandler(base_handler.PostJsonHandler):
"""Command to kick off tasks to clean up a job's data."""
def handle(self):
mapreduce_id = self.request.get("mapreduce_id")
mapreduce_state = model.MapreduceState.get_by_job_id(mapreduce_id)
if mapreduce_state:
shard_keys = model.ShardState.calculate_keys_by_mapreduce_state(
mapreduce_state)
db.delete(shard_keys)
db.delete(mapreduce_state)
self.json_response["status"] = ("Job %s successfully cleaned up." %
mapreduce_id)
class AbortJobHandler(base_handler.PostJsonHandler):
"""Command to abort a running job."""
def handle(self):
model.MapreduceControl.abort(self.request.get("mapreduce_id"))
self.json_response["status"] = "Abort signal sent."
| {
"content_hash": "a31e6943d6c4cc17a241f072cd94a08e",
"timestamp": "",
"source": "github",
"line_count": 1826,
"max_line_length": 80,
"avg_line_length": 37.224534501642935,
"alnum_prop": 0.6546372035544048,
"repo_name": "singhj/locality-sensitive-hashing",
"id": "53b720c6342fa50fcd5d8e493e8781a3437ff7dc",
"size": "68593",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "mapreduce/handlers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19624"
},
{
"name": "HTML",
"bytes": "698473"
},
{
"name": "JavaScript",
"bytes": "35165"
},
{
"name": "Python",
"bytes": "1668137"
}
],
"symlink_target": ""
} |
from datetime import datetime
import math
import time
from rules.conditions.results.componentConditionStatus import ComponentConditionStatus
from rules.ruleResultChecker import RuleResultChecker
from data import dataRetriever
from rules.conditions.conditionsBuilder import ConditionsBuilder
from rules.conditions.processors.conditionProcessorFactory import ConditionProcessorFactory
class RuleProcessor(object):
def __init__(self, rule, data_dao, api_client):
self.api_client = api_client
self.data_dao = data_dao
self.rule = rule
self.rule_processing_start_date = int(math.floor(time.time()))
self.timestamps = None
self.list_of_results = {}
def process_rule(self):
print("Rule [META] = ", self.rule['meta'])
component_ids_with_observation = {}
for device_id, device_details in self.rule['meta']['devices'].items():
self.list_of_results[device_id] = []
for component in device_details['components']:
component_id = str(component['id'])
component_name = str(component['name'])
data_retrieve_params = dataRetriever.DataRetrieveParams(rule=self.rule, component_id=component_id)
conditions_builder = ConditionsBuilder(self.rule['conditions'], component_name)
self.__init_execution_timestamps__()
self.__add_previous_rule_executions_timestamps__(component_id)
for condition in conditions_builder.get_conditions_for_component():
condition_processor = ConditionProcessorFactory.get_condition_processor(self.data_dao, condition)
matching_data = condition_processor.process(data_retrieve_params, self.timestamps)
self.timestamps = condition_processor.timestamps
self.__check_component_condition_status(matching_data, component, device_id)
component_ids_with_observation[component_id] = self.__format_timestamp_for_processing(
'last_obs_trigger_time')
if self.is_rule_fulfilled():
print "Rule triggered, id - ", self.rule['id']
self.api_client.push_alert(self.rule, self.list_of_results.keys(), self.list_of_results.values())
else:
print'Rule not triggered - ', self.rule['id']
# lets update execution time if no exception from push_alert
if len(component_ids_with_observation) > 0:
self.__save_last_processing_batch_dates(component_ids_with_observation)
def __init_execution_timestamps__(self):
self.timestamps = {
'current_execution_time': self.rule_processing_start_date,
'last_execution_time': None,
'last_obs_trigger_time': None
}
def __add_previous_rule_executions_timestamps__(self, component_id):
if component_id in self.rule["executions"]:
for key in self.rule["executions"][component_id]:
self.timestamps[key] = self.rule["executions"][component_id][key]
def __check_component_condition_status(self, matching_data, component, device_id):
component_condition_status = ComponentConditionStatus(component)
if len(matching_data) > 0:
component_condition_status.mark_passed(matching_data)
else:
component_condition_status.mark_failed()
self.list_of_results[device_id].append(component_condition_status)
def is_rule_fulfilled(self):
return RuleResultChecker(self.rule).is_fulfilled(self.list_of_results.values())
def __save_last_processing_batch_dates(self, component_ids_with_observation):
current_execution_time = self.__format_timestamp_for_processing('current_execution_time')
self.api_client.add_rule_executions(rule=self.rule,
component_ids_with_observation=component_ids_with_observation,
last_execution=current_execution_time)
def __format_timestamp_for_processing(self, key):
if self.timestamps[key]:
print ('Saving ' + key + ' - ' + str(
datetime.fromtimestamp(self.timestamps[key]).strftime('%Y-%m-%d %H:%M:%S')))
return self.timestamps[key]
| {
"content_hash": "b51d84cf61912f4e753f59c99e253dfa",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 117,
"avg_line_length": 48.550561797752806,
"alnum_prop": 0.6438324461930108,
"repo_name": "enableiot/iotanalytics-rule-engine",
"id": "cb9fd117a016cdaf4e509ec4e6943626c7bc3c76",
"size": "4906",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pydeps/rules/ruleProcessor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "67705"
},
{
"name": "Shell",
"bytes": "11665"
}
],
"symlink_target": ""
} |
"""
celery.contrib.batches
======================
Experimental task class that buffers messages and processes them as a list.
.. warning::
For this to work you have to set
:setting:`CELERYD_PREFETCH_MULTIPLIER` to zero, or some value where
the final multiplied value is higher than ``flush_every``.
In the future we hope to add the ability to direct batching tasks
to a channel with different QoS requirements than the task channel.
**Simple Example**
A click counter that flushes the buffer every 100 messages, and every
seconds. Does not do anything with the data, but can easily be modified
to store it in a database.
.. code-block:: python
# Flush after 100 messages, or 10 seconds.
@app.task(base=Batches, flush_every=100, flush_interval=10)
def count_click(requests):
from collections import Counter
count = Counter(request.kwargs['url'] for request in requests)
for url, count in count.items():
print('>>> Clicks: {0} -> {1}'.format(url, count))
Then you can ask for a click to be counted by doing::
>>> count_click.delay('http://example.com')
**Example returning results**
An interface to the Web of Trust API that flushes the buffer every 100
messages, and every 10 seconds.
.. code-block:: python
import requests
from urlparse import urlparse
from celery.contrib.batches import Batches
wot_api_target = 'https://api.mywot.com/0.4/public_link_json'
@app.task(base=Batches, flush_every=100, flush_interval=10)
def wot_api(requests):
sig = lambda url: url
reponses = wot_api_real(
(sig(*request.args, **request.kwargs) for request in requests)
)
# use mark_as_done to manually return response data
for response, request in zip(reponses, requests):
app.backend.mark_as_done(request.id, response)
def wot_api_real(urls):
domains = [urlparse(url).netloc for url in urls]
response = requests.get(
wot_api_target,
params={'hosts': ('/').join(set(domains)) + '/'}
)
return [response.json[domain] for domain in domains]
Using the API is done as follows::
>>> wot_api.delay('http://example.com')
.. note::
If you don't have an ``app`` instance then use the current app proxy
instead::
from celery import current_app
app.backend.mark_as_done(request.id, response)
"""
from __future__ import absolute_import
from itertools import count
from celery.task import Task
from celery.five import Empty, Queue
from celery.utils.log import get_logger
from celery.worker.job import Request
from celery.utils import noop
__all__ = ['Batches']
logger = get_logger(__name__)
def consume_queue(queue):
"""Iterator yielding all immediately available items in a
:class:`Queue.Queue`.
The iterator stops as soon as the queue raises :exc:`Queue.Empty`.
*Examples*
>>> q = Queue()
>>> map(q.put, range(4))
>>> list(consume_queue(q))
[0, 1, 2, 3]
>>> list(consume_queue(q))
[]
"""
get = queue.get_nowait
while 1:
try:
yield get()
except Empty:
break
def apply_batches_task(task, args, loglevel, logfile):
task.push_request(loglevel=loglevel, logfile=logfile)
try:
result = task(*args)
except Exception as exc:
result = None
logger.error('Error: %r', exc, exc_info=True)
finally:
task.pop_request()
return result
class SimpleRequest(object):
"""Pickleable request."""
#: task id
id = None
#: task name
name = None
#: positional arguments
args = ()
#: keyword arguments
kwargs = {}
#: message delivery information.
delivery_info = None
#: worker node name
hostname = None
def __init__(self, id, name, args, kwargs, delivery_info, hostname):
self.id = id
self.name = name
self.args = args
self.kwargs = kwargs
self.delivery_info = delivery_info
self.hostname = hostname
@classmethod
def from_request(cls, request):
return cls(request.id, request.name, request.args,
request.kwargs, request.delivery_info, request.hostname)
class Batches(Task):
abstract = True
#: Maximum number of message in buffer.
flush_every = 10
#: Timeout in seconds before buffer is flushed anyway.
flush_interval = 30
def __init__(self):
self._buffer = Queue()
self._count = count(1)
self._tref = None
self._pool = None
def run(self, requests):
raise NotImplementedError('must implement run(requests)')
def Strategy(self, task, app, consumer):
self._pool = consumer.pool
hostname = consumer.hostname
eventer = consumer.event_dispatcher
Req = Request
connection_errors = consumer.connection_errors
timer = consumer.timer
put_buffer = self._buffer.put
flush_buffer = self._do_flush
def task_message_handler(message, body, ack, reject, callbacks, **kw):
request = Req(body, on_ack=ack, app=app, hostname=hostname,
events=eventer, task=task,
connection_errors=connection_errors,
delivery_info=message.delivery_info)
put_buffer(request)
if self._tref is None: # first request starts flush timer.
self._tref = timer.call_repeatedly(
self.flush_interval, flush_buffer,
)
if not next(self._count) % self.flush_every:
flush_buffer()
return task_message_handler
def flush(self, requests):
return self.apply_buffer(requests, ([SimpleRequest.from_request(r)
for r in requests], ))
def _do_flush(self):
logger.debug('Batches: Wake-up to flush buffer...')
requests = None
if self._buffer.qsize():
requests = list(consume_queue(self._buffer))
if requests:
logger.debug('Batches: Buffer complete: %s', len(requests))
self.flush(requests)
if not requests:
logger.debug('Batches: Cancelling timer: Nothing in buffer.')
if self._tref:
self._tref.cancel() # cancel timer.
self._tref = None
def apply_buffer(self, requests, args=(), kwargs={}):
acks_late = [], []
[acks_late[r.task.acks_late].append(r) for r in requests]
assert requests and (acks_late[True] or acks_late[False])
def on_accepted(pid, time_accepted):
[req.acknowledge() for req in acks_late[False]]
def on_return(result):
[req.acknowledge() for req in acks_late[True]]
return self._pool.apply_async(
apply_batches_task,
(self, args, 0, None),
accept_callback=on_accepted,
callback=acks_late[True] and on_return or noop,
)
| {
"content_hash": "3b2cb9fa05b683de1664559e5d279748",
"timestamp": "",
"source": "github",
"line_count": 248,
"max_line_length": 78,
"avg_line_length": 28.70967741935484,
"alnum_prop": 0.6043539325842696,
"repo_name": "johankaito/fufuka",
"id": "8cabc6f6168ca35754a05736a1be9223375ee7a8",
"size": "7144",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "microblog/flask/venv/lib/python2.7/site-packages/celery/contrib/batches.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "70167"
},
{
"name": "C",
"bytes": "993849"
},
{
"name": "C++",
"bytes": "4924114"
},
{
"name": "CSS",
"bytes": "57195"
},
{
"name": "Fortran",
"bytes": "10375"
},
{
"name": "HTML",
"bytes": "3832217"
},
{
"name": "Java",
"bytes": "608432"
},
{
"name": "JavaScript",
"bytes": "48304"
},
{
"name": "MATLAB",
"bytes": "4346"
},
{
"name": "Makefile",
"bytes": "575902"
},
{
"name": "Python",
"bytes": "41068291"
},
{
"name": "Shell",
"bytes": "952977"
},
{
"name": "XSLT",
"bytes": "46584"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: get_certificate
author: "John Westcott IV (@john-westcott-iv)"
version_added: "2.8"
short_description: Get a certificate from a host:port
description:
- Makes a secure connection and returns information about the presented certificate
- The module can use the cryptography Python library, or the pyOpenSSL Python
library. By default, it tries to detect which one is available. This can be
overridden with the I(select_crypto_backend) option. Please note that the PyOpenSSL
backend was deprecated in Ansible 2.9 and will be removed in Ansible 2.13."
options:
host:
description:
- The host to get the cert for (IP is fine)
type: str
required: true
ca_cert:
description:
- A PEM file containing one or more root certificates; if present, the cert will be validated against these root certs.
- Note that this only validates the certificate is signed by the chain; not that the cert is valid for the host presenting it.
type: path
port:
description:
- The port to connect to
type: int
required: true
proxy_host:
description:
- Proxy host used when get a certificate.
type: str
version_added: 2.9
proxy_port:
description:
- Proxy port used when get a certificate.
type: int
default: 8080
version_added: 2.9
timeout:
description:
- The timeout in seconds
type: int
default: 10
select_crypto_backend:
description:
- Determines which crypto backend to use.
- The default choice is C(auto), which tries to use C(cryptography) if available, and falls back to C(pyopenssl).
- If set to C(pyopenssl), will try to use the L(pyOpenSSL,https://pypi.org/project/pyOpenSSL/) library.
- If set to C(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
type: str
default: auto
choices: [ auto, cryptography, pyopenssl ]
version_added: "2.9"
notes:
- When using ca_cert on OS X it has been reported that in some conditions the validate will always succeed.
requirements:
- "python >= 2.7 when using C(proxy_host)"
- "cryptography >= 1.6 or pyOpenSSL >= 0.15"
'''
RETURN = '''
cert:
description: The certificate retrieved from the port
returned: success
type: str
expired:
description: Boolean indicating if the cert is expired
returned: success
type: bool
extensions:
description: Extensions applied to the cert
returned: success
type: list
issuer:
description: Information about the issuer of the cert
returned: success
type: dict
not_after:
description: Expiration date of the cert
returned: success
type: str
not_before:
description: Issue date of the cert
returned: success
type: str
serial_number:
description: The serial number of the cert
returned: success
type: str
signature_algorithm:
description: The algorithm used to sign the cert
returned: success
type: str
subject:
description: Information about the subject of the cert (OU, CN, etc)
returned: success
type: dict
version:
description: The version number of the certificate
returned: success
type: str
'''
EXAMPLES = '''
- name: Get the cert from an RDP port
get_certificate:
host: "1.2.3.4"
port: 3389
delegate_to: localhost
run_once: true
register: cert
- name: Get a cert from an https port
get_certificate:
host: "www.google.com"
port: 443
delegate_to: localhost
run_once: true
register: cert
- name: How many days until cert expires
debug:
msg: "cert expires in: {{ expire_days }} days."
vars:
expire_days: "{{ (( cert.not_after | to_datetime('%Y%m%d%H%M%SZ')) - (ansible_date_time.iso8601 | to_datetime('%Y-%m-%dT%H:%M:%SZ')) ).days }}"
'''
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_bytes
from ansible.module_utils import crypto as crypto_utils
from distutils.version import LooseVersion
from os.path import isfile
from socket import setdefaulttimeout, socket
from ssl import get_server_certificate, DER_cert_to_PEM_cert, CERT_NONE, CERT_OPTIONAL
import atexit
import base64
import datetime
import traceback
MINIMAL_PYOPENSSL_VERSION = '0.15'
MINIMAL_CRYPTOGRAPHY_VERSION = '1.6'
CREATE_DEFAULT_CONTEXT_IMP_ERR = None
try:
from ssl import create_default_context
except ImportError:
CREATE_DEFAULT_CONTEXT_IMP_ERR = traceback.format_exc()
HAS_CREATE_DEFAULT_CONTEXT = False
else:
HAS_CREATE_DEFAULT_CONTEXT = True
PYOPENSSL_IMP_ERR = None
try:
import OpenSSL
from OpenSSL import crypto
PYOPENSSL_VERSION = LooseVersion(OpenSSL.__version__)
except ImportError:
PYOPENSSL_IMP_ERR = traceback.format_exc()
PYOPENSSL_FOUND = False
else:
PYOPENSSL_FOUND = True
CRYPTOGRAPHY_IMP_ERR = None
try:
import cryptography
import cryptography.exceptions
import cryptography.x509
from cryptography.hazmat.backends import default_backend as cryptography_backend
CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
except ImportError:
CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
CRYPTOGRAPHY_FOUND = False
else:
CRYPTOGRAPHY_FOUND = True
def main():
module = AnsibleModule(
argument_spec=dict(
ca_cert=dict(type='path'),
host=dict(type='str', required=True),
port=dict(type='int', required=True),
proxy_host=dict(type='str'),
proxy_port=dict(type='int', default=8080),
timeout=dict(type='int', default=10),
select_crypto_backend=dict(type='str', choices=['auto', 'pyopenssl', 'cryptography'], default='auto'),
),
)
ca_cert = module.params.get('ca_cert')
host = module.params.get('host')
port = module.params.get('port')
proxy_host = module.params.get('proxy_host')
proxy_port = module.params.get('proxy_port')
timeout = module.params.get('timeout')
backend = module.params.get('select_crypto_backend')
if backend == 'auto':
# Detection what is possible
can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(MINIMAL_CRYPTOGRAPHY_VERSION)
can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION)
# First try cryptography, then pyOpenSSL
if can_use_cryptography:
backend = 'cryptography'
elif can_use_pyopenssl:
backend = 'pyopenssl'
# Success?
if backend == 'auto':
module.fail_json(msg=("Can't detect any of the required Python libraries "
"cryptography (>= {0}) or PyOpenSSL (>= {1})").format(
MINIMAL_CRYPTOGRAPHY_VERSION,
MINIMAL_PYOPENSSL_VERSION))
if backend == 'pyopenssl':
if not PYOPENSSL_FOUND:
module.fail_json(msg=missing_required_lib('pyOpenSSL >= {0}'.format(MINIMAL_PYOPENSSL_VERSION)),
exception=PYOPENSSL_IMP_ERR)
module.deprecate('The module is using the PyOpenSSL backend. This backend has been deprecated', version='2.13')
elif backend == 'cryptography':
if not CRYPTOGRAPHY_FOUND:
module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
exception=CRYPTOGRAPHY_IMP_ERR)
result = dict(
changed=False,
)
if not PYOPENSSL_FOUND:
module.fail_json(msg=missing_required_lib('pyOpenSSL >= 0.15'), exception=PYOPENSSL_IMP_ERR)
if timeout:
setdefaulttimeout(timeout)
if ca_cert:
if not isfile(ca_cert):
module.fail_json(msg="ca_cert file does not exist")
if proxy_host:
if not HAS_CREATE_DEFAULT_CONTEXT:
module.fail_json(msg='To use proxy_host, you must run the get_certificate module with Python 2.7 or newer.',
exception=CREATE_DEFAULT_CONTEXT_IMP_ERR)
try:
connect = "CONNECT %s:%s HTTP/1.0\r\n\r\n" % (host, port)
sock = socket()
atexit.register(sock.close)
sock.connect((proxy_host, proxy_port))
sock.send(connect.encode())
sock.recv(8192)
ctx = create_default_context()
ctx.check_hostname = False
ctx.verify_mode = CERT_NONE
if ca_cert:
ctx.verify_mode = CERT_OPTIONAL
ctx.load_verify_locations(cafile=ca_cert)
cert = ctx.wrap_socket(sock, server_hostname=host).getpeercert(True)
cert = DER_cert_to_PEM_cert(cert)
except Exception as e:
module.fail_json(msg="Failed to get cert from port with error: {0}".format(e))
else:
try:
cert = get_server_certificate((host, port), ca_certs=ca_cert)
except Exception as e:
module.fail_json(msg="Failed to get cert from port with error: {0}".format(e))
result['cert'] = cert
if backend == 'pyopenssl':
x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
result['subject'] = {}
for component in x509.get_subject().get_components():
result['subject'][component[0]] = component[1]
result['expired'] = x509.has_expired()
result['extensions'] = []
extension_count = x509.get_extension_count()
for index in range(0, extension_count):
extension = x509.get_extension(index)
result['extensions'].append({
'critical': extension.get_critical(),
'asn1_data': extension.get_data(),
'name': extension.get_short_name(),
})
result['issuer'] = {}
for component in x509.get_issuer().get_components():
result['issuer'][component[0]] = component[1]
result['not_after'] = x509.get_notAfter()
result['not_before'] = x509.get_notBefore()
result['serial_number'] = x509.get_serial_number()
result['signature_algorithm'] = x509.get_signature_algorithm()
result['version'] = x509.get_version()
elif backend == 'cryptography':
x509 = cryptography.x509.load_pem_x509_certificate(to_bytes(cert), cryptography_backend())
result['subject'] = {}
for attribute in x509.subject:
result['subject'][crypto_utils.cryptography_oid_to_name(attribute.oid, short=True)] = attribute.value
result['expired'] = x509.not_valid_after < datetime.datetime.utcnow()
result['extensions'] = []
for dotted_number, entry in crypto_utils.cryptography_get_extensions_from_cert(x509).items():
oid = cryptography.x509.oid.ObjectIdentifier(dotted_number)
result['extensions'].append({
'critical': entry['critical'],
'asn1_data': base64.b64decode(entry['value']),
'name': crypto_utils.cryptography_oid_to_name(oid, short=True),
})
result['issuer'] = {}
for attribute in x509.issuer:
result['issuer'][crypto_utils.cryptography_oid_to_name(attribute.oid, short=True)] = attribute.value
result['not_after'] = x509.not_valid_after.strftime('%Y%m%d%H%M%SZ')
result['not_before'] = x509.not_valid_before.strftime('%Y%m%d%H%M%SZ')
result['serial_number'] = x509.serial_number
result['signature_algorithm'] = crypto_utils.cryptography_oid_to_name(x509.signature_algorithm_oid)
# We need the -1 offset to get the same values as pyOpenSSL
if x509.version == cryptography.x509.Version.v1:
result['version'] = 1 - 1
elif x509.version == cryptography.x509.Version.v3:
result['version'] = 3 - 1
else:
result['version'] = "unknown"
module.exit_json(**result)
if __name__ == '__main__':
main()
| {
"content_hash": "3eb361ef8a4e733c8f056792ab5ee756",
"timestamp": "",
"source": "github",
"line_count": 355,
"max_line_length": 147,
"avg_line_length": 34.70422535211268,
"alnum_prop": 0.6354707792207792,
"repo_name": "thaim/ansible",
"id": "99883366a42fe49c07eb45cba2c52209b96bfda0",
"size": "12453",
"binary": false,
"copies": "7",
"ref": "refs/heads/fix-broken-link",
"path": "lib/ansible/modules/crypto/get_certificate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "246"
}
],
"symlink_target": ""
} |
"""
Base settings file, common to all environments.
These settings can be overridden in local.py.
"""
import datetime
import os
import json
import hashlib
import logging
from datetime import timedelta
from collections import OrderedDict
os_env = os.environ
def parent_dir(path):
'''Return the parent of a directory.'''
return os.path.abspath(os.path.join(path, os.pardir))
HERE = os.path.dirname(os.path.abspath(__file__))
BASE_PATH = parent_dir(HERE) # website/ directory
APP_PATH = parent_dir(BASE_PATH)
ADDON_PATH = os.path.join(APP_PATH, 'addons')
STATIC_FOLDER = os.path.join(BASE_PATH, 'static')
STATIC_URL_PATH = '/static'
ASSET_HASH_PATH = os.path.join(APP_PATH, 'webpack-assets.json')
ROOT = os.path.join(BASE_PATH, '..')
BCRYPT_LOG_ROUNDS = 12
# Logging level to use when DEBUG is False
LOG_LEVEL = logging.INFO
with open(os.path.join(APP_PATH, 'package.json'), 'r') as fobj:
VERSION = json.load(fobj)['version']
# Expiration time for verification key
EXPIRATION_TIME_DICT = {
'password': 24 * 60, # 24 hours in minutes for forgot and reset password
'confirm': 24 * 60, # 24 hours in minutes for confirm account and email
'claim': 30 * 24 * 60 # 30 days in minutes for claim contributor-ship
}
CITATION_STYLES_PATH = os.path.join(BASE_PATH, 'static', 'vendor', 'bower_components', 'styles')
# Minimum seconds between forgot password email attempts
SEND_EMAIL_THROTTLE = 30
# Minimum seconds between attempts to change password
CHANGE_PASSWORD_THROTTLE = 30
# Number of incorrect password attempts allowed before throttling.
INCORRECT_PASSWORD_ATTEMPTS_ALLOWED = 3
# Seconds that must elapse before updating a user's date_last_login field
DATE_LAST_LOGIN_THROTTLE = 60
# Seconds that must elapse before change password attempts are reset(currently 1 hour)
TIME_RESET_CHANGE_PASSWORD_ATTEMPTS = 3600
# Hours before pending embargo/retraction/registration automatically becomes active
RETRACTION_PENDING_TIME = datetime.timedelta(days=2)
EMBARGO_PENDING_TIME = datetime.timedelta(days=2)
EMBARGO_TERMINATION_PENDING_TIME = datetime.timedelta(days=2)
REGISTRATION_APPROVAL_TIME = datetime.timedelta(days=2)
# Date range for embargo periods
EMBARGO_END_DATE_MIN = datetime.timedelta(days=2)
EMBARGO_END_DATE_MAX = datetime.timedelta(days=1460) # Four years
# Question titles to be reomved for anonymized VOL
ANONYMIZED_TITLES = ['Authors']
LOAD_BALANCER = False
PROXY_ADDRS = []
USE_POSTGRES = True
# May set these to True in local.py for development
DEV_MODE = False
DEBUG_MODE = False
SECURE_MODE = not DEBUG_MODE # Set secure cookie
PROTOCOL = 'https://' if SECURE_MODE else 'http://'
DOMAIN = PROTOCOL + 'localhost:5000/'
INTERNAL_DOMAIN = DOMAIN
API_DOMAIN = PROTOCOL + 'localhost:8000/'
PREPRINT_PROVIDER_DOMAINS = {
'enabled': False,
'prefix': PROTOCOL,
'suffix': '/'
}
# External Ember App Local Development
USE_EXTERNAL_EMBER = False
PROXY_EMBER_APPS = False
# http://docs.python-requests.org/en/master/user/advanced/#timeouts
EXTERNAL_EMBER_SERVER_TIMEOUT = 3.05
EXTERNAL_EMBER_APPS = {}
LOG_PATH = os.path.join(APP_PATH, 'logs')
TEMPLATES_PATH = os.path.join(BASE_PATH, 'templates')
ANALYTICS_PATH = os.path.join(BASE_PATH, 'analytics')
# User management & registration
CONFIRM_REGISTRATIONS_BY_EMAIL = True
ALLOW_REGISTRATION = True
ALLOW_LOGIN = True
SEARCH_ENGINE = 'elastic' # Can be 'elastic', or None
ELASTIC_URI = 'localhost:9200'
ELASTIC_TIMEOUT = 10
ELASTIC_INDEX = 'website'
ELASTIC_KWARGS = {
# 'use_ssl': False,
# 'verify_certs': True,
# 'ca_certs': None,
# 'client_cert': None,
# 'client_key': None
}
# Sessions
COOKIE_NAME = 'osf'
# TODO: Override OSF_COOKIE_DOMAIN in local.py in production
OSF_COOKIE_DOMAIN = None
# server-side verification timeout
OSF_SESSION_TIMEOUT = 30 * 24 * 60 * 60 # 30 days in seconds
# TODO: Override SECRET_KEY in local.py in production
SECRET_KEY = 'CHANGEME'
SESSION_COOKIE_SECURE = SECURE_MODE
SESSION_COOKIE_HTTPONLY = True
# local path to private key and cert for local development using https, overwrite in local.py
OSF_SERVER_KEY = None
OSF_SERVER_CERT = None
# Change if using `scripts/cron.py` to manage crontab
CRON_USER = None
# External services
USE_CDN_FOR_CLIENT_LIBS = True
USE_EMAIL = True
FROM_EMAIL = '[email protected]'
# support email
OSF_SUPPORT_EMAIL = '[email protected]'
# contact email
OSF_CONTACT_EMAIL = '[email protected]'
# prereg email
PREREG_EMAIL = '[email protected]'
# Default settings for fake email address generation
FAKE_EMAIL_NAME = 'freddiemercury'
FAKE_EMAIL_DOMAIN = 'cos.io'
# SMTP Settings
MAIL_SERVER = 'smtp.sendgrid.net'
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = '' # Set this in local.py
# OR, if using Sendgrid's API
# WARNING: If `SENDGRID_WHITELIST_MODE` is True,
# `tasks.send_email` would only email recipients included in `SENDGRID_EMAIL_WHITELIST`
SENDGRID_API_KEY = None
SENDGRID_WHITELIST_MODE = False
SENDGRID_EMAIL_WHITELIST = []
# Mailchimp
MAILCHIMP_API_KEY = None
MAILCHIMP_WEBHOOK_SECRET_KEY = 'CHANGEME' # OSF secret key to ensure webhook is secure
ENABLE_EMAIL_SUBSCRIPTIONS = True
MAILCHIMP_GENERAL_LIST = 'Open Science Framework General'
#Triggered emails
OSF_HELP_LIST = 'Open Science Framework Help'
PREREG_AGE_LIMIT = timedelta(weeks=12)
PREREG_WAIT_TIME = timedelta(weeks=2)
WAIT_BETWEEN_MAILS = timedelta(days=7)
NO_ADDON_WAIT_TIME = timedelta(weeks=8)
NO_LOGIN_WAIT_TIME = timedelta(weeks=4)
WELCOME_OSF4M_WAIT_TIME = timedelta(weeks=2)
NO_LOGIN_OSF4M_WAIT_TIME = timedelta(weeks=6)
NEW_PUBLIC_PROJECT_WAIT_TIME = timedelta(hours=24)
WELCOME_OSF4M_WAIT_TIME_GRACE = timedelta(days=12)
# TODO: Override in local.py
MAILGUN_API_KEY = None
# Use Celery for file rendering
USE_CELERY = True
# File rendering timeout (in ms)
MFR_TIMEOUT = 30000
# TODO: Override in local.py in production
DB_HOST = 'localhost'
DB_PORT = os_env.get('OSF_DB_PORT', 27017)
DB_NAME = 'osf20130903'
DB_USER = None
DB_PASS = None
# Cache settings
SESSION_HISTORY_LENGTH = 5
SESSION_HISTORY_IGNORE_RULES = [
lambda url: '/static/' in url,
lambda url: 'favicon' in url,
lambda url: url.startswith('/api/'),
]
# TODO: Configuration should not change between deploys - this should be dynamic.
CANONICAL_DOMAIN = 'openscienceframework.org'
COOKIE_DOMAIN = '.openscienceframework.org' # Beaker
SHORT_DOMAIN = 'osf.io'
# TODO: Combine Python and JavaScript config
# If you change COMMENT_MAXLENGTH, make sure you create a corresponding migration.
COMMENT_MAXLENGTH = 1000
# Profile image options
PROFILE_IMAGE_LARGE = 70
PROFILE_IMAGE_MEDIUM = 40
PROFILE_IMAGE_SMALL = 20
# Currently (8/21/2017) only gravatar supported.
PROFILE_IMAGE_PROVIDER = 'gravatar'
# Conference options
CONFERENCE_MIN_COUNT = 5
WIKI_WHITELIST = {
'tags': [
'a', 'abbr', 'acronym', 'b', 'bdo', 'big', 'blockquote', 'br',
'center', 'cite', 'code',
'dd', 'del', 'dfn', 'div', 'dl', 'dt', 'em', 'embed', 'font',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'ins',
'kbd', 'li', 'object', 'ol', 'param', 'pre', 'p', 'q',
's', 'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup',
'table', 'tbody', 'td', 'th', 'thead', 'tr', 'tt', 'ul', 'u',
'var', 'wbr',
],
'attributes': [
'align', 'alt', 'border', 'cite', 'class', 'dir',
'height', 'href', 'id', 'src', 'style', 'title', 'type', 'width',
'face', 'size', # font tags
'salign', 'align', 'wmode', 'target',
],
# Styles currently used in Reproducibility Project wiki pages
'styles': [
'top', 'left', 'width', 'height', 'position',
'background', 'font-size', 'text-align', 'z-index',
'list-style',
]
}
# Maps category identifier => Human-readable representation for use in
# titles, menus, etc.
# Use an OrderedDict so that menu items show in the correct order
NODE_CATEGORY_MAP = OrderedDict([
('analysis', 'Analysis'),
('communication', 'Communication'),
('data', 'Data'),
('hypothesis', 'Hypothesis'),
('instrumentation', 'Instrumentation'),
('methods and measures', 'Methods and Measures'),
('procedure', 'Procedure'),
('project', 'Project'),
('software', 'Software'),
('other', 'Other'),
('', 'Uncategorized')
])
# Add-ons
# Load addons from addons.json
with open(os.path.join(ROOT, 'addons.json')) as fp:
addon_settings = json.load(fp)
ADDONS_REQUESTED = addon_settings['addons']
ADDONS_ARCHIVABLE = addon_settings['addons_archivable']
ADDONS_COMMENTABLE = addon_settings['addons_commentable']
ADDONS_BASED_ON_IDS = addon_settings['addons_based_on_ids']
ADDONS_DESCRIPTION = addon_settings['addons_description']
ADDONS_URL = addon_settings['addons_url']
ADDONS_DEFAULT = addon_settings['addons_default']
ADDON_CATEGORIES = [
'documentation',
'storage',
'bibliography',
'other',
'security',
'citations',
]
SYSTEM_ADDED_ADDONS = {
'user': [],
'node': [],
}
KEEN = {
'public': {
'project_id': None,
'master_key': 'changeme',
'write_key': '',
'read_key': '',
},
'private': {
'project_id': '',
'write_key': '',
'read_key': '',
},
}
SENTRY_DSN = None
SENTRY_DSN_JS = None
MISSING_FILE_NAME = 'untitled'
# Project Organizer
ALL_MY_PROJECTS_ID = '-amp'
ALL_MY_REGISTRATIONS_ID = '-amr'
ALL_MY_PROJECTS_NAME = 'All my projects'
ALL_MY_REGISTRATIONS_NAME = 'All my registrations'
# Most Popular and New and Noteworthy Nodes
POPULAR_LINKS_NODE = None # TODO Override in local.py in production.
POPULAR_LINKS_REGISTRATIONS = None # TODO Override in local.py in production.
NEW_AND_NOTEWORTHY_LINKS_NODE = None # TODO Override in local.py in production.
MAX_POPULAR_PROJECTS = 10
NEW_AND_NOTEWORTHY_CONTRIBUTOR_BLACKLIST = [] # TODO Override in local.py in production.
# FOR EMERGENCIES ONLY: Setting this to True will disable forks, registrations,
# and uploads in order to save disk space.
DISK_SAVING_MODE = False
# Seconds before another notification email can be sent to a contributor when added to a project
CONTRIBUTOR_ADDED_EMAIL_THROTTLE = 24 * 3600
# Google Analytics
GOOGLE_ANALYTICS_ID = None
GOOGLE_SITE_VERIFICATION = None
DEFAULT_HMAC_SECRET = 'changeme'
DEFAULT_HMAC_ALGORITHM = hashlib.sha256
WATERBUTLER_URL = 'http://localhost:7777'
WATERBUTLER_INTERNAL_URL = WATERBUTLER_URL
WATERBUTLER_ADDRS = ['127.0.0.1']
####################
# Identifiers #
###################
DOI_URL_PREFIX = 'https://dx.doi.org/'
# General Format for DOIs
DOI_FORMAT = '{prefix}/osf.io/{guid}'
# ezid
EZID_DOI_NAMESPACE = 'doi:10.5072'
EZID_ARK_NAMESPACE = 'ark:99999'
EZID_USERNAME = None
EZID_PASSWORD = None
# datacite
DATACITE_USERNAME = None
DATACITE_PASSWORD = None
DATACITE_URL = None
DATACITE_PREFIX = '10.5072' # Datacite's test DOI prefix -- update in production
# Minting DOIs only works on Datacite's production server, so
# disable minting on staging and development environments by default
DATACITE_MINT_DOIS = not DEV_MODE
# crossref
CROSSREF_USERNAME = None
CROSSREF_PASSWORD = None
CROSSREF_URL = None # Location to POST crossref data. In production, change this to the production CrossRef API endpoint
CROSSREF_DEPOSITOR_EMAIL = 'None' # This email will receive confirmation/error messages from CrossRef on submission
ECSARXIV_CROSSREF_USERNAME = None
ECSARXIV_CROSSREF_PASSWORD = None
# Leave as `None` for production, test/staging/local envs must set
SHARE_PREPRINT_PROVIDER_PREPEND = None
SHARE_REGISTRATION_URL = ''
SHARE_URL = None
SHARE_API_TOKEN = None # Required to send project updates to SHARE
CAS_SERVER_URL = 'http://localhost:8080'
MFR_SERVER_URL = 'http://localhost:7778'
###### ARCHIVER ###########
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = 5 * 1024 ** 3 # == math.pow(1024, 3) == 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
ENABLE_ARCHIVER = True
JWT_SECRET = 'changeme'
JWT_ALGORITHM = 'HS256'
##### CELERY #####
# Default RabbitMQ broker
RABBITMQ_USERNAME = os.environ.get('RABBITMQ_USERNAME', 'guest')
RABBITMQ_PASSWORD = os.environ.get('RABBITMQ_PASSWORD', 'guest')
RABBITMQ_HOST = os.environ.get('RABBITMQ_HOST', 'localhost')
RABBITMQ_PORT = os.environ.get('RABBITMQ_PORT', '5672')
RABBITMQ_VHOST = os.environ.get('RABBITMQ_VHOST', '/')
# Seconds, not an actual celery setting
CELERY_RETRY_BACKOFF_BASE = 5
class CeleryConfig:
"""
Celery Configuration
http://docs.celeryproject.org/en/latest/userguide/configuration.html
"""
timezone = 'UTC'
task_default_queue = 'celery'
task_low_queue = 'low'
task_med_queue = 'med'
task_high_queue = 'high'
low_pri_modules = {
'framework.analytics.tasks',
'framework.celery_tasks',
'scripts.osfstorage.usage_audit',
'scripts.stuck_registration_audit',
'scripts.analytics.tasks',
'scripts.populate_new_and_noteworthy_projects',
'scripts.populate_popular_projects_and_registrations',
'scripts.remind_draft_preregistrations',
'website.search.elastic_search',
'scripts.generate_sitemap',
'scripts.generate_prereg_csv',
'scripts.analytics.run_keen_summaries',
'scripts.analytics.run_keen_snapshots',
'scripts.analytics.run_keen_events',
}
med_pri_modules = {
'framework.email.tasks',
'scripts.send_queued_mails',
'scripts.triggered_mails',
'website.mailchimp_utils',
'website.notifications.tasks',
}
high_pri_modules = {
'scripts.approve_embargo_terminations',
'scripts.approve_registrations',
'scripts.embargo_registrations',
'scripts.premigrate_created_modified',
'scripts.refresh_addon_tokens',
'scripts.retract_registrations',
'website.archiver.tasks',
'scripts.add_missing_identifiers_to_preprints'
}
try:
from kombu import Queue, Exchange
except ImportError:
pass
else:
task_queues = (
Queue(task_low_queue, Exchange(task_low_queue), routing_key=task_low_queue,
consumer_arguments={'x-priority': -1}),
Queue(task_default_queue, Exchange(task_default_queue), routing_key=task_default_queue,
consumer_arguments={'x-priority': 0}),
Queue(task_med_queue, Exchange(task_med_queue), routing_key=task_med_queue,
consumer_arguments={'x-priority': 1}),
Queue(task_high_queue, Exchange(task_high_queue), routing_key=task_high_queue,
consumer_arguments={'x-priority': 10}),
)
task_default_exchange_type = 'direct'
task_routes = ('framework.celery_tasks.routers.CeleryRouter', )
task_ignore_result = True
task_store_errors_even_if_ignored = True
broker_url = os.environ.get('BROKER_URL', 'amqp://{}:{}@{}:{}/{}'.format(RABBITMQ_USERNAME, RABBITMQ_PASSWORD, RABBITMQ_HOST, RABBITMQ_PORT, RABBITMQ_VHOST))
broker_use_ssl = False
# Default RabbitMQ backend
result_backend = os.environ.get('CELERY_RESULT_BACKEND', broker_url)
beat_scheduler = 'django_celery_beat.schedulers:DatabaseScheduler'
# Modules to import when celery launches
imports = (
'framework.celery_tasks',
'framework.email.tasks',
'website.mailchimp_utils',
'website.notifications.tasks',
'website.archiver.tasks',
'website.search.search',
'website.project.tasks',
'scripts.populate_new_and_noteworthy_projects',
'scripts.populate_popular_projects_and_registrations',
'scripts.refresh_addon_tokens',
'scripts.remind_draft_preregistrations',
'scripts.retract_registrations',
'scripts.embargo_registrations',
'scripts.approve_registrations',
'scripts.approve_embargo_terminations',
'scripts.triggered_mails',
'scripts.send_queued_mails',
'scripts.analytics.run_keen_summaries',
'scripts.analytics.run_keen_snapshots',
'scripts.analytics.run_keen_events',
'scripts.generate_sitemap',
'scripts.premigrate_created_modified',
'scripts.generate_prereg_csv',
'scripts.add_missing_identifiers_to_preprints',
)
# Modules that need metrics and release requirements
# imports += (
# 'scripts.osfstorage.usage_audit',
# 'scripts.stuck_registration_audit',
# 'scripts.analytics.tasks',
# 'scripts.analytics.upload',
# )
# celery.schedule will not be installed when running invoke requirements the first time.
try:
from celery.schedules import crontab
except ImportError:
pass
else:
# Setting up a scheduler, essentially replaces an independent cron job
# Note: these times must be in UTC
beat_schedule = {
'5-minute-emails': {
'task': 'website.notifications.tasks.send_users_email',
'schedule': crontab(minute='*/5'),
'args': ('email_transactional',),
},
'daily-emails': {
'task': 'website.notifications.tasks.send_users_email',
'schedule': crontab(minute=0, hour=5), # Daily at 12 a.m. EST
'args': ('email_digest',),
},
'refresh_addons': {
'task': 'scripts.refresh_addon_tokens',
'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m
'kwargs': {'dry_run': False, 'addons': {
'box': 60, # https://docs.box.com/docs/oauth-20#section-6-using-the-access-and-refresh-tokens
'googledrive': 14, # https://developers.google.com/identity/protocols/OAuth2#expiration
'mendeley': 14 # http://dev.mendeley.com/reference/topics/authorization_overview.html
}},
},
'retract_registrations': {
'task': 'scripts.retract_registrations',
'schedule': crontab(minute=0, hour=5), # Daily 12 a.m
'kwargs': {'dry_run': False},
},
'embargo_registrations': {
'task': 'scripts.embargo_registrations',
'schedule': crontab(minute=0, hour=5), # Daily 12 a.m
'kwargs': {'dry_run': False},
},
'add_missing_identifiers_to_preprints': {
'task': 'scripts.add_missing_identifiers_to_preprints',
'schedule': crontab(minute=0, hour=5), # Daily 12 a.m
'kwargs': {'dry_run': False},
},
'approve_registrations': {
'task': 'scripts.approve_registrations',
'schedule': crontab(minute=0, hour=5), # Daily 12 a.m
'kwargs': {'dry_run': False},
},
'approve_embargo_terminations': {
'task': 'scripts.approve_embargo_terminations',
'schedule': crontab(minute=0, hour=5), # Daily 12 a.m
'kwargs': {'dry_run': False},
},
'triggered_mails': {
'task': 'scripts.triggered_mails',
'schedule': crontab(minute=0, hour=5), # Daily 12 a.m
'kwargs': {'dry_run': False},
},
'send_queued_mails': {
'task': 'scripts.send_queued_mails',
'schedule': crontab(minute=0, hour=17), # Daily 12 p.m.
'kwargs': {'dry_run': False},
},
'prereg_reminder': {
'task': 'scripts.remind_draft_preregistrations',
'schedule': crontab(minute=0, hour=12), # Daily 12 p.m.
'kwargs': {'dry_run': False},
},
'new-and-noteworthy': {
'task': 'scripts.populate_new_and_noteworthy_projects',
'schedule': crontab(minute=0, hour=7, day_of_week=6), # Saturday 2:00 a.m.
'kwargs': {'dry_run': False}
},
'update_popular_nodes': {
'task': 'scripts.populate_popular_projects_and_registrations',
'schedule': crontab(minute=0, hour=7), # Daily 2:00 a.m.
'kwargs': {'dry_run': False}
},
'run_keen_summaries': {
'task': 'scripts.analytics.run_keen_summaries',
'schedule': crontab(minute=0, hour=6), # Daily 1:00 a.m.
'kwargs': {'yesterday': True}
},
'run_keen_snapshots': {
'task': 'scripts.analytics.run_keen_snapshots',
'schedule': crontab(minute=0, hour=8), # Daily 3:00 a.m.
},
'run_keen_events': {
'task': 'scripts.analytics.run_keen_events',
'schedule': crontab(minute=0, hour=9), # Daily 4:00 a.m.
'kwargs': {'yesterday': True}
},
'generate_sitemap': {
'task': 'scripts.generate_sitemap',
'schedule': crontab(minute=0, hour=5), # Daily 12:00 a.m.
},
'generate_prereg_csv': {
'task': 'scripts.generate_prereg_csv',
'schedule': crontab(minute=0, hour=10, day_of_week=0), # Sunday 5:00 a.m.
},
}
# Tasks that need metrics and release requirements
# beat_schedule.update({
# 'usage_audit': {
# 'task': 'scripts.osfstorage.usage_audit',
# 'schedule': crontab(minute=0, hour=5), # Daily 12 a.m
# 'kwargs': {'send_mail': True},
# },
# 'stuck_registration_audit': {
# 'task': 'scripts.stuck_registration_audit',
# 'schedule': crontab(minute=0, hour=11), # Daily 6 a.m
# 'kwargs': {},
# },
# })
WATERBUTLER_JWE_SALT = 'yusaltydough'
WATERBUTLER_JWE_SECRET = 'CirclesAre4Squares'
WATERBUTLER_JWT_SECRET = 'ILiekTrianglesALot'
WATERBUTLER_JWT_ALGORITHM = 'HS256'
WATERBUTLER_JWT_EXPIRATION = 15
SENSITIVE_DATA_SALT = 'yusaltydough'
SENSITIVE_DATA_SECRET = 'TrainglesAre5Squares'
DRAFT_REGISTRATION_APPROVAL_PERIOD = datetime.timedelta(days=10)
assert (DRAFT_REGISTRATION_APPROVAL_PERIOD > EMBARGO_END_DATE_MIN), 'The draft registration approval period should be more than the minimum embargo end date.'
# TODO: Remove references to this flag
ENABLE_INSTITUTIONS = True
ENABLE_VARNISH = False
ENABLE_ESI = False
VARNISH_SERVERS = [] # This should be set in local.py or cache invalidation won't work
ESI_MEDIA_TYPES = {'application/vnd.api+json', 'application/json'}
# Used for gathering meta information about the current build
GITHUB_API_TOKEN = None
# switch for disabling things that shouldn't happen during
# the modm to django migration
RUNNING_MIGRATION = False
# External Identity Provider
EXTERNAL_IDENTITY_PROFILE = {
'OrcidProfile': 'ORCID',
}
# Source: https://github.com/maxd/fake_email_validator/blob/master/config/fake_domains.list
BLACKLISTED_DOMAINS = [
'0-mail.com',
'0815.ru',
'0815.su',
'0clickemail.com',
'0wnd.net',
'0wnd.org',
'10mail.org',
'10minut.com.pl',
'10minutemail.cf',
'10minutemail.co.uk',
'10minutemail.co.za',
'10minutemail.com',
'10minutemail.de',
'10minutemail.eu',
'10minutemail.ga',
'10minutemail.gq',
'10minutemail.info',
'10minutemail.ml',
'10minutemail.net',
'10minutemail.org',
'10minutemail.ru',
'10minutemail.us',
'10minutesmail.co.uk',
'10minutesmail.com',
'10minutesmail.eu',
'10minutesmail.net',
'10minutesmail.org',
'10minutesmail.ru',
'10minutesmail.us',
'123-m.com',
'15qm-mail.red',
'15qm.com',
'1chuan.com',
'1mail.ml',
'1pad.de',
'1usemail.com',
'1zhuan.com',
'20mail.in',
'20mail.it',
'20minutemail.com',
'2prong.com',
'30minutemail.com',
'30minutesmail.com',
'33mail.com',
'3d-painting.com',
'3mail.ga',
'4mail.cf',
'4mail.ga',
'4warding.com',
'4warding.net',
'4warding.org',
'5mail.cf',
'5mail.ga',
'60minutemail.com',
'675hosting.com',
'675hosting.net',
'675hosting.org',
'6ip.us',
'6mail.cf',
'6mail.ga',
'6mail.ml',
'6paq.com',
'6url.com',
'75hosting.com',
'75hosting.net',
'75hosting.org',
'7mail.ga',
'7mail.ml',
'7mail7.com',
'7tags.com',
'8mail.cf',
'8mail.ga',
'8mail.ml',
'99experts.com',
'9mail.cf',
'9ox.net',
'a-bc.net',
'a45.in',
'abcmail.email',
'abusemail.de',
'abyssmail.com',
'acentri.com',
'advantimo.com',
'afrobacon.com',
'agedmail.com',
'ajaxapp.net',
'alivance.com',
'ama-trade.de',
'amail.com',
'amail4.me',
'amilegit.com',
'amiri.net',
'amiriindustries.com',
'anappthat.com',
'ano-mail.net',
'anobox.ru',
'anonbox.net',
'anonmails.de',
'anonymail.dk',
'anonymbox.com',
'antichef.com',
'antichef.net',
'antireg.ru',
'antispam.de',
'antispammail.de',
'appixie.com',
'armyspy.com',
'artman-conception.com',
'asdasd.ru',
'azmeil.tk',
'baxomale.ht.cx',
'beddly.com',
'beefmilk.com',
'beerolympics.se',
'bestemailaddress.net',
'bigprofessor.so',
'bigstring.com',
'binkmail.com',
'bio-muesli.net',
'bladesmail.net',
'bloatbox.com',
'bobmail.info',
'bodhi.lawlita.com',
'bofthew.com',
'bootybay.de',
'bossmail.de',
'boun.cr',
'bouncr.com',
'boxformail.in',
'boximail.com',
'boxtemp.com.br',
'breakthru.com',
'brefmail.com',
'brennendesreich.de',
'broadbandninja.com',
'bsnow.net',
'bspamfree.org',
'buffemail.com',
'bugmenot.com',
'bumpymail.com',
'bund.us',
'bundes-li.ga',
'burnthespam.info',
'burstmail.info',
'buymoreplays.com',
'buyusedlibrarybooks.org',
'byom.de',
'c2.hu',
'cachedot.net',
'card.zp.ua',
'casualdx.com',
'cbair.com',
'cdnqa.com',
'cek.pm',
'cellurl.com',
'cem.net',
'centermail.com',
'centermail.net',
'chammy.info',
'cheatmail.de',
'chewiemail.com',
'childsavetrust.org',
'chogmail.com',
'choicemail1.com',
'chong-mail.com',
'chong-mail.net',
'chong-mail.org',
'clixser.com',
'clrmail.com',
'cmail.net',
'cmail.org',
'coldemail.info',
'consumerriot.com',
'cool.fr.nf',
'correo.blogos.net',
'cosmorph.com',
'courriel.fr.nf',
'courrieltemporaire.com',
'crapmail.org',
'crazymailing.com',
'cubiclink.com',
'curryworld.de',
'cust.in',
'cuvox.de',
'd3p.dk',
'dacoolest.com',
'daintly.com',
'dandikmail.com',
'dayrep.com',
'dbunker.com',
'dcemail.com',
'deadaddress.com',
'deadfake.cf',
'deadfake.ga',
'deadfake.ml',
'deadfake.tk',
'deadspam.com',
'deagot.com',
'dealja.com',
'delikkt.de',
'despam.it',
'despammed.com',
'devnullmail.com',
'dfgh.net',
'digitalsanctuary.com',
'dingbone.com',
'dingfone.com',
'discard.cf',
'discard.email',
'discard.ga',
'discard.gq',
'discard.ml',
'discard.tk',
'discardmail.com',
'discardmail.de',
'dispomail.eu',
'disposable-email.ml',
'disposable.cf',
'disposable.ga',
'disposable.ml',
'disposableaddress.com',
'disposableemailaddresses.com',
'disposableinbox.com',
'dispose.it',
'disposeamail.com',
'disposemail.com',
'dispostable.com',
'divermail.com',
'dodgeit.com',
'dodgemail.de',
'dodgit.com',
'dodgit.org',
'dodsi.com',
'doiea.com',
'domozmail.com',
'donemail.ru',
'dontmail.net',
'dontreg.com',
'dontsendmespam.de',
'dotmsg.com',
'drdrb.com',
'drdrb.net',
'droplar.com',
'dropmail.me',
'duam.net',
'dudmail.com',
'dump-email.info',
'dumpandjunk.com',
'dumpmail.de',
'dumpyemail.com',
'duskmail.com',
'e-mail.com',
'e-mail.org',
'e4ward.com',
'easytrashmail.com',
'ee1.pl',
'ee2.pl',
'eelmail.com',
'einmalmail.de',
'einrot.com',
'einrot.de',
'eintagsmail.de',
'email-fake.cf',
'email-fake.com',
'email-fake.ga',
'email-fake.gq',
'email-fake.ml',
'email-fake.tk',
'email60.com',
'email64.com',
'emailage.cf',
'emailage.ga',
'emailage.gq',
'emailage.ml',
'emailage.tk',
'emaildienst.de',
'emailgo.de',
'emailias.com',
'emailigo.de',
'emailinfive.com',
'emaillime.com',
'emailmiser.com',
'emailproxsy.com',
'emails.ga',
'emailsensei.com',
'emailspam.cf',
'emailspam.ga',
'emailspam.gq',
'emailspam.ml',
'emailspam.tk',
'emailtemporanea.com',
'emailtemporanea.net',
'emailtemporar.ro',
'emailtemporario.com.br',
'emailthe.net',
'emailtmp.com',
'emailto.de',
'emailwarden.com',
'emailx.at.hm',
'emailxfer.com',
'emailz.cf',
'emailz.ga',
'emailz.gq',
'emailz.ml',
'emeil.in',
'emeil.ir',
'emeraldwebmail.com',
'emil.com',
'emkei.cf',
'emkei.ga',
'emkei.gq',
'emkei.ml',
'emkei.tk',
'emz.net',
'enterto.com',
'ephemail.net',
'ero-tube.org',
'etranquil.com',
'etranquil.net',
'etranquil.org',
'evopo.com',
'example.com',
'explodemail.com',
'express.net.ua',
'eyepaste.com',
'facebook-email.cf',
'facebook-email.ga',
'facebook-email.ml',
'facebookmail.gq',
'facebookmail.ml',
'fake-box.com',
'fake-mail.cf',
'fake-mail.ga',
'fake-mail.ml',
'fakeinbox.cf',
'fakeinbox.com',
'fakeinbox.ga',
'fakeinbox.ml',
'fakeinbox.tk',
'fakeinformation.com',
'fakemail.fr',
'fakemailgenerator.com',
'fakemailz.com',
'fammix.com',
'fansworldwide.de',
'fantasymail.de',
'fastacura.com',
'fastchevy.com',
'fastchrysler.com',
'fastkawasaki.com',
'fastmazda.com',
'fastmitsubishi.com',
'fastnissan.com',
'fastsubaru.com',
'fastsuzuki.com',
'fasttoyota.com',
'fastyamaha.com',
'fatflap.com',
'fdfdsfds.com',
'fightallspam.com',
'fiifke.de',
'filzmail.com',
'fivemail.de',
'fixmail.tk',
'fizmail.com',
'fleckens.hu',
'flurre.com',
'flurred.com',
'flurred.ru',
'flyspam.com',
'footard.com',
'forgetmail.com',
'forward.cat',
'fr33mail.info',
'frapmail.com',
'free-email.cf',
'free-email.ga',
'freemails.cf',
'freemails.ga',
'freemails.ml',
'freundin.ru',
'friendlymail.co.uk',
'front14.org',
'fuckingduh.com',
'fudgerub.com',
'fux0ringduh.com',
'fyii.de',
'garliclife.com',
'gehensiemirnichtaufdensack.de',
'gelitik.in',
'germanmails.biz',
'get-mail.cf',
'get-mail.ga',
'get-mail.ml',
'get-mail.tk',
'get1mail.com',
'get2mail.fr',
'getairmail.cf',
'getairmail.com',
'getairmail.ga',
'getairmail.gq',
'getairmail.ml',
'getairmail.tk',
'getmails.eu',
'getonemail.com',
'getonemail.net',
'gfcom.com',
'ghosttexter.de',
'giantmail.de',
'girlsundertheinfluence.com',
'gishpuppy.com',
'gmial.com',
'goemailgo.com',
'gorillaswithdirtyarmpits.com',
'gotmail.com',
'gotmail.net',
'gotmail.org',
'gowikibooks.com',
'gowikicampus.com',
'gowikicars.com',
'gowikifilms.com',
'gowikigames.com',
'gowikimusic.com',
'gowikinetwork.com',
'gowikitravel.com',
'gowikitv.com',
'grandmamail.com',
'grandmasmail.com',
'great-host.in',
'greensloth.com',
'grr.la',
'gsrv.co.uk',
'guerillamail.biz',
'guerillamail.com',
'guerillamail.de',
'guerillamail.net',
'guerillamail.org',
'guerillamailblock.com',
'guerrillamail.biz',
'guerrillamail.com',
'guerrillamail.de',
'guerrillamail.info',
'guerrillamail.net',
'guerrillamail.org',
'guerrillamailblock.com',
'gustr.com',
'h8s.org',
'hacccc.com',
'haltospam.com',
'haqed.com',
'harakirimail.com',
'hartbot.de',
'hat-geld.de',
'hatespam.org',
'headstrong.de',
'hellodream.mobi',
'herp.in',
'hidemail.de',
'hideme.be',
'hidzz.com',
'hiru-dea.com',
'hmamail.com',
'hochsitze.com',
'hopemail.biz',
'hot-mail.cf',
'hot-mail.ga',
'hot-mail.gq',
'hot-mail.ml',
'hot-mail.tk',
'hotpop.com',
'hulapla.de',
'hushmail.com',
'ieatspam.eu',
'ieatspam.info',
'ieh-mail.de',
'ihateyoualot.info',
'iheartspam.org',
'ikbenspamvrij.nl',
'imails.info',
'imgof.com',
'imgv.de',
'imstations.com',
'inbax.tk',
'inbox.si',
'inboxalias.com',
'inboxclean.com',
'inboxclean.org',
'inboxproxy.com',
'incognitomail.com',
'incognitomail.net',
'incognitomail.org',
'ineec.net',
'infocom.zp.ua',
'inoutmail.de',
'inoutmail.eu',
'inoutmail.info',
'inoutmail.net',
'insorg-mail.info',
'instant-mail.de',
'instantemailaddress.com',
'instantlyemail.com',
'ip6.li',
'ipoo.org',
'irish2me.com',
'iwi.net',
'jetable.com',
'jetable.fr.nf',
'jetable.net',
'jetable.org',
'jnxjn.com',
'jourrapide.com',
'junk1e.com',
'junkmail.com',
'junkmail.ga',
'junkmail.gq',
'jupimail.com',
'kasmail.com',
'kaspop.com',
'keepmymail.com',
'killmail.com',
'killmail.net',
'kimsdisk.com',
'kingsq.ga',
'kiois.com',
'kir.ch.tc',
'klassmaster.com',
'klassmaster.net',
'klzlk.com',
'kook.ml',
'koszmail.pl',
'kulturbetrieb.info',
'kurzepost.de',
'l33r.eu',
'labetteraverouge.at',
'lackmail.net',
'lags.us',
'landmail.co',
'lastmail.co',
'lawlita.com',
'lazyinbox.com',
'legitmail.club',
'letthemeatspam.com',
'lhsdv.com',
'libox.fr',
'lifebyfood.com',
'link2mail.net',
'litedrop.com',
'loadby.us',
'login-email.cf',
'login-email.ga',
'login-email.ml',
'login-email.tk',
'lol.ovpn.to',
'lolfreak.net',
'lookugly.com',
'lopl.co.cc',
'lortemail.dk',
'lovemeleaveme.com',
'lr78.com',
'lroid.com',
'lukop.dk',
'm21.cc',
'm4ilweb.info',
'maboard.com',
'mail-filter.com',
'mail-temporaire.fr',
'mail.by',
'mail.mezimages.net',
'mail.zp.ua',
'mail114.net',
'mail1a.de',
'mail21.cc',
'mail2rss.org',
'mail333.com',
'mail4trash.com',
'mailbidon.com',
'mailbiz.biz',
'mailblocks.com',
'mailblog.biz',
'mailbucket.org',
'mailcat.biz',
'mailcatch.com',
'mailde.de',
'mailde.info',
'maildrop.cc',
'maildrop.cf',
'maildrop.ga',
'maildrop.gq',
'maildrop.ml',
'maildu.de',
'maildx.com',
'maileater.com',
'mailed.ro',
'maileimer.de',
'mailexpire.com',
'mailfa.tk',
'mailforspam.com',
'mailfree.ga',
'mailfree.gq',
'mailfree.ml',
'mailfreeonline.com',
'mailfs.com',
'mailguard.me',
'mailhazard.com',
'mailhazard.us',
'mailhz.me',
'mailimate.com',
'mailin8r.com',
'mailinater.com',
'mailinator.com',
'mailinator.gq',
'mailinator.net',
'mailinator.org',
'mailinator.us',
'mailinator2.com',
'mailinator2.net',
'mailincubator.com',
'mailismagic.com',
'mailjunk.cf',
'mailjunk.ga',
'mailjunk.gq',
'mailjunk.ml',
'mailjunk.tk',
'mailmate.com',
'mailme.gq',
'mailme.ir',
'mailme.lv',
'mailme24.com',
'mailmetrash.com',
'mailmoat.com',
'mailms.com',
'mailnator.com',
'mailnesia.com',
'mailnull.com',
'mailorg.org',
'mailpick.biz',
'mailproxsy.com',
'mailquack.com',
'mailrock.biz',
'mailscrap.com',
'mailshell.com',
'mailsiphon.com',
'mailslapping.com',
'mailslite.com',
'mailspeed.ru',
'mailtemp.info',
'mailtome.de',
'mailtothis.com',
'mailtrash.net',
'mailtv.net',
'mailtv.tv',
'mailzilla.com',
'mailzilla.org',
'mailzilla.orgmbx.cc',
'makemetheking.com',
'mallinator.com',
'manifestgenerator.com',
'manybrain.com',
'mbx.cc',
'mciek.com',
'mega.zik.dj',
'meinspamschutz.de',
'meltmail.com',
'messagebeamer.de',
'mezimages.net',
'mfsa.ru',
'mierdamail.com',
'migmail.pl',
'migumail.com',
'mindless.com',
'ministry-of-silly-walks.de',
'mintemail.com',
'misterpinball.de',
'mjukglass.nu',
'moakt.com',
'mobi.web.id',
'mobileninja.co.uk',
'moburl.com',
'mohmal.com',
'moncourrier.fr.nf',
'monemail.fr.nf',
'monmail.fr.nf',
'monumentmail.com',
'msa.minsmail.com',
'mt2009.com',
'mt2014.com',
'mt2015.com',
'mx0.wwwnew.eu',
'my10minutemail.com',
'myalias.pw',
'mycard.net.ua',
'mycleaninbox.net',
'myemailboxy.com',
'mymail-in.net',
'mymailoasis.com',
'mynetstore.de',
'mypacks.net',
'mypartyclip.de',
'myphantomemail.com',
'mysamp.de',
'myspaceinc.com',
'myspaceinc.net',
'myspaceinc.org',
'myspacepimpedup.com',
'myspamless.com',
'mytemp.email',
'mytempemail.com',
'mytempmail.com',
'mytrashmail.com',
'nabuma.com',
'neomailbox.com',
'nepwk.com',
'nervmich.net',
'nervtmich.net',
'netmails.com',
'netmails.net',
'netzidiot.de',
'neverbox.com',
'nice-4u.com',
'nincsmail.com',
'nincsmail.hu',
'nmail.cf',
'nnh.com',
'no-spam.ws',
'noblepioneer.com',
'nobulk.com',
'noclickemail.com',
'nogmailspam.info',
'nomail.pw',
'nomail.xl.cx',
'nomail2me.com',
'nomorespamemails.com',
'nonspam.eu',
'nonspammer.de',
'noref.in',
'nospam.ze.tc',
'nospam4.us',
'nospamfor.us',
'nospammail.net',
'nospamthanks.info',
'notmailinator.com',
'notsharingmy.info',
'nowhere.org',
'nowmymail.com',
'nurfuerspam.de',
'nwldx.com',
'objectmail.com',
'obobbo.com',
'odaymail.com',
'odnorazovoe.ru',
'one-time.email',
'oneoffemail.com',
'oneoffmail.com',
'onewaymail.com',
'onlatedotcom.info',
'online.ms',
'oopi.org',
'opayq.com',
'opentrash.com',
'ordinaryamerican.net',
'otherinbox.com',
'ourklips.com',
'outlawspam.com',
'ovpn.to',
'owlpic.com',
'pancakemail.com',
'paplease.com',
'pepbot.com',
'pfui.ru',
'pimpedupmyspace.com',
'pjjkp.com',
'plexolan.de',
'poczta.onet.pl',
'politikerclub.de',
'poofy.org',
'pookmail.com',
'pop3.xyz',
'postalmail.biz',
'privacy.net',
'privatdemail.net',
'privy-mail.com',
'privymail.de',
'proxymail.eu',
'prtnx.com',
'prtz.eu',
'pubmail.io',
'punkass.com',
'putthisinyourspamdatabase.com',
'pwrby.com',
'q314.net',
'qisdo.com',
'qisoa.com',
'qoika.com',
'quickinbox.com',
'quickmail.nl',
'rainmail.biz',
'rcpt.at',
're-gister.com',
'reallymymail.com',
'realtyalerts.ca',
'recode.me',
'reconmail.com',
'recursor.net',
'recyclemail.dk',
'regbypass.com',
'regbypass.comsafe-mail.net',
'rejectmail.com',
'reliable-mail.com',
'remail.cf',
'remail.ga',
'renraku.in',
'rhyta.com',
'rklips.com',
'rmqkr.net',
'royal.net',
'rppkn.com',
'rtrtr.com',
's0ny.net',
'safe-mail.net',
'safersignup.de',
'safetymail.info',
'safetypost.de',
'sandelf.de',
'sayawaka-dea.info',
'saynotospams.com',
'scatmail.com',
'schafmail.de',
'schrott-email.de',
'secretemail.de',
'secure-mail.biz',
'secure-mail.cc',
'selfdestructingmail.com',
'selfdestructingmail.org',
'sendspamhere.com',
'senseless-entertainment.com',
'services391.com',
'sharedmailbox.org',
'sharklasers.com',
'shieldedmail.com',
'shieldemail.com',
'shiftmail.com',
'shitmail.me',
'shitmail.org',
'shitware.nl',
'shmeriously.com',
'shortmail.net',
'showslow.de',
'sibmail.com',
'sinnlos-mail.de',
'siteposter.net',
'skeefmail.com',
'slapsfromlastnight.com',
'slaskpost.se',
'slipry.net',
'slopsbox.com',
'slowslow.de',
'slushmail.com',
'smashmail.de',
'smellfear.com',
'smellrear.com',
'smoug.net',
'snakemail.com',
'sneakemail.com',
'sneakmail.de',
'snkmail.com',
'sofimail.com',
'sofort-mail.de',
'softpls.asia',
'sogetthis.com',
'soisz.com',
'solvemail.info',
'soodonims.com',
'spam.la',
'spam.su',
'spam4.me',
'spamail.de',
'spamarrest.com',
'spamavert.com',
'spambob.com',
'spambob.net',
'spambob.org',
'spambog.com',
'spambog.de',
'spambog.net',
'spambog.ru',
'spambooger.com',
'spambox.info',
'spambox.irishspringrealty.com',
'spambox.us',
'spambpg.com',
'spamcannon.com',
'spamcannon.net',
'spamcero.com',
'spamcon.org',
'spamcorptastic.com',
'spamcowboy.com',
'spamcowboy.net',
'spamcowboy.org',
'spamday.com',
'spamex.com',
'spamfighter.cf',
'spamfighter.ga',
'spamfighter.gq',
'spamfighter.ml',
'spamfighter.tk',
'spamfree.eu',
'spamfree24.com',
'spamfree24.de',
'spamfree24.eu',
'spamfree24.info',
'spamfree24.net',
'spamfree24.org',
'spamgoes.in',
'spamgourmet.com',
'spamgourmet.net',
'spamgourmet.org',
'spamherelots.com',
'spamhereplease.com',
'spamhole.com',
'spamify.com',
'spaminator.de',
'spamkill.info',
'spaml.com',
'spaml.de',
'spammotel.com',
'spamobox.com',
'spamoff.de',
'spamsalad.in',
'spamslicer.com',
'spamsphere.com',
'spamspot.com',
'spamstack.net',
'spamthis.co.uk',
'spamthisplease.com',
'spamtrail.com',
'spamtroll.net',
'speed.1s.fr',
'spikio.com',
'spoofmail.de',
'spybox.de',
'squizzy.de',
'ssoia.com',
'startkeys.com',
'stexsy.com',
'stinkefinger.net',
'stop-my-spam.cf',
'stop-my-spam.com',
'stop-my-spam.ga',
'stop-my-spam.ml',
'stop-my-spam.tk',
'streetwisemail.com',
'stuffmail.de',
'super-auswahl.de',
'supergreatmail.com',
'supermailer.jp',
'superrito.com',
'superstachel.de',
'suremail.info',
'sute.jp',
'svk.jp',
'sweetxxx.de',
'tafmail.com',
'tagyourself.com',
'talkinator.com',
'tapchicuoihoi.com',
'teewars.org',
'teleworm.com',
'teleworm.us',
'temp-mail.com',
'temp-mail.net',
'temp-mail.org',
'temp-mail.ru',
'temp15qm.com',
'tempail.com',
'tempalias.com',
'tempe-mail.com',
'tempemail.biz',
'tempemail.co.za',
'tempemail.com',
'tempemail.net',
'tempemail.org',
'tempinbox.co.uk',
'tempinbox.com',
'tempmail.de',
'tempmail.eu',
'tempmail.it',
'tempmail2.com',
'tempmaildemo.com',
'tempmailer.com',
'tempmailer.de',
'tempomail.fr',
'temporarily.de',
'temporarioemail.com.br',
'temporaryemail.net',
'temporaryemail.us',
'temporaryforwarding.com',
'temporaryinbox.com',
'temporarymailaddress.com',
'tempsky.com',
'tempthe.net',
'tempymail.com',
'test.com',
'thanksnospam.info',
'thankyou2010.com',
'thc.st',
'thecloudindex.com',
'thisisnotmyrealemail.com',
'thismail.net',
'thismail.ru',
'throam.com',
'throwam.com',
'throwawayemailaddress.com',
'throwawaymail.com',
'tilien.com',
'tittbit.in',
'tizi.com',
'tmail.ws',
'tmailinator.com',
'tmpeml.info',
'toiea.com',
'tokenmail.de',
'toomail.biz',
'topranklist.de',
'tormail.net',
'tormail.org',
'tradermail.info',
'trash-amil.com',
'trash-mail.at',
'trash-mail.cf',
'trash-mail.com',
'trash-mail.de',
'trash-mail.ga',
'trash-mail.gq',
'trash-mail.ml',
'trash-mail.tk',
'trash-me.com',
'trash2009.com',
'trash2010.com',
'trash2011.com',
'trashdevil.com',
'trashdevil.de',
'trashemail.de',
'trashmail.at',
'trashmail.com',
'trashmail.de',
'trashmail.me',
'trashmail.net',
'trashmail.org',
'trashmail.ws',
'trashmailer.com',
'trashymail.com',
'trashymail.net',
'trayna.com',
'trbvm.com',
'trialmail.de',
'trickmail.net',
'trillianpro.com',
'tryalert.com',
'turual.com',
'twinmail.de',
'twoweirdtricks.com',
'tyldd.com',
'ubismail.net',
'uggsrock.com',
'umail.net',
'unlimit.com',
'unmail.ru',
'upliftnow.com',
'uplipht.com',
'uroid.com',
'us.af',
'valemail.net',
'venompen.com',
'vermutlich.net',
'veryrealemail.com',
'vidchart.com',
'viditag.com',
'viewcastmedia.com',
'viewcastmedia.net',
'viewcastmedia.org',
'viralplays.com',
'vmail.me',
'voidbay.com',
'vomoto.com',
'vpn.st',
'vsimcard.com',
'vubby.com',
'w3internet.co.uk',
'walala.org',
'walkmail.net',
'watchever.biz',
'webemail.me',
'webm4il.info',
'webuser.in',
'wee.my',
'weg-werf-email.de',
'wegwerf-email-addressen.de',
'wegwerf-email.at',
'wegwerf-emails.de',
'wegwerfadresse.de',
'wegwerfemail.com',
'wegwerfemail.de',
'wegwerfmail.de',
'wegwerfmail.info',
'wegwerfmail.net',
'wegwerfmail.org',
'wem.com',
'wetrainbayarea.com',
'wetrainbayarea.org',
'wh4f.org',
'whatiaas.com',
'whatpaas.com',
'whatsaas.com',
'whopy.com',
'whyspam.me',
'wickmail.net',
'wilemail.com',
'willhackforfood.biz',
'willselfdestruct.com',
'winemaven.info',
'wmail.cf',
'writeme.com',
'wronghead.com',
'wuzup.net',
'wuzupmail.net',
'wwwnew.eu',
'wzukltd.com',
'xagloo.com',
'xemaps.com',
'xents.com',
'xmaily.com',
'xoxy.net',
'xww.ro',
'xyzfree.net',
'yapped.net',
'yep.it',
'yogamaven.com',
'yomail.info',
'yopmail.com',
'yopmail.fr',
'yopmail.gq',
'yopmail.net',
'yopmail.org',
'yoru-dea.com',
'you-spam.com',
'youmail.ga',
'yourdomain.com',
'ypmail.webarnak.fr.eu.org',
'yuurok.com',
'yyhmail.com',
'z1p.biz',
'za.com',
'zebins.com',
'zebins.eu',
'zehnminuten.de',
'zehnminutenmail.de',
'zetmail.com',
'zippymail.info',
'zoaxe.com',
'zoemail.com',
'zoemail.net',
'zoemail.org',
'zomg.info',
'zxcv.com',
'zxcvbnm.com',
'zzz.com',
]
# reCAPTCHA API
# NOTE: Using the recaptcha.net domain h/t https://github.com/google/recaptcha/issues/87#issuecomment-368252094
RECAPTCHA_SITE_KEY = None
RECAPTCHA_SECRET_KEY = None
RECAPTCHA_VERIFY_URL = 'https://recaptcha.net/recaptcha/api/siteverify'
# akismet spam check
AKISMET_APIKEY = None
SPAM_CHECK_ENABLED = False
SPAM_CHECK_PUBLIC_ONLY = True
SPAM_ACCOUNT_SUSPENSION_ENABLED = False
SPAM_ACCOUNT_SUSPENSION_THRESHOLD = timedelta(hours=24)
SPAM_FLAGGED_MAKE_NODE_PRIVATE = False
SPAM_FLAGGED_REMOVE_FROM_SEARCH = False
SHARE_API_TOKEN = None
# number of nodes that need to be affiliated with an institution before the institution logo is shown on the dashboard
INSTITUTION_DISPLAY_NODE_THRESHOLD = 5
# refresh campaign every 5 minutes
CAMPAIGN_REFRESH_THRESHOLD = 5 * 60 # 5 minutes in seconds
AWS_ACCESS_KEY_ID = None
AWS_SECRET_ACCESS_KEY = None
# sitemap default settings
SITEMAP_TO_S3 = False
SITEMAP_AWS_BUCKET = None
SITEMAP_URL_MAX = 25000
SITEMAP_INDEX_MAX = 50000
SITEMAP_STATIC_URLS = [
OrderedDict([('loc', ''), ('changefreq', 'yearly'), ('priority', '0.5')]),
OrderedDict([('loc', 'preprints'), ('changefreq', 'yearly'), ('priority', '0.5')]),
OrderedDict([('loc', 'prereg'), ('changefreq', 'yearly'), ('priority', '0.5')]),
OrderedDict([('loc', 'meetings'), ('changefreq', 'yearly'), ('priority', '0.5')]),
OrderedDict([('loc', 'registries'), ('changefreq', 'yearly'), ('priority', '0.5')]),
OrderedDict([('loc', 'reviews'), ('changefreq', 'yearly'), ('priority', '0.5')]),
OrderedDict([('loc', 'explore/activity'), ('changefreq', 'weekly'), ('priority', '0.5')]),
OrderedDict([('loc', 'support'), ('changefreq', 'yearly'), ('priority', '0.5')]),
OrderedDict([('loc', 'faq'), ('changefreq', 'yearly'), ('priority', '0.5')]),
]
SITEMAP_USER_CONFIG = OrderedDict([('loc', ''), ('changefreq', 'yearly'), ('priority', '0.5')])
SITEMAP_NODE_CONFIG = OrderedDict([('loc', ''), ('lastmod', ''), ('changefreq', 'monthly'), ('priority', '0.5')])
SITEMAP_REGISTRATION_CONFIG = OrderedDict([('loc', ''), ('lastmod', ''), ('changefreq', 'never'), ('priority', '0.5')])
SITEMAP_REVIEWS_CONFIG = OrderedDict([('loc', ''), ('lastmod', ''), ('changefreq', 'never'), ('priority', '0.5')])
SITEMAP_PREPRINT_CONFIG = OrderedDict([('loc', ''), ('lastmod', ''), ('changefreq', 'yearly'), ('priority', '0.5')])
SITEMAP_PREPRINT_FILE_CONFIG = OrderedDict([('loc', ''), ('lastmod', ''), ('changefreq', 'yearly'), ('priority', '0.5')])
# For preventing indexing of QA nodes by Elastic and SHARE
DO_NOT_INDEX_LIST = {
'tags': ['qatest', 'qa test'],
'titles': ['Bulk stress 201', 'Bulk stress 202', 'OSF API Registration test'],
}
CUSTOM_CITATIONS = {
'bluebook-law-review': 'bluebook',
'bluebook2': 'bluebook',
'bluebook-inline': 'bluebook'
}
#Email templates logo
OSF_LOGO = 'osf_logo'
OSF_PREPRINTS_LOGO = 'osf_preprints'
OSF_MEETINGS_LOGO = 'osf_meetings'
OSF_PREREG_LOGO = 'osf_prereg'
OSF_REGISTRIES_LOGO = 'osf_registries'
OSF_LOGO_LIST = [OSF_LOGO, OSF_PREPRINTS_LOGO, OSF_MEETINGS_LOGO, OSF_PREREG_LOGO, OSF_REGISTRIES_LOGO]
INSTITUTIONAL_LANDING_FLAG = 'institutions_nav_bar'
FOOTER_LINKS = {
'terms': 'https://github.com/CenterForOpenScience/centerforopenscience.org/blob/master/TERMS_OF_USE.md',
'privacyPolicy': 'https://github.com/CenterForOpenScience/centerforopenscience.org/blob/master/PRIVACY_POLICY.md',
'cookies': 'https://github.com/CenterForOpenScience/centerforopenscience.org/blob/master/PRIVACY_POLICY.md#f-cookies',
'cos': 'https://cos.io',
'statusPage': 'https://status.cos.io/',
'apiDocs': 'https://developer.osf.io/',
'topGuidelines': 'http://cos.io/top/',
'rpp': 'https://osf.io/ezcuj/wiki/home/',
'rpcb': 'https://osf.io/e81xl/wiki/home/',
'twitter': 'http://twitter.com/OSFramework',
'facebook': 'https://www.facebook.com/CenterForOpenScience/',
'googleGroup': 'https://groups.google.com/forum/#!forum/openscienceframework',
'github': 'https://www.github.com/centerforopenscience',
}
| {
"content_hash": "cccceadd3dc5f1ce4015db31bc67d9bd",
"timestamp": "",
"source": "github",
"line_count": 1935,
"max_line_length": 161,
"avg_line_length": 26.422739018087857,
"alnum_prop": 0.6023509622907214,
"repo_name": "sloria/osf.io",
"id": "b77a6a59b642019793d88bed4ffa7ed29c53e516",
"size": "51152",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "website/settings/defaults.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "109070"
},
{
"name": "Dockerfile",
"bytes": "8455"
},
{
"name": "HTML",
"bytes": "263083"
},
{
"name": "JavaScript",
"bytes": "1856674"
},
{
"name": "Mako",
"bytes": "690812"
},
{
"name": "Python",
"bytes": "8397175"
},
{
"name": "VCL",
"bytes": "13885"
}
],
"symlink_target": ""
} |
from lib import server
server.instance = server.TeaserServer()
server.instance.main()
| {
"content_hash": "1d42f199756f7fe09d51bd9c0a130d24",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 39,
"avg_line_length": 21.75,
"alnum_prop": 0.7931034482758621,
"repo_name": "Cibiv/Teaser",
"id": "a51156f29e10cd36c2cbd97a8db7a96e4f21f473",
"size": "109",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "159133"
},
{
"name": "CoffeeScript",
"bytes": "3311"
},
{
"name": "HTML",
"bytes": "8681658"
},
{
"name": "JavaScript",
"bytes": "1581386"
},
{
"name": "PHP",
"bytes": "27909"
},
{
"name": "Python",
"bytes": "278658"
},
{
"name": "Shell",
"bytes": "104"
}
],
"symlink_target": ""
} |
from twisted.internet import reactor
from scaletix.factory import ScaleFactory
from tests.test_application import TestFactory
__author__ = 'nacim'
if __name__ == '__main__':
mp_factory = ScaleFactory(TestFactory(), core=2)
# launch the new multiprocessing factory
port = reactor.listenTCP(8118, mp_factory)
reactor.run()
| {
"content_hash": "ae92513d778cf9f280a22a925be6a59f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 52,
"avg_line_length": 30.818181818181817,
"alnum_prop": 0.7227138643067846,
"repo_name": "Grindizer/scaletix",
"id": "b0fa782a1d4841fd924643fd70d2165f7e9e9164",
"size": "339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/run_test_server.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "24997"
},
{
"name": "Shell",
"bytes": "6466"
}
],
"symlink_target": ""
} |
__author__ = "Christopher and Cody Reichert"
__copyright__ = "Copyright 2015, SimplyRETS Inc. <[email protected]>"
__credits__ = ["Christopher Reichert", "Cody Reichert"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Christopher Reichert"
__email__ = "[email protected]"
__status__ = "Production"
class Property:
""" NOTE: This class is auto generated by the SimplyRets code
generator program. Do not edit the class manually.
"""
def __init__(self):
self.simplyRetsTypes = {
'roof': 'str',
'style': 'str',
'area': 'long',
'bathsFull': 'long',
'bathsHalf': 'long',
'stories': 'float',
'fireplaces': 'long',
'heating': 'str',
'bedrooms': 'long',
'interiorFeatures': 'str',
'lotSize': 'str',
'exteriorFeatures': 'str',
'subdivision': 'str',
'type': 'str',
'yearBuilt': 'long'
}
# Property roof description
self.roof = None # str
# Property style description or short string
self.style = None # str
# Square footage of the building associated with a listing
self.area = None # long
# Number of full bathrooms
self.bathsFull = None # long
# Number of half bathrooms
self.bathsHalf = None # long
# Number of stories or levels. Represented as a `double' to account for half stories.
self.stories = None # float
# Number of fireplaces
self.fireplaces = None # long
# Heating description or short string
self.heating = None # str
# Number of bedrooms
self.bedrooms = None # long
# The properties interior features
self.interiorFeatures = None # str
# Square footage of the entire property lot
self.lotSize = None # str
self.exteriorFeatures = None # str
# The subdivision or community name
self.subdivision = None # str
# Property type (Residential, Multi-Family, Rental)
self.type = None # str
# Year the property was built
self.yearBuilt = None # long
| {
"content_hash": "4686246cc16a1514f79485dadcca91ca",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 93,
"avg_line_length": 35.3968253968254,
"alnum_prop": 0.5668161434977579,
"repo_name": "SimplyRETS/simplyrets-python-sdk",
"id": "27646db2dff6c77f05bb12f4e2338b1b9e25a7ce",
"size": "2253",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simplyrets/models/Property.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30291"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.conf.urls import include, url
import spirit.topic.views
import spirit.admin.urls
import spirit.user.urls
import spirit.search.urls
import spirit.category.urls
import spirit.topic.urls
import spirit.comment.urls
patterns = [
url(r'^$', spirit.topic.views.index_active, name='index'),
url(r'^st/admin/', include(spirit.admin.urls, namespace='admin')),
url(r'^user/', include(spirit.user.urls, namespace='user')),
url(r'^search/', include(spirit.search.urls, namespace='search')),
url(r'^category/', include(spirit.category.urls, namespace='category')),
url(r'^topic/', include(spirit.topic.urls, namespace='topic')),
url(r'^comment/', include(spirit.comment.urls, namespace='comment')),
]
urlpatterns = [
url(r'^', include(patterns, namespace='spirit', app_name='spirit')),
]
| {
"content_hash": "b4a2dd26e57a8da2dfd999a1f49a1c66",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 76,
"avg_line_length": 32,
"alnum_prop": 0.7071759259259259,
"repo_name": "battlecat/Spirit",
"id": "a47ea209c5a36ceaf4eaea335d62a1661b640e06",
"size": "889",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "spirit/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "255435"
},
{
"name": "CoffeeScript",
"bytes": "128350"
},
{
"name": "HTML",
"bytes": "186692"
},
{
"name": "JavaScript",
"bytes": "28458"
},
{
"name": "Makefile",
"bytes": "187"
},
{
"name": "Nginx",
"bytes": "266"
},
{
"name": "Python",
"bytes": "708660"
}
],
"symlink_target": ""
} |
import rospy, tf, math
from lab3Helpers import *
from aStar import *
from std_msgs.msg import Header
from nav_msgs.msg import OccupancyGrid #format for reading the map. Data is stored by row.
from nav_msgs.msg import GridCells #format for publishing to rviz display
from geometry_msgs.msg import PoseWithCovarianceStamped #format for reading the start and goal points
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Point
#this is strictly path finding code. As this node will be used as such in later labs
#This node automaticly subcribes to the map topic, and publishis a service called getPath. getPath takes a
#starting point and a goal point and then publishes the frontier, and the optimal path.
def readMap(msg):
global mapInfo #map information such as width and hight, and cell sizes.
global mapData #the cells of the map, with 100 = impassable and 0 = empty, -1 = unexplored.
mapInfo = msg.info
mapData = msg.data
if(len(mapData) != mapInfo.width * mapInfo.height):
print "map size does not match data length."
def setStart(msg):
global start, pub_start, mapInfo, mapData
#define a point object that will represent our start point on the grid
point = msg.pose.pose.position #this needs to be adjuseted depending on how the gridcells object is using the point object.
#set the starting point for the search to the gridpoint defined by the user mouse click.
start = globalToGrid(point, mapInfo)
#convert the point to a grid position
point.x = round(point.x/mapInfo.resolution) * mapInfo.resolution
point.y = round(point.y/mapInfo.resolution) * mapInfo.resolution
#define a new gridcells object
gridCells = GridCells()
#start construction our message
gridCells.header = msg.header
gridCells.cell_width = mapInfo.resolution
gridCells.cell_height = mapInfo.resolution
cells = [point]
gridCells.cells = cells
pub_start.publish(gridCells)
print "startpoint set"
def setGoal(msg):
global goal, pub_goal
point = msg.pose.position
#set the goal point for the search to the gridpoint defined by the user mouse click.
goal = globalToGrid(point, mapInfo)
#point.x = round(point.x/mapInfo.resolution) * mapInfo.resolution
#point.y = round(point.y/mapInfo.resolution) * mapInfo.resolution
point = gridToGlobal(goal, mapInfo)
#define a new gridcells object
gridCells = GridCells()
#start construction our message
gridCells.header = msg.header
gridCells.cell_width = mapInfo.resolution
gridCells.cell_height = mapInfo.resolution
cells = [point]
gridCells.cells = cells
pub_goal.publish(gridCells)
print "goal set"
# This is the program's main function
if __name__ == '__main__':
# Change this node name to include your username
rospy.init_node('Lab_3_node')
global mapInfo, mapData
global frontier, expanded, path, start, goal
global pub_start, pub_goal, pub_frontier, pub_path, pub_expanded, pub_waypoints
goal = (-1,-1)
start = (-2,-2)
#Set up the subscriptions to all of the nessaray data
map_sum = rospy.Subscriber('map',OccupancyGrid, readMap, queue_size=1)
get_start = rospy.Subscriber('initialpose', PoseWithCovarianceStamped, setStart, queue_size=1)
get_goal = rospy.Subscriber('move_base_simple/goal', PoseStamped, setGoal, queue_size =1)
#set up all of the publicaitons. (start, goal, expanded, frontier, path)
pub_start = rospy.Publisher('/startpoint', GridCells)
pub_goal = rospy.Publisher('/goalpoint', GridCells)
pub_frontier = rospy.Publisher('/frontier',GridCells)
pub_expanded = rospy.Publisher('/expanded', GridCells)
pub_path = rospy.Publisher('/path', GridCells)
pub_waypoints = rospy.Publisher('/waypoints', GridCells)
# Use this command to make the program wait for some seconds
rospy.sleep(rospy.Duration(1, 0))
print "Starting pathfinder"
#print out our debug map, startting by makeing a list of all of the wall locations
#pubMap(pub_path, mapInfo, mapData)
lastGoal = (-1,-1)
lastStart = (-1,-1)
newMap = obstacleExpansion(0, mapInfo, mapData,pub_waypoints)
pubMap(pub_waypoints, mapInfo, newMap)
r = rospy.Rate(10)
while not rospy.is_shutdown():
if (goal is not lastGoal) or (start is not lastStart):
lastStart = start
lastGoal = goal
paths = aStar(start, goal, mapInfo, mapData, pub_frontier, pub_expanded)
publishGridList(paths[0], mapInfo, pub_path)
#publishGridList(paths[1], mapInfo, pub_waypoints)
r.sleep()
print "exiting pathfinder"
#go back up the stack and print all of the points where there is a 100 onto the map. See if the shape is rotated.
#we will need to make all of those functions anyway. | {
"content_hash": "e80c51554808c45606972295f0e04fcb",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 127,
"avg_line_length": 35.925925925925924,
"alnum_prop": 0.7096907216494845,
"repo_name": "cwgeary/rbe3002-d14-teamZTC",
"id": "e7f1379c3967c40de638c984685ba3a62498afd9",
"size": "4874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lab3/src/lab3.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "97625"
}
],
"symlink_target": ""
} |
import logging
from mhc_common import normalize_hla_allele_name
def read_hla_file(path, permissive_parsing=True):
"""
Read in HLA alleles and normalize them, returning a list of HLA allele
names.
"""
assert path.endswith(".hla"), \
"Expected HLA file %s to end with suffix .hla" % path
logging.info("Reading HLA file %s", path)
alleles = []
with open(path, 'r') as f:
contents = f.read()
for line in contents.split("\n"):
for raw_allele in line.split(","):
if permissive_parsing:
# get rid of surrounding whitespace
raw_allele = raw_allele.strip()
# sometimes we get extra columns with scores,
# ignore those
raw_allele = raw_allele.split(" ")[0]
raw_allele = raw_allele.split("\t")[0]
raw_allele = raw_allele.split("'")[0]
if len(raw_allele) > 0:
alleles.append(
normalize_hla_allele_name(
raw_allele))
return alleles
| {
"content_hash": "f44dd29cdd5e9c29ef39a29519e55ac5",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 74,
"avg_line_length": 36.70967741935484,
"alnum_prop": 0.5175746924428822,
"repo_name": "hammerlab/immuno",
"id": "a93a6e6232bc41f343b703019016e4ecf830f6c6",
"size": "1138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "immuno/hla_file.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1564"
},
{
"name": "HTML",
"bytes": "15077"
},
{
"name": "JavaScript",
"bytes": "48875"
},
{
"name": "Python",
"bytes": "226969"
},
{
"name": "Shell",
"bytes": "576"
}
],
"symlink_target": ""
} |
import pyphen
import sys
import re
hyphenator = pyphen.Pyphen(filename='patterns/hyph_la_classical.dic',left=2,right=2)
seenSegs = {}
line = 0
def comparenoncompletehyphens(original, obtained):
i = 0
for c in obtained:
if c == '-':
if original[i] == '-':
i = i + 1
else:
if original[i] == '-':
return False
else:
i = i + 1
return True
def printError(wrong, correct, base):
print('%s %% %s (not %s)' % (base, correct, wrong))
def dotest(filename, allhyphens=True):
global hyphenator, seenSegs
print('differences in '+filename+':')
linenum = 0
with open(filename, 'r') as f:
for line in f:
linenum += 1
line = line.strip()
line = re.sub('\s*\%.*', '', line)
base = line.replace('-', '')
if base in seenSegs and line != seenSegs[base][1]:
print('ERROR: line %d: test \'%s\' differs from test \'%s\' line %d in %s' % (linenum, line, seenSegs[base][1], seenSegs[base][0], seenSegs[base][2]))
else:
seenSegs[base] = (linenum, line, filename)
new = hyphenator.inserted(base)
if allhyphens:
if not line == new:
printError(new, line, base)
else:
if not comparenoncompletehyphens(line, new):
printError(new, line, base)
dotest('tests/nonliturgical/wordlist-classical-italian.txt')
print()
dotest('tests/nonliturgical/wordlist-classical-only.txt')
| {
"content_hash": "58cfeff69d965f17c2345587ee3c6e98",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 154,
"avg_line_length": 27.20408163265306,
"alnum_prop": 0.641410352588147,
"repo_name": "gregorio-project/hyphen-la",
"id": "6e88e32229733800c9a475d226bff4ab8a8f8a7b",
"size": "1357",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/test-classical.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "204638"
},
{
"name": "Makefile",
"bytes": "1510"
},
{
"name": "Python",
"bytes": "14917"
},
{
"name": "Shell",
"bytes": "7507"
},
{
"name": "TeX",
"bytes": "323313"
}
],
"symlink_target": ""
} |
from oslo_config import cfg
from senlin.common.i18n import _
HEALTH_MANAGER_GROUP = cfg.OptGroup('health_manager')
HEALTH_MANAGER_OPTS = [
cfg.StrOpt('nova_control_exchange', default='nova',
help=_("Exchange name for nova notifications.")),
cfg.StrOpt('nova_notification_topic', default='versioned_notifications',
help=_("Topic name for nova notifications.")),
cfg.StrOpt('heat_control_exchange', default='heat',
help=_("Exchange name for heat notifications.")),
cfg.StrOpt('heat_notification_topic', default='notifications',
help=_("Topic name for heat notifications.")),
cfg.MultiStrOpt("enabled_endpoints", default=['nova', 'heat'],
help=_("Notification endpoints to enable.")),
cfg.IntOpt('workers',
default=1,
help=_('Number of senlin-health-manager processes.')),
cfg.IntOpt('threads',
default=1000,
deprecated_name='health_manager_thread_pool_size',
deprecated_group="DEFAULT",
help=_('Number of senlin-health-manager threads.')),
]
def register_opts(conf):
conf.register_group(HEALTH_MANAGER_GROUP)
conf.register_opts(HEALTH_MANAGER_OPTS, group=HEALTH_MANAGER_GROUP)
def list_opts():
return {
HEALTH_MANAGER_GROUP: HEALTH_MANAGER_OPTS
}
| {
"content_hash": "d8a0ecac6f8c9b86595236caa863e81a",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 76,
"avg_line_length": 38.138888888888886,
"alnum_prop": 0.6300072833211945,
"repo_name": "stackforge/senlin",
"id": "3c671e0a236b861b23a459523fc724d9af64dc0f",
"size": "1918",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "senlin/conf/health_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2145946"
},
{
"name": "Shell",
"bytes": "18730"
}
],
"symlink_target": ""
} |
"""Mac-only module to find the home file of a resource."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
from fontTools.misc import sstruct
import array
import calldll
import macfs, Res
def HomeResFile(res):
"""Return a path to the file in which resource 'res' lives."""
return GetFileLocation(res.HomeResFile())
def GetFileLocation(refNum):
"""Return a path to the open file identified with refNum."""
pb = ParamBlock(refNum)
return pb.getPath()
#
# Internal cruft, adapted from MoreFiles
#
_InterfaceLib = calldll.getlibrary("InterfaceLib")
GetVRefNum = calldll.newcall(_InterfaceLib.GetVRefNum, "None", "InShort", "OutShort")
_getInfo = calldll.newcall(_InterfaceLib.PBGetFCBInfoSync, "Short", "InLong")
_FCBPBFormat = """
qLink: l
qType: h
ioTrap: h
ioCmdAddr: l
ioCompletion: l
ioResult: h
ioNamePtr: l
ioVRefNum: h
ioRefNum: h
filler: h
ioFCBIndx: h
filler1: h
ioFCBFINm: l
ioFCBFlags: h
ioFCBStBlk: h
ioFCBEOF: l
ioFCBPLen: l
ioFCBCrPs: l
ioFCBVRefNum: h
ioFCBClpSiz: l
ioFCBParID: l
"""
class ParamBlock(object):
"""Wrapper for the very low level FCBPB record."""
def __init__(self, refNum):
self.__fileName = array.array("c", "\0" * 64)
sstruct.unpack(_FCBPBFormat,
"\0" * sstruct.calcsize(_FCBPBFormat), self)
self.ioNamePtr = self.__fileName.buffer_info()[0]
self.ioRefNum = refNum
self.ioVRefNum = GetVRefNum(refNum)
self.__haveInfo = 0
def getInfo(self):
if self.__haveInfo:
return
data = sstruct.pack(_FCBPBFormat, self)
buf = array.array("c", data)
ptr = buf.buffer_info()[0]
err = _getInfo(ptr)
if err:
raise Res.Error("can't get file info", err)
sstruct.unpack(_FCBPBFormat, buf.tostring(), self)
self.__haveInfo = 1
def getFileName(self):
self.getInfo()
data = self.__fileName.tostring()
return data[1:byteord(data[0])+1]
def getFSSpec(self):
self.getInfo()
vRefNum = self.ioVRefNum
parID = self.ioFCBParID
return macfs.FSSpec((vRefNum, parID, self.getFileName()))
def getPath(self):
return self.getFSSpec().as_pathname()
if __name__ == "__main__":
fond = Res.GetNamedResource("FOND", "Helvetica")
print(HomeResFile(fond))
| {
"content_hash": "44e5b76e0d32fd0594a727896d24fb33",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 85,
"avg_line_length": 23.729166666666668,
"alnum_prop": 0.6769095697980685,
"repo_name": "googlei18n/TachyFont",
"id": "a2d1c8c0c928f1406896d92e3f6e4126d4a97bd1",
"size": "2278",
"binary": false,
"copies": "11",
"ref": "refs/heads/main",
"path": "run_time/src/gae_server/third_party/old-fonttools-master/Lib/fontTools/misc/homeResFile.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "303868"
},
{
"name": "CSS",
"bytes": "313471"
},
{
"name": "HTML",
"bytes": "100691"
},
{
"name": "Java",
"bytes": "24368"
},
{
"name": "JavaScript",
"bytes": "1163591"
},
{
"name": "Python",
"bytes": "169129"
},
{
"name": "Shell",
"bytes": "11939"
}
],
"symlink_target": ""
} |
"""Debugger Wrapper Session Consisting of a Local Curses-based CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import shutil
import sys
import tempfile
# Google-internal import(s).
from tensorflow.python.debug.cli import analyzer_cli
from tensorflow.python.debug.cli import cli_shared
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.cli import stepper_cli
from tensorflow.python.debug.cli import ui_factory
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.wrappers import framework
_DUMP_ROOT_PREFIX = "tfdbg_"
class LocalCLIDebugWrapperSession(framework.BaseDebugWrapperSession):
"""Concrete subclass of BaseDebugWrapperSession implementing a local CLI.
This class has all the methods that a `session.Session` object has, in order
to support debugging with minimal code changes. Invoking its `run()` method
will launch the command-line interface (CLI) of tfdbg.
"""
def __init__(self, sess, dump_root=None, log_usage=True, ui_type="curses"):
"""Constructor of LocalCLIDebugWrapperSession.
Args:
sess: The TensorFlow `Session` object being wrapped.
dump_root: (`str`) optional path to the dump root directory. Must be a
directory that does not exist or an empty directory. If the directory
does not exist, it will be created by the debugger core during debug
`run()` calls and removed afterwards. If `None`, the debug dumps will
be at tfdbg_<random_string> under the system temp directory.
log_usage: (`bool`) whether the usage of this class is to be logged.
ui_type: (`str`) requested UI type. Currently supported:
(curses | readline)
Raises:
ValueError: If dump_root is an existing and non-empty directory or if
dump_root is a file.
"""
if log_usage:
pass # No logging for open-source.
framework.BaseDebugWrapperSession.__init__(self, sess)
if not dump_root:
self._dump_root = tempfile.mktemp(prefix=_DUMP_ROOT_PREFIX)
else:
if os.path.isfile(dump_root):
raise ValueError("dump_root path points to a file: %s" % dump_root)
elif os.path.isdir(dump_root) and os.listdir(dump_root):
raise ValueError("dump_root path points to a non-empty directory: %s" %
dump_root)
self._dump_root = dump_root
self._initialize_argparsers()
# Registered tensor filters.
self._tensor_filters = {}
# Below are the state variables of this wrapper object.
# _active_tensor_filter: what (if any) tensor filter is in effect. If such
# a filter is in effect, this object will call run() method of the
# underlying TensorFlow Session object until the filter passes. This is
# activated by the "-f" flag of the "run" command.
# _run_through_times: keeps track of how many times the wrapper needs to
# run through without stopping at the run-end CLI. It is activated by the
# "-t" option of the "run" command.
# _skip_debug: keeps track of whether the current run should be executed
# without debugging. It is activated by the "-n" option of the "run"
# command.
#
# _run_start_response: keeps track what OnRunStartResponse the wrapper
# should return at the next run-start callback. If this information is
# unavailable (i.e., is None), the run-start CLI will be launched to ask
# the user. This is the case, e.g., right before the first run starts.
self._active_tensor_filter = None
self._run_through_times = 1
self._skip_debug = False
self._run_start_response = None
self._is_run_start = True
self._ui_type = ui_type
def _initialize_argparsers(self):
self._argparsers = {}
ap = argparse.ArgumentParser(
description="Run through, with or without debug tensor watching.",
usage=argparse.SUPPRESS)
ap.add_argument(
"-t",
"--times",
dest="times",
type=int,
default=1,
help="How many Session.run() calls to proceed with.")
ap.add_argument(
"-n",
"--no_debug",
dest="no_debug",
action="store_true",
help="Run through without debug tensor watching.")
ap.add_argument(
"-f",
"--till_filter_pass",
dest="till_filter_pass",
type=str,
default="",
help="Run until a tensor in the graph passes the specified filter.")
self._argparsers["run"] = ap
ap = argparse.ArgumentParser(
description="Invoke stepper (cont, step, breakpoint, etc.)",
usage=argparse.SUPPRESS)
self._argparsers["invoke_stepper"] = ap
ap = argparse.ArgumentParser(
description="Display information about this Session.run() call.",
usage=argparse.SUPPRESS)
self._argparsers["run_info"] = ap
def add_tensor_filter(self, filter_name, tensor_filter):
"""Add a tensor filter.
Args:
filter_name: (`str`) name of the filter.
tensor_filter: (`callable`) the filter callable. See the doc string of
`DebugDumpDir.find()` for more details about its signature.
"""
self._tensor_filters[filter_name] = tensor_filter
def on_session_init(self, request):
"""Overrides on-session-init callback.
Args:
request: An instance of `OnSessionInitRequest`.
Returns:
An instance of `OnSessionInitResponse`.
"""
return framework.OnSessionInitResponse(
framework.OnSessionInitAction.PROCEED)
def on_run_start(self, request):
"""Overrides on-run-start callback.
Invoke the CLI to let user choose what action to take:
`run` / `invoke_stepper`.
Args:
request: An instance of `OnSessionInitRequest`.
Returns:
An instance of `OnSessionInitResponse`.
Raises:
RuntimeError: If user chooses to prematurely exit the debugger.
"""
self._is_run_start = True
self._update_run_calls_state(request.run_call_count, request.fetches,
request.feed_dict)
if self._active_tensor_filter:
# If we are running till a filter passes, we just need to keep running
# with the DEBUG_RUN option.
return framework.OnRunStartResponse(framework.OnRunStartAction.DEBUG_RUN,
self._get_run_debug_urls())
if self._run_call_count > 1 and not self._skip_debug:
if self._run_through_times > 0:
# Just run through without debugging.
return framework.OnRunStartResponse(
framework.OnRunStartAction.NON_DEBUG_RUN, [])
elif self._run_through_times == 0:
# It is the run at which the run-end CLI will be launched: activate
# debugging.
return framework.OnRunStartResponse(
framework.OnRunStartAction.DEBUG_RUN,
self._get_run_debug_urls())
if self._run_start_response is None:
self._prep_cli_for_run_start()
self._run_start_response = self._launch_cli()
if self._run_through_times > 1:
self._run_through_times -= 1
if self._run_start_response == debugger_cli_common.EXPLICIT_USER_EXIT:
# Explicit user "exit" command leads to sys.exit(1).
print(
"Note: user exited from debugger CLI: Calling sys.exit(1).",
file=sys.stderr)
sys.exit(1)
return self._run_start_response
def _prep_cli_for_run_start(self):
"""Prepare (but not launch) the CLI for run-start."""
self._run_cli = ui_factory.get_ui(self._ui_type)
help_intro = debugger_cli_common.RichTextLines([])
if self._run_call_count == 1:
# Show logo at the onset of the first run.
help_intro.extend(cli_shared.get_tfdbg_logo())
help_intro.extend(debugger_cli_common.RichTextLines("Upcoming run:"))
help_intro.extend(self._run_info)
self._run_cli.set_help_intro(help_intro)
# Create initial screen output detailing the run.
self._title = "run-start: " + self._run_description
self._init_command = "run_info"
self._title_color = "blue_on_white"
def on_run_end(self, request):
"""Overrides on-run-end callback.
Actions taken:
1) Load the debug dump.
2) Bring up the Analyzer CLI.
Args:
request: An instance of OnSessionInitRequest.
Returns:
An instance of OnSessionInitResponse.
"""
self._is_run_start = False
if request.performed_action == framework.OnRunStartAction.DEBUG_RUN:
partition_graphs = None
if request.run_metadata and request.run_metadata.partition_graphs:
partition_graphs = request.run_metadata.partition_graphs
elif request.client_graph_def:
partition_graphs = [request.client_graph_def]
if request.tf_error and not os.path.isdir(self._dump_root):
# It is possible that the dump root may not exist due to errors that
# have occurred prior to graph execution (e.g., invalid device
# assignments), in which case we will just raise the exception as the
# unwrapped Session does.
raise request.tf_error
debug_dump = debug_data.DebugDumpDir(
self._dump_root, partition_graphs=partition_graphs)
debug_dump.set_python_graph(self._sess.graph)
passed_filter = None
if self._active_tensor_filter:
if not debug_dump.find(
self._tensor_filters[self._active_tensor_filter], first_n=1):
# No dumped tensor passes the filter in this run. Clean up the dump
# directory and move on.
self._remove_dump_root()
return framework.OnRunEndResponse()
else:
# Some dumped tensor(s) from this run passed the filter.
passed_filter = self._active_tensor_filter
self._active_tensor_filter = None
self._prep_cli_for_run_end(debug_dump, request.tf_error, passed_filter)
self._run_start_response = self._launch_cli()
# Clean up the dump generated by this run.
self._remove_dump_root()
else:
# No debug information to show following a non-debug run() call.
self._run_start_response = None
# Return placeholder response that currently holds no additional
# information.
return framework.OnRunEndResponse()
def _remove_dump_root(self):
if os.path.isdir(self._dump_root):
shutil.rmtree(self._dump_root)
def _prep_cli_for_run_end(self, debug_dump, tf_error, passed_filter):
"""Prepare (but not launch) CLI for run-end, with debug dump from the run.
Args:
debug_dump: (debug_data.DebugDumpDir) The debug dump directory from this
run.
tf_error: (None or OpError) OpError that happened during the run() call
(if any).
passed_filter: (None or str) Name of the tensor filter that just passed
and caused the preparation of this run-end CLI (if any).
"""
if tf_error:
help_intro = cli_shared.get_error_intro(tf_error)
self._init_command = "help"
self._title_color = "red_on_white"
else:
help_intro = None
self._init_command = "lt"
self._title_color = "black_on_white"
if passed_filter is not None:
# Some dumped tensor(s) from this run passed the filter.
self._init_command = "lt -f %s" % passed_filter
self._title_color = "red_on_white"
self._run_cli = analyzer_cli.create_analyzer_ui(
debug_dump, self._tensor_filters, ui_type=self._ui_type,
on_ui_exit=self._remove_dump_root)
# Get names of all dumped tensors.
dumped_tensor_names = []
for datum in debug_dump.dumped_tensor_data:
dumped_tensor_names.append("%s:%d" %
(datum.node_name, datum.output_slot))
# Tab completions for command "print_tensors".
self._run_cli.register_tab_comp_context(["print_tensor", "pt"],
dumped_tensor_names)
# Tab completion for commands "node_info", "list_inputs" and
# "list_outputs". The list comprehension is used below because nodes()
# output can be unicodes and they need to be converted to strs.
self._run_cli.register_tab_comp_context(
["node_info", "ni", "list_inputs", "li", "list_outputs", "lo"],
[str(node_name) for node_name in debug_dump.nodes()])
# TODO(cais): Reduce API surface area for aliases vis-a-vis tab
# completion contexts and registered command handlers.
self._title = "run-end: " + self._run_description
if help_intro:
self._run_cli.set_help_intro(help_intro)
def _launch_cli(self):
"""Launch the interactive command-line interface.
Returns:
The OnRunStartResponse specified by the user using the "run" command.
"""
self._register_this_run_info(self._run_cli)
response = self._run_cli.run_ui(
init_command=self._init_command,
title=self._title,
title_color=self._title_color)
return response
def _run_info_handler(self, args, screen_info=None):
output = debugger_cli_common.RichTextLines([])
if self._run_call_count == 1:
output.extend(cli_shared.get_tfdbg_logo())
output.extend(self._run_info)
if (not self._is_run_start and
debugger_cli_common.MAIN_MENU_KEY in output.annotations):
menu = output.annotations[debugger_cli_common.MAIN_MENU_KEY]
if "list_tensors" not in menu.captions():
menu.insert(
0, debugger_cli_common.MenuItem("list_tensors", "list_tensors"))
return output
def _run_handler(self, args, screen_info=None):
"""Command handler for "run" command during on-run-start."""
_ = screen_info # Currently unused.
parsed = self._argparsers["run"].parse_args(args)
if parsed.till_filter_pass:
# For the run-till-bad-numerical-value-appears mode, use the DEBUG_RUN
# option to access the intermediate tensors, and set the corresponding
# state flag of the class itself to True.
if parsed.till_filter_pass in self._tensor_filters:
action = framework.OnRunStartAction.DEBUG_RUN
self._active_tensor_filter = parsed.till_filter_pass
else:
# Handle invalid filter name.
return debugger_cli_common.RichTextLines(
["ERROR: tensor filter \"%s\" does not exist." %
parsed.till_filter_pass])
self._skip_debug = parsed.no_debug
self._run_through_times = parsed.times
if parsed.times > 1 or parsed.no_debug:
# If requested -t times > 1, the very next run will be a non-debug run.
action = framework.OnRunStartAction.NON_DEBUG_RUN
debug_urls = []
else:
action = framework.OnRunStartAction.DEBUG_RUN
debug_urls = self._get_run_debug_urls()
# Raise CommandLineExit exception to cause the CLI to exit.
raise debugger_cli_common.CommandLineExit(
exit_token=framework.OnRunStartResponse(action, debug_urls))
def _register_this_run_info(self, curses_cli):
curses_cli.register_command_handler(
"run",
self._run_handler,
self._argparsers["run"].format_help(),
prefix_aliases=["r"])
curses_cli.register_command_handler(
"invoke_stepper",
self._on_run_start_step_handler,
self._argparsers["invoke_stepper"].format_help(),
prefix_aliases=["s"])
curses_cli.register_command_handler(
"run_info",
self._run_info_handler,
self._argparsers["run_info"].format_help(),
prefix_aliases=["ri"])
if self._tensor_filters:
# Register tab completion for the filter names.
curses_cli.register_tab_comp_context(["run", "r"],
list(self._tensor_filters.keys()))
def _on_run_start_step_handler(self, args, screen_info=None):
"""Command handler for "invoke_stepper" command during on-run-start."""
_ = screen_info # Currently unused.
# No parsing is currently necessary for invoke_stepper. This may change
# in the future when the command has arguments.
# Raise CommandLineExit exception to cause the CLI to exit.
raise debugger_cli_common.CommandLineExit(
exit_token=framework.OnRunStartResponse(
framework.OnRunStartAction.INVOKE_STEPPER, []))
def _get_run_debug_urls(self):
"""Get the debug_urls value for the current run() call.
Returns:
debug_urls: (list of str) Debug URLs for the current run() call.
Currently, the list consists of only one URL that is a file:// URL.
"""
return ["file://" + self._dump_root]
def _update_run_calls_state(self, run_call_count, fetches, feed_dict):
"""Update the internal state with regard to run() call history.
Args:
run_call_count: (int) Number of run() calls that have occurred.
fetches: a node/tensor or a list of node/tensor that are the fetches of
the run() call. This is the same as the fetches argument to the run()
call.
feed_dict: None of a dict. This is the feed_dict argument to the run()
call.
"""
self._run_call_count = run_call_count
self._run_description = cli_shared.get_run_short_description(run_call_count,
fetches,
feed_dict)
self._run_through_times -= 1
self._run_info = cli_shared.get_run_start_intro(run_call_count,
fetches,
feed_dict,
self._tensor_filters)
def invoke_node_stepper(self,
node_stepper,
restore_variable_values_on_exit=True):
"""Overrides method in base class to implement interactive node stepper.
Args:
node_stepper: (`stepper.NodeStepper`) The underlying NodeStepper API
object.
restore_variable_values_on_exit: (`bool`) Whether any variables whose
values have been altered during this node-stepper invocation should be
restored to their old values when this invocation ends.
Returns:
The same return values as the `Session.run()` call on the same fetches as
the NodeStepper.
"""
stepper = stepper_cli.NodeStepperCLI(node_stepper)
# On exiting the node-stepper CLI, the finalize method of the node_stepper
# object will be called, ensuring that the state of the graph will be the
# same as if the stepping did not happen.
# TODO(cais): Perhaps some users will want the effect of the interactive
# stepping and value injection to persist. When that happens, make the call
# to finalize optional.
stepper_ui = ui_factory.get_ui(
self._ui_type,
on_ui_exit=(node_stepper.restore_variable_values if
restore_variable_values_on_exit else None))
stepper_ui.register_command_handler(
"list_sorted_nodes",
stepper.list_sorted_nodes,
stepper.arg_parsers["list_sorted_nodes"].format_help(),
prefix_aliases=["lt", "lsn"])
stepper_ui.register_command_handler(
"cont",
stepper.cont,
stepper.arg_parsers["cont"].format_help(),
prefix_aliases=["ct", "c"])
stepper_ui.register_command_handler(
"step",
stepper.step,
stepper.arg_parsers["step"].format_help(),
prefix_aliases=["st", "s"])
stepper_ui.register_command_handler(
"print_tensor",
stepper.print_tensor,
stepper.arg_parsers["print_tensor"].format_help(),
prefix_aliases=["pt"])
stepper_ui.register_command_handler(
"inject_value",
stepper.inject_value,
stepper.arg_parsers["inject_value"].format_help(),
prefix_aliases=["inject", "override_value", "override"])
# Register tab completion candidates.
stepper_ui.register_tab_comp_context([
"cont", "ct", "c", "pt", "inject_value", "inject", "override_value",
"override"
], [str(elem) for elem in node_stepper.sorted_nodes()])
# TODO(cais): Tie up register_tab_comp_context to a single alias to shorten
# calls like this.
return stepper_ui.run_ui(
init_command="lt",
title="Node Stepper: " + self._run_description,
title_color="blue_on_white")
| {
"content_hash": "a20826fdf3854af718e6b8639b53802b",
"timestamp": "",
"source": "github",
"line_count": 551,
"max_line_length": 80,
"avg_line_length": 37.029038112522684,
"alnum_prop": 0.6418173797970886,
"repo_name": "MoamerEncsConcordiaCa/tensorflow",
"id": "1aab95152ad3262b34a53fca625d5f9981494e6d",
"size": "21092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/debug/wrappers/local_cli_wrapper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7583"
},
{
"name": "C",
"bytes": "176871"
},
{
"name": "C++",
"bytes": "22197536"
},
{
"name": "CMake",
"bytes": "137754"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "786935"
},
{
"name": "HTML",
"bytes": "579704"
},
{
"name": "Java",
"bytes": "286255"
},
{
"name": "JavaScript",
"bytes": "13406"
},
{
"name": "Jupyter Notebook",
"bytes": "1833623"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37227"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "64656"
},
{
"name": "Protocol Buffer",
"bytes": "207866"
},
{
"name": "Python",
"bytes": "19632899"
},
{
"name": "Shell",
"bytes": "334269"
},
{
"name": "TypeScript",
"bytes": "786973"
}
],
"symlink_target": ""
} |
import re
print "generating template instantiations"
with open("cloudproc.hpp","r") as infile:
with open("autogen_instantiations.cpp","w") as outfile:
while True:
line = infile.readline()
if not line: break
if line.startswith("template"):
funcline = infile.readline()
if ";" not in funcline: continue # skip inline templates
if "disable_autogen" in funcline: continue
if "instantiate:" in funcline:
types = [s.strip() for s in funcline.split("instantiate:")[1].split()]
else:
types = ["pcl::PointXYZ", "pcl::PointXYZRGB", "pcl::PointNormal"]
funcname = re.findall("(\w+)\(",funcline)[0]
funcsig = funcline.split(";")[0]
funcsig = funcsig.replace("typename","")
funcsig = funcsig.replace("TRAJOPT_API","")
funcsig = funcsig.replace(funcname, "%s<T>"%funcname)
for type in types:
funcsig_specialized = funcsig.replace("<T>","<%s>"%type)
outline = "template %s;\n"%(funcsig_specialized)
outfile.write(outline)
# outfile.write("PCL_INSTANTIATE(%s, CLOUDPROC_POINT_TYPES);\n"%(funcname))
| {
"content_hash": "f6a2875ef8d1dd931dcce9233bcd7193",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 99,
"avg_line_length": 48.214285714285715,
"alnum_prop": 0.5222222222222223,
"repo_name": "DeadSocks/trajopt",
"id": "ff2fab5a29de6a2021b2899c4676d1c77828184b",
"size": "1372",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/cloudproc/gen_instantiations.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "636"
},
{
"name": "C",
"bytes": "4474638"
},
{
"name": "C++",
"bytes": "18527353"
},
{
"name": "CMake",
"bytes": "273959"
},
{
"name": "Cuda",
"bytes": "94563"
},
{
"name": "FLUX",
"bytes": "8351"
},
{
"name": "GLSL",
"bytes": "281"
},
{
"name": "HTML",
"bytes": "1223"
},
{
"name": "JavaScript",
"bytes": "25197"
},
{
"name": "Logos",
"bytes": "1557205"
},
{
"name": "Lua",
"bytes": "16444"
},
{
"name": "M",
"bytes": "92"
},
{
"name": "Makefile",
"bytes": "7477"
},
{
"name": "Matlab",
"bytes": "1277"
},
{
"name": "Objective-C",
"bytes": "183885"
},
{
"name": "Objective-C++",
"bytes": "8171"
},
{
"name": "Python",
"bytes": "275905"
},
{
"name": "Shell",
"bytes": "314299"
},
{
"name": "TeX",
"bytes": "11618"
}
],
"symlink_target": ""
} |
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import assert_raises_message, eq_
class _DateProcessorTest(fixtures.TestBase):
def test_date_no_string(self):
assert_raises_message(
ValueError,
"Couldn't parse date string '2012' - value is not a string",
self.module.str_to_date, 2012
)
def test_datetime_no_string(self):
assert_raises_message(
ValueError,
"Couldn't parse datetime string '2012' - value is not a string",
self.module.str_to_datetime, 2012
)
def test_time_no_string(self):
assert_raises_message(
ValueError,
"Couldn't parse time string '2012' - value is not a string",
self.module.str_to_time, 2012
)
def test_date_invalid_string(self):
assert_raises_message(
ValueError,
"Couldn't parse date string: '5:a'",
self.module.str_to_date, "5:a"
)
def test_datetime_invalid_string(self):
assert_raises_message(
ValueError,
"Couldn't parse datetime string: '5:a'",
self.module.str_to_datetime, "5:a"
)
def test_time_invalid_string(self):
assert_raises_message(
ValueError,
"Couldn't parse time string: '5:a'",
self.module.str_to_time, "5:a"
)
class PyDateProcessorTest(_DateProcessorTest):
@classmethod
def setup_class(cls):
from sqlalchemy import processors
cls.module = type("util", (object,),
dict(
(k, staticmethod(v))
for k, v in list(processors.py_fallback().items())
)
)
class CDateProcessorTest(_DateProcessorTest):
__requires__ = ('cextensions',)
@classmethod
def setup_class(cls):
from sqlalchemy import cprocessors
cls.module = cprocessors
class _DistillArgsTest(fixtures.TestBase):
def test_distill_none(self):
eq_(
self.module._distill_params(None, None),
[]
)
def test_distill_no_multi_no_param(self):
eq_(
self.module._distill_params((), {}),
[]
)
def test_distill_dict_multi_none_param(self):
eq_(
self.module._distill_params(None, {"foo": "bar"}),
[{"foo": "bar"}]
)
def test_distill_dict_multi_empty_param(self):
eq_(
self.module._distill_params((), {"foo": "bar"}),
[{"foo": "bar"}]
)
def test_distill_single_dict(self):
eq_(
self.module._distill_params(({"foo": "bar"},), {}),
[{"foo": "bar"}]
)
def test_distill_single_list_strings(self):
eq_(
self.module._distill_params((["foo", "bar"],), {}),
[["foo", "bar"]]
)
def test_distill_single_list_tuples(self):
eq_(
self.module._distill_params(([("foo", "bar"), ("bat", "hoho")],), {}),
[('foo', 'bar'), ('bat', 'hoho')]
)
def test_distill_single_list_tuple(self):
eq_(
self.module._distill_params(([("foo", "bar")],), {}),
[('foo', 'bar')]
)
def test_distill_multi_list_tuple(self):
eq_(
self.module._distill_params(
([("foo", "bar")], [("bar", "bat")]),
{}
),
([('foo', 'bar')], [('bar', 'bat')])
)
def test_distill_multi_strings(self):
eq_(
self.module._distill_params(("foo", "bar"), {}),
[('foo', 'bar')]
)
def test_distill_single_list_dicts(self):
eq_(
self.module._distill_params(([{"foo": "bar"}, {"foo": "hoho"}],), {}),
[{'foo': 'bar'}, {'foo': 'hoho'}]
)
def test_distill_single_string(self):
eq_(
self.module._distill_params(("arg",), {}),
[["arg"]]
)
def test_distill_multi_string_tuple(self):
eq_(
self.module._distill_params((("arg", "arg"),), {}),
[("arg", "arg")]
)
class PyDistillArgsTest(_DistillArgsTest):
@classmethod
def setup_class(cls):
from sqlalchemy.engine import util
cls.module = type("util", (object,),
dict(
(k, staticmethod(v))
for k, v in list(util.py_fallback().items())
)
)
class CDistillArgsTest(_DistillArgsTest):
__requires__ = ('cextensions', )
@classmethod
def setup_class(cls):
from sqlalchemy import cutils as util
cls.module = util
| {
"content_hash": "b172100dbdfd6b55c4a4d421e3a56b85",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 82,
"avg_line_length": 28.113095238095237,
"alnum_prop": 0.5005293245818336,
"repo_name": "michaelBenin/sqlalchemy",
"id": "b1c482f09deb1f663f30179b1290923963c932f8",
"size": "4723",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "test/engine/test_processors.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
INCLUDE_FIELDS = '&include_fields=id,component,classification,' \
'creation_time,last_change_time,is_open,priority,' \
'severity,status,summary,assigned_to,creator,resolution,dupe_of'
def query_from_to(from_date, to_date, limit=0):
severity = ['blocker', 'critical', 'major', 'normal', 'minor', 'trivial']
resolution = ['FIXED', 'INVALID', 'WONTFIX', 'DUPLICATE', 'WORKSFORME', 'INCOMPLETE', 'SUPPORT', 'EXPIRED', 'MOVED']
# product = 'Firefox%20for%20Android'
product = 'Firefox'
# product = 'Thunderbird'
# product = 'Calendar'
status = 'RESOLVED'
partial_query = []
for s in severity:
query = Query()
query.severity = s
partial_query.append(query)
for r in resolution:
query = Query()
query.resolution = r
partial_query.append(query)
q = Query()
q.limit = limit
q.product = product
q.status = status
partial_query.append(q)
result = '&'.join([str(query) for query in partial_query])
return result + '&chfieldfrom=' + from_date + '&chfieldto=' + to_date
class Query(object):
@property
def component(self):
return self._component
@component.setter
def component(self, component):
self._component = component
@property
def creation_time(self):
return self._creation_time
@creation_time.setter
def creation_time(self, creation_time):
self._creation_time = creation_time
@property
def last_change_time(self):
return self._last_change_time
@last_change_time.setter
def last_change_time(self, last_change_time):
self._last_change_time = last_change_time
@property
def status(self):
return self._status
@status.setter
def status(self, status):
self._status = status
@property
def product(self):
return self._product
@product.setter
def product(self, product):
self._product = product
@property
def severity(self):
return self._severity
@severity.setter
def severity(self, severity):
self._severity = severity
@property
def limit(self):
return self._limit
@limit.setter
def limit(self, limit):
self._limit = limit
@property
def resolution(self):
return self._resolution
@resolution.setter
def resolution(self, resolution):
self._resolution = resolution
def __str__(self):
criteria = [
'{}={}'.format(key[1:], value) for (key, value) in self.__dict__.items() if value is not None
]
criteria.sort()
return '&'.join(criteria)
| {
"content_hash": "912a610b4488daa3c29c9e4a1f3938a1",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 120,
"avg_line_length": 24.536363636363635,
"alnum_prop": 0.6050389032975176,
"repo_name": "marquesarthur/BugAnalysisRecommender",
"id": "3df09ee2b53fb0e3e587aaf1120442ac7956a3cd",
"size": "2699",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dataset/bugzilla/query.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "16154"
},
{
"name": "Python",
"bytes": "529600"
}
],
"symlink_target": ""
} |
"""
Show how to set custom font properties.
For interactive users, you can also use kwargs to the text command,
which requires less typing. See examples/fonts_demo_kw.py
"""
from matplotlib.font_manager import FontProperties
from pylab import *
subplot(111, axisbg='w')
font0 = FontProperties()
alignment = {'horizontalalignment':'center', 'verticalalignment':'baseline'}
### Show family options
family = ['serif', 'sans-serif', 'cursive', 'fantasy', 'monospace']
font1 = font0.copy()
font1.set_size('large')
t = text(-0.8, 0.9, 'family', fontproperties=font1,
**alignment)
yp = [0.7, 0.5, 0.3, 0.1, -0.1, -0.3, -0.5]
for k in range(5):
font = font0.copy()
font.set_family(family[k])
if k == 2:
font.set_name('Script MT')
t = text(-0.8, yp[k], family[k], fontproperties=font,
**alignment)
### Show style options
style = ['normal', 'italic', 'oblique']
t = text(-0.4, 0.9, 'style', fontproperties=font1,
**alignment)
for k in range(3):
font = font0.copy()
font.set_family('sans-serif')
font.set_style(style[k])
t = text(-0.4, yp[k], style[k], fontproperties=font,
**alignment)
### Show variant options
variant= ['normal', 'small-caps']
t = text(0.0, 0.9, 'variant', fontproperties=font1,
**alignment)
for k in range(2):
font = font0.copy()
font.set_family('serif')
font.set_variant(variant[k])
t = text( 0.0, yp[k], variant[k], fontproperties=font,
**alignment)
### Show weight options
weight = ['light', 'normal', 'medium', 'semibold', 'bold', 'heavy', 'black']
t = text( 0.4, 0.9, 'weight', fontproperties=font1,
**alignment)
for k in range(7):
font = font0.copy()
font.set_weight(weight[k])
t = text( 0.4, yp[k], weight[k], fontproperties=font,
**alignment)
### Show size options
size = ['xx-small', 'x-small', 'small', 'medium', 'large',
'x-large', 'xx-large']
t = text( 0.8, 0.9, 'size', fontproperties=font1,
**alignment)
for k in range(7):
font = font0.copy()
font.set_size(size[k])
t = text( 0.8, yp[k], size[k], fontproperties=font,
**alignment)
### Show bold italic
font = font0.copy()
font.set_style('italic')
font.set_weight('bold')
font.set_size('x-small')
t = text(0, 0.1, 'bold italic', fontproperties=font,
**alignment)
font = font0.copy()
font.set_style('italic')
font.set_weight('bold')
font.set_size('medium')
t = text(0, 0.2, 'bold italic', fontproperties=font,
**alignment)
font = font0.copy()
font.set_style('italic')
font.set_weight('bold')
font.set_size('x-large')
t = text(0, 0.3, 'bold italic', fontproperties=font,
**alignment)
axis([-1,1,0,1])
show()
| {
"content_hash": "2574053705506064b507be0e75191deb",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 76,
"avg_line_length": 24.06140350877193,
"alnum_prop": 0.6084578928180824,
"repo_name": "yavalvas/yav_com",
"id": "562fb7f171e01c415284c651db3534d41879bc17",
"size": "2765",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "build/matplotlib/examples/pylab_examples/fonts_demo.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "85377"
},
{
"name": "C++",
"bytes": "568744"
},
{
"name": "CSS",
"bytes": "47585"
},
{
"name": "Erlang",
"bytes": "7112"
},
{
"name": "HTML",
"bytes": "14865"
},
{
"name": "JavaScript",
"bytes": "359937"
},
{
"name": "Objective-C",
"bytes": "188937"
},
{
"name": "Perl",
"bytes": "229498"
},
{
"name": "Python",
"bytes": "7684946"
},
{
"name": "Shell",
"bytes": "1805"
}
],
"symlink_target": ""
} |
from ducktape.tests.test import Test
from ducktape.mark.resource import cluster
from ducktape.mark import parametrize, matrix
from kafkatest.tests.kafka_test import KafkaTest
from kafkatest.services.performance.streams_performance import StreamsSimpleBenchmarkService
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService
from kafkatest.version import DEV_BRANCH
class StreamsSimpleBenchmarkTest(Test):
"""
Simple benchmark of Kafka Streams.
"""
def __init__(self, test_context):
super(StreamsSimpleBenchmarkTest, self).__init__(test_context)
self.num_records = 20000000L
self.replication = 1
self.num_threads = 1
@cluster(num_nodes=9)
@matrix(test=["produce", "consume", "count", "processstream", "processstreamwithsink", "processstreamwithstatestore", "processstreamwithcachedstatestore", "kstreamktablejoin", "kstreamkstreamjoin", "ktablektablejoin", "yahoo"], scale=[1, 3])
def test_simple_benchmark(self, test, scale):
"""
Run simple Kafka Streams benchmark
"""
self.driver = [None] * (scale + 1)
node = [None] * (scale)
data = [None] * (scale)
#############
# SETUP PHASE
#############
self.zk = ZookeeperService(self.test_context, num_nodes=1)
self.zk.start()
self.kafka = KafkaService(self.test_context, num_nodes=scale, zk=self.zk, version=DEV_BRANCH, topics={
'simpleBenchmarkSourceTopic' : { 'partitions': scale, 'replication-factor': self.replication },
'countTopic' : { 'partitions': scale, 'replication-factor': self.replication },
'simpleBenchmarkSinkTopic' : { 'partitions': scale, 'replication-factor': self.replication },
'joinSourceTopic1KStreamKStream' : { 'partitions': scale, 'replication-factor': self.replication },
'joinSourceTopic2KStreamKStream' : { 'partitions': scale, 'replication-factor': self.replication },
'joinSourceTopic1KStreamKTable' : { 'partitions': scale, 'replication-factor': self.replication },
'joinSourceTopic2KStreamKTable' : { 'partitions': scale, 'replication-factor': self.replication },
'joinSourceTopic1KTableKTable' : { 'partitions': scale, 'replication-factor': self.replication },
'joinSourceTopic2KTableKTable' : { 'partitions': scale, 'replication-factor': self.replication },
'yahooCampaigns' : { 'partitions': 20, 'replication-factor': self.replication },
'yahooEvents' : { 'partitions': 20, 'replication-factor': self.replication }
})
self.kafka.log_level = "INFO"
self.kafka.start()
################
# LOAD PHASE
################
self.load_driver = StreamsSimpleBenchmarkService(self.test_context, self.kafka,
self.num_records * scale, "true", test,
self.num_threads)
self.load_driver.start()
self.load_driver.wait()
self.load_driver.stop()
################
# RUN PHASE
################
for num in range(0, scale):
self.driver[num] = StreamsSimpleBenchmarkService(self.test_context, self.kafka,
self.num_records/(scale), "false", test,
self.num_threads)
self.driver[num].start()
#######################
# STOP + COLLECT PHASE
#######################
for num in range(0, scale):
self.driver[num].wait()
self.driver[num].stop()
node[num] = self.driver[num].node
node[num].account.ssh("grep Performance %s" % self.driver[num].STDOUT_FILE, allow_fail=False)
data[num] = self.driver[num].collect_data(node[num], "" )
final = {}
for num in range(0, scale):
for key in data[num]:
final[key + str(num)] = data[num][key]
return final
| {
"content_hash": "893b8a733db54be5a5c845eee6ca9939",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 245,
"avg_line_length": 47.59090909090909,
"alnum_prop": 0.5776026743075454,
"repo_name": "ErikKringen/kafka",
"id": "258b7c0a50aa1062f39cbfc570dbf9f28866f476",
"size": "4969",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "tests/kafkatest/benchmarks/streams/streams_simple_benchmark_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "27427"
},
{
"name": "HTML",
"bytes": "5443"
},
{
"name": "Java",
"bytes": "10352290"
},
{
"name": "Python",
"bytes": "597310"
},
{
"name": "Scala",
"bytes": "4759919"
},
{
"name": "Shell",
"bytes": "84077"
},
{
"name": "XSLT",
"bytes": "7116"
}
],
"symlink_target": ""
} |
from django.db import models
from cookiedjango.core.models import TimeStampedModel
class TwitterPost(TimeStampedModel):
tag = models.CharField(max_length=140)
text = models.TextField()
| {
"content_hash": "72068f82fd44eb75f066082f886e08e4",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 53,
"avg_line_length": 32.333333333333336,
"alnum_prop": 0.788659793814433,
"repo_name": "shanenater/shanecookie",
"id": "6ce4299cc1bef4fcb9d2c610733b29e1f5ae2c38",
"size": "194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cookiedjango/twitter/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1768"
},
{
"name": "HTML",
"bytes": "20381"
},
{
"name": "JavaScript",
"bytes": "3150"
},
{
"name": "Nginx",
"bytes": "1095"
},
{
"name": "Python",
"bytes": "41041"
},
{
"name": "Shell",
"bytes": "4535"
}
],
"symlink_target": ""
} |
"""
Implementation of Simple Storage Service support.
:py:class:`S3Target` is a subclass of the Target class to support S3 file
system operations. The `boto` library is required to use S3 targets.
"""
from __future__ import division
import datetime
import itertools
import logging
import os
import os.path
import time
from multiprocessing.pool import ThreadPool
try:
from urlparse import urlsplit
except ImportError:
from urllib.parse import urlsplit
import warnings
try:
from ConfigParser import NoSectionError
except ImportError:
from configparser import NoSectionError
from luigi import six
from luigi.six.moves import range
from luigi import configuration
from luigi.format import get_default_format
from luigi.parameter import Parameter
from luigi.target import FileAlreadyExists, FileSystem, FileSystemException, FileSystemTarget, AtomicLocalFile, MissingParentDirectory
from luigi.task import ExternalTask
logger = logging.getLogger('luigi-interface')
# two different ways of marking a directory
# with a suffix in S3
S3_DIRECTORY_MARKER_SUFFIX_0 = '_$folder$'
S3_DIRECTORY_MARKER_SUFFIX_1 = '/'
class InvalidDeleteException(FileSystemException):
pass
class FileNotFoundException(FileSystemException):
pass
class S3Client(FileSystem):
"""
boto-powered S3 client.
"""
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
# only import boto when needed to allow top-lvl s3 module import
import boto
import boto.s3.connection
from boto.s3.key import Key
options = self._get_s3_config()
options.update(kwargs)
# Removing key args would break backwards compability
role_arn = options.get('aws_role_arn')
role_session_name = options.get('aws_role_session_name')
aws_session_token = None
if role_arn and role_session_name:
from boto import sts
sts_client = sts.STSConnection()
assumed_role = sts_client.assume_role(role_arn, role_session_name)
aws_secret_access_key = assumed_role.credentials.secret_key
aws_access_key_id = assumed_role.credentials.access_key
aws_session_token = assumed_role.credentials.session_token
else:
if not aws_access_key_id:
aws_access_key_id = options.get('aws_access_key_id')
if not aws_secret_access_key:
aws_secret_access_key = options.get('aws_secret_access_key')
for key in ['aws_access_key_id', 'aws_secret_access_key', 'aws_role_session_name', 'aws_role_arn']:
if key in options:
options.pop(key)
self.s3 = boto.s3.connection.S3Connection(aws_access_key_id,
aws_secret_access_key,
security_token=aws_session_token,
**options)
self.Key = Key
def exists(self, path):
"""
Does provided path exist on S3?
"""
(bucket, key) = self._path_to_bucket_and_key(path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
# root always exists
if self._is_root(key):
return True
# file
s3_key = s3_bucket.get_key(key)
if s3_key:
return True
if self.isdir(path):
return True
logger.debug('Path %s does not exist', path)
return False
def remove(self, path, recursive=True):
"""
Remove a file or directory from S3.
"""
if not self.exists(path):
logger.debug('Could not delete %s; path does not exist', path)
return False
(bucket, key) = self._path_to_bucket_and_key(path)
# root
if self._is_root(key):
raise InvalidDeleteException('Cannot delete root of bucket at path %s' % path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
# file
s3_key = s3_bucket.get_key(key)
if s3_key:
s3_bucket.delete_key(s3_key)
logger.debug('Deleting %s from bucket %s', key, bucket)
return True
if self.isdir(path) and not recursive:
raise InvalidDeleteException('Path %s is a directory. Must use recursive delete' % path)
delete_key_list = [
k for k in s3_bucket.list(self._add_path_delimiter(key))]
# delete the directory marker file if it exists
s3_dir_with_suffix_key = s3_bucket.get_key(key + S3_DIRECTORY_MARKER_SUFFIX_0)
if s3_dir_with_suffix_key:
delete_key_list.append(s3_dir_with_suffix_key)
if len(delete_key_list) > 0:
for k in delete_key_list:
logger.debug('Deleting %s from bucket %s', k, bucket)
s3_bucket.delete_keys(delete_key_list)
return True
return False
def get_key(self, path):
"""
Returns just the key from the path.
An s3 path is composed of a bucket and a key.
Suppose we have a path `s3://my_bucket/some/files/my_file`. The key is `some/files/my_file`.
"""
(bucket, key) = self._path_to_bucket_and_key(path)
s3_bucket = self.s3.get_bucket(bucket, validate=True)
return s3_bucket.get_key(key)
def put(self, local_path, destination_s3_path, **kwargs):
"""
Put an object stored locally to an S3 path.
:param kwargs: Keyword arguments are passed to the boto function `set_contents_from_filename`
"""
(bucket, key) = self._path_to_bucket_and_key(destination_s3_path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
# put the file
s3_key = self.Key(s3_bucket)
s3_key.key = key
s3_key.set_contents_from_filename(local_path, **kwargs)
def put_string(self, content, destination_s3_path, **kwargs):
"""
Put a string to an S3 path.
:param kwargs: Keyword arguments are passed to the boto function `set_contents_from_string`
"""
(bucket, key) = self._path_to_bucket_and_key(destination_s3_path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
# put the content
s3_key = self.Key(s3_bucket)
s3_key.key = key
s3_key.set_contents_from_string(content, **kwargs)
def put_multipart(self, local_path, destination_s3_path, part_size=67108864, **kwargs):
"""
Put an object stored locally to an S3 path
using S3 multi-part upload (for files > 5GB).
:param local_path: Path to source local file
:param destination_s3_path: URL for target S3 location
:param part_size: Part size in bytes. Default: 67108864 (64MB), must be >= 5MB and <= 5 GB.
:param kwargs: Keyword arguments are passed to the boto function `initiate_multipart_upload`
"""
# calculate number of parts to upload
# based on the size of the file
source_size = os.stat(local_path).st_size
if source_size <= part_size:
# fallback to standard, non-multipart strategy
return self.put(local_path, destination_s3_path, **kwargs)
(bucket, key) = self._path_to_bucket_and_key(destination_s3_path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
# calculate the number of parts (int division).
# use modulo to avoid float precision issues
# for exactly-sized fits
num_parts = (source_size + part_size - 1) // part_size
mp = None
try:
mp = s3_bucket.initiate_multipart_upload(key, **kwargs)
for i in range(num_parts):
# upload a part at a time to S3
offset = part_size * i
bytes = min(part_size, source_size - offset)
with open(local_path, 'rb') as fp:
part_num = i + 1
logger.info('Uploading part %s/%s to %s', part_num, num_parts, destination_s3_path)
fp.seek(offset)
mp.upload_part_from_file(fp, part_num=part_num, size=bytes)
# finish the upload, making the file available in S3
mp.complete_upload()
except BaseException:
if mp:
logger.info('Canceling multipart s3 upload for %s', destination_s3_path)
# cancel the upload so we don't get charged for
# storage consumed by uploaded parts
mp.cancel_upload()
raise
def get(self, s3_path, destination_local_path):
"""
Get an object stored in S3 and write it to a local path.
"""
(bucket, key) = self._path_to_bucket_and_key(s3_path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
# download the file
s3_key = self.Key(s3_bucket)
s3_key.key = key
s3_key.get_contents_to_filename(destination_local_path)
def get_as_string(self, s3_path):
"""
Get the contents of an object stored in S3 as a string.
"""
(bucket, key) = self._path_to_bucket_and_key(s3_path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
# get the content
s3_key = self.Key(s3_bucket)
s3_key.key = key
contents = s3_key.get_contents_as_string()
return contents
def copy(self, source_path, destination_path, threads=100, start_time=None, end_time=None, part_size=67108864, **kwargs):
"""
Copy object(s) from one S3 location to another. Works for individual keys or entire directories.
When files are larger than `part_size`, multipart uploading will be used.
:param source_path: The `s3://` path of the directory or key to copy from
:param destination_path: The `s3://` path of the directory or key to copy to
:param threads: Optional argument to define the number of threads to use when copying (min: 3 threads)
:param start_time: Optional argument to copy files with modified dates after start_time
:param end_time: Optional argument to copy files with modified dates before end_time
:param part_size: Part size in bytes. Default: 67108864 (64MB), must be >= 5MB and <= 5 GB.
:param kwargs: Keyword arguments are passed to the boto function `copy_key`
:returns tuple (number_of_files_copied, total_size_copied_in_bytes)
"""
start = datetime.datetime.now()
(src_bucket, src_key) = self._path_to_bucket_and_key(source_path)
(dst_bucket, dst_key) = self._path_to_bucket_and_key(destination_path)
# As the S3 copy command is completely server side, there is no issue with issuing a lot of threads
# to issue a single API call per copy, however, this may in theory cause issues on systems with low ulimits for
# number of threads when copying really large files (e.g. with a ~100GB file this will open ~1500
# threads), or large directories. Around 100 threads seems to work well.
threads = 3 if threads < 3 else threads # don't allow threads to be less than 3
total_keys = 0
copy_pool = ThreadPool(processes=threads)
if self.isdir(source_path):
# The management pool is to ensure that there's no deadlock between the s3 copying threads, and the
# multipart_copy threads that monitors each group of s3 copy threads and returns a success once the entire file
# is copied. Without this, we could potentially fill up the pool with threads waiting to check if the s3 copies
# have completed, leaving no available threads to actually perform any copying.
copy_jobs = []
management_pool = ThreadPool(processes=threads)
(bucket, key) = self._path_to_bucket_and_key(source_path)
key_path = self._add_path_delimiter(key)
key_path_len = len(key_path)
total_size_bytes = 0
src_prefix = self._add_path_delimiter(src_key)
dst_prefix = self._add_path_delimiter(dst_key)
for item in self.list(source_path, start_time=start_time, end_time=end_time, return_key=True):
path = item.key[key_path_len:]
# prevents copy attempt of empty key in folder
if path != '' and path != '/':
total_keys += 1
total_size_bytes += item.size
job = management_pool.apply_async(self.__copy_multipart,
args=(copy_pool,
src_bucket, src_prefix + path,
dst_bucket, dst_prefix + path,
part_size),
kwds=kwargs)
copy_jobs.append(job)
# Wait for the pools to finish scheduling all the copies
management_pool.close()
management_pool.join()
copy_pool.close()
copy_pool.join()
# Raise any errors encountered in any of the copy processes
for result in copy_jobs:
result.get()
end = datetime.datetime.now()
duration = end - start
logger.info('%s : Complete : %s total keys copied in %s' %
(datetime.datetime.now(), total_keys, duration))
return total_keys, total_size_bytes
# If the file isn't a directory just perform a simple copy
else:
self.__copy_multipart(copy_pool, src_bucket, src_key, dst_bucket, dst_key, part_size, **kwargs)
# Close the pool
copy_pool.close()
copy_pool.join()
def __copy_multipart(self, pool, src_bucket, src_key, dst_bucket, dst_key, part_size, **kwargs):
"""
Copy a single S3 object to another S3 object, falling back to multipart copy where necessary
NOTE: This is a private method and should only be called from within the `luigi.s3.copy` method
:param pool: The threadpool to put the s3 copy processes onto
:param src_bucket: source bucket name
:param src_key: source key name
:param dst_bucket: destination bucket name
:param dst_key: destination key name
:param key_size: size of the key to copy in bytes
:param part_size: Part size in bytes. Must be >= 5MB and <= 5 GB.
:param kwargs: Keyword arguments are passed to the boto function `initiate_multipart_upload`
"""
source_bucket = self.s3.get_bucket(src_bucket, validate=True)
dest_bucket = self.s3.get_bucket(dst_bucket, validate=True)
key_size = source_bucket.lookup(src_key).size
# We can't do a multipart copy on an empty Key, so handle this specially.
# Also, don't bother using the multipart machinery if we're only dealing with a small non-multipart file
if key_size == 0 or key_size <= part_size:
result = pool.apply_async(dest_bucket.copy_key, args=(dst_key, src_bucket, src_key), kwds=kwargs)
# Bubble up any errors we may encounter
return result.get()
mp = None
try:
mp = dest_bucket.initiate_multipart_upload(dst_key, **kwargs)
cur_pos = 0
# Store the results from the apply_async in a list so we can check for failures
results = []
# Calculate the number of chunks the file will be
num_parts = (key_size + part_size - 1) // part_size
for i in range(num_parts):
# Issue an S3 copy request, one part at a time, from one S3 object to another
part_start = cur_pos
cur_pos += part_size
part_end = min(cur_pos - 1, key_size - 1)
part_num = i + 1
results.append(pool.apply_async(mp.copy_part_from_key, args=(src_bucket, src_key, part_num, part_start, part_end)))
logger.info('Requesting copy of %s/%s to %s/%s', part_num, num_parts, dst_bucket, dst_key)
logger.info('Waiting for multipart copy of %s/%s to finish', dst_bucket, dst_key)
# This will raise any exceptions in any of the copy threads
for result in results:
result.get()
# finish the copy, making the file available in S3
mp.complete_upload()
return mp.key_name
except:
logger.info('Error during multipart s3 copy for %s/%s to %s/%s...', src_bucket, src_key, dst_bucket, dst_key)
# cancel the copy so we don't get charged for storage consumed by copied parts
if mp:
mp.cancel_upload()
raise
def move(self, source_path, destination_path, **kwargs):
"""
Rename/move an object from one S3 location to another.
:param kwargs: Keyword arguments are passed to the boto function `copy_key`
"""
self.copy(source_path, destination_path, **kwargs)
self.remove(source_path)
def listdir(self, path, start_time=None, end_time=None, return_key=False):
"""
Get an iterable with S3 folder contents.
Iterable contains paths relative to queried path.
:param start_time: Optional argument to list files with modified dates after start_time
:param end_time: Optional argument to list files with modified dates before end_time
:param return_key: Optional argument, when set to True will return a boto.s3.key.Key (instead of the filename)
"""
(bucket, key) = self._path_to_bucket_and_key(path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
key_path = self._add_path_delimiter(key)
key_path_len = len(key_path)
for item in s3_bucket.list(prefix=key_path):
last_modified_date = time.strptime(item.last_modified, "%Y-%m-%dT%H:%M:%S.%fZ")
if (
(not start_time and not end_time) or # neither are defined, list all
(start_time and not end_time and start_time < last_modified_date) or # start defined, after start
(not start_time and end_time and last_modified_date < end_time) or # end defined, prior to end
(start_time and end_time and start_time < last_modified_date < end_time) # both defined, between
):
if return_key:
yield item
else:
yield self._add_path_delimiter(path) + item.key[key_path_len:]
def list(self, path, start_time=None, end_time=None, return_key=False): # backwards compat
key_path_len = len(self._add_path_delimiter(path))
for item in self.listdir(path, start_time=start_time, end_time=end_time, return_key=return_key):
if return_key:
yield item
else:
yield item[key_path_len:]
def isdir(self, path):
"""
Is the parameter S3 path a directory?
"""
(bucket, key) = self._path_to_bucket_and_key(path)
# grab and validate the bucket
s3_bucket = self.s3.get_bucket(bucket, validate=True)
# root is a directory
if self._is_root(key):
return True
for suffix in (S3_DIRECTORY_MARKER_SUFFIX_0,
S3_DIRECTORY_MARKER_SUFFIX_1):
s3_dir_with_suffix_key = s3_bucket.get_key(key + suffix)
if s3_dir_with_suffix_key:
return True
# files with this prefix
key_path = self._add_path_delimiter(key)
s3_bucket_list_result = list(itertools.islice(s3_bucket.list(prefix=key_path), 1))
if s3_bucket_list_result:
return True
return False
is_dir = isdir # compatibility with old version.
def mkdir(self, path, parents=True, raise_if_exists=False):
if raise_if_exists and self.isdir(path):
raise FileAlreadyExists()
_, key = self._path_to_bucket_and_key(path)
if self._is_root(key):
return # isdir raises if the bucket doesn't exist; nothing to do here.
key = self._add_path_delimiter(key)
if not parents and not self.isdir(os.path.dirname(key)):
raise MissingParentDirectory()
return self.put_string("", self._add_path_delimiter(path))
def _get_s3_config(self, key=None):
try:
config = dict(configuration.get_config().items('s3'))
except NoSectionError:
return {}
# So what ports etc can be read without us having to specify all dtypes
for k, v in six.iteritems(config):
try:
config[k] = int(v)
except ValueError:
pass
if key:
return config.get(key)
return config
def _path_to_bucket_and_key(self, path):
(scheme, netloc, path, query, fragment) = urlsplit(path)
path_without_initial_slash = path[1:]
return netloc, path_without_initial_slash
def _is_root(self, key):
return (len(key) == 0) or (key == '/')
def _add_path_delimiter(self, key):
return key if key[-1:] == '/' or key == '' else key + '/'
class AtomicS3File(AtomicLocalFile):
"""
An S3 file that writes to a temp file and puts to S3 on close.
:param kwargs: Keyword arguments are passed to the boto function `initiate_multipart_upload`
"""
def __init__(self, path, s3_client, **kwargs):
self.s3_client = s3_client
super(AtomicS3File, self).__init__(path)
self.s3_options = kwargs
def move_to_final_destination(self):
self.s3_client.put_multipart(self.tmp_path, self.path, **self.s3_options)
class ReadableS3File(object):
def __init__(self, s3_key):
self.s3_key = s3_key
self.buffer = []
self.closed = False
self.finished = False
def read(self, size=0):
f = self.s3_key.read(size=size)
# boto will loop on the key forever and it's not what is expected by
# the python io interface
# boto/boto#2805
if f == b'':
self.finished = True
if self.finished:
return b''
return f
def close(self):
self.s3_key.close()
self.closed = True
def __del__(self):
self.close()
def __exit__(self, exc_type, exc, traceback):
self.close()
def __enter__(self):
return self
def _add_to_buffer(self, line):
self.buffer.append(line)
def _flush_buffer(self):
output = b''.join(self.buffer)
self.buffer = []
return output
def readable(self):
return True
def writable(self):
return False
def seekable(self):
return False
def __iter__(self):
key_iter = self.s3_key.__iter__()
has_next = True
while has_next:
try:
# grab the next chunk
chunk = next(key_iter)
# split on newlines, preserving the newline
for line in chunk.splitlines(True):
if not line.endswith(os.linesep):
# no newline, so store in buffer
self._add_to_buffer(line)
else:
# newline found, send it out
if self.buffer:
self._add_to_buffer(line)
yield self._flush_buffer()
else:
yield line
except StopIteration:
# send out anything we have left in the buffer
output = self._flush_buffer()
if output:
yield output
has_next = False
self.close()
class S3Target(FileSystemTarget):
"""
Target S3 file object
:param kwargs: Keyword arguments are passed to the boto function `initiate_multipart_upload`
"""
fs = None
def __init__(self, path, format=None, client=None, **kwargs):
super(S3Target, self).__init__(path)
if format is None:
format = get_default_format()
self.path = path
self.format = format
self.fs = client or S3Client()
self.s3_options = kwargs
def open(self, mode='r'):
if mode not in ('r', 'w'):
raise ValueError("Unsupported open mode '%s'" % mode)
if mode == 'r':
s3_key = self.fs.get_key(self.path)
if not s3_key:
raise FileNotFoundException("Could not find file at %s" % self.path)
fileobj = ReadableS3File(s3_key)
return self.format.pipe_reader(fileobj)
else:
return self.format.pipe_writer(AtomicS3File(self.path, self.fs, **self.s3_options))
class S3FlagTarget(S3Target):
"""
Defines a target directory with a flag-file (defaults to `_SUCCESS`) used
to signify job success.
This checks for two things:
* the path exists (just like the S3Target)
* the _SUCCESS file exists within the directory.
Because Hadoop outputs into a directory and not a single file,
the path is assumed to be a directory.
This is meant to be a handy alternative to AtomicS3File.
The AtomicFile approach can be burdensome for S3 since there are no directories, per se.
If we have 1,000,000 output files, then we have to rename 1,000,000 objects.
"""
fs = None
def __init__(self, path, format=None, client=None, flag='_SUCCESS'):
"""
Initializes a S3FlagTarget.
:param path: the directory where the files are stored.
:type path: str
:param client:
:type client:
:param flag:
:type flag: str
"""
if format is None:
format = get_default_format()
if path[-1] != "/":
raise ValueError("S3FlagTarget requires the path to be to a "
"directory. It must end with a slash ( / ).")
super(S3FlagTarget, self).__init__(path, format, client)
self.flag = flag
def exists(self):
hadoopSemaphore = self.path + self.flag
return self.fs.exists(hadoopSemaphore)
class S3EmrTarget(S3FlagTarget):
"""
Deprecated. Use :py:class:`S3FlagTarget`
"""
def __init__(self, *args, **kwargs):
warnings.warn("S3EmrTarget is deprecated. Please use S3FlagTarget")
super(S3EmrTarget, self).__init__(*args, **kwargs)
class S3PathTask(ExternalTask):
"""
A external task that to require existence of a path in S3.
"""
path = Parameter()
def output(self):
return S3Target(self.path)
class S3EmrTask(ExternalTask):
"""
An external task that requires the existence of EMR output in S3.
"""
path = Parameter()
def output(self):
return S3EmrTarget(self.path)
class S3FlagTask(ExternalTask):
"""
An external task that requires the existence of EMR output in S3.
"""
path = Parameter()
flag = Parameter(default=None)
def output(self):
return S3FlagTarget(self.path, flag=self.flag)
| {
"content_hash": "175e0dfef8c4e675fd517e240ee445d8",
"timestamp": "",
"source": "github",
"line_count": 778,
"max_line_length": 134,
"avg_line_length": 35.74421593830334,
"alnum_prop": 0.5881549138768025,
"repo_name": "jw0201/luigi",
"id": "d01995fb79c50b868d74578a08256f8449eaf68e",
"size": "28412",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "luigi/contrib/s3.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2162"
},
{
"name": "HTML",
"bytes": "39935"
},
{
"name": "JavaScript",
"bytes": "155710"
},
{
"name": "Python",
"bytes": "1720968"
},
{
"name": "Shell",
"bytes": "2627"
}
],
"symlink_target": ""
} |
import functools
import imath
import IECore
import Gaffer
import GafferUI
Gaffer.Metadata.registerNode(
Gaffer.EditScope,
"description",
"""
A container that interactive tools may make nodes in
as necessary.
""",
"icon", "editScopeNode.png",
"graphEditor:childrenViewable", True,
# Add + buttons for setting up via the GraphEditor
"noduleLayout:customGadget:setupButtonTop:gadgetType", "GafferUI.EditScopeUI.PlugAdder",
"noduleLayout:customGadget:setupButtonTop:section", "top",
"noduleLayout:customGadget:setupButtonBottom:gadgetType", "GafferUI.EditScopeUI.PlugAdder",
"noduleLayout:customGadget:setupButtonBottom:section", "bottom",
# Hide the Box + buttons until the node has been set up. Two sets of buttons at
# the same time is way too confusing.
"noduleLayout:customGadget:addButtonTop:visible", lambda node : "in" in node,
"noduleLayout:customGadget:addButtonBottom:visible", lambda node : "in" in node,
"noduleLayout:customGadget:addButtonLeft:visible", lambda node : "in" in node,
"noduleLayout:customGadget:addButtonRight:visible", lambda node : "in" in node,
plugs = {
"in" : [
"renameable", False,
"deletable", False,
],
"out" : [
"renameable", False,
"deletable", False,
],
},
)
# Disable editing of `EditScope.BoxIn` and `EditScope.BoxOut`
Gaffer.Metadata.registerValue( Gaffer.EditScope, "BoxIn.name", "readOnly", True )
Gaffer.Metadata.registerValue( Gaffer.EditScope, "BoxOut.name", "readOnly", True )
Gaffer.Metadata.registerValue( Gaffer.BoxIn, "renameable", lambda node : not isinstance( node.parent(), Gaffer.EditScope ) or node.getName() != "BoxIn" )
Gaffer.Metadata.registerValue( Gaffer.BoxOut, "renameable", lambda node : not isinstance( node.parent(), Gaffer.EditScope ) or node.getName() != "BoxOut" )
# EditScopePlugValueWidget
# ========================
class EditScopePlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
frame = GafferUI.Frame( borderWidth = 0 )
GafferUI.PlugValueWidget.__init__( self, frame, plug, **kw )
with frame :
with GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Horizontal, spacing = 4 ) :
GafferUI.Spacer( imath.V2i( 4, 1 ), imath.V2i( 4, 1 ) )
GafferUI.Label( "Edit Scope" )
self.__menuButton = GafferUI.MenuButton(
"",
menu = GafferUI.Menu( Gaffer.WeakMethod( self.__menuDefinition ) )
)
self.__menuButton._qtWidget().setFixedWidth( 100 )
self.__navigationMenuButton = GafferUI.MenuButton(
image = "navigationArrow.png",
hasFrame = False,
menu = GafferUI.Menu( Gaffer.WeakMethod( self.__navigationMenuDefinition ) )
)
GafferUI.Spacer( imath.V2i( 4, 1 ), imath.V2i( 4, 1 ) )
self._updateFromPlug()
def hasLabel( self ) :
return True
def _updateFromPlug( self ) :
editScope = self.__editScope()
editScopeActive = editScope is not None
self.__updateMenuButton( editScope )
self.__navigationMenuButton.setEnabled( editScopeActive )
if editScopeActive :
self.__editScopeNameChangedConnection = editScope.nameChangedSignal().connect(
Gaffer.WeakMethod( self.__editScopeNameChanged ), scoped = True
)
else :
self.__editScopeNameChangedConnection = None
if self._qtWidget().property( "editScopeActive" ) != editScopeActive :
self._qtWidget().setProperty( "editScopeActive", GafferUI._Variant.toVariant( editScopeActive ) )
self._repolish()
def __updateMenuButton( self, editScope ) :
self.__menuButton.setText( editScope.getName() if editScope is not None else "None" )
def __editScopeNameChanged( self, editScope ) :
self.__updateMenuButton( editScope )
def __editScope( self ) :
input = self.getPlug().getInput()
return input.ancestor( Gaffer.EditScope ) if input is not None else None
def __editScopePredicate( self, node ) :
if not isinstance( node, Gaffer.EditScope ) :
return False
if "out" not in node or not self.getPlug().acceptsInput( node["out"] ) :
return False
return True
def __connectEditScope( self, editScope, *ignored ) :
self.getPlug().setInput( editScope["out"] )
def __menuDefinition( self ) :
result = IECore.MenuDefinition()
node = self.getPlug().node()
if isinstance( node, GafferUI.View ) and self.getPlug() == node["editScope"] :
if node["in"].getInput() is None :
return
else :
node = node["in"].getInput().node()
currentEditScope = None
if self.getPlug().getInput() is not None :
currentEditScope = self.getPlug().getInput().parent()
def addItem( editScope, enabled = True ) :
result.append(
# The underscore suffix prevents collisions with a node and
# it's submenu if it has nested edit scopes.
"/%s_" % editScope.relativeName( editScope.scriptNode() ).replace( ".", "/" ),
{
"command" : functools.partial( Gaffer.WeakMethod( self.__connectEditScope ), editScope ),
"active" : enabled,
"label" : editScope.getName(),
"checkBox" : editScope == currentEditScope,
}
)
upstream = Gaffer.NodeAlgo.findAllUpstream( node, self.__editScopePredicate )
if self.__editScopePredicate( node ) :
upstream.insert( 0, node )
result.append( "/__UpstreamDivider__", { "divider" : True, "label" : "Upstream" } )
if upstream :
for editScope in reversed( upstream ) :
addItem( editScope )
else :
result.append( "/None Available", { "active" : False } )
downstream = Gaffer.NodeAlgo.findAllDownstream( node, self.__editScopePredicate )
if downstream :
result.append( "/__DownstreamDivider__", { "divider" : True, "label" : "Downstream" } )
for editScope in downstream :
addItem( editScope, enabled = False )
result.append( "/__NoneDivider__", { "divider" : True } )
result.append(
"/None", { "command" : functools.partial( self.getPlug().setInput, None ) },
)
return result
def __navigationMenuDefinition( self ) :
result = IECore.MenuDefinition()
editScope = self.__editScope()
if editScope is None :
result.append(
"/No EditScope Selected",
{ "active" : False },
)
return result
nodes = editScope.processors()
nodes.extend( self.__userNodes( editScope ) )
if nodes :
for node in nodes :
path = node.relativeName( editScope ).replace( ".", "/" )
result.append(
"/" + path,
{
"command" : functools.partial( GafferUI.NodeEditor.acquire, node )
}
)
else :
result.append(
"/EditScope is Empty",
{ "active" : False },
)
return result
@staticmethod
def __userNodes( editScope ) :
nodes = Gaffer.Metadata.nodesWithMetadata( editScope, "editScope:includeInNavigationMenu" )
return [ n for n in nodes if n.ancestor( Gaffer.EditScope ).isSame( editScope ) ]
| {
"content_hash": "dbfaec8595232abd29b04bd8bcdadcef",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 155,
"avg_line_length": 28.900862068965516,
"alnum_prop": 0.6850111856823267,
"repo_name": "ImageEngine/gaffer",
"id": "e1bc4680cce49251a22c7b8531b5a059b2ac69d5",
"size": "8501",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "python/GafferUI/EditScopeUI.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4486"
},
{
"name": "C++",
"bytes": "5353598"
},
{
"name": "CSS",
"bytes": "28027"
},
{
"name": "GLSL",
"bytes": "6250"
},
{
"name": "Python",
"bytes": "5296193"
},
{
"name": "Shell",
"bytes": "8008"
},
{
"name": "Slash",
"bytes": "41159"
}
],
"symlink_target": ""
} |
import sys
import subprocess
args = sys.argv[1:]
#print args
ret = subprocess.call(args)
i = len(args) - 1
deplocation = None
while (i >= 0):
if args[i] == '-MF':
deplocation = args[i+1]
break
i = i - 1
if deplocation is None:
exit(0)
objdir = '/'.join(deplocation.split('/')[:-2]) + '/.obj/'
depfile = open(deplocation)
lines = depfile.readlines()
depfile.close()
if not lines[0].startswith(objdir):
lines[0] = objdir + lines[0]
depfile = open(deplocation, 'w')
for line in lines:
depfile.write(line)
depfile.close()
#subprocess.call(['head', "-n1", deplocation])
exit(ret)
| {
"content_hash": "f5aa4b9bc7cc5b6eee7fdb5c1522186a",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 57,
"avg_line_length": 20.928571428571427,
"alnum_prop": 0.6638225255972696,
"repo_name": "DimaKirk/distcc-gen-hosts",
"id": "0400756dea753db32cb10bbb6274eebfb15e77f0",
"size": "604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "healdep.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1796"
}
],
"symlink_target": ""
} |
"""Constraints definition for minimize."""
import numpy as np
from ._hessian_update_strategy import BFGS
from ._differentiable_functions import (
VectorFunction, LinearVectorFunction, IdentityVectorFunction)
from ._optimize import OptimizeWarning
from warnings import warn, catch_warnings, simplefilter
from numpy.testing import suppress_warnings
from scipy.sparse import issparse
def _arr_to_scalar(x):
# If x is a numpy array, return x.item(). This will
# fail if the array has more than one element.
return x.item() if isinstance(x, np.ndarray) else x
class NonlinearConstraint:
"""Nonlinear constraint on the variables.
The constraint has the general inequality form::
lb <= fun(x) <= ub
Here the vector of independent variables x is passed as ndarray of shape
(n,) and ``fun`` returns a vector with m components.
It is possible to use equal bounds to represent an equality constraint or
infinite bounds to represent a one-sided constraint.
Parameters
----------
fun : callable
The function defining the constraint.
The signature is ``fun(x) -> array_like, shape (m,)``.
lb, ub : array_like
Lower and upper bounds on the constraint. Each array must have the
shape (m,) or be a scalar, in the latter case a bound will be the same
for all components of the constraint. Use ``np.inf`` with an
appropriate sign to specify a one-sided constraint.
Set components of `lb` and `ub` equal to represent an equality
constraint. Note that you can mix constraints of different types:
interval, one-sided or equality, by setting different components of
`lb` and `ub` as necessary.
jac : {callable, '2-point', '3-point', 'cs'}, optional
Method of computing the Jacobian matrix (an m-by-n matrix,
where element (i, j) is the partial derivative of f[i] with
respect to x[j]). The keywords {'2-point', '3-point',
'cs'} select a finite difference scheme for the numerical estimation.
A callable must have the following signature:
``jac(x) -> {ndarray, sparse matrix}, shape (m, n)``.
Default is '2-point'.
hess : {callable, '2-point', '3-point', 'cs', HessianUpdateStrategy, None}, optional
Method for computing the Hessian matrix. The keywords
{'2-point', '3-point', 'cs'} select a finite difference scheme for
numerical estimation. Alternatively, objects implementing
`HessianUpdateStrategy` interface can be used to approximate the
Hessian. Currently available implementations are:
- `BFGS` (default option)
- `SR1`
A callable must return the Hessian matrix of ``dot(fun, v)`` and
must have the following signature:
``hess(x, v) -> {LinearOperator, sparse matrix, array_like}, shape (n, n)``.
Here ``v`` is ndarray with shape (m,) containing Lagrange multipliers.
keep_feasible : array_like of bool, optional
Whether to keep the constraint components feasible throughout
iterations. A single value set this property for all components.
Default is False. Has no effect for equality constraints.
finite_diff_rel_step: None or array_like, optional
Relative step size for the finite difference approximation. Default is
None, which will select a reasonable value automatically depending
on a finite difference scheme.
finite_diff_jac_sparsity: {None, array_like, sparse matrix}, optional
Defines the sparsity structure of the Jacobian matrix for finite
difference estimation, its shape must be (m, n). If the Jacobian has
only few non-zero elements in *each* row, providing the sparsity
structure will greatly speed up the computations. A zero entry means
that a corresponding element in the Jacobian is identically zero.
If provided, forces the use of 'lsmr' trust-region solver.
If None (default) then dense differencing will be used.
Notes
-----
Finite difference schemes {'2-point', '3-point', 'cs'} may be used for
approximating either the Jacobian or the Hessian. We, however, do not allow
its use for approximating both simultaneously. Hence whenever the Jacobian
is estimated via finite-differences, we require the Hessian to be estimated
using one of the quasi-Newton strategies.
The scheme 'cs' is potentially the most accurate, but requires the function
to correctly handles complex inputs and be analytically continuable to the
complex plane. The scheme '3-point' is more accurate than '2-point' but
requires twice as many operations.
Examples
--------
Constrain ``x[0] < sin(x[1]) + 1.9``
>>> from scipy.optimize import NonlinearConstraint
>>> con = lambda x: x[0] - np.sin(x[1])
>>> nlc = NonlinearConstraint(con, -np.inf, 1.9)
"""
def __init__(self, fun, lb, ub, jac='2-point', hess=BFGS(),
keep_feasible=False, finite_diff_rel_step=None,
finite_diff_jac_sparsity=None):
self.fun = fun
self.lb = lb
self.ub = ub
self.finite_diff_rel_step = finite_diff_rel_step
self.finite_diff_jac_sparsity = finite_diff_jac_sparsity
self.jac = jac
self.hess = hess
self.keep_feasible = keep_feasible
class LinearConstraint:
"""Linear constraint on the variables.
The constraint has the general inequality form::
lb <= A.dot(x) <= ub
Here the vector of independent variables x is passed as ndarray of shape
(n,) and the matrix A has shape (m, n).
It is possible to use equal bounds to represent an equality constraint or
infinite bounds to represent a one-sided constraint.
Parameters
----------
A : {array_like, sparse matrix}, shape (m, n)
Matrix defining the constraint.
lb, ub : array_like, optional
Lower and upper limits on the constraint. Each array must have the
shape (m,) or be a scalar, in the latter case a bound will be the same
for all components of the constraint. Use ``np.inf`` with an
appropriate sign to specify a one-sided constraint.
Set components of `lb` and `ub` equal to represent an equality
constraint. Note that you can mix constraints of different types:
interval, one-sided or equality, by setting different components of
`lb` and `ub` as necessary. Defaults to ``lb = -np.inf``
and ``ub = np.inf`` (no limits).
keep_feasible : array_like of bool, optional
Whether to keep the constraint components feasible throughout
iterations. A single value set this property for all components.
Default is False. Has no effect for equality constraints.
"""
def _input_validation(self):
if self.A.ndim != 2:
message = "`A` must have exactly two dimensions."
raise ValueError(message)
try:
shape = self.A.shape[0:1]
self.lb = np.broadcast_to(self.lb, shape)
self.ub = np.broadcast_to(self.ub, shape)
self.keep_feasible = np.broadcast_to(self.keep_feasible, shape)
except ValueError:
message = ("`lb`, `ub`, and `keep_feasible` must be broadcastable "
"to shape `A.shape[0:1]`")
raise ValueError(message)
def __init__(self, A, lb=-np.inf, ub=np.inf, keep_feasible=False):
if not issparse(A):
# In some cases, if the constraint is not valid, this emits a
# VisibleDeprecationWarning about ragged nested sequences
# before eventually causing an error. `scipy.optimize.milp` would
# prefer that this just error out immediately so it can handle it
# rather than concerning the user.
with catch_warnings():
simplefilter("error")
self.A = np.atleast_2d(A).astype(np.float64)
else:
self.A = A
self.lb = np.atleast_1d(lb).astype(np.float64)
self.ub = np.atleast_1d(ub).astype(np.float64)
self.keep_feasible = np.atleast_1d(keep_feasible).astype(bool)
self._input_validation()
def residual(self, x):
"""
Calculate the residual between the constraint function and the limits
For a linear constraint of the form::
lb <= A@x <= ub
the lower and upper residuals between ``A@x`` and the limits are values
``sl`` and ``sb`` such that::
lb + sl == A@x == ub - sb
When all elements of ``sl`` and ``sb`` are positive, all elements of
the constraint are satisfied; a negative element in ``sl`` or ``sb``
indicates that the corresponding element of the constraint is not
satisfied.
Parameters
----------
x: array_like
Vector of independent variables
Returns
-------
sl, sb : array-like
The lower and upper residuals
"""
return self.A@x - self.lb, self.ub - self.A@x
class Bounds:
"""Bounds constraint on the variables.
The constraint has the general inequality form::
lb <= x <= ub
It is possible to use equal bounds to represent an equality constraint or
infinite bounds to represent a one-sided constraint.
Parameters
----------
lb, ub : array_like, optional
Lower and upper bounds on independent variables. `lb`, `ub`, and
`keep_feasible` must be the same shape or broadcastable.
Set components of `lb` and `ub` equal
to fix a variable. Use ``np.inf`` with an appropriate sign to disable
bounds on all or some variables. Note that you can mix constraints of
different types: interval, one-sided or equality, by setting different
components of `lb` and `ub` as necessary. Defaults to ``lb = -np.inf``
and ``ub = np.inf`` (no bounds).
keep_feasible : array_like of bool, optional
Whether to keep the constraint components feasible throughout
iterations. Must be broadcastable with `lb` and `ub`.
Default is False. Has no effect for equality constraints.
"""
def _input_validation(self):
try:
res = np.broadcast_arrays(self.lb, self.ub, self.keep_feasible)
self.lb, self.ub, self.keep_feasible = res
except ValueError:
message = "`lb`, `ub`, and `keep_feasible` must be broadcastable."
raise ValueError(message)
def __init__(self, lb=-np.inf, ub=np.inf, keep_feasible=False):
self.lb = np.atleast_1d(lb)
self.ub = np.atleast_1d(ub)
self.keep_feasible = np.atleast_1d(keep_feasible).astype(bool)
self._input_validation()
def __repr__(self):
start = f"{type(self).__name__}({self.lb!r}, {self.ub!r}"
if np.any(self.keep_feasible):
end = f", keep_feasible={self.keep_feasible!r})"
else:
end = ")"
return start + end
def residual(self, x):
"""Calculate the residual (slack) between the input and the bounds
For a bound constraint of the form::
lb <= x <= ub
the lower and upper residuals between `x` and the bounds are values
``sl`` and ``sb`` such that::
lb + sl == x == ub - sb
When all elements of ``sl`` and ``sb`` are positive, all elements of
``x`` lie within the bounds; a negative element in ``sl`` or ``sb``
indicates that the corresponding element of ``x`` is out of bounds.
Parameters
----------
x: array_like
Vector of independent variables
Returns
-------
sl, sb : array-like
The lower and upper residuals
"""
return x - self.lb, self.ub - x
class PreparedConstraint:
"""Constraint prepared from a user defined constraint.
On creation it will check whether a constraint definition is valid and
the initial point is feasible. If created successfully, it will contain
the attributes listed below.
Parameters
----------
constraint : {NonlinearConstraint, LinearConstraint`, Bounds}
Constraint to check and prepare.
x0 : array_like
Initial vector of independent variables.
sparse_jacobian : bool or None, optional
If bool, then the Jacobian of the constraint will be converted
to the corresponded format if necessary. If None (default), such
conversion is not made.
finite_diff_bounds : 2-tuple, optional
Lower and upper bounds on the independent variables for the finite
difference approximation, if applicable. Defaults to no bounds.
Attributes
----------
fun : {VectorFunction, LinearVectorFunction, IdentityVectorFunction}
Function defining the constraint wrapped by one of the convenience
classes.
bounds : 2-tuple
Contains lower and upper bounds for the constraints --- lb and ub.
These are converted to ndarray and have a size equal to the number of
the constraints.
keep_feasible : ndarray
Array indicating which components must be kept feasible with a size
equal to the number of the constraints.
"""
def __init__(self, constraint, x0, sparse_jacobian=None,
finite_diff_bounds=(-np.inf, np.inf)):
if isinstance(constraint, NonlinearConstraint):
fun = VectorFunction(constraint.fun, x0,
constraint.jac, constraint.hess,
constraint.finite_diff_rel_step,
constraint.finite_diff_jac_sparsity,
finite_diff_bounds, sparse_jacobian)
elif isinstance(constraint, LinearConstraint):
fun = LinearVectorFunction(constraint.A, x0, sparse_jacobian)
elif isinstance(constraint, Bounds):
fun = IdentityVectorFunction(x0, sparse_jacobian)
else:
raise ValueError("`constraint` of an unknown type is passed.")
m = fun.m
lb = np.asarray(constraint.lb, dtype=float)
ub = np.asarray(constraint.ub, dtype=float)
keep_feasible = np.asarray(constraint.keep_feasible, dtype=bool)
lb = np.broadcast_to(lb, m)
ub = np.broadcast_to(ub, m)
keep_feasible = np.broadcast_to(keep_feasible, m)
if keep_feasible.shape != (m,):
raise ValueError("`keep_feasible` has a wrong shape.")
mask = keep_feasible & (lb != ub)
f0 = fun.f
if np.any(f0[mask] < lb[mask]) or np.any(f0[mask] > ub[mask]):
raise ValueError("`x0` is infeasible with respect to some "
"inequality constraint with `keep_feasible` "
"set to True.")
self.fun = fun
self.bounds = (lb, ub)
self.keep_feasible = keep_feasible
def violation(self, x):
"""How much the constraint is exceeded by.
Parameters
----------
x : array-like
Vector of independent variables
Returns
-------
excess : array-like
How much the constraint is exceeded by, for each of the
constraints specified by `PreparedConstraint.fun`.
"""
with suppress_warnings() as sup:
sup.filter(UserWarning)
ev = self.fun.fun(np.asarray(x))
excess_lb = np.maximum(self.bounds[0] - ev, 0)
excess_ub = np.maximum(ev - self.bounds[1], 0)
return excess_lb + excess_ub
def new_bounds_to_old(lb, ub, n):
"""Convert the new bounds representation to the old one.
The new representation is a tuple (lb, ub) and the old one is a list
containing n tuples, ith containing lower and upper bound on a ith
variable.
If any of the entries in lb/ub are -np.inf/np.inf they are replaced by
None.
"""
lb = np.broadcast_to(lb, n)
ub = np.broadcast_to(ub, n)
lb = [float(x) if x > -np.inf else None for x in lb]
ub = [float(x) if x < np.inf else None for x in ub]
return list(zip(lb, ub))
def old_bound_to_new(bounds):
"""Convert the old bounds representation to the new one.
The new representation is a tuple (lb, ub) and the old one is a list
containing n tuples, ith containing lower and upper bound on a ith
variable.
If any of the entries in lb/ub are None they are replaced by
-np.inf/np.inf.
"""
lb, ub = zip(*bounds)
# Convert occurrences of None to -inf or inf, and replace occurrences of
# any numpy array x with x.item(). Then wrap the results in numpy arrays.
lb = np.array([float(_arr_to_scalar(x)) if x is not None else -np.inf
for x in lb])
ub = np.array([float(_arr_to_scalar(x)) if x is not None else np.inf
for x in ub])
return lb, ub
def strict_bounds(lb, ub, keep_feasible, n_vars):
"""Remove bounds which are not asked to be kept feasible."""
strict_lb = np.resize(lb, n_vars).astype(float)
strict_ub = np.resize(ub, n_vars).astype(float)
keep_feasible = np.resize(keep_feasible, n_vars)
strict_lb[~keep_feasible] = -np.inf
strict_ub[~keep_feasible] = np.inf
return strict_lb, strict_ub
def new_constraint_to_old(con, x0):
"""
Converts new-style constraint objects to old-style constraint dictionaries.
"""
if isinstance(con, NonlinearConstraint):
if (con.finite_diff_jac_sparsity is not None or
con.finite_diff_rel_step is not None or
not isinstance(con.hess, BFGS) or # misses user specified BFGS
con.keep_feasible):
warn("Constraint options `finite_diff_jac_sparsity`, "
"`finite_diff_rel_step`, `keep_feasible`, and `hess`"
"are ignored by this method.", OptimizeWarning)
fun = con.fun
if callable(con.jac):
jac = con.jac
else:
jac = None
else: # LinearConstraint
if np.any(con.keep_feasible):
warn("Constraint option `keep_feasible` is ignored by this "
"method.", OptimizeWarning)
A = con.A
if issparse(A):
A = A.toarray()
fun = lambda x: np.dot(A, x)
jac = lambda x: A
# FIXME: when bugs in VectorFunction/LinearVectorFunction are worked out,
# use pcon.fun.fun and pcon.fun.jac. Until then, get fun/jac above.
pcon = PreparedConstraint(con, x0)
lb, ub = pcon.bounds
i_eq = lb == ub
i_bound_below = np.logical_xor(lb != -np.inf, i_eq)
i_bound_above = np.logical_xor(ub != np.inf, i_eq)
i_unbounded = np.logical_and(lb == -np.inf, ub == np.inf)
if np.any(i_unbounded):
warn("At least one constraint is unbounded above and below. Such "
"constraints are ignored.", OptimizeWarning)
ceq = []
if np.any(i_eq):
def f_eq(x):
y = np.array(fun(x)).flatten()
return y[i_eq] - lb[i_eq]
ceq = [{"type": "eq", "fun": f_eq}]
if jac is not None:
def j_eq(x):
dy = jac(x)
if issparse(dy):
dy = dy.toarray()
dy = np.atleast_2d(dy)
return dy[i_eq, :]
ceq[0]["jac"] = j_eq
cineq = []
n_bound_below = np.sum(i_bound_below)
n_bound_above = np.sum(i_bound_above)
if n_bound_below + n_bound_above:
def f_ineq(x):
y = np.zeros(n_bound_below + n_bound_above)
y_all = np.array(fun(x)).flatten()
y[:n_bound_below] = y_all[i_bound_below] - lb[i_bound_below]
y[n_bound_below:] = -(y_all[i_bound_above] - ub[i_bound_above])
return y
cineq = [{"type": "ineq", "fun": f_ineq}]
if jac is not None:
def j_ineq(x):
dy = np.zeros((n_bound_below + n_bound_above, len(x0)))
dy_all = jac(x)
if issparse(dy_all):
dy_all = dy_all.toarray()
dy_all = np.atleast_2d(dy_all)
dy[:n_bound_below, :] = dy_all[i_bound_below]
dy[n_bound_below:, :] = -dy_all[i_bound_above]
return dy
cineq[0]["jac"] = j_ineq
old_constraints = ceq + cineq
if len(old_constraints) > 1:
warn("Equality and inequality constraints are specified in the same "
"element of the constraint list. For efficient use with this "
"method, equality and inequality constraints should be specified "
"in separate elements of the constraint list. ", OptimizeWarning)
return old_constraints
def old_constraint_to_new(ic, con):
"""
Converts old-style constraint dictionaries to new-style constraint objects.
"""
# check type
try:
ctype = con['type'].lower()
except KeyError as e:
raise KeyError('Constraint %d has no type defined.' % ic) from e
except TypeError as e:
raise TypeError(
'Constraints must be a sequence of dictionaries.'
) from e
except AttributeError as e:
raise TypeError("Constraint's type must be a string.") from e
else:
if ctype not in ['eq', 'ineq']:
raise ValueError("Unknown constraint type '%s'." % con['type'])
if 'fun' not in con:
raise ValueError('Constraint %d has no function defined.' % ic)
lb = 0
if ctype == 'eq':
ub = 0
else:
ub = np.inf
jac = '2-point'
if 'args' in con:
args = con['args']
fun = lambda x: con['fun'](x, *args)
if 'jac' in con:
jac = lambda x: con['jac'](x, *args)
else:
fun = con['fun']
if 'jac' in con:
jac = con['jac']
return NonlinearConstraint(fun, lb, ub, jac)
| {
"content_hash": "6d05354c9f5e6b362fe4b67bf70b87d3",
"timestamp": "",
"source": "github",
"line_count": 569,
"max_line_length": 88,
"avg_line_length": 38.64674868189807,
"alnum_prop": 0.609549795361528,
"repo_name": "anntzer/scipy",
"id": "2bcd270ce1e4a1f182fb1126ad51934f1e226380",
"size": "21990",
"binary": false,
"copies": "9",
"ref": "refs/heads/main",
"path": "scipy/optimize/_constraints.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4615958"
},
{
"name": "C++",
"bytes": "961697"
},
{
"name": "Cython",
"bytes": "1059655"
},
{
"name": "Dockerfile",
"bytes": "10630"
},
{
"name": "Fortran",
"bytes": "5212087"
},
{
"name": "MATLAB",
"bytes": "4346"
},
{
"name": "Makefile",
"bytes": "778"
},
{
"name": "Meson",
"bytes": "151326"
},
{
"name": "Python",
"bytes": "15648288"
},
{
"name": "R",
"bytes": "3059"
},
{
"name": "Shell",
"bytes": "17744"
},
{
"name": "Starlark",
"bytes": "1757"
},
{
"name": "TeX",
"bytes": "52106"
}
],
"symlink_target": ""
} |
from omics_pipe.parameters.default_parameters import default_parameters
from omics_pipe.utils import *
p = Bunch(default_parameters)
def annovar(sample, annovar_flag):
'''Annotates variants with ANNOVAR variant annotator. Follows VarCall.
input:
.vcf
output:
.vcf
citation:
Wang K, Li M, Hakonarson H. ANNOVAR: Functional annotation of genetic variants from next-generation sequencing data Nucleic Acids Research, 38:e164, 2010
link:
http://www.openbioinformatics.org/annovar/
parameters from parameters file:
VARIANT_RESULTS:
ANNOVARDB:
ANNOVAR_OPTIONS:
ANNOVAR_OPTIONS2:
TEMP_DIR:
ANNOVAR_VERSION:
VCFTOOLS_VERSION:
'''
spawn_job(jobname = 'annovar', SAMPLE = sample, LOG_PATH = p.LOG_PATH, RESULTS_EMAIL = p.RESULTS_EMAIL, SCHEDULER = p.SCHEDULER, walltime = "240:00:00", queue = p.QUEUE, nodes = 1, ppn = 8, memory = "28gb", script = "/annovar.sh", args_list = [sample,p.VARIANT_RESULTS,p.ANNOVARDB,p.ANNOVAR_OPTIONS, p.ANNOVAR_OPTIONS2, p.TEMP_DIR, p.ANNOVAR_VERSION, p.VCFTOOLS_VERSION])
job_status(jobname = 'annovar', resultspath = p.VARIANT_RESULTS, SAMPLE = sample, outputfilename = sample + "/" + sample + ".vcf.gz", FLAG_PATH = p.FLAG_PATH)
return
if __name__ == '__main__':
annovar(sample, annovar_flag)
sys.exit(0) | {
"content_hash": "b982b450712ada34738e86eda9f8c894",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 375,
"avg_line_length": 39.666666666666664,
"alnum_prop": 0.6442577030812325,
"repo_name": "adammaikai/OmicsPipe2.0",
"id": "dbe6d0605b5685f6aeba2dd6e8a79395ea7e225e",
"size": "1451",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "build/lib.linux-x86_64-2.7/omics_pipe/modules/annovar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9660"
},
{
"name": "Groff",
"bytes": "126"
},
{
"name": "Perl",
"bytes": "3396"
},
{
"name": "Python",
"bytes": "543104"
},
{
"name": "R",
"bytes": "342554"
},
{
"name": "Shell",
"bytes": "260672"
}
],
"symlink_target": ""
} |
import os
from mi.logging import config
from mi.dataset.driver.adcpt_acfgm.dcl.pd0.adcpt_acfgm_dcl_pd0_driver_common import AdcptAcfgmDclPd0Driver
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.core.versioning import version
__author__ = "Jeff Roy"
@version("15.7.0")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
config.add_configuration(os.path.join(basePythonCodePath, 'res', 'config', 'mi-logging.yml'))
parser_config = {
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
'velocity': 'Velocity',
'engineering': 'Engineering',
'config': 'Config',
}
}
driver = AdcptAcfgmDclPd0Driver(sourceFilePath, particleDataHdlrObj, parser_config)
return driver.process()
| {
"content_hash": "855eeea00aab068df2234d2b76ba3450",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 106,
"avg_line_length": 30.23076923076923,
"alnum_prop": 0.7150127226463104,
"repo_name": "JeffRoy/mi-dataset",
"id": "9aaa684a594a9e32ca926c798b1c8f7d1ec8e886",
"size": "867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mi/dataset/driver/adcpt_acfgm/dcl/pd0/adcpt_acfgm_dcl_pd0_telemetered_driver.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "3610231"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from voprov.serializers.provn import VOProvNSerializer
from voprov.serializers.xml import VOProvXMLSerializer
from voprov.serializers.provjson import VOProvJSONSerializer
from prov import Error
__author__ = 'Jean-Francois Sornay'
__email__ = '[email protected]'
__all__ = [
'get'
]
class DoNotExist(Error):
"""Exception for the case a serializer is not available."""
pass
class Registry:
"""Registry of serializers."""
serializers = None
"""Property caching all available serializers in a dict."""
@staticmethod
def load_serializers():
"""Loads all available serializers into the registry."""
from voprov.serializers.provjson import VOProvJSONSerializer
from voprov.serializers.provn import VOProvNSerializer
from voprov.serializers.xml import VOProvXMLSerializer
from prov.serializers.provrdf import ProvRDFSerializer
Registry.serializers = {
'json': VOProvJSONSerializer,
'rdf': ProvRDFSerializer,
'provn': VOProvNSerializer,
'xml': VOProvXMLSerializer
}
def get(format_name):
"""
Returns the serializer class for the specified format. Raises a DoNotExist
"""
# Lazily initialize the list of serializers to avoid cyclic imports
if Registry.serializers is None:
Registry.load_serializers()
try:
return Registry.serializers[format_name]
except KeyError:
raise DoNotExist(
'No serializer available for the format "%s"' % format_name
)
| {
"content_hash": "dc08b0cdc8de354c34e1ddd1c1863e6a",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 78,
"avg_line_length": 30.436363636363637,
"alnum_prop": 0.6804062126642771,
"repo_name": "sanguillon/voprov",
"id": "dc04d4d3e48b7e5041f276e879a7ac611281a7a3",
"size": "1698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "voprov/serializers/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2101119"
},
{
"name": "Python",
"bytes": "171065"
}
],
"symlink_target": ""
} |
import unittest
from nose.tools import *
from footy.test_data.test_data_paths import premier_league_2015_2016_path
from footy.src.clubs.club_gateway import ClubGateway
class ClubGatewayTest(unittest.TestCase):
def setUp(self):
self.gateway = ClubGateway(premier_league_2015_2016_path)
def test_get_all_clubs(self):
clubs = self.gateway.get_all()
self.assertEquals(20, len(clubs))
def test_get_all_clubs_includes_club_name(self):
clubs = self.gateway.get_all()
self.assertEquals("Arsenal", clubs[0].name)
self.assertEquals("Aston Villa", clubs[1].name)
self.assertEquals("Bournemouth", clubs[2].name)
| {
"content_hash": "614e27707439adee97d45d074a1cf9a8",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 73,
"avg_line_length": 33.65,
"alnum_prop": 0.6983655274888558,
"repo_name": "bryce-klinker/hello-python",
"id": "3b3d82864cc0776c4af3325f627956c088809c05",
"size": "673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "footy/test/clubs/test_club_gateway.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cucumber",
"bytes": "2350"
},
{
"name": "Python",
"bytes": "23281"
}
],
"symlink_target": ""
} |
import posixpath
import xml.dom.minidom as xml
from xml.parsers.expat import ExpatError
from appengine_url_fetcher import AppEngineUrlFetcher
from docs_server_utils import StringIdentity
from file_system import FileSystem, FileNotFoundError, StatInfo, ToUnicode
from future import Future
import svn_constants
import url_constants
class _AsyncFetchFuture(object):
def __init__(self, paths, fetcher, binary, args=None):
def apply_args(path):
return path if args is None else '%s?%s' % (path, args)
# A list of tuples of the form (path, Future).
self._fetches = [(path, fetcher.FetchAsync(apply_args(path)))
for path in paths]
self._value = {}
self._error = None
self._binary = binary
def _ListDir(self, directory):
dom = xml.parseString(directory)
files = [elem.childNodes[0].data for elem in dom.getElementsByTagName('a')]
if '..' in files:
files.remove('..')
return files
def Get(self):
for path, future in self._fetches:
try:
result = future.Get()
except Exception as e:
raise FileNotFoundError(
'Error when fetching %s for Get: %s' % (path, e))
if result.status_code == 404:
raise FileNotFoundError('Got 404 when fetching %s for Get' % path)
elif path.endswith('/'):
self._value[path] = self._ListDir(result.content)
elif not self._binary:
self._value[path] = ToUnicode(result.content)
else:
self._value[path] = result.content
if self._error is not None:
raise self._error
return self._value
class SubversionFileSystem(FileSystem):
'''Class to fetch resources from src.chromium.org.
'''
@staticmethod
def Create(branch, revision=None):
if branch == 'trunk':
svn_path = 'trunk/src/%s' % svn_constants.EXTENSIONS_PATH
else:
svn_path = 'branches/%s/src/%s' % (branch,
svn_constants.EXTENSIONS_PATH)
return SubversionFileSystem(
AppEngineUrlFetcher('%s/%s' % (url_constants.SVN_URL, svn_path)),
AppEngineUrlFetcher('%s/%s' % (url_constants.VIEWVC_URL, svn_path)),
svn_path,
revision=revision)
def __init__(self, file_fetcher, stat_fetcher, svn_path, revision=None):
self._file_fetcher = file_fetcher
self._stat_fetcher = stat_fetcher
self._svn_path = svn_path
self._revision = revision
def Read(self, paths, binary=False):
args = None
if self._revision is not None:
# |fetcher| gets from svn.chromium.org which uses p= for version.
args = 'p=%s' % self._revision
return Future(delegate=_AsyncFetchFuture(paths,
self._file_fetcher,
binary,
args=args))
def _ParseHTML(self, html):
'''Unfortunately, the viewvc page has a stray </div> tag, so this takes care
of all mismatched tags.
'''
try:
return xml.parseString(html)
except ExpatError as e:
return self._ParseHTML('\n'.join(
line for (i, line) in enumerate(html.split('\n'))
if e.lineno != i + 1))
def _CreateStatInfo(self, html):
def inner_text(node):
'''Like node.innerText in JS DOM, but strips surrounding whitespace.
'''
text = []
if node.nodeValue:
text.append(node.nodeValue)
if hasattr(node, 'childNodes'):
for child_node in node.childNodes:
text.append(inner_text(child_node))
return ''.join(text).strip()
dom = self._ParseHTML(html)
# Try all of the tables until we find the one that contains the data.
for table in dom.getElementsByTagName('table'):
# Within the table there is a list of files. However, there may be some
# things beforehand; a header, "parent directory" list, etc. We will deal
# with that below by being generous and just ignoring such rows.
rows = table.getElementsByTagName('tr')
child_versions = {}
for row in rows:
# Within each row there are probably 5 cells; name, version, age,
# author, and last log entry. Maybe the columns will change; we're at
# the mercy viewvc, but this constant can be easily updated.
elements = row.getElementsByTagName('td')
if len(elements) != 5:
continue
name_element, version_element, _, __, ___ = elements
name = inner_text(name_element) # note: will end in / for directories
try:
version = int(inner_text(version_element))
except ValueError:
continue
child_versions[name] = version
if not child_versions:
continue
# Parent version is max version of all children, since it's SVN.
parent_version = max(child_versions.values())
# All versions in StatInfo need to be strings.
return StatInfo(str(parent_version),
dict((path, str(version))
for path, version in child_versions.iteritems()))
# Bleh, but, this data is so unreliable. There are actually some empty file
# listings caused by git/svn/something not cleaning up empty dirs.
return StatInfo('0', {})
def Stat(self, path):
directory, filename = posixpath.split(path)
directory += '/'
if self._revision is not None:
# |stat_fetch| uses viewvc which uses pathrev= for version.
directory += '?pathrev=%s' % self._revision
result = self._stat_fetcher.Fetch(directory)
if result.status_code == 404:
raise FileNotFoundError(
'Got 404 when fetching %s from %s for Stat' % (path, directory))
stat_info = self._CreateStatInfo(result.content)
if path.endswith('/'):
return stat_info
if filename not in stat_info.child_versions:
raise FileNotFoundError('%s was not in child versions' % filename)
return StatInfo(stat_info.child_versions[filename])
def GetIdentity(self):
# NOTE: no revision here, consider it just an implementation detail of the
# file version that is handled by Stat.
return '@'.join((self.__class__.__name__, StringIdentity(self._svn_path)))
| {
"content_hash": "2eda9520b3918675baea7e79f54dd66f",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 80,
"avg_line_length": 37.4969696969697,
"alnum_prop": 0.6272830127687086,
"repo_name": "jing-bao/pa-chromium",
"id": "c0ff68c9a1fd7e6c1d64c992b04a2075c20dd7c3",
"size": "6354",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "chrome/common/extensions/docs/server2/subversion_file_system.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from django.conf import settings as django_settings
SU_ADMIN_NAME = "Django Admin"
SU_ADMIN_MENU = [
]
class Settings(object):
"""Settings for internal use."""
def __getattr__(self, name):
if not hasattr(django_settings, name) and name not in globals():
raise ValueError('{} is not found in settings'.format(name))
s = getattr(django_settings, name, globals()[name])
return s
settings = Settings()
| {
"content_hash": "902f0220989418000372a01276b1e7ed",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 72,
"avg_line_length": 23.63157894736842,
"alnum_prop": 0.6458797327394209,
"repo_name": "gbdlin/django-admin-sidebar",
"id": "024af5987a25c715fd9986aa6a83a3e37c6b0a6b",
"size": "449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8986"
},
{
"name": "HTML",
"bytes": "5601"
},
{
"name": "Python",
"bytes": "4545"
}
],
"symlink_target": ""
} |
import MySQLdb
import collections
c = MySQLdb.connect()
f = collections.deque.popleft
# TODO not sure if this should be an error or not
f(None) # e 0
c # 0 MySQLConnection
if 1:
c # 4 MySQLConnection
x = 1 # 0 int
def g():
global x
x = ''
x # 0 int
g()
x # 0 <int|str>
x = '' # 0 str
g()
x # 0 <int|str>
def f():
global x
x = 1 # 4 int
g()
x # 4 <int|str>
# TODO another test: get a reference to the local scope (ex to the module), and change a local through that
| {
"content_hash": "a6fed78db594f9cff37bdae2a7c18a29",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 107,
"avg_line_length": 13.289473684210526,
"alnum_prop": 0.601980198019802,
"repo_name": "kmod/icbd",
"id": "732299f31ce2f70b4b1dbc995fcb6176873d9fc1",
"size": "505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "icbd/type_analyzer/tests/clearing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "33042"
},
{
"name": "C++",
"bytes": "35981"
},
{
"name": "CSS",
"bytes": "8888"
},
{
"name": "JavaScript",
"bytes": "3602"
},
{
"name": "Makefile",
"bytes": "48655"
},
{
"name": "Objective-C",
"bytes": "88"
},
{
"name": "Python",
"bytes": "10340340"
},
{
"name": "Shell",
"bytes": "18865"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.