repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
philipgian/pre-commit | tests/make_archives_test.py | 1 | 1979 | from __future__ import absolute_import
from __future__ import unicode_literals
import os.path
import tarfile
import mock
import pytest
from pre_commit import make_archives
from pre_commit.util import cmd_output
from pre_commit.util import cwd
from testing.fixtures import git_dir
from testing.util import get_head_sha
from testing.util import skipif_slowtests_false
def test_make_archive(tempdir_factory):
output_dir = tempdir_factory.get()
git_path = git_dir(tempdir_factory)
# Add a files to the git directory
with cwd(git_path):
cmd_output('touch', 'foo')
cmd_output('git', 'add', '.')
cmd_output('git', 'commit', '-m', 'foo')
# We'll use this sha
head_sha = get_head_sha('.')
# And check that this file doesn't exist
cmd_output('touch', 'bar')
cmd_output('git', 'add', '.')
cmd_output('git', 'commit', '-m', 'bar')
# Do the thing
archive_path = make_archives.make_archive(
'foo', git_path, head_sha, output_dir,
)
assert archive_path == os.path.join(output_dir, 'foo.tar.gz')
assert os.path.exists(archive_path)
extract_dir = tempdir_factory.get()
# Extract the tar
with tarfile.open(archive_path) as tf:
tf.extractall(extract_dir)
# Verify the contents of the tar
assert os.path.exists(os.path.join(extract_dir, 'foo'))
assert os.path.exists(os.path.join(extract_dir, 'foo', 'foo'))
assert not os.path.exists(os.path.join(extract_dir, 'foo', '.git'))
assert not os.path.exists(os.path.join(extract_dir, 'foo', 'bar'))
@skipif_slowtests_false
@pytest.mark.integration
def test_main(tempdir_factory):
path = tempdir_factory.get()
# Don't actually want to make these in the current repo
with mock.patch.object(make_archives, 'RESOURCES_DIR', path):
make_archives.main()
for archive, _, _ in make_archives.REPOS:
assert os.path.exists(os.path.join(path, archive + '.tar.gz'))
| mit | -4,190,765,153,830,089,700 | 29.921875 | 71 | 0.656897 | false |
Ledoux/ShareYourSystem | Pythonlogy/ShareYourSystem/Standards/Tutorials/_Drafts/Distance/__init__.py | 1 | 4579 | #<ImportSpecificModules>
import ShareYourSystem as SYS
import numpy as np
import scipy.stats
from tables import *
import time
import operator
import os
#</ImportSpecificModules>
#<DefineLocals>
#</DefineLocals>
#<DefineClass>
class DistanceClass(SYS.ObjectsClass):
#<DefineHookMethods>
def initAfter(self):
#<DefineSpecificDo>
self.IntsList=[1,4,3]
self.PowerFloat=0.5
self.SquaredIntsList=[1,16,3]
self.UnitsInt=3
self.DistanceFloat=np.sqrt(sum(self.SquaredIntsList))
#</DefineSpecificDo>
#Definition the features
self['App_Model_ParameterizingDict']={
'ColumningTuplesList':
[
#ColumnStr #Col
('PowerFloat', Float64Col()),
('IntsList', (Int64Col,'UnitsInt'))
],
'IsFeaturingBool':True,
'ScanningTuplesList':
[
('IntsList',[[1,2,3],[4,5]])
]
}
#Definition the outputs
self['App_Model_ResultingDict']={
'ColumningTuplesList':
[
#ColumnStr #Col
('SquaredIntsList', (Int64Col,'UnitsInt')),
('DistanceFloat', Float64Col()),
('IntsList', (Int64Col,'UnitsInt'))
],
'JoiningTuple':("","Parameter")
}
def outputAfter(self,**_LocalOutputingVariablesDict):
#set the SquaredIntsList
self.SquaredIntsList=map(lambda __Int:__Int**2,self.IntsList)
#set the DistanceFloat
self.DistanceFloat=np.power(sum(self.SquaredIntsList),self.PowerFloat)
#</DefineHookMethods>
#</DefineTriggeringHookMethods>
def bindIntsListAfter(self):
#Bind with UnitsInt setting
self.UnitsInt=len(self.IntsList)
#</DefineTriggeringHookMethods>
#</DefineClass>
#<DefineAttestingFunctions>
def attest_insert():
#Insert the default output
Distance=SYS.DistanceClass(
).update(
[
('IntsList',[4,5]),
('PowerFloat',0.5)
]
).insert('Result'
).update(
[
('IntsList',[4,5]),
('PowerFloat',1.)
]
).insert(
).update(
[
('IntsList',[4,5]),
('PowerFloat',2.)
]
).insert(
).update(
[
('IntsList',[1,2,3]),
('PowerFloat',0.5)
]
).insert(
).update(
[
('IntsList',[4,6]),
('PowerFloat',1.)
]
).insert(
).update(
[
('IntsList',[1,2,3]),
('PowerFloat',1.)
]
).insert(
).update(
[
('IntsList',[0,1]),
('PowerFloat',0.5)
]
).insert(
).hdfclose()
#Return the object and the h5py
return "\n\n\n\n"+SYS.represent(
Distance
)+'\n\n\n'+SYS.represent(
os.popen('/usr/local/bin/h5ls -dlr '+Distance.HdformatingPathStr).read()
)
def attest_retrieve():
Distance=SYS.DistanceClass(
).__setitem__('/App_Model_ResultingDict/RetrievingIndexesList',(0,2)
).retrieve('Result'
).hdfclose()
#Return the object and the h5py
return "\n\n\n\n"+SYS.represent(
Distance
)
def attest_find():
Distance=SYS.DistanceClass(
).update(
[
('/App_Model_ParameterizingDict/FindingTuplesList',[
('IntsList',(SYS.getIsEqualBool,[4,5])),
]),
('/App_Model_ResultingDict/update',
[
('MergingTuplesList',
[
('UnitsInt',(operator.eq,2))
]
),
('FindingTuplesList',[
('DistanceFloat',(operator.gt,30.)),
#('__IntsList',(SYS.getIsEqualBool,[4,5])),
])
]
)
]
).find('Result'
).hdfclose()
#Return the object and the h5py
return "\n\n\n\n"+SYS.represent(
Distance
)
def attest_recover():
Distance=SYS.DistanceClass(
).update(
[
('/App_Model_ParameterizingDict/FindingTuplesList',[
('IntsList',(SYS.getIsEqualBool,[4,5])),
('PowerFloat',(SYS.getIsEqualBool,1.))
]),
('/App_Model_ResultingDict/update',
[
('MergingTuplesList',
[
('UnitsInt',(operator.eq,2))
]
),
('FindingTuplesList',[
('DistanceFloat',(operator.gt,30.)),
#('__IntsList',(SYS.getIsEqualBool,[4,5])),
])
]
)
]
).recover('Result'
).hdfclose()
#Return the object and the h5py
return "\n\n\n\n"+SYS.represent(
Distance
)
def attest_scan():
#Scan
Distance=SYS.DistanceClass(
).scan('Result'
).hdfclose()
#Return the object and the h5py
return "\n\n\n\n"+SYS.represent(
Distance
)+'\n\n\n'+SYS.represent(
os.popen('/usr/local/bin/h5ls -dlr '+Distance.HdformatingPathStr).read()
)
#</DefineAttestingFunctions>
| mit | 4,831,635,864,539,707,000 | 20.297674 | 76 | 0.567373 | false |
slimta/python-slimta | slimta/util/__init__.py | 1 | 4971 | # Copyright (c) 2016 Ian C. Good
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
"""Package containing a variety of useful modules utilities that didn't really
belong anywhere else.
"""
from __future__ import absolute_import
from gevent import socket
__all__ = ['build_ipv4_socket_creator', 'create_connection_ipv4',
'create_listeners']
def build_ipv4_socket_creator(only_ports=None):
"""Returns a function that will act like
:py:func:`socket.create_connection` but only using IPv4 addresses. This
function can be used as the ``socket_creator`` argument to some classes
like :class:`~slimta.relay.smtp.mx.MxSmtpRelay`.
:param only_ports: If given, can be a list to limit which ports are
restricted to IPv4. Connections to all other ports may
be IPv6.
"""
def socket_creator(*args, **kwargs):
return create_connection_ipv4(*args, only_ports=only_ports, **kwargs)
return socket_creator
def create_connection_ipv4(address, timeout=None, source_address=None,
only_ports=None):
"""Attempts to mimick to :py:func:`socket.create_connection`, but
connections are only made to IPv4 addresses.
:param only_ports: If given, can be a list to limit which ports are
restricted to IPv4. Connections to all other ports may
be IPv6.
"""
host, port = address
if only_ports and port not in only_ports:
return socket.create_connection(address, timeout, source_address)
last_exc = None
for res in socket.getaddrinfo(host, port, socket.AF_INET):
_, _, _, _, sockaddr = res
try:
return socket.create_connection(sockaddr, timeout, source_address)
except socket.error as exc:
last_exc = exc
if last_exc is not None:
raise last_exc
else:
raise socket.error('getaddrinfo returns an empty list')
def create_listeners(address,
family=socket.AF_UNSPEC,
socktype=socket.SOCK_STREAM,
proto=socket.IPPROTO_IP):
"""Uses :func:`socket.getaddrinfo` to create listening sockets for
available socket parameters. For example, giving *address* as
``('localhost', 80)`` on a system with IPv6 would return one socket bound
to ``127.0.0.1`` and one bound to ``::1`.
May also be used for ``socket.AF_UNIX`` with a file path to produce a
single unix domain socket listening on that path.
:param address: A ``(host, port)`` tuple to listen on.
:param family: the socket family, default ``AF_UNSPEC``.
:param socktype: the socket type, default ``SOCK_STREAM``.
:param proto: the socket protocol, default ``IPPROTO_IP``.
"""
if family == socket.AF_UNIX:
sock = socket.socket(family, socktype, proto)
_init_socket(sock, address)
return [sock]
elif not isinstance(address, tuple) or len(address) != 2:
raise ValueError(address)
flags = socket.AI_PASSIVE
host, port = address
listeners = []
last_exc = None
for res in socket.getaddrinfo(host, port, family, socktype, proto, flags):
fam, typ, prt, _, sockaddr = res
try:
sock = socket.socket(fam, typ, prt)
_init_socket(sock, sockaddr)
except socket.error as exc:
last_exc = exc
else:
listeners.append(sock)
if last_exc and not listeners:
raise last_exc
return listeners
def _init_socket(sock, sockaddr):
try:
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
except socket.error:
pass
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except socket.error:
pass
sock.setblocking(0)
sock.bind(sockaddr)
if sock.type != socket.SOCK_DGRAM:
sock.listen(socket.SOMAXCONN)
# vim:et:fdm=marker:sts=4:sw=4:ts=4
| mit | 1,704,426,083,830,367,700 | 36.097015 | 79 | 0.662442 | false |
Azure/azure-sdk-for-python | sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/datab/aio/_configuration.py | 1 | 3204 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
VERSION = "unknown"
class DataBoxEdgeManagementClientConfiguration(Configuration):
"""Configuration for DataBoxEdgeManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The subscription ID.
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(DataBoxEdgeManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2020-12-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-databoxedge/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| mit | -890,126,408,029,941,400 | 47.545455 | 134 | 0.682272 | false |
jeromecc/doctoctocbot | src/crowdfunding/migrations/0013_tiers.py | 1 | 1118 | # Generated by Django 2.0.13 on 2019-02-25 05:21
from decimal import Decimal
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('crowdfunding', '0012_auto_20190224_0523'),
]
operations = [
migrations.CreateModel(
name='Tiers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tag', models.CharField(max_length=191)),
('description', models.CharField(max_length=191)),
('emoji', models.CharField(blank=True, max_length=4)),
('image', models.ImageField(blank=True, upload_to='')),
('min', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=12)),
('max', models.DecimalField(decimal_places=2, default=Decimal('Infinity'), max_digits=12)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='crowdfunding.Project')),
],
),
]
| mpl-2.0 | -3,936,454,583,790,252,500 | 38.928571 | 119 | 0.601073 | false |
yanni4night/ursa-django | app/settings.py | 1 | 2208 | """
Django settings for ursa-django project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_ROOT = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', 'dev')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'uq==k2a4+j^3i3)wns^+3%9)ww+eysjo0)-sg(hu5q$6=uqg^+'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'app.urls'
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'zh-cn'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = False
USE_L10N = False
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(PROJECT_ROOT, 'templates')] | mit | 2,840,151,740,766,177,000 | 24.988235 | 84 | 0.717391 | false |
iotaledger/iota.lib.py | iota/multisig/commands/prepare_multisig_transfer.py | 1 | 5102 | from typing import List, Optional
import filters as f
from iota import Address, ProposedTransaction
from iota.commands import FilterCommand, RequestFilter
from iota.commands.core import GetBalancesCommand
from iota.exceptions import with_context
from iota.filters import Trytes
from iota.multisig.transaction import ProposedMultisigBundle
from iota.multisig.types import MultisigAddress
__all__ = [
'PrepareMultisigTransferCommand',
]
class PrepareMultisigTransferCommand(FilterCommand):
"""
Implements `prepare_multisig_transfer` multisig API command.
References:
- :py:meth:`iota.multisig.api.MultisigIota.prepare_multisig_transfer`
"""
command = 'prepareMultisigTransfer'
def get_request_filter(self) -> 'PrepareMultisigTransferRequestFilter':
return PrepareMultisigTransferRequestFilter()
def get_response_filter(self):
pass
async def _execute(self, request: dict) -> dict:
change_address: Optional[Address] = request['changeAddress']
multisig_input: MultisigAddress = request['multisigInput']
transfers: List[ProposedTransaction] = request['transfers']
bundle = ProposedMultisigBundle(transfers)
want_to_spend = bundle.balance
if want_to_spend > 0:
gb_response = await GetBalancesCommand(self.adapter)(
addresses=[multisig_input],
)
multisig_input.balance = gb_response['balances'][0]
if multisig_input.balance < want_to_spend:
raise with_context(
exc=ValueError(
'Insufficient balance; found {found}, need {need} '
'(``exc.context`` has more info).'.format(
found=multisig_input.balance,
need=want_to_spend,
),
),
# The structure of this context object is intended
# to match the one from ``PrepareTransferCommand``.
context={
'available_to_spend': multisig_input.balance,
'confirmed_inputs': [multisig_input],
'request': request,
'want_to_spend': want_to_spend,
},
)
bundle.add_inputs([multisig_input])
if bundle.balance < 0:
if change_address:
bundle.send_unspent_inputs_to(change_address)
else:
#
# Unlike :py:meth:`iota.api.Iota.prepare_transfer`
# where all of the inputs are owned by the same
# seed, creating a multisig transfer usually
# involves multiple people.
#
# It would be unfair to the participants of the
# transaction if we were to automatically generate a
# change address using the seed of whoever happened
# to invoke the
# :py:meth:`MultisigIota.prepare_multisig_transfer`
# method!
#
raise with_context(
exc=ValueError(
'Bundle has unspent inputs, '
'but no change address specified.',
),
context={
'available_to_spend': multisig_input.balance,
'balance': bundle.balance,
'confirmed_inputs': [multisig_input],
'request': request,
'want_to_spend': want_to_spend,
},
)
else:
raise with_context(
exc=ValueError(
'Use ``prepare_transfer`` '
'to create a bundle without spending IOTAs.',
),
context={
'request': request,
},
)
bundle.finalize()
# Return the bundle with inputs unsigned.
return {
'trytes': bundle.as_tryte_strings(),
}
class PrepareMultisigTransferRequestFilter(RequestFilter):
def __init__(self) -> None:
super(PrepareMultisigTransferRequestFilter, self).__init__(
{
'changeAddress': Trytes(Address),
'multisigInput': f.Required | f.Type(MultisigAddress),
'transfers':
f.Required | f.Array | f.FilterRepeater(
f.Required | f.Type(ProposedTransaction),
),
},
allow_missing_keys={
'changeAddress',
},
)
| mit | -2,375,090,809,166,574,000 | 35.971014 | 83 | 0.488828 | false |
google-research/google-research | smu/parser/smu_utils_lib_test.py | 1 | 35529 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for smu_utils_lib."""
import copy
import os
import tempfile
from absl.testing import absltest
from absl.testing import parameterized
import numpy as np
import pandas as pd
from rdkit import Chem
from google.protobuf import text_format
from smu import dataset_pb2
from smu.parser import smu_parser_lib
from smu.parser import smu_utils_lib
MAIN_DAT_FILE = 'x07_sample.dat'
STAGE1_DAT_FILE = 'x07_stage1.dat'
TESTDATA_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'testdata')
def str_to_bond_topology(s):
bt = dataset_pb2.BondTopology()
text_format.Parse(s, bt)
return bt
def get_stage1_conformer():
parser = smu_parser_lib.SmuParser(
os.path.join(TESTDATA_PATH, STAGE1_DAT_FILE))
conformer, _ = next(parser.process_stage1())
return conformer
def get_stage2_conformer():
parser = smu_parser_lib.SmuParser(os.path.join(TESTDATA_PATH, MAIN_DAT_FILE))
conformer, _ = next(parser.process_stage2())
return conformer
class SpecialIDTest(absltest.TestCase):
def test_from_dat_id(self):
self.assertIsNone(
smu_utils_lib.special_case_bt_id_from_dat_id(123456, 'CC'))
self.assertEqual(smu_utils_lib.special_case_bt_id_from_dat_id(999998, 'O'),
899650)
self.assertEqual(smu_utils_lib.special_case_bt_id_from_dat_id(0, 'O'),
899650)
with self.assertRaises(ValueError):
smu_utils_lib.special_case_bt_id_from_dat_id(0, 'NotASpecialCaseSmiles')
def test_from_bt_id(self):
self.assertIsNone(smu_utils_lib.special_case_dat_id_from_bt_id(123456))
self.assertEqual(
smu_utils_lib.special_case_dat_id_from_bt_id(899651), 999997)
class GetCompositionTest(absltest.TestCase):
def test_simple(self):
bt = dataset_pb2.BondTopology()
bt.atoms.extend([dataset_pb2.BondTopology.ATOM_C,
dataset_pb2.BondTopology.ATOM_C,
dataset_pb2.BondTopology.ATOM_N,
dataset_pb2.BondTopology.ATOM_H,
dataset_pb2.BondTopology.ATOM_H,
dataset_pb2.BondTopology.ATOM_H])
self.assertEqual('x03_c2nh3', smu_utils_lib.get_composition(bt))
class GetCanonicalStoichiometryWithHydrogensTest(absltest.TestCase):
def test_cyclobutane(self):
bt = smu_utils_lib.create_bond_topology('CCCC', '110011', '2222')
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(bt), '(ch2)4')
def test_ethylene(self):
bt = smu_utils_lib.create_bond_topology('CC', '2', '22')
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(bt), '(ch2)2')
def test_acrylic_acid(self):
bt = smu_utils_lib.create_bond_topology('CCCOO', '2000100210', '21001')
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(bt),
'(c)(ch)(ch2)(o)(oh)')
def test_fluorine(self):
bt = smu_utils_lib.create_bond_topology('OFF', '110', '000')
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(bt), '(o)(f)2')
def test_fully_saturated(self):
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(
smu_utils_lib.create_bond_topology('C', '', '4')), '(ch4)')
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(
smu_utils_lib.create_bond_topology('N', '', '3')), '(nh3)')
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(
smu_utils_lib.create_bond_topology('O', '', '2')), '(oh2)')
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(
smu_utils_lib.create_bond_topology('F', '', '1')), '(fh)')
def test_nplus_oneg(self):
bt = smu_utils_lib.create_bond_topology('NO', '1', '30')
self.assertEqual(
smu_utils_lib.get_canonical_stoichiometry_with_hydrogens(bt),
'(nh3)(o)')
class ParseBondTopologyTest(absltest.TestCase):
def test_4_heavy(self):
num_atoms, atoms_str, matrix, hydrogens = smu_utils_lib.parse_bond_topology_line(
' 4 N+O O O- 010110 3000')
self.assertEqual(num_atoms, 4)
self.assertEqual(atoms_str, 'N+O O O-')
self.assertEqual(matrix, '010110')
self.assertEqual(hydrogens, '3000')
def test_7_heavy(self):
num_atoms, atoms_str, matrix, hydrogens = smu_utils_lib.parse_bond_topology_line(
' 7 N+O O O O-F F 001011101001000000000 1000000')
self.assertEqual(num_atoms, 7)
self.assertEqual(atoms_str, 'N+O O O O-F F ') # Note the trailing space
self.assertEqual(matrix, '001011101001000000000')
self.assertEqual(hydrogens, '1000000')
class CreateBondTopologyTest(absltest.TestCase):
def test_no_charged(self):
got = smu_utils_lib.create_bond_topology('CNFF', '111000', '1200')
expected_str = '''
atoms: ATOM_C
atoms: ATOM_N
atoms: ATOM_F
atoms: ATOM_F
atoms: ATOM_H
atoms: ATOM_H
atoms: ATOM_H
bonds {
atom_b: 1
bond_type: BOND_SINGLE
}
bonds {
atom_b: 2
bond_type: BOND_SINGLE
}
bonds {
atom_b: 3
bond_type: BOND_SINGLE
}
bonds {
atom_b: 4
bond_type: BOND_SINGLE
}
bonds {
atom_a: 1
atom_b: 5
bond_type: BOND_SINGLE
}
bonds {
atom_a: 1
atom_b: 6
bond_type: BOND_SINGLE
}
'''
expected = str_to_bond_topology(expected_str)
self.assertEqual(str(expected), str(got))
def test_charged(self):
# This is actually C N N+O-
got = smu_utils_lib.create_bond_topology('CNNO', '200101', '2020')
expected_str = '''
atoms: ATOM_C
atoms: ATOM_N
atoms: ATOM_NPOS
atoms: ATOM_ONEG
atoms: ATOM_H
atoms: ATOM_H
atoms: ATOM_H
atoms: ATOM_H
bonds {
atom_b: 1
bond_type: BOND_DOUBLE
}
bonds {
atom_a: 1
atom_b: 2
bond_type: BOND_SINGLE
}
bonds {
atom_a: 2
atom_b: 3
bond_type: BOND_SINGLE
}
bonds {
atom_b: 4
bond_type: BOND_SINGLE
}
bonds {
atom_b: 5
bond_type: BOND_SINGLE
}
bonds {
atom_a: 2
atom_b: 6
bond_type: BOND_SINGLE
}
bonds {
atom_a: 2
atom_b: 7
bond_type: BOND_SINGLE
}
'''
expected = str_to_bond_topology(expected_str)
self.assertEqual(str(expected), str(got))
def test_one_heavy(self):
got = smu_utils_lib.create_bond_topology('C', '', '4')
expected_str = '''
atoms: ATOM_C
atoms: ATOM_H
atoms: ATOM_H
atoms: ATOM_H
atoms: ATOM_H
bonds {
atom_b: 1
bond_type: BOND_SINGLE
}
bonds {
atom_b: 2
bond_type: BOND_SINGLE
}
bonds {
atom_b: 3
bond_type: BOND_SINGLE
}
bonds {
atom_b: 4
bond_type: BOND_SINGLE
}
'''
expected = str_to_bond_topology(expected_str)
self.assertEqual(str(expected), str(got))
class FromCSVTest(absltest.TestCase):
def test_basic(self):
infile = tempfile.NamedTemporaryFile(mode='w', delete=False)
infile.write(
'id,num_atoms,atoms_str,connectivity_matrix,hydrogens,smiles\n')
infile.write('68,3,C N+O-,310,010,[NH+]#C[O-]\n')
infile.write('134,4,N+O-F F ,111000,1000,[O-][NH+](F)F\n')
infile.close()
out = smu_utils_lib.generate_bond_topologies_from_csv(infile.name)
bt = next(out)
self.assertEqual(68, bt.bond_topology_id)
self.assertLen(bt.atoms, 4)
self.assertEqual(bt.smiles, '[NH+]#C[O-]')
bt = next(out)
self.assertEqual(134, bt.bond_topology_id)
self.assertLen(bt.atoms, 5)
self.assertEqual(bt.smiles, '[O-][NH+](F)F')
class ParseDuplicatesFileTest(absltest.TestCase):
def test_basic(self):
df = smu_utils_lib.parse_duplicates_file(
os.path.join(TESTDATA_PATH, 'small.equivalent_isomers.dat'))
pd.testing.assert_frame_equal(
pd.DataFrame(
columns=['name1', 'stoich1', 'btid1', 'shortconfid1', 'confid1',
'name2', 'stoich2', 'btid2', 'shortconfid2', 'confid2'],
data=[
['x07_c2n2o2fh3.224227.004',
'c2n2o2fh3', 224227, 4, 224227004,
'x07_c2n2o2fh3.224176.005',
'c2n2o2fh3', 224176, 5, 224176005],
['x07_c2n2o2fh3.260543.005',
'c2n2o2fh3', 260543, 5, 260543005,
'x07_c2n2o2fh3.224050.001',
'c2n2o2fh3', 224050, 1, 224050001],
]),
df,
check_like=True)
class BondTopologyToMoleculeTest(absltest.TestCase):
def test_o2(self):
bond_topology = str_to_bond_topology('''
atoms: ATOM_O
atoms: ATOM_O
bonds {
atom_b: 1
bond_type: BOND_DOUBLE
}
''')
got = smu_utils_lib.bond_topology_to_molecule(bond_topology)
self.assertEqual('O=O', Chem.MolToSmiles(got))
def test_methane(self):
bond_topology = str_to_bond_topology('''
atoms: ATOM_C
atoms: ATOM_H
atoms: ATOM_H
atoms: ATOM_H
atoms: ATOM_H
bonds {
atom_b: 1
bond_type: BOND_SINGLE
}
bonds {
atom_b: 2
bond_type: BOND_SINGLE
}
bonds {
atom_b: 3
bond_type: BOND_SINGLE
}
bonds {
atom_b: 4
bond_type: BOND_SINGLE
}
''')
got = smu_utils_lib.bond_topology_to_molecule(bond_topology)
self.assertEqual('[H]C([H])([H])[H]', Chem.MolToSmiles(got))
# This molecule is an N+ central atom, bonded to C (triply), O-, and F
def test_charged_molecule(self):
bond_topology = str_to_bond_topology('''
atoms: ATOM_C
atoms: ATOM_NPOS
atoms: ATOM_ONEG
atoms: ATOM_F
bonds {
atom_b: 1
bond_type: BOND_TRIPLE
}
bonds {
atom_a: 1
atom_b: 2
bond_type: BOND_SINGLE
}
bonds {
atom_a: 1
atom_b: 3
bond_type: BOND_SINGLE
}
''')
got = smu_utils_lib.bond_topology_to_molecule(bond_topology)
self.assertEqual('C#[N+]([O-])F', Chem.MolToSmiles(got))
class ConformerToMoleculeTest(absltest.TestCase):
def setUp(self):
super().setUp()
self.conformer = get_stage2_conformer()
# We'll make a new initial_geometry which is just the current one with all
# coordinates multiplied by 1000
self.conformer.initial_geometries.append(
self.conformer.initial_geometries[0])
new_geom = self.conformer.initial_geometries[1]
for atom_pos in new_geom.atom_positions:
atom_pos.x = atom_pos.x * 1000
atom_pos.y = atom_pos.y * 1000
atom_pos.z = atom_pos.z * 1000
# For the extra bond_topology, we'll just copy the existing one and change
# the id. Through the dumb luck of the molecule we picked there's not a
# simple way to make this a new bond topology and still have it look valid
# to RDKit
self.conformer.bond_topologies.append(self.conformer.bond_topologies[0])
self.conformer.bond_topologies[1].bond_topology_id = 99999
def test_all_outputs(self):
mols = list(smu_utils_lib.conformer_to_molecules(self.conformer))
self.assertLen(mols, 6) # 2 bond topologies * (1 opt geom + 2 init_geom)
self.assertEqual([m.GetProp('_Name') for m in mols], [
'SMU 618451001 bt=618451(0/2) geom=init(0/2)',
'SMU 618451001 bt=618451(0/2) geom=init(1/2)',
'SMU 618451001 bt=618451(0/2) geom=opt',
'SMU 618451001 bt=99999(1/2) geom=init(0/2)',
'SMU 618451001 bt=99999(1/2) geom=init(1/2)',
'SMU 618451001 bt=99999(1/2) geom=opt'
])
self.assertEqual(
'[H]C(F)=C(OC([H])([H])[H])OC([H])([H])[H]',
Chem.MolToSmiles(mols[0], kekuleSmiles=True, isomericSmiles=False))
self.assertEqual(
'[H]C(F)=C(OC([H])([H])[H])OC([H])([H])[H]',
Chem.MolToSmiles(mols[4], kekuleSmiles=True, isomericSmiles=False))
def test_initial_only(self):
mols = list(
smu_utils_lib.conformer_to_molecules(
self.conformer,
include_initial_geometries=True,
include_optimized_geometry=False,
include_all_bond_topologies=False))
self.assertLen(mols, 2)
self.assertEqual([m.GetProp('_Name') for m in mols], [
'SMU 618451001 bt=618451(0/2) geom=init(0/2)',
'SMU 618451001 bt=618451(0/2) geom=init(1/2)',
])
# This is just one random atom I picked from the .dat file and converted to
# angstroms instead of bohr.
self.assertEqual('C', mols[0].GetAtomWithIdx(1).GetSymbol())
np.testing.assert_allclose([0.6643, -3.470301, 3.4766],
list(mols[0].GetConformer().GetAtomPosition(1)),
atol=1e-6)
self.assertEqual('C', mols[1].GetAtomWithIdx(1).GetSymbol())
np.testing.assert_allclose([664.299998, -3470.300473, 3476.600215],
list(mols[1].GetConformer().GetAtomPosition(1)),
atol=1e-6)
def test_optimized_only(self):
mols = list(
smu_utils_lib.conformer_to_molecules(
self.conformer,
include_initial_geometries=False,
include_optimized_geometry=True,
include_all_bond_topologies=False))
self.assertLen(mols, 1)
self.assertEqual(
mols[0].GetProp('_Name'),
'SMU 618451001 bt=618451(0/2) geom=opt',
)
self.assertEqual(
'[H]C(F)=C(OC([H])([H])[H])OC([H])([H])[H]',
Chem.MolToSmiles(mols[0], kekuleSmiles=True, isomericSmiles=False))
# This is just two random atoms I picked from the .dat file and converted to
# angstroms instead of bohr.
self.assertEqual('C', mols[0].GetAtomWithIdx(1).GetSymbol())
np.testing.assert_allclose([0.540254, -3.465543, 3.456982],
list(mols[0].GetConformer().GetAtomPosition(1)),
atol=1e-6)
self.assertEqual('H', mols[0].GetAtomWithIdx(13).GetSymbol())
np.testing.assert_allclose([2.135153, -1.817366, 0.226376],
list(mols[0].GetConformer().GetAtomPosition(13)),
atol=1e-6)
class SmilesCompareTest(absltest.TestCase):
def test_string_format(self):
# for some simplicity later on, we use shorter names
self.assertEqual('MISSING', str(smu_utils_lib.SmilesCompareResult.MISSING))
self.assertEqual('MISMATCH',
str(smu_utils_lib.SmilesCompareResult.MISMATCH))
self.assertEqual('MATCH', str(smu_utils_lib.SmilesCompareResult.MATCH))
def test_missing(self):
bond_topology = str_to_bond_topology('''
atoms: ATOM_O
atoms: ATOM_O
bonds {
atom_b: 1
bond_type: BOND_DOUBLE
}
''')
result, with_h, without_h = smu_utils_lib.bond_topology_smiles_comparison(
bond_topology)
self.assertEqual(smu_utils_lib.SmilesCompareResult.MISSING, result)
self.assertEqual('O=O', with_h)
self.assertEqual('O=O', without_h)
# Also directly test compute_smiles_for_bond_topology
self.assertEqual(
'O=O',
smu_utils_lib.compute_smiles_for_bond_topology(
bond_topology, include_hs=True))
def test_mismatch(self):
bond_topology = str_to_bond_topology('''
atoms: ATOM_O
atoms: ATOM_O
bonds {
atom_b: 1
bond_type: BOND_DOUBLE
}
smiles: "BlahBlahBlah"
''')
result, with_h, without_h = smu_utils_lib.bond_topology_smiles_comparison(
bond_topology)
self.assertEqual(smu_utils_lib.SmilesCompareResult.MISMATCH, result)
self.assertEqual('O=O', with_h)
self.assertEqual('O=O', without_h)
def test_matched_and_h_stripping(self):
bond_topology = str_to_bond_topology('''
atoms: ATOM_O
atoms: ATOM_H
atoms: ATOM_H
bonds {
atom_b: 1
bond_type: BOND_SINGLE
}
bonds {
atom_b: 2
bond_type: BOND_SINGLE
}
smiles: "O"
''')
result, with_h, without_h = smu_utils_lib.bond_topology_smiles_comparison(
bond_topology)
self.assertEqual(smu_utils_lib.SmilesCompareResult.MATCH, result)
self.assertEqual('[H]O[H]', with_h)
self.assertEqual('O', without_h)
# Also directly test compute_smiles_for_bond_topology
self.assertEqual(
'[H]O[H]',
smu_utils_lib.compute_smiles_for_bond_topology(
bond_topology, include_hs=True))
self.assertEqual(
'O',
smu_utils_lib.compute_smiles_for_bond_topology(
bond_topology, include_hs=False))
def test_compute_smiles_from_molecule_no_hs(self):
mol = Chem.MolFromSmiles('FOC', sanitize=False)
self.assertEqual(
smu_utils_lib.compute_smiles_for_molecule(mol, include_hs=False), 'COF')
# This is expected. Even with include_hs=True, if there were no Hs in the
# molecule, they will not be in the smiles.
self.assertEqual(
smu_utils_lib.compute_smiles_for_molecule(mol, include_hs=True), 'COF')
def test_compute_smiles_from_molecule_with_hs(self):
mol = Chem.MolFromSmiles('FOC', sanitize=False)
Chem.SanitizeMol(mol, Chem.rdmolops.SanitizeFlags.SANITIZE_ADJUSTHS)
mol = Chem.AddHs(mol)
self.assertEqual(
smu_utils_lib.compute_smiles_for_molecule(mol, include_hs=False), 'COF')
self.assertEqual(
smu_utils_lib.compute_smiles_for_molecule(mol, include_hs=True),
'[H]C([H])([H])OF')
def test_compute_smiles_from_molecule_special_case(self):
mol = Chem.MolFromSmiles('C12=C3C4=C1C4=C23', sanitize=False)
# Double check that this really is the special case -- we get back the
# SMILES we put in even though it's not the one we want.
self.assertEqual('C12=C3C4=C1C4=C23',
Chem.MolToSmiles(mol, kekuleSmiles=True))
self.assertEqual(
smu_utils_lib.compute_smiles_for_molecule(mol, include_hs=False),
'C12=C3C1=C1C2=C31')
def test_compute_smiles_from_molecule_labeled_with_h(self):
mol = Chem.MolFromSmiles(
'[O-][N+]([H])([H])N([H])OC([H])([H])F', sanitize=False)
self.assertIsNotNone(mol)
self.assertEqual(
'[O-][N+:1]([H:2])([H:3])[N:4]([H:5])[O:6][C:7]([H:8])([H:9])[F:10]',
smu_utils_lib.compute_smiles_for_molecule(
mol, include_hs=True, labeled_atoms=True))
def test_compute_smiles_from_molecule_labeled_no_h(self):
mol = Chem.MolFromSmiles(
'[O-][N+]([H])([H])N([H])OC([H])([H])F', sanitize=False)
self.assertIsNotNone(mol)
self.assertEqual(
'[O-][NH2+:1][NH:2][O:3][CH2:4][F:5]',
smu_utils_lib.compute_smiles_for_molecule(
mol, include_hs=False, labeled_atoms=True))
class MergeConformersTest(absltest.TestCase):
def setUp(self):
super().setUp()
# We are relying on the fact that the first conformer in both x07_sample.dat
# and x07_stage1.dat are the same.
self.stage1_conformer = get_stage1_conformer()
self.stage2_conformer = get_stage2_conformer()
self.duplicate_conformer = dataset_pb2.Conformer()
self.duplicate_conformer.conformer_id = self.stage1_conformer.conformer_id
# A real duplicate conformer wouldn't have both of these fields filled in,
# but it's fine for the test to make sure everything is copied.
self.duplicate_conformer.duplicated_by = 123
self.duplicate_conformer.duplicate_of.extend([111, 222])
def test_two_stage2(self):
with self.assertRaises(ValueError):
smu_utils_lib.merge_conformer(self.stage2_conformer,
self.stage2_conformer)
def test_two_stage1(self):
with self.assertRaises(ValueError):
smu_utils_lib.merge_conformer(self.stage1_conformer,
self.stage1_conformer)
def test_two_duplicates(self):
duplicate_conformer2 = copy.deepcopy(self.duplicate_conformer)
duplicate_conformer2.duplicate_of[:] = [333, 444]
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.duplicate_conformer, duplicate_conformer2)
self.assertIsNone(got_conflict)
self.assertEqual(123, got_conf.duplicated_by)
self.assertCountEqual([111, 222, 333, 444], got_conf.duplicate_of)
def test_stage2_stage1(self):
# Add a duplicate to stage1 to make sure it is copied
self.stage1_conformer.duplicate_of.append(999)
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.stage2_conformer, self.stage1_conformer)
self.assertIsNone(got_conflict)
self.assertEqual(got_conf.duplicate_of, [999])
# Just check a random field that is in stage2 but not stage1
self.assertNotEmpty(got_conf.properties.normal_modes)
def test_stage2_stage1_conflict_energy(self):
self.stage2_conformer.properties.initial_geometry_energy.value = -1.23
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.stage2_conformer, self.stage1_conformer)
self.assertEqual(got_conflict, [
618451001,
1, 1, 1, 1, -406.51179, 0.052254, -406.522079, 2.5e-05, True, True,
1, 1, 1, 1, -1.23, 0.052254, -406.522079, 2.5e-05, True, True
])
# Just check a random field that is in stage2 but not stage1
self.assertNotEmpty(got_conf.properties.normal_modes)
# This stage2 values should be returned
self.assertEqual(got_conf.properties.initial_geometry_energy.value, -1.23)
def test_stage2_stage1_conflict_error_codes(self):
self.stage2_conformer.properties.errors.error_nstat1 = 999
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.stage2_conformer, self.stage1_conformer)
self.assertEqual(got_conflict, [
618451001,
1, 1, 1, 1, -406.51179, 0.052254, -406.522079, 2.5e-05, True, True,
999, 1, 1, 1, -406.51179, 0.052254, -406.522079, 2.5e-05, True, True
])
# Just check a random field that is in stage2 but not stage1
self.assertNotEmpty(got_conf.properties.normal_modes)
def test_stage2_stage1_conflict_missing_geometry(self):
self.stage2_conformer.ClearField('optimized_geometry')
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.stage2_conformer, self.stage1_conformer)
self.assertEqual(got_conflict, [
618451001,
1, 1, 1, 1, -406.51179, 0.052254, -406.522079, 2.5e-05, True, True,
1, 1, 1, 1, -406.51179, 0.052254, -406.522079, 2.5e-05, True, False
])
# Just check a random field that is in stage2 but not stage1
self.assertNotEmpty(got_conf.properties.normal_modes)
def test_stage2_stage1_no_conflict_minus1(self):
# If stage2 contains a -1, we keep that (stricter error checking later on)
self.stage2_conformer.properties.initial_geometry_energy.value = -1.0
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.stage2_conformer, self.stage1_conformer)
self.assertIsNone(got_conflict)
self.assertEqual(got_conf.properties.initial_geometry_energy.value, -1.0)
def test_stage2_stage1_no_conflict_approx_equal(self):
self.stage2_conformer.properties.initial_geometry_energy.value += 1e-7
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.stage2_conformer, self.stage1_conformer)
self.assertIsNone(got_conflict)
# Just check a random field from stage2
self.assertNotEmpty(got_conf.properties.normal_modes)
def test_stage2_duplicate(self):
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.stage2_conformer, self.duplicate_conformer)
self.assertIsNone(got_conflict)
self.assertEqual(got_conf.duplicate_of, [111, 222])
self.assertEqual(got_conf.duplicated_by, 123)
# Just check a random field from stage2
self.assertNotEmpty(got_conf.properties.normal_modes)
def test_stage1_duplicate(self):
got_conf, got_conflict = smu_utils_lib.merge_conformer(
self.stage1_conformer, self.duplicate_conformer)
self.assertIsNone(got_conflict)
self.assertEqual(got_conf.duplicate_of, [111, 222])
self.assertEqual(got_conf.duplicated_by, 123)
# Just check a random field from stage1
self.assertTrue(got_conf.properties.HasField('initial_geometry_energy'))
def test_multiple_initial_geometries(self):
bad_conformer = copy.deepcopy(self.stage1_conformer)
bad_conformer.initial_geometries.append(bad_conformer.initial_geometries[0])
with self.assertRaises(ValueError):
smu_utils_lib.merge_conformer(bad_conformer, self.stage2_conformer)
with self.assertRaises(ValueError):
smu_utils_lib.merge_conformer(self.stage2_conformer, bad_conformer)
def test_multiple_bond_topologies(self):
bad_conformer = copy.deepcopy(self.stage1_conformer)
bad_conformer.bond_topologies.append(bad_conformer.bond_topologies[0])
with self.assertRaises(ValueError):
smu_utils_lib.merge_conformer(bad_conformer, self.stage2_conformer)
with self.assertRaises(ValueError):
smu_utils_lib.merge_conformer(self.stage2_conformer, bad_conformer)
def test_different_bond_topologies(self):
self.stage1_conformer.bond_topologies[0].atoms[0] = (
dataset_pb2.BondTopology.ATOM_H)
with self.assertRaises(ValueError):
smu_utils_lib.merge_conformer(self.stage1_conformer,
self.stage2_conformer)
with self.assertRaises(ValueError):
smu_utils_lib.merge_conformer(self.stage2_conformer,
self.stage1_conformer)
class ConformerErrorTest(absltest.TestCase):
def test_stage1_no_error(self):
conformer = get_stage1_conformer()
self.assertFalse(smu_utils_lib.conformer_has_calculation_errors(conformer))
def test_stage1_error(self):
conformer = get_stage2_conformer()
conformer.properties.errors.error_frequencies = 123
self.assertTrue(smu_utils_lib.conformer_has_calculation_errors(conformer))
def test_stage2_no_error(self):
conformer = get_stage2_conformer()
self.assertFalse(smu_utils_lib.conformer_has_calculation_errors(conformer))
def test_stage2_error_in_1_expected_field(self):
conformer = get_stage2_conformer()
conformer.properties.errors.error_rotational_modes = 123
self.assertTrue(smu_utils_lib.conformer_has_calculation_errors(conformer))
def test_stage2_error_in_0_expected_field(self):
conformer = get_stage2_conformer()
# This field is 0 to indicate no error. Why the discrepancy? Who knows!
conformer.properties.errors.error_nsvg09 = 1
self.assertTrue(smu_utils_lib.conformer_has_calculation_errors(conformer))
def test_stage2_nstat1_is_3(self):
# This is the other bizaare case. nstat1 of 3 is still considered success.
conformer = get_stage2_conformer()
conformer.properties.errors.error_nstat1 = 3
self.assertFalse(smu_utils_lib.conformer_has_calculation_errors(conformer))
class FilterConformerByAvailabilityTest(absltest.TestCase):
def setUp(self):
super().setUp()
self.conformer = dataset_pb2.Conformer()
properties = self.conformer.properties
# A STANDARD field
properties.single_point_energy_pbe0d3_6_311gd.value = 1.23
# A COMPLETE field
properties.homo_pbe0_aug_pc_1.value = 1.23
# An INTERNAL_ONLY field
properties.nuclear_repulsion_energy.value = 1.23
def test_standard(self):
smu_utils_lib.filter_conformer_by_availability(self.conformer,
[dataset_pb2.STANDARD])
self.assertTrue(
self.conformer.properties.HasField(
'single_point_energy_pbe0d3_6_311gd'))
self.assertFalse(self.conformer.properties.HasField('homo_pbe0_aug_pc_1'))
self.assertFalse(
self.conformer.properties.HasField('nuclear_repulsion_energy'))
def test_complete_and_internal_only(self):
smu_utils_lib.filter_conformer_by_availability(
self.conformer, [dataset_pb2.COMPLETE, dataset_pb2.INTERNAL_ONLY])
self.assertFalse(
self.conformer.properties.HasField(
'single_point_energy_pbe0d3_6_311gd'))
self.assertTrue(self.conformer.properties.HasField('homo_pbe0_aug_pc_1'))
self.assertTrue(
self.conformer.properties.HasField('nuclear_repulsion_energy'))
class ConformerToStandardTest(absltest.TestCase):
def setUp(self):
super().setUp()
self.conformer = get_stage2_conformer()
def test_field_filtering(self):
# Check that the field which should be filtered starts out set
self.assertTrue(self.conformer.properties.HasField(
'single_point_energy_hf_6_31gd'))
got = smu_utils_lib.conformer_to_standard(self.conformer)
# Check for a field that was originally in self.conformer and should be
# filtered and a field which should still be present.
self.assertTrue(got.properties.HasField(
'single_point_energy_pbe0d3_6_311gd'))
self.assertFalse(
got.properties.HasField('single_point_energy_hf_6_31gd'))
def test_remove_error_conformer(self):
self.conformer.properties.errors.error_frequencies = 123
self.assertIsNone(smu_utils_lib.conformer_to_standard(self.conformer))
def test_remove_duplicate(self):
self.conformer.duplicated_by = 123
self.assertIsNone(smu_utils_lib.conformer_to_standard(self.conformer))
class DetermineFateTest(parameterized.TestCase):
def test_duplicate_same_topology(self):
conformer = get_stage1_conformer()
# bond topology is conformer_id // 1000
conformer.duplicated_by = conformer.conformer_id + 1
self.assertEqual(dataset_pb2.Conformer.FATE_DUPLICATE_SAME_TOPOLOGY,
smu_utils_lib.determine_fate(conformer))
def test_duplicate_different_topology(self):
conformer = get_stage1_conformer()
# bond topology is conformer_id // 1000
conformer.duplicated_by = conformer.conformer_id + 1000
self.assertEqual(dataset_pb2.Conformer.FATE_DUPLICATE_DIFFERENT_TOPOLOGY,
smu_utils_lib.determine_fate(conformer))
@parameterized.parameters(
(2, dataset_pb2.Conformer.FATE_GEOMETRY_OPTIMIZATION_PROBLEM),
(5, dataset_pb2.Conformer.FATE_DISASSOCIATED),
(4, dataset_pb2.Conformer.FATE_FORCE_CONSTANT_FAILURE),
(6, dataset_pb2.Conformer.FATE_DISCARDED_OTHER))
def test_geometry_failures(self, nstat1, expected_fate):
conformer = get_stage1_conformer()
conformer.properties.errors.error_nstat1 = nstat1
self.assertEqual(expected_fate, smu_utils_lib.determine_fate(conformer))
def test_no_result(self):
conformer = get_stage1_conformer()
self.assertEqual(dataset_pb2.Conformer.FATE_NO_CALCULATION_RESULTS,
smu_utils_lib.determine_fate(conformer))
def test_calculation_errors(self):
conformer = get_stage2_conformer()
# This is a random choice of an error to set. I just need some error.
conformer.properties.errors.error_atomic_analysis = 999
self.assertEqual(dataset_pb2.Conformer.FATE_CALCULATION_WITH_ERROR,
smu_utils_lib.determine_fate(conformer))
def test_success(self):
conformer = get_stage2_conformer()
self.assertEqual(dataset_pb2.Conformer.FATE_SUCCESS,
smu_utils_lib.determine_fate(conformer))
class ToBondTopologySummaryTest(absltest.TestCase):
def setUp(self):
super().setUp()
self.conformer = get_stage2_conformer()
def test_dup_same(self):
self.conformer.fate = dataset_pb2.Conformer.FATE_DUPLICATE_SAME_TOPOLOGY
got = list(
smu_utils_lib.conformer_to_bond_topology_summaries(self.conformer))
self.assertLen(got, 1)
self.assertEqual(got[0].bond_topology.bond_topology_id,
self.conformer.bond_topologies[0].bond_topology_id)
self.assertEqual(got[0].count_attempted_conformers, 1)
self.assertEqual(got[0].count_duplicates_same_topology, 1)
def test_dup_diff(self):
self.conformer.fate = (
dataset_pb2.Conformer.FATE_DUPLICATE_DIFFERENT_TOPOLOGY)
got = list(
smu_utils_lib.conformer_to_bond_topology_summaries(self.conformer))
self.assertLen(got, 1)
self.assertEqual(got[0].count_attempted_conformers, 1)
self.assertEqual(got[0].count_duplicates_different_topology, 1)
def test_geometry_failed(self):
self.conformer.fate = (dataset_pb2.Conformer.FATE_DISCARDED_OTHER)
got = list(
smu_utils_lib.conformer_to_bond_topology_summaries(self.conformer))
self.assertLen(got, 1)
self.assertEqual(got[0].count_attempted_conformers, 1)
self.assertEqual(got[0].count_failed_geometry_optimization, 1)
def test_missing_calculation(self):
self.conformer.fate = dataset_pb2.Conformer.FATE_NO_CALCULATION_RESULTS
got = list(
smu_utils_lib.conformer_to_bond_topology_summaries(self.conformer))
self.assertLen(got, 1)
self.assertEqual(got[0].count_attempted_conformers, 1)
self.assertEqual(got[0].count_kept_geometry, 1)
self.assertEqual(got[0].count_missing_calculation, 1)
def test_calculation_with_error(self):
self.conformer.fate = dataset_pb2.Conformer.FATE_CALCULATION_WITH_ERROR
self.conformer.bond_topologies.append(self.conformer.bond_topologies[0])
self.conformer.bond_topologies[-1].bond_topology_id = 123
got = list(
smu_utils_lib.conformer_to_bond_topology_summaries(self.conformer))
self.assertLen(got, 2)
# We don't actually care about the order, but this is what comes out right
# now.
self.assertEqual(got[0].bond_topology.bond_topology_id, 123)
self.assertEqual(got[0].count_attempted_conformers, 0)
self.assertEqual(got[0].count_kept_geometry, 0)
self.assertEqual(got[0].count_calculation_with_error, 0)
self.assertEqual(got[0].count_detected_match_with_error, 1)
self.assertEqual(got[1].bond_topology.bond_topology_id,
self.conformer.bond_topologies[0].bond_topology_id)
self.assertEqual(got[1].count_attempted_conformers, 1)
self.assertEqual(got[1].count_kept_geometry, 1)
self.assertEqual(got[1].count_calculation_with_error, 1)
self.assertEqual(got[1].count_detected_match_with_error, 0)
def test_calculation_success(self):
self.conformer.fate = dataset_pb2.Conformer.FATE_SUCCESS
self.conformer.bond_topologies.append(self.conformer.bond_topologies[0])
self.conformer.bond_topologies[-1].bond_topology_id = 123
got = list(
smu_utils_lib.conformer_to_bond_topology_summaries(self.conformer))
self.assertLen(got, 2)
# We don't actually care about the order, but this is what comes out right
# now.
self.assertEqual(got[0].bond_topology.bond_topology_id, 123)
self.assertEqual(got[0].count_attempted_conformers, 0)
self.assertEqual(got[0].count_kept_geometry, 0)
self.assertEqual(got[0].count_calculation_success, 0)
self.assertEqual(got[0].count_detected_match_success, 1)
self.assertEqual(got[1].bond_topology.bond_topology_id,
self.conformer.bond_topologies[0].bond_topology_id)
self.assertEqual(got[1].count_attempted_conformers, 1)
self.assertEqual(got[1].count_kept_geometry, 1)
self.assertEqual(got[1].count_calculation_success, 1)
self.assertEqual(got[1].count_detected_match_success, 0)
class LabeledSmilesTester(absltest.TestCase):
def test_atom_labels(self):
mol = Chem.MolFromSmiles('FCON[NH2+][O-]', sanitize=False)
self.assertIsNotNone(mol)
smiles_before = Chem.MolToSmiles(mol)
self.assertEqual(
smu_utils_lib.labeled_smiles(mol), 'F[CH2:1][O:2][NH:3][NH2+:4][O-:5]')
# Testing both the atom numbers and the smiles is redundant,
# but guards against possible future changes.
for atom in mol.GetAtoms():
self.assertEqual(atom.GetAtomMapNum(), 0)
self.assertEqual(Chem.MolToSmiles(mol), smiles_before)
if __name__ == '__main__':
absltest.main()
| apache-2.0 | 2,717,534,729,657,840,000 | 34.851665 | 85 | 0.676574 | false |
alexis-roche/nipy | nipy/testing/__init__.py | 2 | 1369 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""The testing directory contains a small set of imaging files to be
used for doctests only. More thorough tests and example data will be
stored in a nipy data packages that you can download separately.
.. note:
We use the ``nose`` testing framework for tests.
Nose is a dependency for the tests, but should not be a dependency
for running the algorithms in the NIPY library. This file should
import without nose being present on the python path.
Examples
--------
>>> from nipy.testing import funcfile
>>> from nipy.io.api import load_image
>>> img = load_image(funcfile)
>>> img.shape
(17, 21, 3, 20)
"""
from __future__ import absolute_import
import os
#__all__ = ['funcfile', 'anatfile']
# Discover directory path
filepath = os.path.abspath(__file__)
basedir = os.path.dirname(filepath)
funcfile = os.path.join(basedir, 'functional.nii.gz')
anatfile = os.path.join(basedir, 'anatomical.nii.gz')
from numpy.testing import *
# Overwrites numpy.testing.Tester
from .nosetester import NipyNoseTester as Tester
test = Tester().test
bench = Tester().bench
from . import decorators as dec
# Allow failed import of nose if not now running tests
try:
from nose.tools import assert_true, assert_false
except ImportError:
pass
| bsd-3-clause | -6,188,143,799,394,715,000 | 26.38 | 73 | 0.723156 | false |
openstack/mistral | mistral/api/controllers/v2/execution.py | 1 | 17181 | # Copyright 2013 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
# Copyright 2015 Huawei Technologies Co., Ltd.
# Copyright 2016 - Brocade Communications Systems, Inc.
# Copyright 2018 - Extreme Networks, Inc.
# Copyright 2019 - NetCracker Technology Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from oslo_utils import uuidutils
from pecan import rest
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from mistral.api import access_control as acl
from mistral.api.controllers.v2 import execution_report
from mistral.api.controllers.v2 import resources
from mistral.api.controllers.v2 import sub_execution
from mistral.api.controllers.v2 import task
from mistral.api.controllers.v2 import types
from mistral import context
from mistral.db.v2 import api as db_api
from mistral.db.v2.sqlalchemy import models as db_models
from mistral import exceptions as exc
from mistral.rpc import clients as rpc
from mistral.services import workflows as wf_service
from mistral.utils import filter_utils
from mistral.utils import rest_utils
from mistral.workflow import data_flow
from mistral.workflow import states
from mistral_lib.utils import merge_dicts
LOG = logging.getLogger(__name__)
STATE_TYPES = wtypes.Enum(
str,
states.IDLE,
states.RUNNING,
states.SUCCESS,
states.ERROR,
states.PAUSED,
states.CANCELLED
)
def _get_workflow_execution_resource_with_output(wf_ex):
rest_utils.load_deferred_fields(wf_ex, ['params', 'input', 'output'])
return resources.Execution.from_db_model(wf_ex)
def _get_workflow_execution_resource(wf_ex):
rest_utils.load_deferred_fields(wf_ex, ['params', 'input'])
return resources.Execution.from_db_model(wf_ex)
# Use retries to prevent possible failures.
@rest_utils.rest_retry_on_db_error
def _get_workflow_execution(id, must_exist=True):
with db_api.transaction():
if must_exist:
wf_ex = db_api.get_workflow_execution(id)
else:
wf_ex = db_api.load_workflow_execution(id)
return rest_utils.load_deferred_fields(
wf_ex,
['params', 'input', 'output', 'context', 'spec']
)
# TODO(rakhmerov): Make sure to make all needed renaming on public API.
class ExecutionsController(rest.RestController):
tasks = task.ExecutionTasksController()
report = execution_report.ExecutionReportController()
executions = sub_execution.SubExecutionsController()
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(resources.Execution, wtypes.text)
def get(self, id):
"""Return the specified Execution.
:param id: UUID of execution to retrieve.
"""
acl.enforce("executions:get", context.ctx())
LOG.debug("Fetch execution [id=%s]", id)
wf_ex = _get_workflow_execution(id)
resource = resources.Execution.from_db_model(wf_ex)
resource.published_global = (
data_flow.get_workflow_execution_published_global(wf_ex)
)
return resource
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(
resources.Execution,
wtypes.text,
body=resources.Execution
)
def put(self, id, wf_ex):
"""Update the specified workflow execution.
:param id: UUID of execution to update.
:param wf_ex: Execution object.
"""
acl.enforce('executions:update', context.ctx())
LOG.debug('Update execution [id=%s, execution=%s]', id, wf_ex)
@rest_utils.rest_retry_on_db_error
def _compute_delta(wf_ex):
with db_api.transaction():
# ensure that workflow execution exists
db_api.get_workflow_execution(
id,
fields=(db_models.WorkflowExecution.id,)
)
delta = {}
if wf_ex.state:
delta['state'] = wf_ex.state
if wf_ex.description:
delta['description'] = wf_ex.description
if wf_ex.params and wf_ex.params.get('env'):
delta['env'] = wf_ex.params.get('env')
# Currently we can change only state, description, or env.
if len(delta.values()) <= 0:
raise exc.InputException(
'The property state, description, or env '
'is not provided for update.'
)
# Description cannot be updated together with state.
if delta.get('description') and delta.get('state'):
raise exc.InputException(
'The property description must be updated '
'separately from state.'
)
# If state change, environment cannot be updated
# if not RUNNING.
if (delta.get('env') and
delta.get('state') and
delta['state'] != states.RUNNING):
raise exc.InputException(
'The property env can only be updated when workflow '
'execution is not running or on resume from pause.'
)
if delta.get('description'):
wf_ex = db_api.update_workflow_execution(
id,
{'description': delta['description']}
)
if not delta.get('state') and delta.get('env'):
wf_ex = db_api.get_workflow_execution(id)
wf_ex = wf_service.update_workflow_execution_env(
wf_ex,
delta.get('env')
)
return delta, wf_ex
delta, wf_ex = _compute_delta(wf_ex)
if delta.get('state'):
if states.is_paused(delta.get('state')):
wf_ex = rpc.get_engine_client().pause_workflow(id)
elif delta.get('state') == states.RUNNING:
wf_ex = rpc.get_engine_client().resume_workflow(
id,
env=delta.get('env')
)
elif states.is_completed(delta.get('state')):
msg = wf_ex.state_info if wf_ex.state_info else None
wf_ex = rpc.get_engine_client().stop_workflow(
id,
delta.get('state'),
msg
)
else:
# To prevent changing state in other cases throw a message.
raise exc.InputException(
"Cannot change state to %s. Allowed states are: '%s" % (
wf_ex.state,
', '.join([
states.RUNNING,
states.PAUSED,
states.SUCCESS,
states.ERROR,
states.CANCELLED
])
)
)
return resources.Execution.from_dict(
wf_ex if isinstance(wf_ex, dict) else wf_ex.to_dict()
)
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(
resources.Execution,
body=resources.Execution,
status_code=201
)
def post(self, wf_ex):
"""Create a new Execution.
:param wf_ex: Execution object with input content.
"""
acl.enforce('executions:create', context.ctx())
LOG.debug("Create execution [execution=%s]", wf_ex)
exec_dict = wf_ex.to_dict()
exec_id = exec_dict.get('id')
if not exec_id:
exec_id = uuidutils.generate_uuid()
LOG.debug("Generated execution id [exec_id=%s]", exec_id)
exec_dict.update({'id': exec_id})
wf_ex = None
else:
# If ID is present we need to check if such execution exists.
# If yes, the method just returns the object. If not, the ID
# will be used to create a new execution.
wf_ex = _get_workflow_execution(exec_id, must_exist=False)
if wf_ex:
return resources.Execution.from_db_model(wf_ex)
source_execution_id = exec_dict.get('source_execution_id')
source_exec_dict = None
if source_execution_id:
# If source execution is present we will perform a lookup for
# previous workflow execution model and the information to start
# a new workflow based on that information.
source_exec_dict = db_api.get_workflow_execution(
source_execution_id).to_dict()
exec_dict['description'] = "{} Based on the execution '{}'".format(
exec_dict['description'],
source_execution_id
)
exec_dict['description'] = exec_dict['description'].strip()
result_exec_dict = merge_dicts(source_exec_dict, exec_dict)
if not (result_exec_dict.get('workflow_id') or
result_exec_dict.get('workflow_name')):
raise exc.WorkflowException(
"Workflow ID or workflow name must be provided. Workflow ID is"
" recommended."
)
engine = rpc.get_engine_client()
result = engine.start_workflow(
result_exec_dict.get(
'workflow_id',
result_exec_dict.get('workflow_name')
),
result_exec_dict.get('workflow_namespace', ''),
result_exec_dict.get('id'),
result_exec_dict.get('input'),
description=result_exec_dict.get('description', ''),
**result_exec_dict.get('params') or {}
)
return resources.Execution.from_dict(result)
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(None, wtypes.text, bool, status_code=204)
def delete(self, id, force=False):
"""Delete the specified Execution.
:param id: UUID of execution to delete.
:param force: Optional. Force the deletion of unfinished executions.
Default: false. While the api is backward compatible
the behaviour is not the same. The new default is the
safer option
"""
acl.enforce('executions:delete', context.ctx())
LOG.debug("Delete execution [id=%s]", id)
if not force:
state = db_api.get_workflow_execution(
id,
fields=(db_models.WorkflowExecution.state,)
)[0]
if not states.is_completed(state):
raise exc.NotAllowedException(
"Only completed executions can be deleted. "
"Use --force to override this. "
"Execution {} is in {} state".format(id, state)
)
return rest_utils.rest_retry_on_db_error(
db_api.delete_workflow_execution
)(id)
@rest_utils.wrap_wsme_controller_exception
@wsme_pecan.wsexpose(resources.Executions, types.uuid, int,
types.uniquelist, types.list, types.uniquelist,
wtypes.text, types.uuid, wtypes.text,
types.uniquelist, types.jsontype, types.uuid,
types.uuid, STATE_TYPES, wtypes.text,
types.jsontype, types.jsontype, wtypes.text,
wtypes.text, bool, types.uuid,
bool, types.list)
def get_all(self, marker=None, limit=None,
sort_keys='created_at', sort_dirs='asc', fields='',
workflow_name=None, workflow_id=None, description=None,
tags=None, params=None, task_execution_id=None,
root_execution_id=None, state=None, state_info=None,
input=None, output=None, created_at=None,
updated_at=None, include_output=None, project_id=None,
all_projects=False, nulls=''):
"""Return all Executions.
:param marker: Optional. Pagination marker for large data sets.
:param limit: Optional. Maximum number of resources to return in a
single result. Default value is None for backward
compatibility.
:param sort_keys: Optional. Columns to sort results by.
Default: created_at, which is backward compatible.
:param sort_dirs: Optional. Directions to sort corresponding to
sort_keys, "asc" or "desc" can be chosen.
Default: desc. The length of sort_dirs can be equal
or less than that of sort_keys.
:param fields: Optional. A specified list of fields of the resource to
be returned. 'id' will be included automatically in
fields if it's provided, since it will be used when
constructing 'next' link.
:param workflow_name: Optional. Keep only resources with a specific
workflow name.
:param workflow_id: Optional. Keep only resources with a specific
workflow ID.
:param description: Optional. Keep only resources with a specific
description.
:param tags: Optional. Keep only resources containing specific tags.
:param params: Optional. Keep only resources with specific parameters.
:param task_execution_id: Optional. Keep only resources with a
specific task execution ID.
:param root_execution_id: Optional. Keep only resources with a
specific root execution ID.
:param state: Optional. Keep only resources with a specific state.
:param state_info: Optional. Keep only resources with specific
state information.
:param input: Optional. Keep only resources with a specific input.
:param output: Optional. Keep only resources with a specific output.
:param created_at: Optional. Keep only resources created at a specific
time and date.
:param updated_at: Optional. Keep only resources with specific latest
update time and date.
:param include_output: Optional. Include the output for all executions
in the list.
:param project_id: Optional. Only get executions belong to the project.
Admin required.
:param all_projects: Optional. Get resources of all projects. Admin
required.
:param nulls: Optional. The names of the columns with null value in
the query.
"""
acl.enforce('executions:list', context.ctx())
db_models.WorkflowExecution.check_allowed_none_values(nulls)
if all_projects or project_id:
acl.enforce('executions:list:all_projects', context.ctx())
filters = filter_utils.create_filters_from_request_params(
none_values=nulls,
created_at=created_at,
workflow_name=workflow_name,
workflow_id=workflow_id,
tags=tags,
params=params,
task_execution_id=task_execution_id,
state=state,
state_info=state_info,
input=input,
output=output,
updated_at=updated_at,
description=description,
project_id=project_id,
root_execution_id=root_execution_id,
)
LOG.debug(
"Fetch executions. marker=%s, limit=%s, sort_keys=%s, "
"sort_dirs=%s, filters=%s, all_projects=%s", marker, limit,
sort_keys, sort_dirs, filters, all_projects
)
if include_output:
resource_function = _get_workflow_execution_resource_with_output
else:
resource_function = _get_workflow_execution_resource
return rest_utils.get_all(
resources.Executions,
resources.Execution,
db_api.get_workflow_executions,
db_api.get_workflow_execution,
resource_function=resource_function,
marker=marker,
limit=limit,
sort_keys=sort_keys,
sort_dirs=sort_dirs,
fields=fields,
all_projects=all_projects,
**filters
)
| apache-2.0 | -1,259,276,599,923,299,800 | 37.436242 | 79 | 0.569117 | false |
kgullikson88/TS23-Scripts | CheckSyntheticTemperature.py | 1 | 14868 | import os
import re
from collections import defaultdict
from operator import itemgetter
import logging
import pandas
from scipy.interpolate import InterpolatedUnivariateSpline as spline
from george import kernels
import matplotlib.pyplot as plt
import numpy as np
import george
import emcee
import StarData
import SpectralTypeRelations
def classify_filename(fname, type='bright'):
"""
Given a CCF filename, it classifies the star combination, temperature, metallicity, and vsini
:param fname:
:return:
"""
# First, remove any leading directories
fname = fname.split('/')[-1]
# Star combination
m1 = re.search('\.[0-9]+kps', fname)
stars = fname[:m1.start()]
star1 = stars.split('+')[0].replace('_', ' ')
star2 = stars.split('+')[1].split('_{}'.format(type))[0].replace('_', ' ')
# secondary star vsini
vsini = float(fname[m1.start() + 1:].split('kps')[0])
# Temperature
m2 = re.search('[0-9]+\.0K', fname)
temp = float(m2.group()[:-1])
# logg
m3 = re.search('K\+[0-9]\.[0-9]', fname)
logg = float(m3.group()[1:])
# metallicity
metal = float(fname.split(str(logg))[-1])
return star1, star2, vsini, temp, logg, metal
def get_ccf_data(basedir, primary_name=None, secondary_name=None, vel_arr=np.arange(-900.0, 900.0, 0.1), type='bright'):
"""
Searches the given directory for CCF files, and classifies
by star, temperature, metallicity, and vsini
:param basedir: The directory to search for CCF files
:keyword primary_name: Optional keyword. If given, it will only get the requested primary star data
:keyword secondary_name: Same as primary_name, but only reads ccfs for the given secondary
:keyword vel_arr: The velocities to interpolate each ccf at
:return: pandas DataFrame
"""
if not basedir.endswith('/'):
basedir += '/'
all_files = ['{}{}'.format(basedir, f) for f in os.listdir(basedir) if type in f.lower()]
primary = []
secondary = []
vsini_values = []
temperature = []
gravity = []
metallicity = []
ccf = []
for fname in all_files:
star1, star2, vsini, temp, logg, metal = classify_filename(fname, type=type)
if primary_name is not None and star1.lower() != primary_name.lower():
continue
if secondary_name is not None and star2.lower() != secondary_name.lower():
continue
vel, corr = np.loadtxt(fname, unpack=True)
fcn = spline(vel, corr)
ccf.append(fcn(vel_arr))
primary.append(star1)
secondary.append(star2)
vsini_values.append(vsini)
temperature.append(temp)
gravity.append(logg)
metallicity.append(metal)
# Make a pandas dataframe with all this data
df = pandas.DataFrame(data={'Primary': primary, 'Secondary': secondary, 'Temperature': temperature,
'vsini': vsini_values, 'logg': gravity, '[Fe/H]': metallicity, 'CCF': ccf})
return df
def get_ccf_summary(basedir, vel_arr=np.arange(-900.0, 900.0, 0.1), velocity='highest', type='bright'):
"""
Very similar to get_ccf_data, but does it in a way that is more memory efficient
:param basedir: The directory to search for CCF files
:keyword velocity: The velocity to measure the CCF at. The default is 'highest', and uses the maximum of the ccf
:keyword vel_arr: The velocities to interpolate each ccf at
:return: pandas DataFrame
"""
if not basedir.endswith('/'):
basedir += '/'
all_files = ['{}{}'.format(basedir, f) for f in os.listdir(basedir) if type in f.lower()]
file_dict = defaultdict(lambda: defaultdict(list))
for fname in all_files:
star1, star2, vsini, temp, logg, metal = classify_filename(fname, type=type)
file_dict[star1][star2].append(fname)
# Now, read the ccfs for each primary/secondary combo, and find the best combination
summary_dfs = []
for primary in file_dict.keys():
for secondary in file_dict[primary].keys():
data = get_ccf_data(basedir, primary_name=primary, secondary_name=secondary,
vel_arr=vel_arr, type=type)
summary_dfs.append(find_best_pars(data, velocity=velocity, vel_arr=vel_arr))
return pandas.concat(summary_dfs, ignore_index=True)
def find_best_pars(df, velocity='highest', vel_arr=np.arange(-900.0, 900.0, 0.1)):
"""
Find the 'best-fit' parameters for each combination of primary and secondary star
:param df: the dataframe to search in
:keyword velocity: The velocity to measure the CCF at. The default is 'highest', and uses the maximum of the ccf
:keyword vel_arr: The velocities to interpolate each ccf at
:return: a dataframe with keys of primary, secondary, and the parameters
"""
# Get the names of the primary and secondary stars
primary_names = pandas.unique(df.Primary)
secondary_names = pandas.unique(df.Secondary)
# Find the ccf value at the given velocity
if velocity == 'highest':
fcn = lambda row: (np.max(row), vel_arr[np.argmax(row)])
vals = df['CCF'].map(fcn)
df['ccf_max'] = vals.map(lambda l: l[0])
df['rv'] = vals.map(lambda l: l[1])
# df['ccf_max'] = df['CCF'].map(np.max)
else:
df['ccf_max'] = df['CCF'].map(lambda arr: arr[np.argmin(np.abs(vel_arr - velocity))])
# Find the best parameter for each combination
d = defaultdict(list)
for primary in primary_names:
for secondary in secondary_names:
good = df.loc[(df.Primary == primary) & (df.Secondary == secondary)]
best = good.loc[good.ccf_max == good.ccf_max.max()]
d['Primary'].append(primary)
d['Secondary'].append(secondary)
d['Temperature'].append(best['Temperature'].item())
d['vsini'].append(best['vsini'].item())
d['logg'].append(best['logg'].item())
d['[Fe/H]'].append(best['[Fe/H]'].item())
d['rv'].append(best['rv'].item())
return pandas.DataFrame(data=d)
def get_detected_objects(df, tol=1.0):
"""
Takes a summary dataframe with RV information. Finds the median rv for each star,
and removes objects that are 'tol' km/s from the median value
:param df: A summary dataframe, such as created by find_best_pars
:param tol: The tolerance, in km/s, to accept an observation as detected
:return: a dataframe containing only detected companions
"""
secondary_names = pandas.unique(df.Secondary)
secondary_to_rv = defaultdict(float)
for secondary in secondary_names:
rv = df.loc[df.Secondary == secondary]['rv'].median()
secondary_to_rv[secondary] = rv
print secondary, rv
keys = df.Secondary.values
good = df.loc[abs(df.rv.values - np.array(itemgetter(*keys)(secondary_to_rv))) < tol]
return good
def add_actual_temperature(df, method='spt'):
"""
Add the actual temperature to a given summary dataframe
:param df: The dataframe to which we will add the actual secondary star temperature
:param method: How to get the actual temperature. Options are:
- 'spt': Use main-sequence relationships to go from spectral type --> temperature
- 'excel': Use tabulated data, available in the file 'SecondaryStar_Temperatures.xls'
:return: copy of the original dataframe, with an extra column for the secondary star temperature
"""
# First, get a list of the secondary stars in the data
secondary_names = pandas.unique(df.Secondary)
secondary_to_temperature = defaultdict(float)
secondary_to_error = defaultdict(float)
if method.lower() == 'spt':
MS = SpectralTypeRelations.MainSequence()
for secondary in secondary_names:
star_data = StarData.GetData(secondary)
spt = star_data.spectype[0] + re.search('[0-9]\.*[0-9]*', star_data.spectype).group()
T_sec = MS.Interpolate(MS.Temperature, spt)
secondary_to_temperature[secondary] = T_sec
elif method.lower() == 'excel':
table = pandas.read_excel('SecondaryStar_Temperatures.xls', 0)
for secondary in secondary_names:
T_sec = table.loc[table.Star.str.lower().str.contains(secondary.strip().lower())]['Literature_Temp'].item()
T_error = table.loc[table.Star.str.lower().str.contains(secondary.strip().lower())][
'Literature_error'].item()
secondary_to_temperature[secondary] = T_sec
secondary_to_error[secondary] = T_error
df['Tactual'] = df['Secondary'].map(lambda s: secondary_to_temperature[s])
df['Tact_err'] = df['Secondary'].map(lambda s: secondary_to_error[s])
return
def make_gaussian_process_samples(df):
"""
Make a gaussian process fitting the Tactual-Tmeasured relationship
:param df: pandas DataFrame with columns 'Temperature' (with the measured temperature)
and 'Tactual' (for the actual temperature)
:return: emcee sampler instance
"""
# First, find the uncertainties at each actual temperature
# Tactual = df['Tactual'].values
#Tmeasured = df['Temperature'].values
#error = df['Tact_err'].values
temp = df.groupby('Temperature').mean()['Tactual']
Tmeasured = temp.keys().values
Tactual = temp.values
error = np.nan_to_num(df.groupby('Temperature').std(ddof=1)['Tactual'].values)
default = np.median(error[error > 1])
error = np.maximum(error, np.ones(error.size) * default)
for Tm, Ta, e in zip(Tmeasured, Tactual, error):
print Tm, Ta, e
plt.figure(1)
plt.errorbar(Tmeasured, Tactual, yerr=error, fmt='.k', capsize=0)
plt.plot(Tmeasured, Tmeasured, 'r--')
plt.xlim((min(Tmeasured) - 100, max(Tmeasured) + 100))
plt.xlabel('Measured Temperature')
plt.ylabel('Actual Temperature')
plt.show(block=False)
# Define some functions to use in the GP fit
def model(pars, T):
#polypars = pars[2:]
#return np.poly1d(polypars)(T)
return T
def lnlike(pars, Tact, Tmeas, Terr):
a, tau = np.exp(pars[:2])
gp = george.GP(a * kernels.ExpSquaredKernel(tau))
gp.compute(Tmeas, Terr)
return gp.lnlikelihood(Tact - model(pars, Tmeas))
def lnprior(pars):
lna, lntau = pars[:2]
polypars = pars[2:]
if -20 < lna < 20 and 4 < lntau < 20:
return 0.0
return -np.inf
def lnprob(pars, x, y, yerr):
lp = lnprior(pars)
return lp + lnlike(pars, x, y, yerr) if np.isfinite(lp) else -np.inf
# Set up the emcee fitter
initial = np.array([0, 6])#, 1.0, 0.0])
ndim = len(initial)
nwalkers = 100
p0 = [np.array(initial) + 1e-8 * np.random.randn(ndim) for i in xrange(nwalkers)]
sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=(Tactual, Tmeasured, error))
print 'Running first burn-in'
p1, lnp, _ = sampler.run_mcmc(p0, 500)
sampler.reset()
print "Running second burn-in..."
p_best = p1[np.argmax(lnp)]
p2 = [p_best + 1e-8 * np.random.randn(ndim) for i in xrange(nwalkers)]
p3, _, _ = sampler.run_mcmc(p2, 250)
sampler.reset()
print "Running production..."
sampler.run_mcmc(p3, 1000)
# Plot a bunch of the fits
print "Plotting..."
N = 100
Tvalues = np.arange(3300, 7000, 20)
idx = np.argsort(-sampler.lnprobability.flatten())[:N] # Get N 'best' curves
par_vals = sampler.flatchain[idx]
for i, pars in enumerate(par_vals):
a, tau = np.exp(pars[:2])
gp = george.GP(a * kernels.ExpSquaredKernel(tau))
gp.compute(Tmeasured, error)
s = gp.sample_conditional(Tactual - model(pars, Tmeasured), Tvalues) + model(pars, Tvalues)
plt.plot(Tvalues, s, 'b-', alpha=0.1)
plt.draw()
# Finally, get posterior samples at all the possibly measured temperatures
print 'Generating posterior samples at all temperatures...'
N = 10000 # This is 1/10th of the total number of samples!
idx = np.argsort(-sampler.lnprobability.flatten())[:N] # Get N 'best' curves
par_vals = sampler.flatchain[idx]
Tvalues = np.arange(3000, 6900, 100)
gp_posterior = []
for pars in par_vals:
a, tau = np.exp(pars[:2])
gp = george.GP(a * kernels.ExpSquaredKernel(tau))
gp.compute(Tmeasured, error)
s = gp.sample_conditional(Tactual - model(pars, Tmeasured), Tvalues) + model(pars, Tvalues)
gp_posterior.append(s)
# Finally, make confidence intervals for the actual temperatures
gp_posterior = np.array(gp_posterior)
l, m, h = np.percentile(gp_posterior, [16.0, 50.0, 84.0], axis=0)
conf = pandas.DataFrame(data={'Measured Temperature': Tvalues, 'Actual Temperature': m,
'Lower Bound': l, 'Upper bound': h})
conf.to_csv('Confidence_Intervals.csv', index=False)
return sampler, np.array(gp_posterior)
def check_posterior(df, posterior, Tvalues):
"""
Checks the posterior samples: Are 95% of the measurements within 2-sigma of the prediction?
:param df: The summary dataframe
:param posterior: The MCMC predicted values
:param Tvalues: The measured temperatures the posterior was made with
:return: boolean, as well as some warning messages if applicable
"""
# First, make 2-sigma confidence intervals
l, m, h = np.percentile(posterior, [5.0, 50.0, 95.0], axis=0)
# Save the confidence intervals
# conf = pandas.DataFrame(data={'Measured Temperature': Tvalues, 'Actual Temperature': m,
# 'Lower Bound': l, 'Upper bound': h})
#conf.to_csv('Confidence_Intervals.csv', index=False)
Ntot = [] # The total number of observations with the given measured temperature
Nacc = [] # The number that have actual temperatures within the confidence interval
g = df.groupby('Temperature')
for i, T in enumerate(Tvalues):
if T in g.groups.keys():
Ta = g.get_group(T)['Tactual']
low, high = l[i], h[i]
Ntot.append(len(Ta))
Nacc.append(len(Ta.loc[(Ta >= low) & (Ta <= high)]))
p = float(Nacc[-1]) / float(Ntot[-1])
if p < 0.95:
logging.warn(
'Only {}/{} of the samples ({:.2f}%) were accepted for T = {} K'.format(Nacc[-1], Ntot[-1], p * 100,
T))
print low, high
print sorted(Ta)
else:
Ntot.append(0)
Nacc.append(0)
p = float(sum(Nacc)) / float(sum(Ntot))
if p < 0.95:
logging.warn('Only {:.2f}% of the total samples were accepted!'.format(p * 100))
return False
return True
if __name__ == '__main__':
pass
| gpl-3.0 | -920,756,524,181,589,000 | 39.402174 | 120 | 0.625572 | false |
MattDevo/edk2 | BaseTools/Source/Python/Workspace/MetaFileTable.py | 1 | 16975 | ## @file
# This file is used to create/update/query/erase a meta file table
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
from __future__ import absolute_import
import uuid
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import FORMAT_INVALID
from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE_INF, \
MODEL_FILE_OTHERS
from Common.DataType import *
class MetaFileTable():
# TRICK: use file ID as the part before '.'
_ID_STEP_ = 1
_ID_MAX_ = 99999999
## Constructor
def __init__(self, DB, MetaFile, FileType, Temporary, FromItem=None):
self.MetaFile = MetaFile
self.TableName = ""
self.DB = DB
self._NumpyTab = None
self.CurrentContent = []
DB.TblFile.append([MetaFile.Name,
MetaFile.Ext,
MetaFile.Dir,
MetaFile.Path,
FileType,
MetaFile.TimeStamp,
FromItem])
self.FileId = len(DB.TblFile)
self.ID = self.FileId * 10**8
if Temporary:
self.TableName = "_%s_%s_%s" % (FileType, len(DB.TblFile), uuid.uuid4().hex)
else:
self.TableName = "_%s_%s" % (FileType, len(DB.TblFile))
def IsIntegrity(self):
try:
TimeStamp = self.MetaFile.TimeStamp
if not self.CurrentContent:
Result = False
else:
Result = self.CurrentContent[-1][0] < 0
if not Result:
# update the timestamp in database
self.DB.SetFileTimeStamp(self.FileId, TimeStamp)
return False
if TimeStamp != self.DB.GetFileTimeStamp(self.FileId):
# update the timestamp in database
self.DB.SetFileTimeStamp(self.FileId, TimeStamp)
return False
except Exception as Exc:
EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc))
return False
return True
def SetEndFlag(self):
self.CurrentContent.append(self._DUMMY_)
def GetAll(self):
return [item for item in self.CurrentContent if item[0] >= 0 ]
## Python class representation of table storing module data
class ModuleTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Scope1 TEXT,
Scope2 TEXT,
BelongsToItem REAL NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = [-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1]
## Constructor
def __init__(self, Db, MetaFile, Temporary):
MetaFileTable.__init__(self, Db, MetaFile, MODEL_FILE_INF, Temporary)
## Insert a record into table Inf
#
# @param Model: Model of a Inf item
# @param Value1: Value1 of a Inf item
# @param Value2: Value2 of a Inf item
# @param Value3: Value3 of a Inf item
# @param Scope1: Arch of a Inf item
# @param Scope2 Platform os a Inf item
# @param BelongsToItem: The item belongs to which another item
# @param StartLine: StartLine of a Inf item
# @param StartColumn: StartColumn of a Inf item
# @param EndLine: EndLine of a Inf item
# @param EndColumn: EndColumn of a Inf item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON,
BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip())
self.ID = self.ID + self._ID_STEP_
if self.ID >= (MODEL_FILE_INF + self._ID_MAX_):
self.ID = MODEL_FILE_INF + self._ID_STEP_
row = [ self.ID,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
BelongsToItem,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
]
self.CurrentContent.append(row)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
# @param Platform The Platform attribute of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None, Platform=None, BelongsToItem=None):
QueryTab = self.CurrentContent
result = [item for item in QueryTab if item[1] == Model and item[-1]>=0 ]
if Arch is not None and Arch != TAB_ARCH_COMMON:
ArchList = set(['COMMON'])
ArchList.add(Arch)
result = [item for item in result if item[5] in ArchList]
if Platform is not None and Platform != TAB_COMMON:
Platformlist = set( ['COMMON','DEFAULT'])
Platformlist.add(Platform)
result = [item for item in result if item[6] in Platformlist]
if BelongsToItem is not None:
result = [item for item in result if item[7] == BelongsToItem]
result = [ [r[2],r[3],r[4],r[5],r[6],r[0],r[9]] for r in result ]
return result
## Python class representation of table storing package data
class PackageTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Scope1 TEXT,
Scope2 TEXT,
BelongsToItem REAL NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = [-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1]
## Constructor
def __init__(self, Cursor, MetaFile, Temporary):
MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DEC, Temporary)
## Insert table
#
# Insert a record into table Dec
#
# @param Model: Model of a Dec item
# @param Value1: Value1 of a Dec item
# @param Value2: Value2 of a Dec item
# @param Value3: Value3 of a Dec item
# @param Scope1: Arch of a Dec item
# @param Scope2: Module type of a Dec item
# @param BelongsToItem: The item belongs to which another item
# @param StartLine: StartLine of a Dec item
# @param StartColumn: StartColumn of a Dec item
# @param EndLine: EndLine of a Dec item
# @param EndColumn: EndColumn of a Dec item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON,
BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip())
self.ID = self.ID + self._ID_STEP_
row = [ self.ID,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
BelongsToItem,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
]
self.CurrentContent.append(row)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None):
QueryTab = self.CurrentContent
result = [item for item in QueryTab if item[1] == Model and item[-1]>=0 ]
if Arch is not None and Arch != TAB_ARCH_COMMON:
ArchList = set(['COMMON'])
ArchList.add(Arch)
result = [item for item in result if item[5] in ArchList]
return [[r[2], r[3], r[4], r[5], r[6], r[0], r[8]] for r in result]
def GetValidExpression(self, TokenSpaceGuid, PcdCName):
QueryTab = self.CurrentContent
result = [[item[2], item[8]] for item in QueryTab if item[3] == TokenSpaceGuid and item[4] == PcdCName]
validateranges = []
validlists = []
expressions = []
try:
for row in result:
comment = row[0]
LineNum = row[1]
comment = comment.strip("#")
comment = comment.strip()
oricomment = comment
if comment.startswith("@ValidRange"):
comment = comment.replace("@ValidRange", "", 1)
validateranges.append(comment.split("|")[1].strip())
if comment.startswith("@ValidList"):
comment = comment.replace("@ValidList", "", 1)
validlists.append(comment.split("|")[1].strip())
if comment.startswith("@Expression"):
comment = comment.replace("@Expression", "", 1)
expressions.append(comment.split("|")[1].strip())
except Exception as Exc:
ValidType = ""
if oricomment.startswith("@ValidRange"):
ValidType = "@ValidRange"
if oricomment.startswith("@ValidList"):
ValidType = "@ValidList"
if oricomment.startswith("@Expression"):
ValidType = "@Expression"
EdkLogger.error('Parser', FORMAT_INVALID, "The syntax for %s of PCD %s.%s is incorrect" % (ValidType, TokenSpaceGuid, PcdCName),
ExtraData=oricomment, File=self.MetaFile, Line=LineNum)
return set(), set(), set()
return set(validateranges), set(validlists), set(expressions)
## Python class representation of table storing platform data
class PlatformTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Scope1 TEXT,
Scope2 TEXT,
Scope3 TEXT,
BelongsToItem REAL NOT NULL,
FromItem REAL NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = [-1, -1, '====', '====', '====', '====', '====','====', -1, -1, -1, -1, -1, -1, -1]
## Constructor
def __init__(self, Cursor, MetaFile, Temporary, FromItem=0):
MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DSC, Temporary, FromItem)
## Insert table
#
# Insert a record into table Dsc
#
# @param Model: Model of a Dsc item
# @param Value1: Value1 of a Dsc item
# @param Value2: Value2 of a Dsc item
# @param Value3: Value3 of a Dsc item
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
# @param StartLine: StartLine of a Dsc item
# @param StartColumn: StartColumn of a Dsc item
# @param EndLine: EndLine of a Dsc item
# @param EndColumn: EndColumn of a Dsc item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON, Scope3=TAB_DEFAULT_STORES_DEFAULT,BelongsToItem=-1,
FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
(Value1, Value2, Value3, Scope1, Scope2, Scope3) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip(), Scope3.strip())
self.ID = self.ID + self._ID_STEP_
row = [ self.ID,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
Scope3,
BelongsToItem,
FromItem,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
]
self.CurrentContent.append(row)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
QueryTab = self.CurrentContent
result = [item for item in QueryTab if item[1] == Model and item[-1]>0 ]
if Scope1 is not None and Scope1 != TAB_ARCH_COMMON:
Sc1 = set(['COMMON'])
Sc1.add(Scope1)
result = [item for item in result if item[5] in Sc1]
Sc2 = set( ['COMMON','DEFAULT'])
if Scope2 and Scope2 != TAB_COMMON:
if '.' in Scope2:
Index = Scope2.index('.')
NewScope = TAB_COMMON + Scope2[Index:]
Sc2.add(NewScope)
Sc2.add(Scope2)
result = [item for item in result if item[6] in Sc2]
if BelongsToItem is not None:
result = [item for item in result if item[8] == BelongsToItem]
else:
result = [item for item in result if item[8] < 0]
if FromItem is not None:
result = [item for item in result if item[9] == FromItem]
result = [ [r[2],r[3],r[4],r[5],r[6],r[7],r[0],r[9]] for r in result ]
return result
## Factory class to produce different storage for different type of meta-file
class MetaFileStorage(object):
_FILE_TABLE_ = {
MODEL_FILE_INF : ModuleTable,
MODEL_FILE_DEC : PackageTable,
MODEL_FILE_DSC : PlatformTable,
MODEL_FILE_OTHERS : MetaFileTable,
}
_FILE_TYPE_ = {
".inf" : MODEL_FILE_INF,
".dec" : MODEL_FILE_DEC,
".dsc" : MODEL_FILE_DSC,
}
_ObjectCache = {}
## Constructor
def __new__(Class, Cursor, MetaFile, FileType=None, Temporary=False, FromItem=None):
# no type given, try to find one
key = (MetaFile.Path, FileType,Temporary,FromItem)
if key in Class._ObjectCache:
return Class._ObjectCache[key]
if not FileType:
if MetaFile.Type in self._FILE_TYPE_:
FileType = Class._FILE_TYPE_[MetaFile.Type]
else:
FileType = MODEL_FILE_OTHERS
# don't pass the type around if it's well known
if FileType == MODEL_FILE_OTHERS:
Args = (Cursor, MetaFile, FileType, Temporary)
else:
Args = (Cursor, MetaFile, Temporary)
if FromItem:
Args = Args + (FromItem,)
# create the storage object and return it to caller
reval = Class._FILE_TABLE_[FileType](*Args)
if not Temporary:
Class._ObjectCache[key] = reval
return reval
| bsd-2-clause | 8,104,280,331,112,390,000 | 36.492063 | 155 | 0.544035 | false |
ssharpjr/taskbuster-boilerplate | taskbuster/apps/taskmanager/models.py | 1 | 2262 | # -*- coding: utf-8 -*-
from django.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.core.validators import RegexValidator
from . import managers
class Profile(models.Model):
# Relations
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
related_name="profile",
verbose_name=_("user")
)
# Attributes - Mandatory
interaction = models.PositiveIntegerField(
default=0,
verbose_name=_("interaction")
)
# Attributes - Optional
# Object Manager
objects = managers.ProfileManager()
# Custom Properties
@property
def username(self):
return self.user.username
# Methods
# Meta and String
class Meta:
verbose_name = _("Profile")
verbose_name_plural = _("Profiles")
ordering = ("user",)
def __str__(self):
return self.user.username
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_profile_for_new_user(sender, created, instance, **kwargs):
if created:
profile = Profile(user=instance)
profile.save()
class Project(models.Model):
# Relations
user = models.ForeignKey(
Profile,
related_name="projects",
verbose_name=_("user")
)
# Attributes - Mandatory
name = models.CharField(
max_length=100,
verbose_name=_("name"),
help_text=_("Enter the project name")
)
color = models.CharField(
max_length=7,
default="#fff",
validators=[RegexValidator(
"(^#[0-9a-fA-F]{3}$)|(^#[0-9a-fA-F]{6}$)")],
verbose_name=_("color"),
help_text=_("Enter the hex color code, like #ccc or #cccccc")
)
# Attributes - Optional
# Object Manager
objects = managers.ProjectManager()
# Custom Properties
# Methods
# Meta and String
class Meta:
verbose_name = _("Project")
verbose_name_plural = _("Projects")
ordering = ("user", "name")
unique_together = ("user", "name")
def __str__(self):
return "%s - %s" % (self.user, self.name)
| mit | 2,343,749,543,026,243,000 | 25 | 69 | 0.599912 | false |
jeffstaley/cyflash | cyflash/cyacd_test.py | 1 | 1628 | from cStringIO import StringIO
import unittest
import cyacd
class BootloaderRowTest(unittest.TestCase):
def testParseRow(self):
rowdata = ":000018008000100020110C0000E92D0000E92D000008B5024B83F3088802F0E8F800100020F8B572B6002406236343704D0134EE187279707831793778B3781202F67800020A4338431904084337063843002103F09FF8032CE7D1291C12316548802203F08EF80023191C634AFF25141C143418593C32061CAE434F00C4B2351CD219002CB8"
blrow = cyacd.BootloaderRow.read(rowdata)
self.assertEquals(blrow.array_id, 0)
self.assertEquals(blrow.row_number, 0x18)
self.assertEquals(len(blrow.data), 0x80)
self.assertEquals(blrow.data.encode('hex').upper(), rowdata[11:-2])
def testParseFile(self):
filedata = """04A611931101
:000018008000100020110C0000E92D0000E92D000008B5024B83F3088802F0E8F800100020F8B572B6002406236343704D0134EE187279707831793778B3781202F67800020A4338431904084337063843002103F09FF8032CE7D1291C12316548802203F08EF80023191C634AFF25141C143418593C32061CAE434F00C4B2351CD219002CB8
:000019008007D0167857787619013C3770E4B20232F5E7C0B204330918282BE4D1564A574B574C584D584F59491A6099262C604F20574A584B584D3E600F240860574F58491A6003262C608720564B574C3E603C220860564DD82756491A6038012260554A2E60554B0860554C554D56481660C0270E2655491E6002222C6093260760534BB3"""
bldata = cyacd.BootloaderData.read(StringIO(filedata))
self.assertEquals(bldata.silicon_id, 0x04A61193)
self.assertEquals(bldata.silicon_rev, 0x11)
self.assertEquals(bldata.checksum_type, 0x01)
self.assertEquals(len(bldata.rows), 2)
self.assertTrue(all(isinstance(row, cyacd.BootloaderRow) for row in bldata.rows))
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | 3,166,059,054,195,194,000 | 55.137931 | 283 | 0.869779 | false |
CitoEngine/cito_engine | app/cito_engine/actions/json_formatter.py | 1 | 1266 | """Copyright 2014 Cyrus Dasadia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import simplejson
def create_json_parameters(event_action, incident, message=None):
plugin_parameters = event_action.pluginParameters
plugin_parameters = re.sub('"__EVENTID__"', simplejson.dumps(unicode(incident.event.id)), plugin_parameters)
plugin_parameters = re.sub('"__INCIDENTID__"', simplejson.dumps(unicode(incident.id)), plugin_parameters)
plugin_parameters = re.sub('"__ELEMENT__"', simplejson.dumps(unicode(incident.element)), plugin_parameters)
plugin_parameters = re.sub('"__MESSAGE__"', simplejson.dumps(unicode(message)), plugin_parameters)
return '{"plugin": %s, "parameters": %s}' % (simplejson.dumps(unicode(event_action.plugin.name)), plugin_parameters) | apache-2.0 | 1,861,107,108,582,375,000 | 47.730769 | 120 | 0.756714 | false |
quantumlib/Cirq | dev_tools/profiling/benchmark_serializers.py | 1 | 4296 | # Copyright 2020 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for benchmarking serialization of large circuits.
This tool was originally introduced to enable comparison of the two JSON
serialization protocols (gzip and non-gzip):
https://github.com/quantumlib/Cirq/pull/3662
This is part of the "efficient serialization" effort:
https://github.com/quantumlib/Cirq/issues/3438
Run this benchmark with the following command (make sure to install cirq-dev):
python3 dev_tools/profiling/benchmark_serializers.py \
--num_gates=<int> --nesting_depth=<int> --num_repetitions=<int>
WARNING: runtime increases exponentially with nesting_depth. Values much
higher than nesting_depth=10 are not recommended.
"""
import argparse
import sys
import timeit
import numpy as np
import cirq
_JSON_GZIP = 'json_gzip'
_JSON = 'json'
NUM_QUBITS = 8
SUFFIXES = ['B', 'kB', 'MB', 'GB', 'TB']
def serialize(serializer: str, num_gates: int, nesting_depth: int) -> int:
""""Runs a round-trip of the serializer."""
circuit = cirq.Circuit()
for _ in range(num_gates):
which = np.random.choice(['expz', 'expw', 'exp11'])
if which == 'expw':
q1 = cirq.GridQubit(0, np.random.randint(NUM_QUBITS))
circuit.append(
cirq.PhasedXPowGate(
phase_exponent=np.random.random(), exponent=np.random.random()
).on(q1)
)
elif which == 'expz':
q1 = cirq.GridQubit(0, np.random.randint(NUM_QUBITS))
circuit.append(cirq.Z(q1) ** np.random.random())
elif which == 'exp11':
q1 = cirq.GridQubit(0, np.random.randint(NUM_QUBITS - 1))
q2 = cirq.GridQubit(0, q1.col + 1)
circuit.append(cirq.CZ(q1, q2) ** np.random.random())
cs = [circuit]
for _ in range(1, nesting_depth):
fc = cs[-1].freeze()
cs.append(cirq.Circuit(fc.to_op(), fc.to_op()))
test_circuit = cs[-1]
if serializer == _JSON:
json_data = cirq.to_json(test_circuit)
assert json_data is not None
data_size = len(json_data)
cirq.read_json(json_text=json_data)
elif serializer == _JSON_GZIP:
gzip_data = cirq.to_json_gzip(test_circuit)
assert gzip_data is not None
data_size = len(gzip_data)
cirq.read_json_gzip(gzip_raw=gzip_data)
return data_size
def main(
num_gates: int,
nesting_depth: int,
num_repetitions: int,
setup: str = 'from __main__ import serialize',
):
for serializer in [_JSON_GZIP, _JSON]:
print()
print(f'Using serializer "{serializer}":')
command = f'serialize(\'{serializer}\', {num_gates}, {nesting_depth})'
time = timeit.timeit(command, setup, number=num_repetitions)
print(f'Round-trip serializer time: {time / num_repetitions}s')
data_size = float(serialize(serializer, num_gates, nesting_depth))
suffix_idx = 0
while data_size > 1000:
data_size /= 1024
suffix_idx += 1
print(f'Serialized data size: {data_size} {SUFFIXES[suffix_idx]}.')
def parse_arguments(args):
parser = argparse.ArgumentParser('Benchmark a serializer.')
parser.add_argument(
'--num_gates', default=100, type=int, help='Number of gates at the bottom nesting layer.'
)
parser.add_argument(
'--nesting_depth',
default=1,
type=int,
help='Depth of nested subcircuits. Total gate count will be 2^nesting_depth * num_gates.',
)
parser.add_argument(
'--num_repetitions', default=10, type=int, help='Number of times to repeat serialization.'
)
return vars(parser.parse_args(args))
if __name__ == '__main__':
main(**parse_arguments(sys.argv[1:]))
| apache-2.0 | 1,166,759,302,246,157,000 | 33.368 | 98 | 0.64176 | false |
Chetox/RCode | Cannon_Avanzado/client.py | 1 | 2002 | #!/usr/bin/python
# -*- coding:utf-8; tab-width:4; mode:python -*-
import sys
import Ice
Ice.loadSlice('-I {} cannon.ice'.format(Ice.getSliceDir()))
import Cannon
import time
from matrix_utils import matrix_multiply
def load_matrix_from_file(filename):
with file(filename) as f:
rows = f.readlines()
order = len(rows[0].split())
retval = Cannon.Matrix(order, [])
for row in rows:
rowdata = row.split()
assert len(rowdata) == order
for n in rowdata:
retval.data.append(float(n))
assert len(retval.data) == order ** 2
return retval
class Client(Ice.Application):
def run(self, argv):
t_dist = 0;
t_secu = 0;
loader = self.string_to_proxy(argv[1], Cannon.OperationsPrx)
example = argv[2]
A = load_matrix_from_file('m/{}A'.format(example))
B = load_matrix_from_file('m/{}B'.format(example))
t_dist = time.time()
C = loader.matrixMultiply(A, B)
t_dist = time.time() - t_dist
t_secu = time.time()
c = matrix_multiply(A,B)
t_secu = time.time() - t_secu
expected = load_matrix_from_file('m/{}C'.format(example))
retval = (C == expected)
print("OK" if retval else "FAIL")
print("El tiempo que ha tardado en distribuido ha sido {}".format(t_dist))
print("El tiempo que ha tardado en secuencial ha sido {}".format(t_secu))
if(C == None): print("Timeout expired")
return not retval
def string_to_proxy(self, str_proxy, iface):
proxy = self.communicator().stringToProxy(str_proxy)
retval = iface.checkedCast(proxy)
if not retval:
raise RuntimeError('Invalid proxy %s' % str_proxy)
return retval
def print_matrix(self, M):
ncols = M.ncols
nrows = len(M.data) / ncols
for r in range(nrows):
print M.data[r * ncols:(r + 1) * ncols]
if __name__ == '__main__':
sys.exit(Client().main(sys.argv))
| apache-2.0 | -3,968,228,681,157,256,000 | 25.342105 | 82 | 0.586414 | false |
bitcraze/crazyflie-lib-python | test/crtp/test_crtpstack.py | 1 | 2875 | # -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import unittest
from cflib.crtp.crtpstack import CRTPPacket
class CRTPPacketTest(unittest.TestCase):
def setUp(self):
self.callback_count = 0
self.sut = CRTPPacket()
def test_that_port_and_channle_is_encoded_in_header(self):
# Fixture
self.sut.set_header(2, 1)
# Test
actual = self.sut.get_header()
# Assert
expected = 0x2d
self.assertEqual(expected, actual)
def test_that_port_is_truncated_in_header(self):
# Fixture
port = 0xff
self.sut.set_header(port, 0)
# Test
actual = self.sut.get_header()
# Assert
expected = 0xfc
self.assertEqual(expected, actual)
def test_that_channel_is_truncated_in_header(self):
# Fixture
channel = 0xff
self.sut.set_header(0, channel)
# Test
actual = self.sut.get_header()
# Assert
expected = 0x0f
self.assertEqual(expected, actual)
def test_that_port_and_channel_is_encoded_in_header_when_set_separat(self):
# Fixture
self.sut.port = 2
self.sut.channel = 1
# Test
actual = self.sut.get_header()
# Assert
expected = 0x2d
self.assertEqual(expected, actual)
def test_that_default_header_is_set_when_constructed(self):
# Fixture
# Test
actual = self.sut.get_header()
# Assert
expected = 0x0c
self.assertEqual(expected, actual)
def test_that_header_is_set_when_constructed(self):
# Fixture
sut = CRTPPacket(header=0x21)
# Test
actual = sut.get_header()
# Assert
self.assertEqual(0x2d, actual)
self.assertEqual(2, sut.port)
self.assertEqual(1, sut.channel)
| gpl-2.0 | 3,989,698,133,982,647 | 26.644231 | 79 | 0.575652 | false |
csixteen/HackerRank_Python | Algorithms/magic_square.py | 1 | 1071 | class Solution(object):
MAGIC_SQUARES = [
[4, 9, 2, 3, 5, 7, 8, 1, 6],
[2, 9, 4, 7, 5, 3, 6, 1, 8],
[8, 3, 4, 1, 5, 9, 6, 7, 2],
[4, 3, 8, 9, 5, 1, 2, 7, 6],
[6, 1, 8, 7, 5, 3, 2, 9, 4],
[8, 1, 6, 3, 5, 7, 4, 9, 2],
[6, 7, 2, 1, 5, 9, 8, 3, 4],
[2, 7, 6, 9, 5, 1, 4, 3, 8]
]
def magic_square(self, s):
totals = []
for ms in self.MAGIC_SQUARES:
totals.append(sum([abs(ms_e - s_e) for ms_e, s_e in zip(ms, s)]))
return min(totals)
import unittest
class SolutionTest(unittest.TestCase):
def test_magic_square(self):
s = Solution()
self.assertEqual(0, s.magic_square([6, 1, 8, 7, 5, 3, 2, 9, 4]))
self.assertEqual(1, s.magic_square([4, 9, 2, 3, 5, 7, 8, 1, 5]))
self.assertEqual(4, s.magic_square([4, 8, 2, 4, 5, 7, 6, 1, 6]))
self.assertEqual(45, s.magic_square([0, 0, 0, 0, 0, 0, 0, 0, 0]))
self.assertEqual(36, s.magic_square([9, 9, 9, 9, 9, 9, 9, 9, 9]))
if __name__ == "__main__":
unittest.main()
| mit | -3,675,657,614,335,557,600 | 32.46875 | 77 | 0.459384 | false |
alexpap/exareme | exareme-tools/madis/src/functionslocal/aggregate/approximatedmedian.py | 1 | 2110 | import inspect
import math
import random
import numpy
from fractions import Fraction
import sys
import json
from array import *
class approximatedmedian:
registered = True #Value to define db operator
def __init__(self):
self.n = 0
self.totalnums = 0
self.numberofcolumns = 5
self.colname = []
self.buckets = []
self.minvalues = []
self.maxvalues = []
self.nums = []
def step(self, *args):
try:
self.colname.append(args[0])
self.buckets.append(int(args[1]))
self.minvalues.append(float(args[2]))
self.maxvalues.append(float(args[3]))
self.nums.append(int(args[4]))
self.totalnums += int(args[4])
self.n += 1
except (ValueError, TypeError):
raise
def final(self):
# print self.nums
# print self.totalnums / 2.0
yield ('colname0', 'val', 'bucket', 'numsBeforeMedian', 'numsAfterMedian')
# yield ('attr1', 'attr2', 'val', 'reccount')
currentsum = 0
for i in xrange(0,self.n):
# print i,self.totalnums / 2.0,self.nums[i],currentsum
currentsum += self.nums[i]
if currentsum >= (self.totalnums / 2.0):
break
median = self.minvalues[i]+(currentsum-self.totalnums / 2.0) * (self.maxvalues[i]-self.minvalues[i]) / self.nums[i]
# print (self.totalnums / 2.0), currentsum, currentsum -self.nums[i]
numsBeforeMedian = (self.totalnums / 2.0) - (currentsum - self.nums[i])
numsAfterMedian = currentsum - (self.totalnums / 2.0)
yield self.colname[0], median, i, numsBeforeMedian,numsAfterMedian
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
import setpath
#from functions import *
#testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.testmod()
| mit | -1,993,861,639,533,290,000 | 23.823529 | 123 | 0.572512 | false |
kunaltyagi/nsiqcppstyle | rules/RULE_4_1_B_locate_each_enum_item_in_seperate_line.py | 1 | 3034 | """
Locate the each enum item in seperate lines.
== Violation ==
enum A {
A_A, A_B <== Violation
}
== Good ==
enum A {
A_A, <== Good
A_B
}
"""
from nsiqunittest.nsiqcppstyle_unittestbase import *
from nsiqcppstyle_rulehelper import *
from nsiqcppstyle_reporter import *
from nsiqcppstyle_rulemanager import *
def RunRule(lexer, typeName, typeFullName, decl, contextStack, typeContext):
if not decl and typeContext is not None:
# column = GetRealColumn(lexer.GetCurToken())
if typeName == "ENUM":
lexer._MoveToToken(typeContext.startToken)
while(True):
nt = lexer.GetNextTokenInTypeList(
["COMMA", "RBRACE"], False, True)
if nt is None or nt == typeContext.endToken:
break
if typeContext != nt.contextStack.Peek():
continue
nt2 = lexer.PeekNextTokenSkipWhiteSpaceAndCommentAndPreprocess()
nt3 = lexer.PeekPrevTokenSkipWhiteSpaceAndCommentAndPreprocess()
# print nt, nt2,nt3
if nt.lineno == nt2.lineno and nt3.lineno == nt.lineno:
nsiqcppstyle_reporter.Error(
nt2, __name__, "Each enum item(%s) should be located in the different line" % nt2.value)
ruleManager.AddTypeNameRule(RunRule)
##########################################################################
# Unit Test
##########################################################################
class testRule(nct):
def setUpRule(self):
ruleManager.AddTypeNameRule(RunRule)
def test1(self):
self.Analyze("test/thisFile.c",
"""
enum A {
}
""")
self.ExpectSuccess(__name__)
def test2(self):
self.Analyze("test/thisFile.c",
"""
enum C {
AA, BB
}
""")
self.ExpectError(__name__)
def test3(self):
self.Analyze("test/thisFile.c",
"""
enum C {
AA = 4,
BB
}
""")
self.ExpectSuccess(__name__)
def test4(self):
self.Analyze("test/thisFile.c",
"""
enum C {
AA = 4
,BB
}
""")
self.ExpectSuccess(__name__)
def test5(self):
self.Analyze("test/thisFile.c",
"""
enum C
{
AA = 4
,BB
} TT;
""")
self.ExpectSuccess(__name__)
def test6(self):
self.Analyze("test/thisFile.c",
"""
enum COLOR
{
COLOR_TRANSPARENT = RGB(0, 0, 255),
COLOR_ROOM_IN_OUT = 0xffff00,
COLOR_CHAT_ITEM = 0xff9419,
COLOR_CHAT_MY = 0x00b4ff,
COLOR_CHAT_YOUR = 0xa3d5ff,
COLOR_ROOM_INFO = 0x00ffff,
COLOR_RESULT_SCORE = 0xffcc00,
COLOR_RESULT_RATING = 0x00fcff,
COLOR_RESULT_POINT = 0x33ff00
}; """)
self.ExpectSuccess(__name__)
| gpl-2.0 | -3,059,917,030,099,528,000 | 23.711864 | 112 | 0.49176 | false |
fishtown-analytics/dbt | test/integration/041_presto_test/test_simple_presto_view.py | 1 | 2230 | from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile
import random
import time
class TestBasePrestoRun(DBTIntegrationTest):
@property
def schema(self):
return "presto_test_41"
@property
def models(self):
return "models"
@property
def project_config(self):
return {
'config-version': 2,
'data-paths': ['data'],
'macro-paths': ['macros'],
'seeds': {
'quote_columns': False,
},
}
@property
def profile_config(self):
return self.presto_profile()
def assert_nondupes_pass(self):
# The 'dupe' model should fail, but all others should pass
test_results = self.run_dbt(['test'], expect_pass=False)
for result in test_results:
if 'dupe' in result.node.name:
self.assertIsNone(result.error)
self.assertFalse(result.skipped)
self.assertTrue(result.status > 0)
# assert that actual tests pass
else:
self.assertIsNone(result.error)
self.assertFalse(result.skipped)
# status = # of failing rows
self.assertEqual(result.status, 0)
class TestSimplePrestoRun(TestBasePrestoRun):
def setUp(self):
super().setUp()
for conn in self.adapter.connections.in_use.values():
conn.transaction_open
@use_profile('presto')
def test__presto_simple_run(self):
# make sure seed works twice. Full-refresh is a no-op
self.run_dbt(['seed'])
self.run_dbt(['seed', '--full-refresh'])
results = self.run_dbt()
self.assertEqual(len(results), 2)
self.assert_nondupes_pass()
class TestUnderscorePrestoRun(TestBasePrestoRun):
prefix = "_test{}{:04}".format(int(time.time()), random.randint(0, 9999))
@use_profile('presto')
def test_presto_run_twice(self):
self.run_dbt(['seed'])
results = self.run_dbt()
self.assertEqual(len(results), 2)
self.assert_nondupes_pass()
results = self.run_dbt()
self.assertEqual(len(results), 2)
self.assert_nondupes_pass()
| apache-2.0 | 3,608,262,232,624,162,000 | 27.961039 | 77 | 0.583857 | false |
nihlaeth/Nagios_check_slackpkg | check_slackpkg_nonpriv.py | 1 | 1673 | #!/usr/bin/env python
"""Nagios module for monitoring available updates via slackpkg."""
import subprocess
import sys
import os
# pylint: disable=invalid-name
# run check-updates to poll mirror for changes
result = []
try:
result = subprocess.check_output("myslackpkg check-updates", shell=True).split("\n")
except (OSError, subprocess.CalledProcessError) as error:
print "Failed to check for updates: %s" % error
sys.exit(3)
updates = "idk"
for line in result:
if "good news" in line:
updates = "no"
elif "News on" in line:
updates = "yes"
if updates == "idk":
print "Error parsing slackpkg check-updates status"
sys.exit(3)
elif updates == "yes":
# fetch updated package list
try:
_ = subprocess.check_output("myslackpkg update &> /dev/null", shell=True)
except (OSError, subprocess.CalledProcessError) as error:
print "Failed to update package list: %s" % error
sys.exit(3)
# Now the packages list is up to date, check if we need to upgrade anything
result = []
devnull = open(os.devnull, 'w')
try:
result = subprocess.check_output([
"myslackpkg",
"upgrade-all"], stderr=devnull).split("\n")
except (OSError, subprocess.CalledProcessError) as error:
print "Failed to check for upgrades: %s" % error
sys.exit(3)
packages = []
for line in result:
if ".txz" in line:
packages.append(line.strip())
if "update gpg" in line:
print "Error: need up-to-date gpg key!"
sys.exit(3)
if len(packages) == 0:
print "OK: everything up-to-date"
sys.exit(0)
else:
print "Updates available: " + " ".join(packages)
sys.exit(2)
| gpl-3.0 | 4,253,797,037,185,081,000 | 27.355932 | 88 | 0.654513 | false |
haphaeu/yoshimi | sql/data_analysis/database.py | 1 | 3122 | from os import path
from sqlalchemy import (create_engine,
Column,
String,
Integer,
Boolean,
Table,
ForeignKey)
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.ext.declarative import declarative_base
database_filename = 'twitter.sqlite3'
directory = path.abspath(path.dirname(__file__))
database_filepath = path.join(directory, database_filename)
engine_url = 'sqlite:///{}'.format(database_filepath)
engine = create_engine(engine_url)
# Our database class objects are going to inherit from
# this class
Base = declarative_base(bind=engine)
# create a configured “Session” class
Session = sessionmaker(bind=engine, autoflush=False)
# Create a Session
session = Session()
hashtag_tweet = Table('hashtag_tweet', Base.metadata,
Column('hashtag_id', Integer, ForeignKey('hashtags.id'), nullable=False),
Column('tweet_id', Integer, ForeignKey('tweets.id'), nullable=False))
class Tweet(Base):
__tablename__ = 'tweets'
id = Column(Integer, primary_key=True)
tid = Column(String(100), nullable=False)
tweet = Column(String(300), nullable=False)
user_id = Column(Integer, ForeignKey('users.id'), nullable=False)
coordinates = Column(String(50), nullable=True)
user = relationship('User', backref='tweets')
created_at = Column(String(100), nullable=False)
favorite_count = Column(Integer)
in_reply_to_screen_name = Column(String)
in_reply_to_status_id = Column(Integer)
in_reply_to_user_id = Column(Integer)
lang = Column(String)
quoted_status_id = Column(Integer)
retweet_count = Column(Integer)
source = Column(String)
is_retweet = Column(Boolean)
hashtags = relationship('Hashtag',
secondary='hashtag_tweet',
back_populates='tweets')
def __repr__(self):
return '<Tweet {}>'.format(self.id)
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
uid = Column(String(50), nullable=False)
name = Column(String(100), nullable=False)
screen_name = Column(String)
created_at = Column(String)
# Nullable
description = Column(String)
followers_count = Column(Integer)
friends_count = Column(Integer)
statuses_count = Column(Integer)
favourites_count = Column(Integer)
listed_count = Column(Integer)
geo_enabled = Column(Boolean)
lang = Column(String)
def __repr__(self):
return '<User {}>'.format(self.id)
class Hashtag(Base):
__tablename__ = 'hashtags'
id = Column(Integer, primary_key=True)
text = Column(String(200), nullable=False)
tweets = relationship('Tweet',
secondary='hashtag_tweet',
back_populates='hashtags')
def __repr__(self):
return '<Hashtag {}>'.format(self.text)
def init_db():
Base.metadata.create_all()
if not path.isfile(database_filepath):
init_db()
| lgpl-3.0 | 5,123,633,034,681,498,000 | 29.271845 | 95 | 0.626042 | false |
EnceladOnline/interfaX | icon.py | 1 | 1967 | from tkinter import *
from tkinter import ttk
import func
class Icon:
def __init__(self, main, icon):
# Affiche les icon sur le tab
self.main = main
self.master = self.main.cache["CurrentTabID"]
self.icon = icon
if self.icon[1][1] == None:
self.icon_label()
else:
self.icon_image()
def icon_label(self):
self.cadre = ttk.Button(self.main.cache["CurrentTabID"],
text = self.icon[0], command = self.launch,
style = "STYLE_B.TButton", takefocus = 0, cursor = "hand2")
self.icon_tagorid = self.main.cache["CurrentTabID"].create_window(self.icon[2][0],
self.icon[2][1], window = self.cadre, anchor = "se")
self.main.cache["CurrentIconID"] = self.cadre
self.main.cache["CurrentIcon"] = self.icon
# Bind
self.cadre.bind("<Button-3>", self.icon_menu_eventhandler)
# Utilisé dans InterfaX 1
# self.cadre.bind("<Motion>", self.icon_title_eventhandler)
def icon_image(self):
try:
self.main.cache[self.icon[0]] = PhotoImage(file = self.icon[1][1])
except:
self.main.cache[self.icon[0]] = None
self.cadre = ttk.Button(self.main.cache["CurrentTabID"],
image = self.main.cache[self.icon[0]], takefocus = 0,
command = self.launch, cursor = "hand2")
self.icon_tagorid = self.main.cache["CurrentTabID"].create_window(self.icon[2][0],
self.icon[2][1], window = self.cadre, anchor = "se")
# Bind
self.cadre.bind("<Button-3>", self.icon_menu_eventhandler)
self.cadre.bind("<Motion>", self.icon_title_eventhandler)
def launch(self):
path_list = self.icon[3]
func.launcher(path_list)
def icon_menu_eventhandler(self, event):
self.main.cache["CurrentIconID"] = self.cadre
self.main.cache["CurrentIcon"] = self.icon
self.main.cache["CurrentIconTAGORID"] = self.icon_tagorid
self.main.icon_menu_eventhandler()
def icon_title_eventhandler(self, event):
self.main.strvar_icon_title.set(self.icon[0])
| gpl-2.0 | -8,296,808,383,988,564,000 | 22.987805 | 84 | 0.654629 | false |
wilkinsg/piweb | watched.py | 1 | 2615 | #!/usr/bin/python
import hash
import os
import config
import video_info
watched_cache = {}
def prepwatched( conn ):
global watched_cache
result = conn.execute( "SELECT * FROM history" )
queueitem = result.fetchone()
while( queueitem ):
watched_cache[ queueitem[ 0 ] ] = True
queueitem = result.fetchone()
# def is_list_watched( hashlist, conn ):
# orlist = ( '?,' * len( hashlist ) ).rstrip( ',' )
# result = conn.execute( "SELECT * FROM history WHERE hash in ({})".format( orlist ), tuple( hashlist ) )
# if( result.rowcount() == len( hashlist ) ):
# return( True )
# else:
# return( False )
def is_watched( hash, conn ):
global watched_cache
try:
return( watched_cache[ hash ] )
except KeyError:
result = conn.execute( "SELECT * FROM history WHERE hash = ?", ( hash, ) )
if( result.fetchone() ):
watched_cache[ hash ] = True
return( True )
else:
watched_cache[ hash ] = False
return( False )
def is_directory_watched( dir, conn ):
dir = os.path.join( config.get_media_dir(), dir.lstrip( '/' ) )
for root, dirs, files in os.walk( dir ):
for filename in files:
if( video_info.is_video( filename ) ):
file = os.path.join( root, filename )
if( False == is_watched( hash.hash_name( file ), conn ) ):
return( False )
return( True )
def mark_all_watched( list, conn ):
global watched_cache
for filename in list:
input = hash.hash_name( filename )
if( input and len( input ) == 32 and not is_watched( input, conn ) ):
conn.execute( "INSERT INTO history VALUES( ? )", ( input, ) )
watched_cache[ input ] = True
conn.commit()
def mark_hash_watched( input, conn, docommit=True ):
global watched_cache
if( input and len( input ) == 32 and not is_watched( input, conn ) ):
conn.execute( "INSERT INTO history VALUES( ? )", ( input, ) )
watched_cache[ input ] = True
if( docommit ):
conn.commit()
return True
return( False )
def mark_hash_unwatched( input, conn ):
global watched_cache
if( input and len( input ) == 32 ):
conn.execute( "DELETE FROM history WHERE hash=?", ( input, ) )
watched_cache[ input ] = False
conn.commit()
return True
return( False )
def mark_watched( filename, conn ):
input = hash.hash_name( filename )
mark_hash_watched( input, conn )
| mit | 3,103,893,710,333,927,000 | 30.130952 | 109 | 0.559465 | false |
yangl1996/libpagure | tests/test_api.py | 1 | 12568 | import pytest
from libpagure import Pagure
@pytest.fixture(scope='module')
def simple_pg():
""" Create a simple Pagure object
to be used in test
"""
pg = Pagure(pagure_repository="testrepo")
return pg
def test_pagure_object():
""" Test the pagure object creation """
pg = Pagure(pagure_token="a token",
pagure_repository="test_repo")
assert pg.token == "a token"
assert pg.repo == "test_repo"
assert pg.namespace is None
assert pg.username is None
assert pg.instance == "https://pagure.io"
assert pg.insecure is False
assert pg.header == {"Authorization": "token a token"}
basic_url_data = [
(None, None, 'testrepo', 'https://pagure.io/api/0/testrepo/'),
(None, 'testnamespace', 'testrepo',
'https://pagure.io/api/0/testnamespace/testrepo/'),
('testfork', None, 'testrepo',
'https://pagure.io/api/0/fork/testfork/testrepo/'),
('testfork', 'testnamespace', 'testrepo',
'https://pagure.io/api/0/fork/testfork/testnamespace/testrepo/'),
]
@pytest.mark.parametrize("user, namespace, repo, expected",
basic_url_data)
def test_create_basic_url(user, namespace, repo, expected):
""" Test creation of url in function of argument
passed to the Pagure class.
"""
pg = Pagure(pagure_repository=repo,
fork_username=user,
namespace=namespace)
url = pg.create_basic_url()
assert url == expected
def test_api_version(mocker, simple_pg):
""" Test the call to the version API """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.api_version()
Pagure._call_api.assert_called_once_with('https://pagure.io/api/0/version')
def test_list_users(mocker, simple_pg):
""" Test the call to the users API """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.list_users(pattern='c')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/users', params={'pattern': 'c'})
def test_list_tags(mocker, simple_pg):
""" Test the call to the tags API """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.list_tags(pattern='easy')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/tags', params={'pattern': 'easy'})
def test_list_groups(mocker, simple_pg):
""" Test the call to the groups API """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.list_groups()
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/groups', params=None)
def test_error_codes(mocker, simple_pg):
""" Test the call to the error codes API """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.error_codes()
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/error_codes')
pr_data = [
('teststatus', 'testassignee', 'testauthor',
{'status': 'teststatus', 'assignee': 'testassignee', 'author': 'testauthor'}),
(None, None, None, {})
]
@pytest.mark.parametrize("status, assignee, author, expected", pr_data)
def test_list_requests(mocker, simple_pg, status, assignee, author, expected):
""" Test the API call to the pull-requests endpoint """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.list_requests(status, assignee, author)
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/pull-requests', params=expected)
def test_request_info(mocker, simple_pg):
""" Test the API call to get pull-request info """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.request_info('123')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/pull-request/123')
def test_merge_request(mocker, simple_pg):
""" Test the API call to merge a pull-request """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.merge_request('123')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/pull-request/123/merge', method='POST')
def test_close_request(mocker, simple_pg):
""" Test the API call to close a pull-request """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.close_request('123')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/pull-request/123/close', method='POST')
comment_data = [
("test body", None, None, None, {'comment': 'test body'}),
("test body", "testcommit", "testfilename", "testrow",
{'comment': 'test body', 'commit': 'testcommit', 'filename': 'testfilename',
'row': 'testrow'})
]
@pytest.mark.parametrize("body, commit, filename, row, expected", comment_data)
def test_comment_request(mocker, simple_pg, body, commit, filename, row, expected):
""" Test the API call to comment on a pull-request """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.comment_request('123', body, commit, filename, row)
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/pull-request/123/comment', method='POST',
data=expected)
flag_data = [
('testuser', 'testpercent', 'testcomment', 'testurl', None, None,
{'username': 'testuser', 'percent': 'testpercent', 'comment': 'testcomment',
'url': 'testurl'}),
('testuser', 'testpercent', 'testcomment', 'testurl', 'testuid', 'testcommit',
{'username': 'testuser', 'percent': 'testpercent', 'comment': 'testcomment',
'url': 'testurl', 'uid': 'testuid', 'commit': 'testcommit'})
]
@pytest.mark.parametrize("username, percent, comment, url, uid, commit, expected",
flag_data)
def test_flag_request(mocker, simple_pg, username, percent, comment, url, uid,
commit, expected):
""" Test the API call to flag a pull-request """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.flag_request('123', username, percent, comment, url, uid, commit)
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/pull-request/123/flag', method='POST',
data=expected)
def test_create_issue(mocker, simple_pg):
""" Test the API call to create an issue """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.create_issue('A test issue', 'Some issue content', True)
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/new_issue', method='POST',
data={'title': 'A test issue', 'issue_content': 'Some issue content',
'priority': True})
def test_list_issues(mocker, simple_pg):
""" Test the API call to list all issues of a project """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.list_issues('status', 'tags', 'assignee', 'author',
'milestones', 'priority', 'no_stones', 'since')
expected = {'status': 'status', 'tags': 'tags', 'assignee': 'assignee',
'author': 'author', 'milestones': 'milestones', 'priority': 'priority',
'no_stones': 'no_stones', 'since': 'since'}
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/issues', params=expected)
def test_issue_info(mocker, simple_pg):
""" Test the API call to info about a project issue """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.issue_info('123')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/issue/123')
def test_list_comment(mocker, simple_pg):
""" Test the API call to info about a project issue """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.get_list_comment('123', '001')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/issue/123/comment/001')
def test_change_issue_status(mocker, simple_pg):
""" Test the API call to change the status of a project issue """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.change_issue_status('123', 'Closed', 'wontfix')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/issue/123/status', method='POST',
data={'status': 'Closed', 'close_status': 'wontfix'})
def test_change_issue_milestone(mocker, simple_pg):
""" Test the API call to change the milestone of a project issue """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.change_issue_milestone('123', 'Tomorrow')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/issue/123/milestone', method='POST',
data={'milestone': 'Tomorrow'})
def test_comment_issue(mocker, simple_pg):
""" Test the API call to change the milestone of a project issue """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.comment_issue('123', 'A comment')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/issue/123/comment', method='POST',
data={'comment': 'A comment'})
def test_project_tags(mocker, simple_pg):
""" Test the API call to get a project tags """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.project_tags()
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/git/tags')
def test_list_projects(mocker, simple_pg):
""" Test the API call to list all projects on a pagure instance """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.list_projects('tags', 'pattern', 'username', 'owner',
'namespace', 'fork', 'short', 1, 100)
expected = {'tags': 'tags', 'pattern': 'pattern', 'username': 'username',
'owner': 'owner', 'namespace': 'namespace', 'fork': 'fork',
'short': 'short', 'page': '1', 'per_page': '100'}
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/projects', params=expected)
def test_user_info(mocker, simple_pg):
""" Test the API call to get info about a user """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.user_info('auser')
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/user/auser')
def test_new_project(mocker, simple_pg):
""" Test the API call to list all projects on a pagure instance """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.new_project('name', 'description', 'namespace', 'url',
'avatar_email', True, True)
expected = {'name': 'name', 'description': 'description', 'namespace': 'namespace',
'url': 'url', 'avatar_email': 'avatar_email',
'create_readme': True, 'private': True}
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/new', data=expected, method='POST')
def test_project_branches(mocker, simple_pg):
""" Test the API call to get info about a user """
mocker.patch('libpagure.Pagure._call_api')
simple_pg.project_branches()
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/testrepo/git/branches')
def test_user_activity_stats(mocker, simple_pg):
""" Test the API call to get stats about a user activity"""
mocker.patch('libpagure.Pagure._call_api')
simple_pg.user_activity_stats('auser')
expected = {'username': 'auser'}
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/user/auser/activity/stats', params=expected)
def test_user_activity_stats_by_date(mocker, simple_pg):
""" Test the API call to get stats about a user activity by specific date"""
mocker.patch('libpagure.Pagure._call_api')
simple_pg.user_activity_stats_by_date('auser',"2017-12-30")
expected = {'username': 'auser', 'date' : '2017-12-30'}
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/user/auser/activity/2017-12-30', params=expected)
def test_list_pull_requests(mocker, simple_pg):
""" Test the API call to get stats about a user's pull requests"""
mocker.patch('libpagure.Pagure._call_api')
simple_pg.list_pull_requests('auser', 1)
expected = {'username': 'auser', 'page': 1}
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/user/auser/requests/filed', params=expected)
def test_list_prs_actionable_by_user(mocker, simple_pg):
""" Test the API call to list PR's actionable for a given user"""
mocker.patch('libpagure.Pagure._call_api')
simple_pg.list_prs_actionable_by_user('auser', 1)
expected = {'username': 'auser', 'page': 1}
Pagure._call_api.assert_called_once_with(
'https://pagure.io/api/0/user/auser/requests/actionable', params=expected)
| gpl-2.0 | 1,007,721,609,211,840,800 | 38.898413 | 87 | 0.647279 | false |
nixingyang/Kaggle-Competitions | TalkingData AdTracking Fraud Detection/perform_ensembling.py | 1 | 2489 | import os
import glob
import shutil
import datetime
import numpy as np
import pandas as pd
# Dataset
PROJECT_NAME = "TalkingData AdTracking Fraud Detection"
PROJECT_FOLDER_PATH = os.path.join(os.path.expanduser("~"), "Documents/Dataset",
PROJECT_NAME)
# Submission
TEAM_NAME = "Aurora"
SUBMISSION_FOLDER_PATH = os.path.join(PROJECT_FOLDER_PATH, "submission")
os.makedirs(SUBMISSION_FOLDER_PATH, exist_ok=True)
# Ensembling
WORKSPACE_FOLDER_PATH = os.path.join(PROJECT_FOLDER_PATH, "script/Mar_25_3")
KEYWORD = "DL"
# Generate a zip archive for a file
create_zip_archive = lambda file_path: shutil.make_archive(
file_path[:file_path.rindex(".")], "zip",
os.path.abspath(os.path.join(file_path, "..")), os.path.basename(file_path))
def run():
print("Searching for submissions with keyword {} at {} ...".format(
KEYWORD, WORKSPACE_FOLDER_PATH))
submission_file_path_list = sorted(
glob.glob(os.path.join(WORKSPACE_FOLDER_PATH, "*{}*".format(KEYWORD))))
assert len(submission_file_path_list) != 0
ranking_array_list = []
for submission_file_path in submission_file_path_list:
print("Loading {} ...".format(submission_file_path))
submission_df = pd.read_csv(submission_file_path)
print("Ranking the entries ...")
index_series = submission_df["is_attributed"].argsort()
ranking_array = np.zeros(index_series.shape, dtype=np.uint32)
ranking_array[index_series] = np.arange(len(index_series))
ranking_array_list.append(ranking_array)
ensemble_df = submission_df.copy()
ensemble_prediction_array = np.mean(ranking_array_list, axis=0)
apply_normalization = lambda data_array: 1.0 * (data_array - np.min(
data_array)) / (np.max(data_array) - np.min(data_array))
ensemble_df["is_attributed"] = apply_normalization(
ensemble_prediction_array)
ensemble_file_path = os.path.join(
SUBMISSION_FOLDER_PATH, "{} {} {}.csv".format(
TEAM_NAME, KEYWORD,
str(datetime.datetime.now()).split(".")[0]).replace(" ", "_"))
print("Saving submission to {} ...".format(ensemble_file_path))
ensemble_df.to_csv(ensemble_file_path, float_format="%.6f", index=False)
compressed_ensemble_file_path = create_zip_archive(ensemble_file_path)
print("Saving compressed submission to {} ...".format(
compressed_ensemble_file_path))
print("All done!")
if __name__ == "__main__":
run()
| mit | -8,510,683,944,066,425,000 | 36.712121 | 80 | 0.659703 | false |
flennerhag/mlens | mlens/externals/sklearn/validation.py | 1 | 27114 | """
Scikit-learn utilities for input validation.
"""
# Authors: Olivier Grisel
# Gael Varoquaux
# Andreas Mueller
# Lars Buitinck
# Alexandre Gramfort
# Nicolas Tresegnie
# License: BSD 3 clause
import warnings
import numbers
import numpy as np
import scipy.sparse as sp
from .. import six
from ...utils.exceptions import NotFittedError, NonBLASDotWarning, \
DataConversionWarning
try:
from inspect import signature
except ImportError:
from mlens.externals.funcsigs import signature
FLOAT_DTYPES = (np.float64, np.float32, np.float16)
# Silenced by default to reduce verbosity. Turn on at runtime for
# performance profiling.
warnings.simplefilter('ignore', NonBLASDotWarning)
def _assert_all_finite(X):
"""Like assert_all_finite, but only for ndarray."""
X = np.asanyarray(X)
# First try an O(n) time, O(1) space solution for the common case that
# everything is finite; fall back to O(n) space np.isfinite to prevent
# false positives from overflow in sum method.
if (X.dtype.char in np.typecodes['AllFloat'] and not np.isfinite(X.sum())
and not np.isfinite(X).all()):
raise ValueError("Input contains NaN, infinity"
" or a value too large for %r." % X.dtype)
def assert_all_finite(X):
"""Throw a ValueError if X contains NaN or infinity.
Parameters
----------
X : array or sparse matrix
"""
_assert_all_finite(X.data if sp.issparse(X) else X)
def as_float_array(X, copy=True, force_all_finite=True):
"""Converts an array-like to an array of floats.
The new dtype will be np.float32 or np.float64, depending on the original
type. The function can create a copy or modify the argument depending
on the argument copy.
Parameters
----------
X : {array-like, sparse matrix}
copy : bool, optional
If True, a copy of X will be created. If False, a copy may still be
returned if X's dtype is not a floating point type.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
Returns
-------
XT : {array, sparse matrix}
An array of type np.float
"""
if isinstance(X, np.matrix) or (not isinstance(X, np.ndarray)
and not sp.issparse(X)):
return check_array(X, ['csr', 'csc', 'coo'], dtype=np.float64,
copy=copy, force_all_finite=force_all_finite,
ensure_2d=False)
elif sp.issparse(X) and X.dtype in [np.float32, np.float64]:
return X.copy() if copy else X
elif X.dtype in [np.float32, np.float64]: # is numpy array
return X.copy('F' if X.flags['F_CONTIGUOUS'] else 'C') if copy else X
else:
if X.dtype.kind in 'uib' and X.dtype.itemsize <= 4:
return_dtype = np.float32
else:
return_dtype = np.float64
return X.astype(return_dtype)
def _is_arraylike(x):
"""Returns whether the input is array-like"""
return (hasattr(x, '__len__') or
hasattr(x, 'shape') or
hasattr(x, '__array__'))
def _num_samples(x):
"""Return number of samples in array-like x."""
if hasattr(x, 'fit') and callable(x.fit):
# Don't get num_samples from an ensembles length!
raise TypeError('Expected sequence or array-like, got '
'estimator %s' % x)
if not hasattr(x, '__len__') and not hasattr(x, 'shape'):
if hasattr(x, '__array__'):
x = np.asarray(x)
else:
raise TypeError("Expected sequence or array-like, got %s" %
type(x))
if hasattr(x, 'shape'):
if len(x.shape) == 0:
raise TypeError("Singleton array %r cannot be considered"
" a valid collection." % x)
return x.shape[0]
else:
return len(x)
def _shape_repr(shape):
"""Return a platform independent representation of an array shape
Under Python 2, the `long` type introduces an 'L' suffix when using the
default %r format for tuples of integers (typically used to store the shape
of an array).
Under Windows 64 bit (and Python 2), the `long` type is used by default
in numpy shapes even when the integer dimensions are well below 32 bit.
The platform specific type causes string messages or doctests to change
from one platform to another which is not desirable.
Under Python 3, there is no more `long` type so the `L` suffix is never
introduced in string representation.
>>> _shape_repr((1, 2))
'(1, 2)'
>>> one = 2 ** 64 / 2 ** 64 # force an upcast to `long` under Python 2
>>> _shape_repr((one, 2 * one))
'(1, 2)'
>>> _shape_repr((1,))
'(1,)'
>>> _shape_repr(())
'()'
"""
if len(shape) == 0:
return "()"
joined = ", ".join("%d" % e for e in shape)
if len(shape) == 1:
# special notation for singleton tuples
joined += ','
return "(%s)" % joined
def check_consistent_length(*arrays):
"""Check that all arrays have consistent first dimensions.
Checks whether all objects in arrays have the same shape or length.
Parameters
----------
*arrays : list or tuple of input objects.
Objects that will be checked for consistent length.
"""
lengths = [_num_samples(X) for X in arrays if X is not None]
uniques = np.unique(lengths)
if len(uniques) > 1:
raise ValueError("Found input variables with inconsistent numbers of"
" samples: %r" % [int(l) for l in lengths])
def indexable(*iterables):
"""Make arrays indexable for cross-validation.
Checks consistent length, passes through None, and ensures that everything
can be indexed by converting sparse matrices to csr and converting
non-interable objects to arrays.
Parameters
----------
*iterables : lists, dataframes, arrays, sparse matrices
List of objects to ensure sliceability.
"""
result = []
for X in iterables:
if sp.issparse(X):
result.append(X.tocsr())
elif hasattr(X, "__getitem__") or hasattr(X, "iloc"):
result.append(X)
elif X is None:
result.append(X)
else:
result.append(np.array(X))
check_consistent_length(*result)
return result
def _ensure_sparse_format(spmatrix, accept_sparse, dtype, copy,
force_all_finite):
"""Convert a sparse matrix to a given format.
Checks the sparse format of spmatrix and converts if necessary.
Parameters
----------
spmatrix : scipy sparse matrix
Input to validate and convert.
accept_sparse : string, boolean or list/tuple of strings
String[s] representing allowed sparse matrix formats ('csc',
'csr', 'coo', 'dok', 'bsr', 'lil', 'dia'). If the input is sparse but
not in the allowed format, it will be converted to the first listed
format. True allows the input to be any format. False means
that a sparse matrix input will raise an error.
dtype : string, type or None
Data type of result. If None, the dtype of the input is preserved.
copy : boolean
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean
Whether to raise an error on np.inf and np.nan in X.
Returns
-------
spmatrix_converted : scipy sparse matrix.
Matrix that is ensured to have an allowed type.
"""
if dtype is None:
dtype = spmatrix.dtype
changed_format = False
if isinstance(accept_sparse, six.string_types):
accept_sparse = [accept_sparse]
if accept_sparse is False:
raise TypeError('A sparse matrix was passed, but dense '
'data is required. Use X.toarray() to '
'convert to a dense numpy array.')
elif isinstance(accept_sparse, (list, tuple)):
if len(accept_sparse) == 0:
raise ValueError("When providing 'accept_sparse' "
"as a tuple or list, it must contain at "
"least one string value.")
# ensure correct sparse format
if spmatrix.format not in accept_sparse:
# create new with correct sparse
spmatrix = spmatrix.asformat(accept_sparse[0])
changed_format = True
elif accept_sparse is not True:
# any other type
raise ValueError("Parameter 'accept_sparse' should be a string, "
"boolean or list of strings. You provided "
"'accept_sparse={}'.".format(accept_sparse))
if dtype != spmatrix.dtype:
# convert dtype
spmatrix = spmatrix.astype(dtype)
elif copy and not changed_format:
# force copy
spmatrix = spmatrix.copy()
if force_all_finite:
if not hasattr(spmatrix, "data"):
warnings.warn("Can't check %s sparse matrix for nan or inf."
% spmatrix.format)
else:
_assert_all_finite(spmatrix.data)
return spmatrix
def check_array(array, accept_sparse=False, dtype="numeric", order=None,
copy=False, force_all_finite=True, ensure_2d=True,
allow_nd=False, ensure_min_samples=1, ensure_min_features=1,
warn_on_dtype=False, estimator=None):
"""Input validation on an array, list, sparse matrix or similar.
By default, the input is converted to an at least 2D numpy array.
If the dtype of the array is object, attempt converting to float,
raising on failure.
Parameters
----------
array : object
Input object to check / convert.
accept_sparse : string, boolean or list/tuple of strings (default=False)
String[s] representing allowed sparse matrix formats, such as 'csc',
'csr', etc. If the input is sparse but not in the allowed format,
it will be converted to the first listed format. True allows the input
to be any format. False means that a sparse matrix input will
raise an error.
.. deprecated:: 0.19
Passing 'None' to parameter ``accept_sparse`` in methods is
deprecated in version 0.19 "and will be removed in 0.21. Use
``accept_sparse=False`` instead.
dtype : string, type, list of types or None (default="numeric")
Data type of result. If None, the dtype of the input is preserved.
If "numeric", dtype is preserved unless array.dtype is object.
If dtype is a list of types, conversion on the first type is only
performed if the dtype of the input is not in the list.
order : 'F', 'C' or None (default=None)
Whether an array will be forced to be fortran or c-style.
When order is None (default), then if copy=False, nothing is ensured
about the memory layout of the output array; otherwise (copy=True)
the memory layout of the returned array is kept as close as possible
to the original array.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X.
ensure_2d : boolean (default=True)
Whether to raise a value error if X is not 2d.
allow_nd : boolean (default=False)
Whether to allow X.ndim > 2.
ensure_min_samples : int (default=1)
Make sure that the array has a minimum number of samples in its first
axis (rows for a 2D array). Setting to 0 disables this check.
ensure_min_features : int (default=1)
Make sure that the 2D array has some minimum number of features
(columns). The default value of 1 rejects empty datasets.
This check is only enforced when the input data has effectively 2
dimensions or is originally 1D and ``ensure_2d`` is True. Setting to 0
disables this check.
warn_on_dtype : boolean (default=False)
Raise DataConversionWarning if the dtype of the input data structure
does not match the requested dtype, causing a memory copy.
estimator : str or estimator instance (default=None)
If passed, include the name of the estimator in warning messages.
Returns
-------
X_converted : object
The converted and validated X.
"""
# accept_sparse 'None' deprecation check
if accept_sparse is None:
warnings.warn(
"Passing 'None' to parameter 'accept_sparse' in methods "
"check_array and check_X_y is deprecated in version 0.19 "
"and will be removed in 0.21. Use 'accept_sparse=False' "
" instead.", DeprecationWarning)
accept_sparse = False
# store whether originally we wanted numeric dtype
dtype_numeric = isinstance(dtype, six.string_types) and dtype == "numeric"
dtype_orig = getattr(array, "dtype", None)
if not hasattr(dtype_orig, 'kind'):
# not a data type (e.g. a column named dtype in a pandas DataFrame)
dtype_orig = None
if dtype_numeric:
if dtype_orig is not None and dtype_orig.kind == "O":
# if input is object, convert to float.
dtype = np.float64
else:
dtype = None
if isinstance(dtype, (list, tuple)):
if dtype_orig is not None and dtype_orig in dtype:
# no dtype conversion required
dtype = None
else:
# dtype conversion required. Let's select the first element of the
# list of accepted types.
dtype = dtype[0]
if estimator is not None:
if isinstance(estimator, six.string_types):
estimator_name = estimator
else:
estimator_name = estimator.__class__.__name__
else:
estimator_name = "Estimator"
context = " by %s" % estimator_name if estimator is not None else ""
if sp.issparse(array):
array = _ensure_sparse_format(array, accept_sparse, dtype, copy,
force_all_finite)
else:
array = np.array(array, dtype=dtype, order=order, copy=copy)
if ensure_2d:
if array.ndim == 1:
raise ValueError(
"Expected 2D array, got 1D array instead:\narray={}.\n"
"Reshape your data either using array.reshape(-1, 1) if "
"your data has a single feature or array.reshape(1, -1) "
"if it contains a single sample.".format(array))
array = np.atleast_2d(array)
# To ensure that array flags are maintained
array = np.array(array, dtype=dtype, order=order, copy=copy)
# make sure we actually converted to numeric:
if dtype_numeric and array.dtype.kind == "O":
array = array.astype(np.float64)
if not allow_nd and array.ndim >= 3:
raise ValueError("Found array with dim %d. %s expected <= 2."
% (array.ndim, estimator_name))
if force_all_finite:
_assert_all_finite(array)
shape_repr = _shape_repr(array.shape)
if ensure_min_samples > 0:
n_samples = _num_samples(array)
if n_samples < ensure_min_samples:
raise ValueError("Found array with %d sample(s) (shape=%s) while a"
" minimum of %d is required%s."
% (n_samples, shape_repr, ensure_min_samples,
context))
if ensure_min_features > 0 and array.ndim == 2:
n_features = array.shape[1]
if n_features < ensure_min_features:
raise ValueError("Found array with %d feature(s) (shape=%s) while"
" a minimum of %d is required%s."
% (n_features, shape_repr, ensure_min_features,
context))
if warn_on_dtype and dtype_orig is not None and array.dtype != dtype_orig:
msg = ("Data with input dtype %s was converted to %s%s."
% (dtype_orig, array.dtype, context))
warnings.warn(msg, DataConversionWarning)
return array
def check_X_y(X, y, accept_sparse=False, dtype="numeric", order=None,
copy=False, force_all_finite=True, ensure_2d=True,
allow_nd=False, multi_output=False, ensure_min_samples=1,
ensure_min_features=1, y_numeric=False,
warn_on_dtype=False, estimator=None):
"""Input validation for standard estimators.
Checks X and y for consistent length, enforces X 2d and y 1d.
Standard input checks are only applied to y, such as checking that y
does not have np.nan or np.inf targets. For multi-label y, set
multi_output=True to allow 2d and sparse y. If the dtype of X is
object, attempt converting to float, raising on failure.
Parameters
----------
X : nd-array, list or sparse matrix
Input data.
y : nd-array, list or sparse matrix
Labels.
accept_sparse : string, boolean or list of string (default=False)
String[s] representing allowed sparse matrix formats, such as 'csc',
'csr', etc. If the input is sparse but not in the allowed format,
it will be converted to the first listed format. True allows the input
to be any format. False means that a sparse matrix input will
raise an error.
.. deprecated:: 0.19
Passing 'None' to parameter ``accept_sparse`` in methods is
deprecated in version 0.19 "and will be removed in 0.21. Use
``accept_sparse=False`` instead.
dtype : string, type, list of types or None (default="numeric")
Data type of result. If None, the dtype of the input is preserved.
If "numeric", dtype is preserved unless array.dtype is object.
If dtype is a list of types, conversion on the first type is only
performed if the dtype of the input is not in the list.
order : 'F', 'C' or None (default=None)
Whether an array will be forced to be fortran or c-style.
copy : boolean (default=False)
Whether a forced copy will be triggered. If copy=False, a copy might
be triggered by a conversion.
force_all_finite : boolean (default=True)
Whether to raise an error on np.inf and np.nan in X. This parameter
does not influence whether y can have np.inf or np.nan values.
ensure_2d : boolean (default=True)
Whether to make X at least 2d.
allow_nd : boolean (default=False)
Whether to allow X.ndim > 2.
multi_output : boolean (default=False)
Whether to allow 2-d y (array or sparse matrix). If false, y will be
validated as a vector. y cannot have np.nan or np.inf values if
multi_output=True.
ensure_min_samples : int (default=1)
Make sure that X has a minimum number of samples in its first
axis (rows for a 2D array).
ensure_min_features : int (default=1)
Make sure that the 2D array has some minimum number of features
(columns). The default value of 1 rejects empty datasets.
This check is only enforced when X has effectively 2 dimensions or
is originally 1D and ``ensure_2d`` is True. Setting to 0 disables
this check.
y_numeric : boolean (default=False)
Whether to ensure that y has a numeric type. If dtype of y is object,
it is converted to float64. Should only be used for regression
algorithms.
warn_on_dtype : boolean (default=False)
Raise DataConversionWarning if the dtype of the input data structure
does not match the requested dtype, causing a memory copy.
estimator : str or estimator instance (default=None)
If passed, include the name of the estimator in warning messages.
Returns
-------
X_converted : object
The converted and validated X.
y_converted : object
The converted and validated y.
"""
X = check_array(X, accept_sparse, dtype, order, copy, force_all_finite,
ensure_2d, allow_nd, ensure_min_samples,
ensure_min_features, warn_on_dtype, estimator)
if multi_output:
y = check_array(y, 'csr', force_all_finite=True, ensure_2d=False,
dtype=None)
else:
y = column_or_1d(y, warn=True)
_assert_all_finite(y)
if y_numeric and y.dtype.kind == 'O':
y = y.astype(np.float64)
check_consistent_length(X, y)
return X, y
def column_or_1d(y, warn=False):
""" Ravel column or 1d numpy array, else raises an error
Parameters
----------
y : array-like
warn : boolean, default False
To control display of warnings.
Returns
-------
y : array
"""
shape = np.shape(y)
if len(shape) == 1:
return np.ravel(y)
if len(shape) == 2 and shape[1] == 1:
if warn:
warnings.warn("A column-vector y was passed when a 1d array was"
" expected. Please change the shape of y to "
"(n_samples, ), for example using ravel().",
DataConversionWarning, stacklevel=2)
return np.ravel(y)
raise ValueError("bad input shape {0}".format(shape))
def check_random_state(seed):
"""Turn seed into a np.random.RandomState instance
Parameters
----------
seed : None | int | instance of RandomState
If seed is None, return the RandomState singleton used by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
Otherwise raise ValueError.
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, (numbers.Integral, np.integer)):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
def has_fit_parameter(estimator, parameter):
"""Checks whether the estimator's fit method supports the given parameter.
Parameters
----------
estimator : object
An estimator to inspect.
parameter: str
The searched parameter.
Returns
-------
is_parameter: bool
Whether the parameter was found to be a named parameter of the
estimator's fit method.
Examples
--------
>>> from sklearn.svm import SVC
>>> has_fit_parameter(SVC(), "sample_weight")
True
"""
return parameter in signature(estimator.fit).parameters
def check_symmetric(array, tol=1E-10, raise_warning=True,
raise_exception=False):
"""Make sure that array is 2D, square and symmetric.
If the array is not symmetric, then a symmetrized version is returned.
Optionally, a warning or exception is raised if the matrix is not
symmetric.
Parameters
----------
array : nd-array or sparse matrix
Input object to check / convert. Must be two-dimensional and square,
otherwise a ValueError will be raised.
tol : float
Absolute tolerance for equivalence of arrays. Default = 1E-10.
raise_warning : boolean (default=True)
If True then raise a warning if conversion is required.
raise_exception : boolean (default=False)
If True then raise an exception if array is not symmetric.
Returns
-------
array_sym : ndarray or sparse matrix
Symmetrized version of the input array, i.e. the average of array
and array.transpose(). If sparse, then duplicate entries are first
summed and zeros are eliminated.
"""
if (array.ndim != 2) or (array.shape[0] != array.shape[1]):
raise ValueError("array must be 2-dimensional and square. "
"shape = {0}".format(array.shape))
if sp.issparse(array):
diff = array - array.T
# only csr, csc, and coo have `data` attribute
if diff.format not in ['csr', 'csc', 'coo']:
diff = diff.tocsr()
symmetric = np.all(abs(diff.data) < tol)
else:
symmetric = np.allclose(array, array.T, atol=tol)
if not symmetric:
if raise_exception:
raise ValueError("Array must be symmetric")
if raise_warning:
warnings.warn("Array is not symmetric, and will be converted "
"to symmetric by average with its transpose.")
if sp.issparse(array):
conversion = 'to' + array.format
array = getattr(0.5 * (array + array.T), conversion)()
else:
array = 0.5 * (array + array.T)
return array
def check_is_fitted(estimator, attributes, msg=None, all_or_any=all):
"""Perform is_fitted validation for estimator.
Checks if the estimator is fitted by verifying the presence of
"all_or_any" of the passed attributes and raises a NotFittedError with the
given message.
Parameters
----------
estimator : estimator instance.
estimator instance for which the check is performed.
attributes : attribute name(s) given as string or a list/tuple of strings
Eg.:
``["coef_", "estimator_", ...], "coef_"``
msg : string
The default error message is, "This %(name)s instance is not fitted
yet. Call 'fit' with appropriate arguments before using this method."
For custom messages if "%(name)s" is present in the message string,
it is substituted for the estimator name.
Eg. : "Estimator, %(name)s, must be fitted before sparsifying".
all_or_any : callable, {all, any}, default all
Specify whether all or any of the given attributes must exist.
Returns
-------
None
Raises
------
NotFittedError
If the attributes are not found.
"""
if msg is None:
msg = ("This %(name)s instance is not fitted yet. Call 'fit' with "
"appropriate arguments before using this method.")
if not hasattr(estimator, 'fit'):
raise TypeError("%s is not an estimator instance." % (estimator))
if not isinstance(attributes, (list, tuple)):
attributes = [attributes]
if not all_or_any([hasattr(estimator, attr) for attr in attributes]):
raise NotFittedError(msg % {'name': type(estimator).__name__})
def check_non_negative(X, whom):
"""
Check if there is any negative value in an array.
Parameters
----------
X : array-like or sparse matrix
Input data.
whom : string
Who passed X to this function.
"""
X = X.data if sp.issparse(X) else X
if (X < 0).any():
raise ValueError("Negative values in data passed to %s" % whom)
| mit | 4,532,605,231,950,397,000 | 39.050222 | 79 | 0.615328 | false |
gregelin/python-ideascaleapi | setup.py | 1 | 1086 | from distutils.core import setup
from ideascaleapi import __version__,__license__,__doc__
license_text = open('LICENSE').read()
long_description = open('README.rst').read()
setup(name="python-ideascaleapi",
version=__version__,
py_modules=["ideascaleapi"],
description="Libraries for interacting with the Ideascale API",
author="Greg Elin (forking James Turk)",
author_email = "[email protected]",
license=license_text,
url="http://github.com/gregelin/python-ideascaleapi/tree/master",
long_description=long_description,
platforms=["any"],
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
install_requires=["simplejson >= 1.8"]
)
| bsd-3-clause | 1,815,907,163,226,902,500 | 39.222222 | 82 | 0.593923 | false |
uwosh/COBAInternship | config.py | 1 | 1615 | # -*- coding: utf-8 -*-
#
# File: COBAInternship.py
#
# Copyright (c) 2008 by []
# Generator: ArchGenXML Version 2.1
# http://plone.org/products/archgenxml
#
# GNU General Public License (GPL)
#
__author__ = """Andrew Schultz and Josh Klotz"""
__docformat__ = 'plaintext'
# Product configuration.
#
# The contents of this module will be imported into __init__.py, the
# workflow configuration and every content type module.
#
# If you wish to perform custom configuration, you may put a file
# AppConfig.py in your product's root directory. The items in there
# will be included (by importing) in this file if found.
from Products.CMFCore.permissions import setDefaultRoles
##code-section config-head #fill in your manual code here
##/code-section config-head
PROJECTNAME = "COBAInternship"
# Permissions
DEFAULT_ADD_CONTENT_PERMISSION = "Add portal content"
setDefaultRoles(DEFAULT_ADD_CONTENT_PERMISSION, ('Manager', 'Owner'))
ADD_CONTENT_PERMISSIONS = {
'COBAInternship': 'COBAInternship: Add COBAInternship',
}
setDefaultRoles('COBAInternship: Add COBAInternship', ('Manager','Owner'))
product_globals = globals()
# Dependencies of Products to be installed by quick-installer
# override in custom configuration
DEPENDENCIES = []
# Dependend products - not quick-installed - used in testcase
# override in custom configuration
PRODUCT_DEPENDENCIES = []
##code-section config-bottom #fill in your manual code here
##/code-section config-bottom
# Load custom configuration not managed by archgenxml
try:
from Products.COBAInternship.AppConfig import *
except ImportError:
pass
| gpl-2.0 | 5,393,316,480,363,080,000 | 26.372881 | 74 | 0.745511 | false |
marekjm/diaspy | diaspy/models.py | 1 | 23071 | #!/usr/bin/env python3
"""This module is only imported in other diaspy modules and
MUST NOT import anything.
"""
import json
import copy
import re
BS4_SUPPORT=False
try:
from bs4 import BeautifulSoup
except ImportError:
print("[diaspy] BeautifulSoup not found, falling back on regex.")
else: BS4_SUPPORT=True
from diaspy import errors
class Aspect():
"""This class represents an aspect.
Class can be initialized by passing either an id and/or name as
parameters.
If both are missing, an exception will be raised.
"""
def __init__(self, connection, id, name=None):
self._connection = connection
self.id, self.name = id, name
self._cached = []
def getUsers(self, fetch = True):
"""Returns list of GUIDs of users who are listed in this aspect.
"""
if fetch:
request = self._connection.get('contacts.json?a_id={}'.format(self.id))
self._cached = request.json()
return self._cached
def removeAspect(self):
"""
--> POST /aspects/{id} HTTP/1.1
--> _method=delete&authenticity_token={token}
<-- HTTP/1.1 302 Found
Removes whole aspect.
:returns: None
"""
request = self._connection.tokenFrom('contacts').delete('aspects/{}'.format(self.id))
if request.status_code != 302:
raise errors.AspectError('wrong status code: {0}'.format(request.status_code))
def addUser(self, user_id):
"""Add user to current aspect.
:param user_id: user to add to aspect
:type user_id: int
:returns: JSON from request
--> POST /aspect_memberships HTTP/1.1
--> Accept: application/json, text/javascript, */*; q=0.01
--> Content-Type: application/json; charset=UTF-8
--> {"aspect_id":123,"person_id":123}
<-- HTTP/1.1 200 OK
"""
data = {'aspect_id': self.id,
'person_id': user_id}
headers = {'content-type': 'application/json',
'accept': 'application/json'}
request = self._connection.tokenFrom('contacts').post('aspect_memberships', data=json.dumps(data), headers=headers)
if request.status_code == 400:
raise errors.AspectError('duplicate record, user already exists in aspect: {0}'.format(request.status_code))
elif request.status_code == 404:
raise errors.AspectError('user not found from this pod: {0}'.format(request.status_code))
elif request.status_code != 200:
raise errors.AspectError('wrong status code: {0}'.format(request.status_code))
response = None
try:
response = request.json()
except json.decoder.JSONDecodeError:
""" Should be OK now, but I'll leave this commentary here
at first to see if anything comes up """
# FIXME For some (?) reason removing users from aspects works, but
# adding them is a no-go and Diaspora* kicks us out with CSRF errors.
# Weird.
pass
if response is None:
raise errors.CSRFProtectionKickedIn()
# Now you should fetchguid(fetch_stream=False) on User to update aspect membership_id's
# Or update it locally with the response
return response
def removeUser(self, user):
"""Remove user from current aspect.
:param user: user to remove from aspect
:type user: diaspy.people.User object
"""
membership_id = None
to_remove = None
for each in user.aspectMemberships():
if each.get('aspect', {}).get('id') == self.id:
membership_id = each.get('id')
to_remove = each
break # no need to continue
if membership_id is None:
raise errors.UserIsNotMemberOfAspect(user, self)
request = self._connection.delete('aspect_memberships/{0}'.format(membership_id))
if request.status_code == 404:
raise errors.AspectError('cannot remove user from aspect, probably tried too fast after adding: {0}'.format(request.status_code))
elif request.status_code != 200:
raise errors.AspectError('cannot remove user from aspect: {0}'.format(request.status_code))
if 'contact' in user.data: # User object
if to_remove: user.data['contact']['aspect_memberships'].remove( to_remove ) # remove local aspect membership_id
else: # User object from Contacts()
if to_remove: user.data['aspect_memberships'].remove( to_remove ) # remove local aspect membership_id
return request.json()
class Notification():
"""This class represents single notification.
"""
_who_regexp = re.compile(r'/people/([0-9a-f]+)["\']{1} class=["\']{1}hovercardable')
_aboutid_regexp = re.compile(r'/posts/[0-9a-f]+')
_htmltag_regexp = re.compile('</?[a-z]+( *[a-z_-]+=["\'].*?["\'])* */?>')
def __init__(self, connection, data):
self._connection = connection
self.type = data['type']
self._data = data[self.type]
self.id = self._data['id']
self.unread = self._data['unread']
def __getitem__(self, key):
"""Returns a key from notification data.
"""
return self._data[key]
def __str__(self):
"""Returns notification note.
"""
if BS4_SUPPORT:
soup = BeautifulSoup(self._data['note_html'], 'lxml')
media_body = soup.find('div', {"class": "media-body"})
div = media_body.find('div')
if div: div.decompose()
return media_body.getText().strip()
else:
string = re.sub(self._htmltag_regexp, '', self._data['note_html'])
string = string.strip().split('\n')[0]
while ' ' in string: string = string.replace(' ', ' ')
return string
def __repr__(self):
"""Returns notification note with more details.
"""
return '{0}: {1}'.format(self.when(), str(self))
def about(self):
"""Returns id of post about which the notification is informing OR:
If the id is None it means that it's about user so .who() is called.
"""
if BS4_SUPPORT:
soup = BeautifulSoup(self._data['note_html'], 'lxml')
id = soup.find('a', {"data-ref": True})
if id: return id['data-ref']
about = self._aboutid_regexp.search(self._data['note_html'])
if about is None: about = self.who()[0]
else: about = int(about.group(0)[7:])
return about
def who(self):
"""Returns list of guids of the users who caused you to get the notification.
"""
if BS4_SUPPORT: # Parse the HTML with BS4
soup = BeautifulSoup(self._data['note_html'], 'lxml')
hovercardable_soup = soup.findAll('a', {"class": "hovercardable"})
return list(set([soup['href'][8:] for soup in hovercardable_soup]))
else:
return list(set([who for who in self._who_regexp.findall(self._data['note_html'])]))
def when(self):
"""Returns UTC time as found in note_html.
"""
return self._data['created_at']
def mark(self, unread=False):
"""Marks notification to read/unread.
Marks notification to read if `unread` is False.
Marks notification to unread if `unread` is True.
:param unread: which state set for notification
:type unread: bool
"""
headers = {'x-csrf-token': repr(self._connection)}
params = {'set_unread': json.dumps(unread)}
self._connection.put('notifications/{0}'.format(self['id']), params=params, headers=headers)
self._data['unread'] = unread
class Conversation():
"""This class represents a conversation.
.. note::
Remember that you need to have access to the conversation.
"""
if not BS4_SUPPORT:
_message_stream_regexp = re.compile(r'<div class=["\']{1}stream["\']{1}>(.*?)<div class=["\']{1}stream-element new-message["\']{1}>', re.DOTALL)
_message_guid_regexp = re.compile(r'data-guid=["\']{1}([0-9]+)["\']{1}')
_message_created_at_regexp = re.compile(r'<time datetime=["\']{1}([0-9]{4}-[0-9]{2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}Z)["\']{1}')
_message_body_regexp = re.compile(r'<div class=["\']{1}message-content["\']{1}>\s+<p>(.*?)</p>\s+</div>', re.DOTALL)
_message_author_guid_regexp = re.compile(r'<a href=["\']{1}/people/([a-f0-9]+)["\']{1} class=["\']{1}img')
_message_author_name_regexp = re.compile(r'<img alt=["\']{1}(.*?)["\']{1}.*')
_message_author_avatar_regexp = re.compile(r'src=["\']{1}(.*?)["\']{1}')
def __init__(self, connection, id, fetch=True):
"""
:param conv_id: id of the post and not the guid!
:type conv_id: str
:param connection: connection object used to authenticate
:type connection: connection.Connection
"""
self._connection = connection
self.id = id
self._data = {}
self._messages = []
if fetch: self._fetch()
def __len__(self): return len(self._messages)
def __iter__(self): return iter(self._messages)
def __getitem__(self, n): return self._messages[n]
def _fetch(self):
"""Fetches JSON data representing conversation.
"""
request = self._connection.get('conversations/{}.json'.format(self.id))
if request.status_code == 200:
self._data = request.json()['conversation']
else:
raise errors.ConversationError('cannot download conversation data: {0}'.format(request.status_code))
def _fetch_messages(self):
"""Fetches HTML data we will use to parse message data.
This is a workaround until Diaspora* has it's API plans implemented.
"""
request = self._connection.get('conversations/{}'.format(self.id))
if request.status_code == 200:
# Clear potential old messages
self._messages = []
message_template = {
'guid' : None,
'created_at' : None,
'body' : None,
'author' : {
'guid' : None,
'diaspora_id' : None, # TODO? Not able to get from this page.
'name' : None,
'avatar' : None
}
}
if BS4_SUPPORT: # Parse the HTML with BS4
soup = BeautifulSoup(request.content, 'lxml')
messages_soup = soup.findAll('div', {"class": "stream-element message"})
for message_soup in messages_soup:
message = copy.deepcopy(message_template)
# guid
if message_soup and message_soup.has_attr('data-guid'):
message['guid'] = message_soup['data-guid']
# created_at
time_soup = message_soup.find('time', {"class": "timeago"})
if time_soup and time_soup.has_attr('datetime'):
message['created_at'] = time_soup['datetime']
# body
body_soup = message_soup.find('div', {"class": "message-content"})
if body_soup: message['body'] = body_soup.get_text().strip()
# author
author_a_soup = message_soup.find('a', {"class": "img"})
if author_a_soup:
# author guid
message['author']['guid'] = author_a_soup['href'][8:]
# name and avatar
author_img_soup = author_a_soup.find('img', {"class": "avatar"})
if author_img_soup:
message['author']['name'] = author_img_soup['title']
message['author']['avatar'] = author_img_soup['src']
self._messages.append(message.copy())
else: # Regex fallback
messages_stream_html = self._message_stream_regexp.search(request.content.decode('utf-8'))
if messages_stream_html:
messages_html = messages_stream_html.group(1).split("<div class='stream-element message'")
for message_html in messages_html:
message = copy.deepcopy(message_template)
# Guid
guid = self._message_guid_regexp.search(message_html)
if guid: message['guid'] = guid.group(1)
else: continue
# Created at
created_at = self._message_created_at_regexp.search(message_html)
if created_at: message['created_at'] = created_at.group(1)
# Body
body = self._message_body_regexp.search(message_html)
if body: message['body'] = body.group(1)
# Author
author_guid = self._message_author_guid_regexp.search(message_html)
if author_guid: message['author']['guid'] = author_guid.group(1)
author_name = self._message_author_name_regexp.search(message_html)
if author_name:
message['author']['name'] = author_name.group(1)
author_avatar = self._message_author_avatar_regexp.search(author_name.group(0))
if author_avatar: message['author']['avatar'] = author_avatar.group(1)
self._messages.append(message.copy())
else:
raise errors.ConversationError('cannot download message data from conversation: {0}'.format(request.status_code))
def messages(self): return self._messages
def update_messages(self):
"""(Re-)fetches messages in this conversation.
"""
self._fetch_messages()
def answer(self, text):
"""Answer that conversation
:param text: text to answer.
:type text: str
"""
data = {'message[text]': text,
'utf8': '✓',
'authenticity_token': repr(self._connection)}
request = self._connection.post('conversations/{}/messages'.format(self.id),
data=data,
headers={'accept': 'application/json'})
if request.status_code != 200:
raise errors.ConversationError('{0}: Answer could not be posted.'
.format(request.status_code))
return request.json()
def delete(self):
"""Delete this conversation.
Has to be implemented.
"""
data = {'authenticity_token': repr(self._connection)}
request = self._connection.delete('conversations/{0}/visibility/'
.format(self.id),
data=data,
headers={'accept': 'application/json'})
if request.status_code != 404:
raise errors.ConversationError('{0}: Conversation could not be deleted.'
.format(request.status_code))
def get_subject(self):
"""Returns the subject of this conversation
"""
return self._data['subject']
class Comment():
"""Represents comment on post.
Does not require Connection() object. Note that you should not manually
create `Comment()` objects -- they are designed to be created automatically
by `Comments()` objects wich automatically will be created by `Post()`
objects.
"""
def __init__(self, data):
self._data = data
self.id = data['id']
self.guid = data['guid']
def __str__(self):
"""Returns comment's text.
"""
return self._data['text']
def __repr__(self):
"""Returns comments text and author.
Format: AUTHOR (AUTHOR'S GUID): COMMENT
"""
return '{0} ({1}): {2}'.format(self.author(), self.author('guid'), str(self))
def when(self):
"""Returns time when the comment had been created.
"""
return self._data['created_at']
def author(self, key='name'):
"""Returns author of the comment.
"""
return self._data['author'][key]
class Comments():
def __init__(self, comments=[]):
self._comments = comments
def __iter__(self):
for comment in self._comments: yield comment
def __len__(self):
return len(self._comments)
def __getitem__(self, index):
if self._comments: return self._comments[index]
def __bool__(self):
if self._comments: return True
return False
def ids(self):
return [c.id for c in self._comments]
def add(self, comment):
""" Expects Comment() object
:param comment: Comment() object to add.
:type comment: Comment() object."""
if comment and type(comment) == Comment: self._comments.append(comment)
def set(self, comments):
"""Sets comments wich already have a Comment() obj
:param comments: list with Comment() objects to set.
:type comments: list.
"""
if comments: self._comments = comments
def set_json(self, json_comments):
"""Sets comments for this post from post data."""
if json_comments:
self._comments = [Comment(c) for c in json_comments]
class Post():
"""This class represents a post.
.. note::
Remember that you need to have access to the post.
"""
def __init__(self, connection, id=0, guid='', fetch=True, comments=True, post_data=None):
"""
:param id: id of the post (GUID is recommended)
:type id: int
:param guid: GUID of the post
:type guid: str
:param connection: connection object used to authenticate
:type connection: connection.Connection
:param fetch: defines whether to fetch post's data or not
:type fetch: bool
:param comments: defines whether to fetch post's comments or not (if True also data will be fetched)
:type comments: bool
:param post_data: contains post data so no need to fetch the post if this is set, until you want to update post data
:type: json
"""
if not (guid or id): raise TypeError('neither guid nor id was provided')
self._connection = connection
self.id = id
self.guid = guid
self._data = {}
self.comments = Comments()
if post_data:
self._data = post_data
if fetch: self._fetchdata()
if comments:
if not self._data: self._fetchdata()
self._fetchcomments()
else:
if not self._data: self._fetchdata()
self.comments.set_json( self.data()['interactions']['comments'] )
def __repr__(self):
"""Returns string containing more information then str().
"""
return '{0} ({1}): {2}'.format(self._data['author']['name'], self._data['author']['guid'], self._data['text'])
def __str__(self):
"""Returns text of a post.
"""
return self._data['text']
def _fetchdata(self):
"""This function retrieves data of the post.
:returns: guid of post whose data was fetched
"""
if self.id: id = self.id
if self.guid: id = self.guid
request = self._connection.get('posts/{0}.json'.format(id))
if request.status_code != 200:
raise errors.PostError('{0}: could not fetch data for post: {1}'.format(request.status_code, id))
elif request:
self._data = request.json()
return self.data()['guid']
def _fetchcomments(self):
"""Retreives comments for this post.
Retrieving comments via GUID will result in 404 error.
DIASPORA* does not supply comments through /posts/:guid/ endpoint.
"""
id = self.data()['id']
if self.data()['interactions']['comments_count']:
request = self._connection.get('posts/{0}/comments.json'.format(id))
if request.status_code != 200:
raise errors.PostError('{0}: could not fetch comments for post: {1}'.format(request.status_code, id))
else:
self.comments.set([Comment(c) for c in request.json()])
def fetch(self, comments = False):
"""Fetches post data.
"""
self._fetchdata()
if comments:
self._fetchcomments()
return self
def data(self, data = None):
if data is not None:
self._data = data
return self._data
def like(self):
"""This function likes a post.
It abstracts the 'Like' functionality.
:returns: dict -- json formatted like object.
"""
data = {'authenticity_token': repr(self._connection)}
request = self._connection.post('posts/{0}/likes'.format(self.id),
data=data,
headers={'accept': 'application/json'})
if request.status_code != 201:
raise errors.PostError('{0}: Post could not be liked.'
.format(request.status_code))
likes_json = request.json()
if likes_json:
self._data['interactions']['likes'] = [likes_json]
return likes_json
def reshare(self):
"""This function reshares a post
"""
data = {'root_guid': self._data['guid'],
'authenticity_token': repr(self._connection)}
request = self._connection.post('reshares',
data=data,
headers={'accept': 'application/json'})
if request.status_code != 201:
raise Exception('{0}: Post could not be reshared'.format(request.status_code))
return request.json()
def comment(self, text):
"""This function comments on a post
:param text: text to comment.
:type text: str
"""
data = {'text': text,
'authenticity_token': repr(self._connection)}
request = self._connection.post('posts/{0}/comments'.format(self.id),
data=data,
headers={'accept': 'application/json'})
if request.status_code != 201:
raise Exception('{0}: Comment could not be posted.'
.format(request.status_code))
return Comment(request.json())
def vote_poll(self, poll_answer_id):
"""This function votes on a post's poll
:param poll_answer_id: id to poll vote.
:type poll_answer_id: int
"""
poll_id = self._data['poll']['poll_id']
data = {'poll_answer_id': poll_answer_id,
'poll_id': poll_id,
'post_id': self.id,
'authenticity_token': repr(self._connection)}
request = self._connection.post('posts/{0}/poll_participations'.format(self.id),
data=data,
headers={'accept': 'application/json'})
if request.status_code != 201:
raise Exception('{0}: Vote on poll failed.'
.format(request.status_code))
return request.json()
def hide(self):
"""
-> PUT /share_visibilities/42 HTTP/1.1
post_id=123
<- HTTP/1.1 200 OK
"""
headers = {'x-csrf-token': repr(self._connection)}
params = {'post_id': json.dumps(self.id)}
request = self._connection.put('share_visibilities/42', params=params, headers=headers)
if request.status_code != 200:
raise Exception('{0}: Failed to hide post.'
.format(request.status_code))
def mute(self):
"""
-> POST /blocks HTTP/1.1
{"block":{"person_id":123}}
<- HTTP/1.1 204 No Content
"""
headers = {'content-type':'application/json', 'x-csrf-token': repr(self._connection)}
data = json.dumps({ 'block': { 'person_id' : self._data['author']['id'] } })
request = self._connection.post('blocks', data=data, headers=headers)
if request.status_code != 204:
raise Exception('{0}: Failed to block person'
.format(request.status_code))
def subscribe(self):
"""
-> POST /posts/123/participation HTTP/1.1
<- HTTP/1.1 201 Created
"""
headers = {'x-csrf-token': repr(self._connection)}
data = {}
request = self._connection.post('posts/{}/participation'
.format( self.id ), data=data, headers=headers)
if request.status_code != 201:
raise Exception('{0}: Failed to subscribe to post'
.format(request.status_code))
def unsubscribe(self):
"""
-> POST /posts/123/participation HTTP/1.1
_method=delete
<- HTTP/1.1 200 OK
"""
headers = {'x-csrf-token': repr(self._connection)}
data = { "_method": "delete" }
request = self._connection.post('posts/{}/participation'
.format( self.id ), headers=headers, data=data)
if request.status_code != 200:
raise Exception('{0}: Failed to unsubscribe to post'
.format(request.status_code))
def report(self):
"""
TODO
"""
pass
def delete(self):
""" This function deletes this post
"""
data = {'authenticity_token': repr(self._connection)}
request = self._connection.delete('posts/{0}'.format(self.id),
data=data,
headers={'accept': 'application/json'})
if request.status_code != 204:
raise errors.PostError('{0}: Post could not be deleted'.format(request.status_code))
def delete_comment(self, comment_id):
"""This function removes a comment from a post
:param comment_id: id of the comment to remove.
:type comment_id: str
"""
data = {'authenticity_token': repr(self._connection)}
request = self._connection.delete('posts/{0}/comments/{1}'
.format(self.id, comment_id),
data=data,
headers={'accept': 'application/json'})
if request.status_code != 204:
raise errors.PostError('{0}: Comment could not be deleted'
.format(request.status_code))
def delete_like(self):
"""This function removes a like from a post
"""
data = {'authenticity_token': repr(self._connection)}
url = 'posts/{0}/likes/{1}'.format(self.id, self._data['interactions']['likes'][0]['id'])
request = self._connection.delete(url, data=data)
if request.status_code != 204:
raise errors.PostError('{0}: Like could not be removed.'
.format(request.status_code))
def author(self, key='name'):
"""Returns author of the post.
:param key: all keys available in data['author']
"""
return self._data['author'][key]
| mit | 5,489,278,063,691,003,000 | 30.954294 | 146 | 0.653158 | false |
matousc89/padasip | padasip/filters/nlmf.py | 1 | 5444 | """
.. versionadded:: 1.1.0
The least-mean-fourth (LMF) adaptive filter implemented according to the
paper :cite:`zerguine2000convergence`. The NLMF is an extension of the LMF
adaptive filter (:ref:`filter-lmf`).
The NLMF filter can be created as follows
>>> import padasip as pa
>>> pa.filters.FilterNLMF(n)
where `n` is the size (number of taps) of the filter.
Content of this page:
.. contents::
:local:
:depth: 1
.. seealso:: :ref:`filters`
Algorithm Explanation
======================================
The NLMF is extension of LMF filter. See :ref:`filter-lmf`
for explanation of the algorithm behind.
The extension is based on normalization of learning rate.
The learning rage :math:`\mu` is replaced by learning rate :math:`\eta(k)`
normalized with every new sample according to input power as follows
:math:`\eta (k) = \\frac{\mu}{\epsilon + || \\textbf{x}(k) ||^2}`,
where :math:`|| \\textbf{x}(k) ||^2` is norm of input vector and
:math:`\epsilon` is a small positive constant (regularization term).
This constant is introduced to preserve the stability in cases where
the input is close to zero.
Minimal Working Examples
======================================
If you have measured data you may filter it as follows
.. code-block:: python
import numpy as np
import matplotlib.pylab as plt
import padasip as pa
# creation of data
N = 500
x = np.random.normal(0, 1, (N, 4)) # input matrix
v = np.random.normal(0, 0.1, N) # noise
d = 2*x[:,0] + 0.1*x[:,1] - 0.3*x[:,2] + 0.5*x[:,3] + v # target
# identification
f = pa.filters.FilterNLMF(n=4, mu=0.005, w="random")
y, e, w = f.run(d, x)
# show results
plt.figure(figsize=(15,9))
plt.subplot(211);plt.title("Adaptation");plt.xlabel("samples - k")
plt.plot(d,"b", label="d - target")
plt.plot(y,"g", label="y - output");plt.legend()
plt.subplot(212);plt.title("Filter error");plt.xlabel("samples - k")
plt.plot(10*np.log10(e**2),"r", label="e - error [dB]");plt.legend()
plt.tight_layout()
plt.show()
References
======================================
.. bibliography:: lmf.bib
:style: plain
Code Explanation
======================================
"""
import numpy as np
from padasip.filters.base_filter import AdaptiveFilter
class FilterNLMF(AdaptiveFilter):
"""
Adaptive NLMF filter.
**Args:**
* `n` : length of filter (integer) - how many input is input array
(row of input matrix)
**Kwargs:**
* `mu` : learning rate (float). Also known as step size.
If it is too slow,
the filter may have bad performance. If it is too high,
the filter will be unstable. The default value can be unstable
for ill-conditioned input data.
* `eps` : regularization term (float). It is introduced to preserve
stability for close-to-zero input vectors
* `w` : initial weights of filter. Possible values are:
* array with initial weights (1 dimensional array) of filter size
* "random" : create random weights
* "zeros" : create zero value weights
"""
def __init__(self, n, mu=0.1, eps=1., w="random"):
self.kind = "NLMF filter"
if type(n) == int:
self.n = n
else:
raise ValueError('The size of filter must be an integer')
self.mu = self.check_float_param(mu, 0, 1000, "mu")
self.eps = self.check_float_param(eps, 0, 1000, "eps")
self.init_weights(w, self.n)
self.w_history = False
def adapt(self, d, x):
"""
Adapt weights according one desired value and its input.
**Args:**
* `d` : desired value (float)
* `x` : input array (1-dimensional array)
"""
y = np.dot(self.w, x)
e = d - y
nu = self.mu / (self.eps + np.dot(x, x))
self.w += nu * x * e**3
def run(self, d, x):
"""
This function filters multiple samples in a row.
**Args:**
* `d` : desired value (1 dimensional array)
* `x` : input matrix (2-dimensional array). Rows are samples,
columns are input arrays.
**Returns:**
* `y` : output value (1 dimensional array).
The size corresponds with the desired value.
* `e` : filter error for every sample (1 dimensional array).
The size corresponds with the desired value.
* `w` : history of all weights (2 dimensional array).
Every row is set of the weights for given sample.
"""
# measure the data and check if the dimmension agree
N = len(x)
if not len(d) == N:
raise ValueError('The length of vector d and matrix x must agree.')
self.n = len(x[0])
# prepare data
try:
x = np.array(x)
d = np.array(d)
except:
raise ValueError('Impossible to convert x or d to a numpy array')
# create empty arrays
y = np.zeros(N)
e = np.zeros(N)
self.w_history = np.zeros((N,self.n))
# adaptation loop
for k in range(N):
self.w_history[k,:] = self.w
y[k] = np.dot(self.w, x[k])
e[k] = d[k] - y[k]
nu = self.mu / (self.eps + np.dot(x[k], x[k]))
dw = nu * x[k] * e[k]**3
self.w += dw
return y, e, self.w_history
| mit | -1,379,382,755,198,736,400 | 28.586957 | 81 | 0.568883 | false |
tachijuan/python | myscripts/imap.py | 1 | 1470 | import os, sys, imaplib, rfc822, re, StringIO
import RPi.GPIO as GPIO
import time
server ='mail.xxx.us'
username='[email protected]'
password='xxx'
GPIO.setmode(GPIO.BOARD)
GREEN_LED = 22
RED_LED = 7
GPIO.setup(GREEN_LED, GPIO.OUT)
GPIO.setup(RED_LED, GPIO.OUT)
M = imaplib.IMAP4_SSL(server)
M.login(username, password)
M.select()
try:
while 1:
print "checking email"
typ, data = M.search(None, '(UNSEEN SUBJECT "PIFI MESSAGE")')
for num in data[0].split():
typ, data = M.fetch(num, '(RFC822)')
#print 'Message %s\n%s\n' % (num, data[0][1])
redon = re.search( "RED ON",
data[0][1],
re.MULTILINE|re.DOTALL )
greenon = re.search( "GREEN ON",
data[0][1],
re.MULTILINE|re.DOTALL )
redoff = re.search( "RED OFF",
data[0][1],
re.MULTILINE|re.DOTALL )
greenoff = re.search( "GREEN OFF",
data[0][1],
re.MULTILINE|re.DOTALL )
if redon:
GPIO.output(RED_LED, True)
print "red on"
if greenon:
GPIO.output(GREEN_LED, True)
print "green on"
if redoff:
GPIO.output(RED_LED, False)
print "red off"
if greenoff:
GPIO.output(GREEN_LED, False)
print "green off"
time.sleep(120)
except KeyboardInterrupt:
GPIO.cleanup()
pass
M.close()
M.logout()
| mit | -6,482,561,999,222,598,000 | 21.96875 | 65 | 0.535374 | false |
LevinJ/Supply-demand-forecasting | implement/xgboostmodel.py | 1 | 4070 | import sys
import os
sys.path.insert(0, os.path.abspath('..'))
from preprocess.preparedata import PrepareData
import numpy as np
from utility.runtype import RunType
from utility.datafilepath import g_singletonDataFilePath
from preprocess.splittrainvalidation import HoldoutSplitMethod
import xgboost as xgb
from evaluation.sklearnmape import mean_absolute_percentage_error_xgboost
from evaluation.sklearnmape import mean_absolute_percentage_error
from utility.modelframework import ModelFramework
from utility.xgbbasemodel import XGBoostGridSearch
from evaluation.sklearnmape import mean_absolute_percentage_error_xgboost_cv
from utility.xgbbasemodel import XGBoostBase
import logging
import sys
class DidiXGBoostModel(XGBoostBase, PrepareData, XGBoostGridSearch):
def __init__(self):
PrepareData.__init__(self)
XGBoostGridSearch.__init__(self)
XGBoostBase.__init__(self)
self.best_score_colname_in_cv = 'test-mape-mean'
self.do_cross_val = False
self.train_validation_foldid = -2
if self.do_cross_val is None:
root = logging.getLogger()
root.setLevel(logging.DEBUG)
root.addHandler(logging.StreamHandler(sys.stdout))
root.addHandler(logging.FileHandler('logs/finetune_parameters.log', mode='w'))
return
def set_xgb_parameters(self):
early_stopping_rounds = 3
self.xgb_params = {'silent':1, 'colsample_bytree': 0.8, 'silent': 1, 'lambda ': 1, 'min_child_weight': 1, 'subsample': 0.8, 'eta': 0.01, 'objective': 'reg:linear', 'max_depth': 7}
# self.xgb_params = {'silent':1 }
self.xgb_learning_params = {
'num_boost_round': 200,
'callbacks':[xgb.callback.print_evaluation(show_stdv=True),xgb.callback.early_stop(early_stopping_rounds)],
'feval':mean_absolute_percentage_error_xgboost_cv}
if self.do_cross_val == False:
self.xgb_learning_params['feval'] = mean_absolute_percentage_error_xgboost
return
def get_paramgrid_1(self):
"""
This method must be overriden by derived class when its objective is not reg:linear
"""
param_grid = {'max_depth':[6], 'eta':[0.1], 'min_child_weight':[1],'silent':[1],
'objective':['reg:linear'],'colsample_bytree':[0.8],'subsample':[0.8], 'lambda ':[1]}
return param_grid
def get_paramgrid_2(self, param_grid):
"""
This method must be overriden by derived class if it intends to fine tune parameters
"""
self.ramdonized_search_enable = False
self.randomized_search_n_iter = 150
self.grid_search_display_result = True
param_grid['eta'] = [0.01] #train-mape:-0.448062+0.00334926 test-mape:-0.448402+0.00601761
# param_grid['max_depth'] = [7] #train-mape:-0.363007+0.00454276 test-mape:-0.452832+0.00321641
# param_grid['colsample_bytree'] = [0.8]
param_grid['max_depth'] = range(5,8) #train-mape:-0.363007+0.00454276 test-mape:-0.452832+0.00321641
param_grid['colsample_bytree'] = [0.6,0.8,1.0]
# param_grid['lambda'] = range(1,15)
# param_grid['max_depth'] = [3,4]
# param_grid['eta'] = [0.01,0.1] # 0.459426+0.00518875
# param_grid['subsample'] = [0.5] #0.458935+0.00522205
# param_grid['eta'] = [0.005] #0.457677+0.00526401
return param_grid
def get_learning_params(self):
"""e
This method must be overriden by derived class if it intends to fine tune parameters
"""
num_boost_round = 100
early_stopping_rounds = 5
kwargs = {'num_boost_round':num_boost_round, 'feval':mean_absolute_percentage_error_xgboost_cv,
'callbacks':[xgb.callback.print_evaluation(show_stdv=True),xgb.callback.early_stop(early_stopping_rounds)]}
return kwargs
if __name__ == "__main__":
obj= DidiXGBoostModel()
obj.run() | mit | 3,712,276,902,335,813,600 | 43.736264 | 187 | 0.629975 | false |
Censio/filterpy | filterpy/common/tests/test_discretization.py | 1 | 2566 | # -*- coding: utf-8 -*-
"""Copyright 2015 Roger R Labbe Jr.
FilterPy library.
http://github.com/rlabbe/filterpy
Documentation at:
https://filterpy.readthedocs.org
Supporting book at:
https://github.com/rlabbe/Kalman-and-Bayesian-Filters-in-Python
This is licensed under an MIT license. See the readme.MD file
for more information.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from filterpy.common import linear_ode_discretation, Q_discrete_white_noise
from numpy import array
def near_eq(x,y):
return abs(x-y) < 1.e-17
def test_Q_discrete_white_noise():
Q = Q_discrete_white_noise (2)
assert Q[0,0] == .25
assert Q[1,0] == .5
assert Q[0,1] == .5
assert Q[1,1] == 1
assert Q.shape == (2,2)
def test_linear_ode():
F = array([[0,0,1,0,0,0],
[0,0,0,1,0,0],
[0,0,0,0,1,0],
[0,0,0,0,0,1],
[0,0,0,0,0,0],
[0,0,0,0,0,0]], dtype=float)
L = array ([[0,0],
[0,0],
[0,0],
[0,0],
[1,0],
[0,1]], dtype=float)
q = .2
Q = array([[q, 0],[0, q]])
dt = 0.5
A,Q = linear_ode_discretation(F, L, Q, dt)
val = [1, 0, dt, 0, 0.5*dt**2, 0]
for i in range(6):
assert val[i] == A[0,i]
for i in range(6):
assert val[i-1] == A[1,i] if i > 0 else A[1,i] == 0
for i in range(6):
assert val[i-2] == A[2,i] if i > 1 else A[2,i] == 0
for i in range(6):
assert val[i-3] == A[3,i] if i > 2 else A[3,i] == 0
for i in range(6):
assert val[i-4] == A[4,i] if i > 3 else A[4,i] == 0
for i in range(6):
assert val[i-5] == A[5,i] if i > 4 else A[5,i] == 0
assert near_eq(Q[0,0], (1./20)*(dt**5)*q)
assert near_eq(Q[0,1], 0)
assert near_eq(Q[0,2], (1/8)*(dt**4)*q)
assert near_eq(Q[0,3], 0)
assert near_eq(Q[0,4], (1./6)*(dt**3)*q)
assert near_eq(Q[0,5], 0)
if __name__ == "__main__":
test_linear_ode()
test_Q_discrete_white_noise()
F = array([[0,0,1,0,0,0],
[0,0,0,1,0,0],
[0,0,0,0,1,0],
[0,0,0,0,0,1],
[0,0,0,0,0,0],
[0,0,0,0,0,0]], dtype=float)
L = array ([[0,0],
[0,0],
[0,0],
[0,0],
[1,0],
[0,1]], dtype=float)
q = .2
Q = array([[q, 0],[0, q]])
dt = 1/30
A,Q = linear_ode_discretation(F, L, Q, dt)
print(Q) | mit | -1,838,436,546,280,924,000 | 21.716814 | 75 | 0.464147 | false |
boada/planckClusters | MOSAICpipe/bpz-1.99.3/prior_full.py | 1 | 3446 | from __future__ import print_function
from __future__ import division
from past.utils import old_div
from useful import match_resol
import numpy
import sys
# Hacked to use numpy and avoid import * commands
# FM
Float = numpy.float
less = numpy.less
def function(z, m, nt):
"""HDFN prior for the main six types of Benitez 2000
Returns an array pi[z[:],:6]
The input magnitude is F814W AB
"""
if nt != 6:
print("Wrong number of template spectra!")
sys.exit()
global zt_at_a
global zt_at_1p5
global zt_at_2
nz = len(z)
momin_hdf = 20.
if m <= 20.:
xm = numpy.arange(12., 18.0)
ft = numpy.array((0.55, 0.21, 0.21, .01, .01, .01))
zm0 = numpy.array([0.021, 0.034, 0.056, 0.0845, 0.1155, 0.127]) * (
old_div(2., 3.))
if len(ft) != nt:
print("Wrong number of templates!")
sys.exit()
nz = len(z)
m = numpy.array([m]) # match_resol works with arrays
m = numpy.clip(m, xm[0], xm[-1])
zm = match_resol(xm, zm0, m)
try:
zt_2.shape
except NameError:
t2 = [2.] * nt
zt_2 = numpy.power.outer(z, t2)
try:
zt_1p5.shape
except NameError:
t1p5 = [1.5] * nt
zt_1p5 = numpy.power.outer(z, t1p5)
zm_3 = numpy.power.outer(zm, 3)
zm_1p5 = numpy.power.outer(zm, 1.5)
p_i = 3. / 2. / zm_3 * zt_2[:, :] * numpy.exp(-numpy.clip(
old_div(zt_1p5[:, :], zm_1p5), 0., 700.))
norm = numpy.add.reduce(p_i[:nz, :], 0)
#Get rid of very low probability levels
p_i[:nz, :] = numpy.where(
numpy.less(
old_div(p_i[:nz, :], norm[:]), old_div(1e-5, float(nz))), 0.,
old_div(p_i[:nz, :], norm[:]))
norm = numpy.add.reduce(p_i[:nz, :], 0)
return p_i[:nz, :] / norm[:] * ft[:]
else:
m = numpy.minimum(numpy.maximum(20., m), 32)
a = numpy.array((2.465, 1.806, 1.806, 0.906, 0.906, 0.906))
zo = numpy.array((0.431, 0.390, 0.390, 0.0626, 0.0626, 0.0626))
km = numpy.array((0.0913, 0.0636, 0.0636, 0.123, 0.123, 0.123))
fo_t = numpy.array((0.35, 0.25, 0.25))
k_t = numpy.array((0.450, 0.147, 0.147))
dm = m - momin_hdf
zmt = numpy.clip(zo + km * dm, 0.01, 15.)
zmt_at_a = zmt**(a)
#We define z**a as global to keep it
#between function calls. That way it is
# estimated only once
try:
zt_at_a.shape
except NameError:
zt_at_a = numpy.power.outer(z, a)
#Morphological fractions
f_t = numpy.zeros((len(a), ), Float)
f_t[:3] = fo_t * numpy.exp(-k_t * dm)
f_t[3:] = old_div((1. - numpy.add.reduce(f_t[:3])), 3.)
#Formula:
#zm=zo+km*(m_m_min)
#p(z|T,m)=(z**a)*numpy.exp(-(z/zm)**a)
p_i = zt_at_a[:nz, :6] * numpy.exp(-numpy.clip(
old_div(zt_at_a[:nz, :6], zmt_at_a[:6]), 0., 700.))
#This eliminates the very low level tails of the priors
norm = numpy.add.reduce(p_i[:nz, :6], 0)
p_i[:nz, :6] = numpy.where(
less(
old_div(p_i[:nz, :6], norm[:6]), old_div(1e-2, float(nz))), 0.,
old_div(p_i[:nz, :6], norm[:6]))
norm = numpy.add.reduce(p_i[:nz, :6], 0)
p_i[:nz, :6] = p_i[:nz, :6] / norm[:6] * f_t[:6]
return p_i
| mit | -3,085,104,049,551,145,000 | 31.819048 | 79 | 0.492455 | false |
luzheqi1987/nova-annotation | nova/tests/unit/integrated/v3/test_remote_consoles.py | 1 | 3052 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.unit.integrated.v3 import test_servers
class ConsolesSampleJsonTests(test_servers.ServersSampleBase):
extension_name = "os-remote-consoles"
def setUp(self):
super(ConsolesSampleJsonTests, self).setUp()
self.flags(vnc_enabled=True)
self.flags(enabled=True, group='spice')
self.flags(enabled=True, group='rdp')
self.flags(enabled=True, group='serial_console')
def test_get_vnc_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-vnc-console-post-req',
{'action': 'os-getVNCConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-vnc-console-post-resp', subs, response, 200)
def test_get_spice_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-spice-console-post-req',
{'action': 'os-getSPICEConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-spice-console-post-resp', subs,
response, 200)
def test_get_rdp_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-rdp-console-post-req',
{'action': 'os-getRDPConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-rdp-console-post-resp', subs,
response, 200)
def test_get_serial_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-serial-console-post-req',
{'action': 'os-getSerialConsole'})
subs = self._get_regexes()
subs["url"] = \
"((ws?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-serial-console-post-resp', subs,
response, 200)
| apache-2.0 | -1,114,911,595,904,322,600 | 42.6 | 79 | 0.52228 | false |
selboo/starl-mangle | webvirtmgr/dashboard/views.py | 1 | 5187 | from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.utils.datastructures import SortedDict
from instance.models import Host
from webvirtmgr.server import ConnServer
from dashboard.forms import HostAddTcpForm, HostAddSshForm
def sort_host(hosts):
"""
Sorts dictionary of hosts by key
"""
if hosts:
sorted_hosts = []
for host in sorted(hosts.iterkeys()):
sorted_hosts.append((host, hosts[host]))
return SortedDict(sorted_hosts)
def index(request):
"""
Index page.
"""
if not request.user.is_authenticated():
return HttpResponseRedirect('/login')
else:
return HttpResponseRedirect('/dashboard')
def dashboard(request):
"""
Dashboard page.
"""
if not request.user.is_authenticated():
return HttpResponseRedirect('/login')
def get_hosts_status(hosts):
"""
Function return all hosts all vds on host
"""
all_hosts = {}
for host in hosts:
try:
import socket
socket_host = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket_host.settimeout(1)
if host.type == 'ssh':
socket_host.connect((host.hostname, host.port))
else:
socket_host.connect((host.hostname, 16509))
socket_host.close()
status = 1
except Exception as err:
status = err
all_hosts[host.id] = (host.name, host.hostname, status)
return all_hosts
hosts = Host.objects.filter()
hosts_info = get_hosts_status(hosts)
form = None
if request.method == 'POST':
if 'host_del' in request.POST:
del_host = Host.objects.get(id=request.POST.get('host_id', ''))
del_host.delete()
return HttpResponseRedirect(request.get_full_path())
if 'host_tcp_add' in request.POST:
form = HostAddTcpForm(request.POST)
if form.is_valid():
data = form.cleaned_data
new_host = Host(name=data['name'],
hostname=data['hostname'],
type='tcp',
login=data['login'],
password=data['password1']
)
new_host.save()
return HttpResponseRedirect(request.get_full_path())
if 'host_ssh_add' in request.POST:
form = HostAddSshForm(request.POST)
if form.is_valid():
data = form.cleaned_data
new_host = Host(name=data['name'],
hostname=data['hostname'],
type='ssh',
port=data['port'],
login=data['login']
)
new_host.save()
return HttpResponseRedirect(request.get_full_path())
hosts_info = sort_host(hosts_info)
return render_to_response('dashboard.html', {'hosts_info': hosts_info,
'form': form,
},
context_instance=RequestContext(request))
def infrastructure(request):
"""
Infrastructure page.
"""
if not request.user.is_authenticated():
return HttpResponseRedirect('/login')
hosts = Host.objects.filter().order_by('id')
hosts_vms = {}
host_info = None
host_mem = None
for host in hosts:
try:
import socket
socket_host = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket_host.settimeout(1)
if host.type == 'ssh':
socket_host.connect((host.hostname, host.port))
else:
socket_host.connect((host.hostname, 16509))
socket_host.close()
status = 1
except:
status = 2
if status == 1:
conn = ConnServer(host)
host_info = conn.node_get_info()
host_mem = conn.memory_get_usage()
hosts_vms[host.id, host.name, status, host_info[2], host_mem[0], host_mem[2]] = conn.vds_on_cluster()
else:
hosts_vms[host.id, host.name, status, None, None, None] = None
for host in hosts_vms:
hosts_vms[host] = sort_host(hosts_vms[host])
hosts_vms = sort_host(hosts_vms)
return render_to_response('infrastructure.html', {'hosts_info': host_info,
'host_mem': host_mem,
'hosts_vms': hosts_vms,
'hosts': hosts
},
context_instance=RequestContext(request))
def page_setup(request):
return render_to_response('setup.html', {}, context_instance=RequestContext(request))
| apache-2.0 | 1,696,039,395,362,939,000 | 31.829114 | 113 | 0.508965 | false |
anntzer/scikit-learn | sklearn/linear_model/_passive_aggressive.py | 2 | 17363 | # Authors: Rob Zinkov, Mathieu Blondel
# License: BSD 3 clause
from ..utils.validation import _deprecate_positional_args
from ._stochastic_gradient import BaseSGDClassifier
from ._stochastic_gradient import BaseSGDRegressor
from ._stochastic_gradient import DEFAULT_EPSILON
class PassiveAggressiveClassifier(BaseSGDClassifier):
"""Passive Aggressive Classifier
Read more in the :ref:`User Guide <passive_aggressive>`.
Parameters
----------
C : float, default=1.0
Maximum step size (regularization). Defaults to 1.0.
fit_intercept : bool, default=True
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered.
max_iter : int, default=1000
The maximum number of passes over the training data (aka epochs).
It only impacts the behavior in the ``fit`` method, and not the
:meth:`partial_fit` method.
.. versionadded:: 0.19
tol : float or None, default=1e-3
The stopping criterion. If it is not None, the iterations will stop
when (loss > previous_loss - tol).
.. versionadded:: 0.19
early_stopping : bool, default=False
Whether to use early stopping to terminate training when validation.
score is not improving. If set to True, it will automatically set aside
a stratified fraction of training data as validation and terminate
training when validation score is not improving by at least tol for
n_iter_no_change consecutive epochs.
.. versionadded:: 0.20
validation_fraction : float, default=0.1
The proportion of training data to set aside as validation set for
early stopping. Must be between 0 and 1.
Only used if early_stopping is True.
.. versionadded:: 0.20
n_iter_no_change : int, default=5
Number of iterations with no improvement to wait before early stopping.
.. versionadded:: 0.20
shuffle : bool, default=True
Whether or not the training data should be shuffled after each epoch.
verbose : integer, default=0
The verbosity level
loss : string, default="hinge"
The loss function to be used:
hinge: equivalent to PA-I in the reference paper.
squared_hinge: equivalent to PA-II in the reference paper.
n_jobs : int or None, default=None
The number of CPUs to use to do the OVA (One Versus All, for
multi-class problems) computation.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
random_state : int, RandomState instance, default=None
Used to shuffle the training data, when ``shuffle`` is set to
``True``. Pass an int for reproducible output across multiple
function calls.
See :term:`Glossary <random_state>`.
warm_start : bool, default=False
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
See :term:`the Glossary <warm_start>`.
Repeatedly calling fit or partial_fit when warm_start is True can
result in a different solution than when calling fit a single time
because of the way the data is shuffled.
class_weight : dict, {class_label: weight} or "balanced" or None, \
default=None
Preset for the class_weight fit parameter.
Weights associated with classes. If not given, all classes
are supposed to have weight one.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
.. versionadded:: 0.17
parameter *class_weight* to automatically weight samples.
average : bool or int, default=False
When set to True, computes the averaged SGD weights and stores the
result in the ``coef_`` attribute. If set to an int greater than 1,
averaging will begin once the total number of samples seen reaches
average. So average=10 will begin averaging after seeing 10 samples.
.. versionadded:: 0.19
parameter *average* to use weights averaging in SGD
Attributes
----------
coef_ : array, shape = [1, n_features] if n_classes == 2 else [n_classes,\
n_features]
Weights assigned to the features.
intercept_ : array, shape = [1] if n_classes == 2 else [n_classes]
Constants in decision function.
n_iter_ : int
The actual number of iterations to reach the stopping criterion.
For multiclass fits, it is the maximum over every binary fit.
classes_ : array of shape (n_classes,)
The unique classes labels.
t_ : int
Number of weight updates performed during training.
Same as ``(n_iter_ * n_samples)``.
loss_function_ : callable
Loss function used by the algorithm.
Examples
--------
>>> from sklearn.linear_model import PassiveAggressiveClassifier
>>> from sklearn.datasets import make_classification
>>> X, y = make_classification(n_features=4, random_state=0)
>>> clf = PassiveAggressiveClassifier(max_iter=1000, random_state=0,
... tol=1e-3)
>>> clf.fit(X, y)
PassiveAggressiveClassifier(random_state=0)
>>> print(clf.coef_)
[[0.26642044 0.45070924 0.67251877 0.64185414]]
>>> print(clf.intercept_)
[1.84127814]
>>> print(clf.predict([[0, 0, 0, 0]]))
[1]
See Also
--------
SGDClassifier
Perceptron
References
----------
Online Passive-Aggressive Algorithms
<http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf>
K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006)
"""
@_deprecate_positional_args
def __init__(self, *, C=1.0, fit_intercept=True, max_iter=1000, tol=1e-3,
early_stopping=False, validation_fraction=0.1,
n_iter_no_change=5, shuffle=True, verbose=0, loss="hinge",
n_jobs=None, random_state=None, warm_start=False,
class_weight=None, average=False):
super().__init__(
penalty=None,
fit_intercept=fit_intercept,
max_iter=max_iter,
tol=tol,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
n_iter_no_change=n_iter_no_change,
shuffle=shuffle,
verbose=verbose,
random_state=random_state,
eta0=1.0,
warm_start=warm_start,
class_weight=class_weight,
average=average,
n_jobs=n_jobs)
self.C = C
self.loss = loss
def partial_fit(self, X, y, classes=None):
"""Fit linear model with Passive Aggressive algorithm.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Subset of the training data
y : numpy array of shape [n_samples]
Subset of the target values
classes : array, shape = [n_classes]
Classes across all calls to partial_fit.
Can be obtained by via `np.unique(y_all)`, where y_all is the
target vector of the entire dataset.
This argument is required for the first call to partial_fit
and can be omitted in the subsequent calls.
Note that y doesn't need to contain all labels in `classes`.
Returns
-------
self : returns an instance of self.
"""
self._validate_params(for_partial_fit=True)
if self.class_weight == 'balanced':
raise ValueError("class_weight 'balanced' is not supported for "
"partial_fit. For 'balanced' weights, use "
"`sklearn.utils.compute_class_weight` with "
"`class_weight='balanced'`. In place of y you "
"can use a large enough subset of the full "
"training set target to properly estimate the "
"class frequency distributions. Pass the "
"resulting weights as the class_weight "
"parameter.")
lr = "pa1" if self.loss == "hinge" else "pa2"
return self._partial_fit(X, y, alpha=1.0, C=self.C,
loss="hinge", learning_rate=lr, max_iter=1,
classes=classes, sample_weight=None,
coef_init=None, intercept_init=None)
def fit(self, X, y, coef_init=None, intercept_init=None):
"""Fit linear model with Passive Aggressive algorithm.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Training data
y : numpy array of shape [n_samples]
Target values
coef_init : array, shape = [n_classes,n_features]
The initial coefficients to warm-start the optimization.
intercept_init : array, shape = [n_classes]
The initial intercept to warm-start the optimization.
Returns
-------
self : returns an instance of self.
"""
self._validate_params()
lr = "pa1" if self.loss == "hinge" else "pa2"
return self._fit(X, y, alpha=1.0, C=self.C,
loss="hinge", learning_rate=lr,
coef_init=coef_init, intercept_init=intercept_init)
class PassiveAggressiveRegressor(BaseSGDRegressor):
"""Passive Aggressive Regressor
Read more in the :ref:`User Guide <passive_aggressive>`.
Parameters
----------
C : float, default=1.0
Maximum step size (regularization). Defaults to 1.0.
fit_intercept : bool, default=True
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered. Defaults to True.
max_iter : int, default=1000
The maximum number of passes over the training data (aka epochs).
It only impacts the behavior in the ``fit`` method, and not the
:meth:`partial_fit` method.
.. versionadded:: 0.19
tol : float or None, default=1e-3
The stopping criterion. If it is not None, the iterations will stop
when (loss > previous_loss - tol).
.. versionadded:: 0.19
early_stopping : bool, default=False
Whether to use early stopping to terminate training when validation.
score is not improving. If set to True, it will automatically set aside
a fraction of training data as validation and terminate
training when validation score is not improving by at least tol for
n_iter_no_change consecutive epochs.
.. versionadded:: 0.20
validation_fraction : float, default=0.1
The proportion of training data to set aside as validation set for
early stopping. Must be between 0 and 1.
Only used if early_stopping is True.
.. versionadded:: 0.20
n_iter_no_change : int, default=5
Number of iterations with no improvement to wait before early stopping.
.. versionadded:: 0.20
shuffle : bool, default=True
Whether or not the training data should be shuffled after each epoch.
verbose : integer, default=0
The verbosity level
loss : string, default="epsilon_insensitive"
The loss function to be used:
epsilon_insensitive: equivalent to PA-I in the reference paper.
squared_epsilon_insensitive: equivalent to PA-II in the reference
paper.
epsilon : float, default=0.1
If the difference between the current prediction and the correct label
is below this threshold, the model is not updated.
random_state : int, RandomState instance, default=None
Used to shuffle the training data, when ``shuffle`` is set to
``True``. Pass an int for reproducible output across multiple
function calls.
See :term:`Glossary <random_state>`.
warm_start : bool, default=False
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
See :term:`the Glossary <warm_start>`.
Repeatedly calling fit or partial_fit when warm_start is True can
result in a different solution than when calling fit a single time
because of the way the data is shuffled.
average : bool or int, default=False
When set to True, computes the averaged SGD weights and stores the
result in the ``coef_`` attribute. If set to an int greater than 1,
averaging will begin once the total number of samples seen reaches
average. So average=10 will begin averaging after seeing 10 samples.
.. versionadded:: 0.19
parameter *average* to use weights averaging in SGD
Attributes
----------
coef_ : array, shape = [1, n_features] if n_classes == 2 else [n_classes,\
n_features]
Weights assigned to the features.
intercept_ : array, shape = [1] if n_classes == 2 else [n_classes]
Constants in decision function.
n_iter_ : int
The actual number of iterations to reach the stopping criterion.
t_ : int
Number of weight updates performed during training.
Same as ``(n_iter_ * n_samples)``.
Examples
--------
>>> from sklearn.linear_model import PassiveAggressiveRegressor
>>> from sklearn.datasets import make_regression
>>> X, y = make_regression(n_features=4, random_state=0)
>>> regr = PassiveAggressiveRegressor(max_iter=100, random_state=0,
... tol=1e-3)
>>> regr.fit(X, y)
PassiveAggressiveRegressor(max_iter=100, random_state=0)
>>> print(regr.coef_)
[20.48736655 34.18818427 67.59122734 87.94731329]
>>> print(regr.intercept_)
[-0.02306214]
>>> print(regr.predict([[0, 0, 0, 0]]))
[-0.02306214]
See Also
--------
SGDRegressor
References
----------
Online Passive-Aggressive Algorithms
<http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf>
K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006)
"""
@_deprecate_positional_args
def __init__(self, *, C=1.0, fit_intercept=True, max_iter=1000, tol=1e-3,
early_stopping=False, validation_fraction=0.1,
n_iter_no_change=5, shuffle=True, verbose=0,
loss="epsilon_insensitive", epsilon=DEFAULT_EPSILON,
random_state=None, warm_start=False,
average=False):
super().__init__(
penalty=None,
l1_ratio=0,
epsilon=epsilon,
eta0=1.0,
fit_intercept=fit_intercept,
max_iter=max_iter,
tol=tol,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
n_iter_no_change=n_iter_no_change,
shuffle=shuffle,
verbose=verbose,
random_state=random_state,
warm_start=warm_start,
average=average)
self.C = C
self.loss = loss
def partial_fit(self, X, y):
"""Fit linear model with Passive Aggressive algorithm.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Subset of training data
y : numpy array of shape [n_samples]
Subset of target values
Returns
-------
self : returns an instance of self.
"""
self._validate_params(for_partial_fit=True)
lr = "pa1" if self.loss == "epsilon_insensitive" else "pa2"
return self._partial_fit(X, y, alpha=1.0, C=self.C,
loss="epsilon_insensitive",
learning_rate=lr, max_iter=1,
sample_weight=None,
coef_init=None, intercept_init=None)
def fit(self, X, y, coef_init=None, intercept_init=None):
"""Fit linear model with Passive Aggressive algorithm.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Training data
y : numpy array of shape [n_samples]
Target values
coef_init : array, shape = [n_features]
The initial coefficients to warm-start the optimization.
intercept_init : array, shape = [1]
The initial intercept to warm-start the optimization.
Returns
-------
self : returns an instance of self.
"""
self._validate_params()
lr = "pa1" if self.loss == "epsilon_insensitive" else "pa2"
return self._fit(X, y, alpha=1.0, C=self.C,
loss="epsilon_insensitive",
learning_rate=lr,
coef_init=coef_init,
intercept_init=intercept_init)
| bsd-3-clause | -4,979,293,226,387,037,000 | 35.942553 | 79 | 0.607153 | false |
vit-/telegram-uz-bot | uz/tests/interface/telegram/test_bot.py | 1 | 5489 | import time
from datetime import datetime
import mock
import pytest
from uz.tests import Awaitable
from uz.interface.telegram import bot
from uz.scanner import UknkownScanID
CHAT_ID = 'chat_id'
def tg_message(text):
return {
'chat': {
'id': CHAT_ID,
'type': 'private',
},
'from': {'first_name': 'n/a', 'id': 'user_id'},
'message_id': int(time.time()),
'text': text
}
def get_reply(send_message_mock):
args, kwargs = send_message_mock.call_args_list[0]
return args[1]
@pytest.mark.asyncio
async def test_list_trains(source_station, destination_station, train):
bot.send_message = send_message = mock.MagicMock(return_value=Awaitable())
date = datetime(2016, 7, 21)
command = '/trains {} {} {}'.format(
date.strftime('%Y-%m-%d'), source_station.title, destination_station.title)
with mock.patch('uz.interface.serializer.Deserializer.load',
return_value=Awaitable((date, source_station, destination_station))) as load, \
mock.patch('uz.client.client.UZClient.list_trains',
return_value=Awaitable([train])) as list_trains:
await bot._process_message(tg_message(command))
load.assert_called_once_with({
'date': date.strftime('%Y-%m-%d'),
'source': source_station.title,
'destination': destination_station.title})
list_trains.assert_called_once_with(date, source_station, destination_station)
msg = get_reply(send_message)
title = 'Trains from %s to %s on %s:' % (
source_station, destination_station, date.date())
assert msg.startswith(title)
assert train.info() in msg
@pytest.mark.asyncio
@pytest.mark.parametrize('is_ok', [True, False])
async def test_status(is_ok):
scan_id = 'id1234'
scanner = mock.MagicMock()
if is_ok:
scanner.status.return_value = (attempts, error) = (10, 'i am error')
else:
scanner.status.side_effect = UknkownScanID()
bot.send_message = send_message = mock.MagicMock(return_value=Awaitable())
bot.set_scanner(scanner)
await bot._process_message(tg_message('/status_{}'.format(scan_id)))
scanner.status.assert_called_once_with(scan_id)
if is_ok:
send_message.assert_called_once_with(
CHAT_ID, 'No attempts: {}\nLast error message: {}'.format(attempts, error))
else:
send_message.assert_called_once_with(
CHAT_ID, 'Unknown scan id: {}'.format(scan_id))
@pytest.mark.asyncio
@pytest.mark.parametrize('is_ok', [True, False])
async def test_abort_scan(is_ok):
scan_id = 'id4321'
scanner = mock.MagicMock()
if is_ok:
scanner.abort.return_value = True
else:
scanner.abort.side_effect = UknkownScanID()
bot.send_message = send_message = mock.MagicMock(return_value=Awaitable())
bot.set_scanner(scanner)
await bot._process_message(tg_message('/abort_{}'.format(scan_id)))
scanner.abort.assert_called_once_with(scan_id)
if is_ok:
send_message.assert_called_once_with(
CHAT_ID, 'OK')
else:
send_message.assert_called_once_with(
CHAT_ID, 'Unknown scan id: {}'.format(scan_id))
@pytest.mark.asyncio
@pytest.mark.parametrize('ct_letter', [None, 'C2'])
async def test_scan(source_station, destination_station, ct_letter):
scan_id = 'id1234'
date = datetime(2016, 10, 7)
train_num = '744K'
firstname = 'username'
lastname = 'surname'
parts = [
'/scan',
firstname,
lastname,
date.strftime('%Y-%m-%d'),
source_station,
destination_station,
train_num]
if ct_letter:
parts.append(ct_letter)
command = ' '.join(str(i) for i in parts)
scanner = mock.MagicMock()
scanner.add_item.return_value = scan_id
bot.send_message = send_message = mock.MagicMock(return_value=Awaitable())
bot.set_scanner(scanner)
with mock.patch('uz.interface.serializer.Deserializer.load',
return_value=Awaitable((date, source_station, destination_station))) as load:
await bot._process_message(tg_message(command))
load.assert_called_once_with({
'firstname': firstname,
'lastname': lastname,
'date': date.strftime('%Y-%m-%d'),
'source': source_station.title,
'destination': destination_station.title,
'train_num': train_num,
'ct_letter': ct_letter})
scanner.add_item.assert_called_once_with(
mock.ANY, firstname, lastname, date, source_station, destination_station,
train_num, ct_letter)
expected = ('Scanning tickets for train {train} from {src} to {dst} on {date}.\n'
'To monitor scan status: /status_{sid}\n'
'To abort scan: /abort_{sid}').format(
train=train_num,
src=source_station,
dst=destination_station,
date=date.date(),
sid=scan_id)
send_message.assert_called_once_with(CHAT_ID, expected)
@pytest.mark.asyncio
async def test_hello():
bot.send_message = send_message = mock.MagicMock(return_value=Awaitable())
await bot._process_message(tg_message('hi'))
send_message.assert_called_once_with(CHAT_ID, mock.ANY)
@pytest.mark.asyncio
async def test_help_msg():
bot.send_message = send_message = mock.MagicMock(return_value=Awaitable())
await bot._process_message(tg_message('/help'))
send_message.assert_called_once_with(CHAT_ID, mock.ANY)
| mit | 5,860,053,250,814,065,000 | 33.522013 | 99 | 0.635817 | false |
CityofPittsburgh/pittsburgh-purchasing-suite | migrations/versions/31d29fbffe44_add_passwords_for_users.py | 1 | 1988 | """add passwords for users
Revision ID: 31d29fbffe44
Revises: 48c578b852fa
Create Date: 2016-01-20 23:33:36.893832
"""
# revision identifiers, used by Alembic.
revision = '31d29fbffe44'
down_revision = '48c578b852fa'
import random
from flask_security.utils import encrypt_password
from alembic import op
import sqlalchemy as sa
ALPHABET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
def rand_alphabet():
return encrypt_password(''.join(random.choice(ALPHABET) for i in range(16)))
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column(u'roles', sa.Column('description', sa.String(length=255), nullable=True))
op.add_column(u'users', sa.Column('confirmed_at', sa.DateTime(), nullable=True))
op.add_column(u'users', sa.Column('current_login_at', sa.DateTime(), nullable=True))
op.add_column(u'users', sa.Column('current_login_ip', sa.String(length=255), nullable=True))
op.add_column(u'users', sa.Column('last_login_at', sa.DateTime(), nullable=True))
op.add_column(u'users', sa.Column('last_login_ip', sa.String(length=255), nullable=True))
op.add_column(u'users', sa.Column('login_count', sa.Integer(), nullable=True))
op.add_column(u'users', sa.Column(
'password', sa.String(length=255), nullable=False,
default=rand_alphabet(), server_default=rand_alphabet()
))
### end Alembic commands ###
op.execute(sa.sql.text('''
UPDATE users SET confirmed_at = now()
'''))
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'users', 'password')
op.drop_column(u'users', 'login_count')
op.drop_column(u'users', 'last_login_ip')
op.drop_column(u'users', 'last_login_at')
op.drop_column(u'users', 'current_login_ip')
op.drop_column(u'users', 'current_login_at')
op.drop_column(u'users', 'confirmed_at')
op.drop_column(u'roles', 'description')
### end Alembic commands ###
| bsd-3-clause | -1,728,439,714,103,357,000 | 36.509434 | 96 | 0.686117 | false |
miracle2k/stgit | stgit/commands/delete.py | 1 | 3073 |
__copyright__ = """
Copyright (C) 2005, Catalin Marinas <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
from stgit.argparse import opt
from stgit.commands import common
from stgit.lib import transaction
from stgit import argparse
help = 'Delete patches'
kind = 'patch'
usage = ['[options] <patch1> [<patch2>] [<patch3>..<patch4>]']
description = """
Delete the patches passed as arguments."""
args = [argparse.patch_range(argparse.applied_patches,
argparse.unapplied_patches)]
options = [
opt('--spill', action = 'store_true',
short = 'Spill patch contents to worktree and index', long = """
Delete the patches, but do not touch the index and worktree.
This only works with applied patches at the top of the stack.
The effect is to "spill" the patch contents into the index and
worktree. This can be useful e.g. if you want to split a patch
into several smaller pieces."""),
opt('-b', '--branch', args = [argparse.stg_branches],
short = 'Use BRANCH instead of the default branch')]
directory = common.DirectoryHasRepositoryLib()
def func(parser, options, args):
"""Delete one or more patches."""
stack = directory.repository.get_stack(options.branch)
if options.branch:
iw = None # can't use index/workdir to manipulate another branch
else:
iw = stack.repository.default_iw
if args:
patches = set(common.parse_patches(args, list(stack.patchorder.all),
len(stack.patchorder.applied)))
else:
parser.error('No patches specified')
if options.spill:
if set(stack.patchorder.applied[-len(patches):]) != patches:
parser.error('Can only spill topmost applied patches')
iw = None # don't touch index+worktree
def allow_conflicts(trans):
# Allow conflicts if the topmost patch stays the same.
if stack.patchorder.applied:
return (trans.applied
and trans.applied[-1] == stack.patchorder.applied[-1])
else:
return not trans.applied
trans = transaction.StackTransaction(stack, 'delete',
allow_conflicts = allow_conflicts)
try:
to_push = trans.delete_patches(lambda pn: pn in patches)
for pn in to_push:
trans.push_patch(pn, iw)
except transaction.TransactionHalted:
pass
return trans.run(iw)
| gpl-2.0 | -8,761,710,056,651,811,000 | 38.397436 | 76 | 0.664497 | false |
FedoraScientific/salome-smesh | src/Tools/blocFissure/CasTests/fissure_Coude_4.py | 1 | 3081 | # -*- coding: utf-8 -*-
from fissure_Coude import fissure_Coude
class fissure_Coude_4(fissure_Coude):
"""
probleme de fissure du Coude : ASCOU09A
adaptation maillage
"""
# ---------------------------------------------------------------------------
def setParamGeometrieSaine(self):
"""
Paramètres géométriques du tuyau coudé sain:
angleCoude
r_cintr
l_tube_p1
l_tube_p2
epais
de
"""
self.geomParams = dict(angleCoude = 40,
r_cintr = 654,
l_tube_p1 = 1700,
l_tube_p2 = 1700,
epais = 62.5,
de = 912.4)
# ---------------------------------------------------------------------------
def setParamMaillageSain(self):
self.meshParams = dict(n_long_p1 = 16,
n_ep = 5,
n_long_coude = 30,
n_circ_g = 50,
n_circ_d = 20,
n_long_p2 = 12)
# ---------------------------------------------------------------------------
def setParamShapeFissure(self):
"""
paramètres de la fissure
profondeur : 0 < profondeur <= épaisseur
azimut : entre 0 et 360°
alpha : 0 < alpha < angleCoude
longueur : <=2*profondeur ==> ellipse, >2*profondeur = fissure longue
orientation : 0° : longitudinale, 90° : circonférentielle, autre : uniquement fissures elliptiques
externe : True : fissure face externe, False : fissure face interne
"""
print "setParamShapeFissure", self.nomCas
self.shapeFissureParams = dict(nomRep = '.',
nomFicSain = self.nomCas,
nomFicFissure = 'fissure_' + self.nomCas,
profondeur = 10,
azimut = 90,
alpha = 20,
longueur = 240,
orientation = 90,
lgInfluence = 30,
elliptique = False,
convexe = True,
externe = True)
# ---------------------------------------------------------------------------
def setReferencesMaillageFissure(self):
self.referencesMaillageFissure = dict(Entity_Quad_Pyramid = 948,
Entity_Quad_Triangle = 1562,
Entity_Quad_Edge = 1192,
Entity_Quad_Penta = 732,
Entity_Quad_Hexa = 22208,
Entity_Node = 133418,
Entity_Quad_Tetra = 18759,
Entity_Quad_Quadrangle = 11852)
| lgpl-2.1 | -4,575,732,381,039,616,000 | 41.068493 | 102 | 0.370563 | false |
davidwilson-85/easymap | graphic_output/Pillow-4.2.1/Tests/test_file_wmf.py | 1 | 1215 | from helper import unittest, PillowTestCase, hopper
from PIL import Image
class TestFileWmf(PillowTestCase):
def test_load_raw(self):
# Test basic EMF open and rendering
im = Image.open('Tests/images/drawing.emf')
if hasattr(Image.core, "drawwmf"):
# Currently, support for WMF/EMF is Windows-only
im.load()
# Compare to reference rendering
imref = Image.open('Tests/images/drawing_emf_ref.png')
imref.load()
self.assert_image_similar(im, imref, 0)
# Test basic WMF open and rendering
im = Image.open('Tests/images/drawing.wmf')
if hasattr(Image.core, "drawwmf"):
# Currently, support for WMF/EMF is Windows-only
im.load()
# Compare to reference rendering
imref = Image.open('Tests/images/drawing_wmf_ref.png')
imref.load()
self.assert_image_similar(im, imref, 2.0)
def test_save(self):
im = hopper()
for ext in [".wmf", ".emf"]:
tmpfile = self.tempfile("temp"+ext)
self.assertRaises(IOError, lambda: im.save(tmpfile))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -6,634,027,561,541,816,000 | 30.973684 | 66 | 0.584362 | false |
abrt/faf | src/pyfaf/storage/migrations/versions/a2b6d12819f9_drop_yum_type.py | 1 | 2158 | # Copyright (C) 2019 ABRT Team
# Copyright (C) 2019 Red Hat, Inc.
#
# This file is part of faf.
#
# faf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# faf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with faf. If not, see <http://www.gnu.org/licenses/>.
"""
drop_yum_type
Revision ID: a2b6d12819f9
Revises: e5d5cefb8ca4
Create Date: 2019-02-08 11:41:56.967881
"""
from alembic.op import execute, get_bind
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a2b6d12819f9'
down_revision = 'e5d5cefb8ca4'
new_values = ['dnf', 'koji', 'rpmmetadata']
old_values = new_values + ['yum']
old_type = sa.Enum(*old_values, name='repo_type')
new_type = sa.Enum(*new_values, name='repo_type')
tmp_type = sa.Enum(*new_values, name='_repo_type')
def upgrade() -> None:
bind = get_bind()
execute('UPDATE repo SET type=\'dnf\' WHERE type=\'yum\'')
tmp_type.create(bind, checkfirst=False)
execute('ALTER TABLE repo ALTER COLUMN type TYPE _repo_type USING '
'type::text::_repo_type')
old_type.drop(bind, checkfirst=False)
new_type.create(bind, checkfirst=False)
execute('ALTER TABLE repo ALTER COLUMN type TYPE repo_type USING '
'type::text::repo_type')
tmp_type.drop(bind, checkfirst=False)
def downgrade() -> None:
bind = get_bind()
tmp_type.create(bind, checkfirst=False)
execute('ALTER TABLE repo ALTER COLUMN type TYPE _repo_type USING '
'type::text::_repo_type')
new_type.drop(bind, checkfirst=False)
old_type.create(bind, checkfirst=False)
execute('ALTER TABLE repo ALTER COLUMN type TYPE repo_type USING '
'type::text::repo_type')
tmp_type.drop(bind, checkfirst=False)
| gpl-3.0 | 894,444,310,800,265,600 | 30.735294 | 71 | 0.696015 | false |
lucius-feng/tg2 | tests/test_middlewares.py | 2 | 3091 | from webtest import TestApp
from tg.support.middlewares import StatusCodeRedirect
from tg.support.middlewares import DBSessionRemoverMiddleware
from tg.support.middlewares import MingSessionRemoverMiddleware
def FakeApp(environ, start_response):
if environ['PATH_INFO'].startswith('/error'):
start_response('403 Forbidden', [])
else:
start_response('200 Success', [])
if environ['PATH_INFO'] == '/error/document':
yield b'ERROR!!!'
else:
yield b'HI'
yield b'MORE'
class TestStatusCodeRedirectMiddleware(object):
def setup(self):
self.app = TestApp(StatusCodeRedirect(FakeApp, [403]))
def test_error_redirection(self):
r = self.app.get('/error_test', status=403)
assert 'ERROR!!!' in r, r
def test_success_passthrough(self):
r = self.app.get('/success_test')
assert 'HI' in r, r
class FakeDBSession(object):
removed = False
def remove(self):
self.removed = True
def close_all(self):
self.remove()
class FakeAppWithClose(object):
closed = False
step = 0
def __call__(self, environ, start_response):
start_response('200 Success', [])
if environ['PATH_INFO'] == '/crash':
raise Exception('crashed')
return self
def __iter__(self):
return self
def next(self):
self.step += 1
if self.step > 3:
raise StopIteration()
return str(self.step)
def close(self):
self.closed = True
def __repr__(self):
return '%s - %s' % (self.step, self.closed)
class TestDBSessionRemoverMiddleware(object):
def setup(self):
self.app_with_close = FakeAppWithClose()
self.session = FakeDBSession()
self.app = TestApp(DBSessionRemoverMiddleware(self.session, self.app_with_close))
def test_close_is_called(self):
r = self.app.get('/nonerror')
assert self.app_with_close.closed == True, self.app_with_close
def test_session_is_removed(self):
r = self.app.get('/nonerror')
assert self.session.removed == True, self.app_with_close
def test_session_is_removed_on_crash(self):
try:
r = self.app.get('/crash')
except:
pass
assert self.session.removed == True, self.app_with_close
class TestMingSessionRemoverMiddlewaree(object):
def setup(self):
self.app_with_close = FakeAppWithClose()
self.session = FakeDBSession()
self.app = TestApp(MingSessionRemoverMiddleware(self.session, self.app_with_close))
def test_close_is_called(self):
r = self.app.get('/nonerror')
assert self.app_with_close.closed == True, self.app_with_close
def test_session_is_removed(self):
r = self.app.get('/nonerror')
assert self.session.removed == True, self.app_with_close
def test_session_is_removed_on_crash(self):
try:
r = self.app.get('/crash')
except:
pass
assert self.session.removed == True, self.app_with_close
| mit | -5,575,966,756,473,490,000 | 25.646552 | 91 | 0.619864 | false |
cloud-fan/spark | python/pyspark/pandas/data_type_ops/base.py | 1 | 12265 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numbers
from abc import ABCMeta
from itertools import chain
from typing import Any, Optional, TYPE_CHECKING, Union
import numpy as np
import pandas as pd
from pandas.api.types import CategoricalDtype
from pyspark.sql import functions as F
from pyspark.sql.types import (
ArrayType,
BinaryType,
BooleanType,
DataType,
DateType,
FractionalType,
IntegralType,
MapType,
NullType,
NumericType,
StringType,
StructType,
TimestampType,
UserDefinedType,
)
from pyspark.pandas.typedef import Dtype, extension_dtypes
from pyspark.pandas.typedef.typehints import extension_object_dtypes_available
if extension_object_dtypes_available:
from pandas import BooleanDtype
if TYPE_CHECKING:
from pyspark.pandas.indexes import Index # noqa: F401 (SPARK-34943)
from pyspark.pandas.series import Series # noqa: F401 (SPARK-34943)
def is_valid_operand_for_numeric_arithmetic(operand: Any, *, allow_bool: bool = True) -> bool:
"""Check whether the `operand` is valid for arithmetic operations against numerics."""
from pyspark.pandas.base import IndexOpsMixin
if isinstance(operand, numbers.Number):
return not isinstance(operand, bool) or allow_bool
elif isinstance(operand, IndexOpsMixin):
if isinstance(operand.dtype, CategoricalDtype):
return False
else:
return isinstance(operand.spark.data_type, NumericType) or (
allow_bool and isinstance(operand.spark.data_type, BooleanType)
)
else:
return False
def transform_boolean_operand_to_numeric(
operand: Any, spark_type: Optional[DataType] = None
) -> Any:
"""Transform boolean operand to numeric.
If the `operand` is:
- a boolean IndexOpsMixin, transform the `operand` to the `spark_type`.
- a boolean literal, transform to the int value.
Otherwise, return the operand as it is.
"""
from pyspark.pandas.base import IndexOpsMixin
if isinstance(operand, IndexOpsMixin) and isinstance(operand.spark.data_type, BooleanType):
assert spark_type, "spark_type must be provided if the operand is a boolean IndexOpsMixin"
return operand.spark.transform(lambda scol: scol.cast(spark_type))
elif isinstance(operand, bool):
return int(operand)
else:
return operand
def _as_categorical_type(
index_ops: Union["Series", "Index"], dtype: CategoricalDtype, spark_type: DataType
) -> Union["Index", "Series"]:
"""Cast `index_ops` to categorical dtype, given `dtype` and `spark_type`."""
assert isinstance(dtype, CategoricalDtype)
if dtype.categories is None:
codes, uniques = index_ops.factorize()
return codes._with_new_scol(
codes.spark.column,
field=codes._internal.data_fields[0].copy(dtype=CategoricalDtype(categories=uniques)),
)
else:
categories = dtype.categories
if len(categories) == 0:
scol = F.lit(-1)
else:
kvs = chain(
*[(F.lit(category), F.lit(code)) for code, category in enumerate(categories)]
)
map_scol = F.create_map(*kvs)
scol = F.coalesce(map_scol.getItem(index_ops.spark.column), F.lit(-1))
return index_ops._with_new_scol(
scol.cast(spark_type).alias(index_ops._internal.data_fields[0].name),
field=index_ops._internal.data_fields[0].copy(
dtype=dtype, spark_type=spark_type, nullable=False
),
)
def _as_bool_type(
index_ops: Union["Series", "Index"], dtype: Union[str, type, Dtype]
) -> Union["Index", "Series"]:
"""Cast `index_ops` to BooleanType Spark type, given `dtype`."""
from pyspark.pandas.internal import InternalField
if isinstance(dtype, extension_dtypes):
scol = index_ops.spark.column.cast(BooleanType())
else:
scol = F.when(index_ops.spark.column.isNull(), F.lit(False)).otherwise(
index_ops.spark.column.cast(BooleanType())
)
return index_ops._with_new_scol(
scol.alias(index_ops._internal.data_spark_column_names[0]),
field=InternalField(dtype=dtype),
)
def _as_string_type(
index_ops: Union["Series", "Index"],
dtype: Union[str, type, Dtype],
*,
null_str: str = str(None)
) -> Union["Index", "Series"]:
"""Cast `index_ops` to StringType Spark type, given `dtype` and `null_str`,
representing null Spark column.
"""
from pyspark.pandas.internal import InternalField
if isinstance(dtype, extension_dtypes):
scol = index_ops.spark.column.cast(StringType())
else:
casted = index_ops.spark.column.cast(StringType())
scol = F.when(index_ops.spark.column.isNull(), null_str).otherwise(casted)
return index_ops._with_new_scol(
scol.alias(index_ops._internal.data_spark_column_names[0]),
field=InternalField(dtype=dtype),
)
def _as_other_type(
index_ops: Union["Series", "Index"], dtype: Union[str, type, Dtype], spark_type: DataType
) -> Union["Index", "Series"]:
"""Cast `index_ops` to a `dtype` (`spark_type`) that needs no pre-processing.
Destination types that need pre-processing: CategoricalDtype, BooleanType, and StringType.
"""
from pyspark.pandas.internal import InternalField
need_pre_process = (
isinstance(dtype, CategoricalDtype)
or isinstance(spark_type, BooleanType)
or isinstance(spark_type, StringType)
)
assert not need_pre_process, "Pre-processing is needed before the type casting."
scol = index_ops.spark.column.cast(spark_type)
return index_ops._with_new_scol(
scol.alias(index_ops._internal.data_spark_column_names[0]),
field=InternalField(dtype=dtype),
)
class DataTypeOps(object, metaclass=ABCMeta):
"""The base class for binary operations of pandas-on-Spark objects (of different data types)."""
def __new__(cls, dtype: Dtype, spark_type: DataType):
from pyspark.pandas.data_type_ops.binary_ops import BinaryOps
from pyspark.pandas.data_type_ops.boolean_ops import BooleanOps, BooleanExtensionOps
from pyspark.pandas.data_type_ops.categorical_ops import CategoricalOps
from pyspark.pandas.data_type_ops.complex_ops import ArrayOps, MapOps, StructOps
from pyspark.pandas.data_type_ops.date_ops import DateOps
from pyspark.pandas.data_type_ops.datetime_ops import DatetimeOps
from pyspark.pandas.data_type_ops.null_ops import NullOps
from pyspark.pandas.data_type_ops.num_ops import IntegralOps, FractionalOps
from pyspark.pandas.data_type_ops.string_ops import StringOps
from pyspark.pandas.data_type_ops.udt_ops import UDTOps
if isinstance(dtype, CategoricalDtype):
return object.__new__(CategoricalOps)
elif isinstance(spark_type, FractionalType):
return object.__new__(FractionalOps)
elif isinstance(spark_type, IntegralType):
return object.__new__(IntegralOps)
elif isinstance(spark_type, StringType):
return object.__new__(StringOps)
elif isinstance(spark_type, BooleanType):
if extension_object_dtypes_available and isinstance(dtype, BooleanDtype):
return object.__new__(BooleanExtensionOps)
else:
return object.__new__(BooleanOps)
elif isinstance(spark_type, TimestampType):
return object.__new__(DatetimeOps)
elif isinstance(spark_type, DateType):
return object.__new__(DateOps)
elif isinstance(spark_type, BinaryType):
return object.__new__(BinaryOps)
elif isinstance(spark_type, ArrayType):
return object.__new__(ArrayOps)
elif isinstance(spark_type, MapType):
return object.__new__(MapOps)
elif isinstance(spark_type, StructType):
return object.__new__(StructOps)
elif isinstance(spark_type, NullType):
return object.__new__(NullOps)
elif isinstance(spark_type, UserDefinedType):
return object.__new__(UDTOps)
else:
raise TypeError("Type %s was not understood." % dtype)
def __init__(self, dtype: Dtype, spark_type: DataType):
self.dtype = dtype
self.spark_type = spark_type
@property
def pretty_name(self) -> str:
raise NotImplementedError()
def add(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Addition can not be applied to %s." % self.pretty_name)
def sub(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Subtraction can not be applied to %s." % self.pretty_name)
def mul(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Multiplication can not be applied to %s." % self.pretty_name)
def truediv(self, left, right) -> Union["Series", "Index"]:
raise TypeError("True division can not be applied to %s." % self.pretty_name)
def floordiv(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Floor division can not be applied to %s." % self.pretty_name)
def mod(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Modulo can not be applied to %s." % self.pretty_name)
def pow(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Exponentiation can not be applied to %s." % self.pretty_name)
def radd(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Addition can not be applied to %s." % self.pretty_name)
def rsub(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Subtraction can not be applied to %s." % self.pretty_name)
def rmul(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Multiplication can not be applied to %s." % self.pretty_name)
def rtruediv(self, left, right) -> Union["Series", "Index"]:
raise TypeError("True division can not be applied to %s." % self.pretty_name)
def rfloordiv(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Floor division can not be applied to %s." % self.pretty_name)
def rmod(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Modulo can not be applied to %s." % self.pretty_name)
def rpow(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Exponentiation can not be applied to %s." % self.pretty_name)
def __and__(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Bitwise and can not be applied to %s." % self.pretty_name)
def __or__(self, left, right) -> Union["Series", "Index"]:
raise TypeError("Bitwise or can not be applied to %s." % self.pretty_name)
def rand(self, left, right) -> Union["Series", "Index"]:
return left.__and__(right)
def ror(self, left, right) -> Union["Series", "Index"]:
return left.__or__(right)
def restore(self, col: pd.Series) -> pd.Series:
"""Restore column when to_pandas."""
return col
def prepare(self, col: pd.Series) -> pd.Series:
"""Prepare column when from_pandas."""
return col.replace({np.nan: None})
def astype(
self, index_ops: Union["Index", "Series"], dtype: Union[str, type, Dtype]
) -> Union["Index", "Series"]:
raise TypeError("astype can not be applied to %s." % self.pretty_name)
| apache-2.0 | 8,822,871,887,076,614,000 | 39.081699 | 100 | 0.658296 | false |
rnirmal/savanna | savanna/tests/unit/db/models/test_clusters.py | 1 | 1607 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from savanna import context as ctx
from savanna.db import models as m
from savanna.tests.unit import base as models_test_base
class ClusterModelTest(models_test_base.DbTestCase):
def testCreateCluster(self):
session = ctx.current().session
with session.begin():
c = m.Cluster('c-1', 't-1', 'p-1', 'hv-1')
session.add(c)
with session.begin():
res = session.query(m.Cluster).filter_by().first()
self.assertIsValidModelObject(res)
def testCreateClusterFromDict(self):
c = m.Cluster('c-1', 't-1', 'p-1', 'hv-1')
c_dict = c.dict
del c_dict['created']
del c_dict['updated']
del c_dict['id']
del c_dict['node_groups']
del c_dict['status']
del c_dict['status_description']
del c_dict['info']
c_dict.update({
'tenant_id': 't-1'
})
self.assertEqual(self.get_clean_dict(c),
self.get_clean_dict(m.Cluster(**c_dict)))
| apache-2.0 | -3,469,991,657,179,127,300 | 32.479167 | 69 | 0.632234 | false |
MTK6580/walkie-talkie | ALPS.L1.MP6.V2_HEXING6580_WE_L/alps/build/tools/releasetools/img_from_target_files.py | 1 | 4926 | #!/usr/bin/env python
#
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Given a target-files zipfile, produces an image zipfile suitable for
use with 'fastboot update'.
Usage: img_from_target_files [flags] input_target_files output_image_zip
-z (--bootable_zip)
Include only the bootable images (eg 'boot' and 'recovery') in
the output.
"""
import sys
if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import errno
import os
import re
import shutil
import subprocess
import tempfile
import zipfile
# missing in Python 2.4 and before
if not hasattr(os, "SEEK_SET"):
os.SEEK_SET = 0
import common
OPTIONS = common.OPTIONS
def CopyInfo(output_zip):
"""Copy the android-info.txt file from the input to the output."""
output_zip.write(os.path.join(OPTIONS.input_tmp, "OTA", "android-info.txt"),
"android-info.txt")
def main(argv):
bootable_only = [False]
def option_handler(o, a):
if o in ("-z", "--bootable_zip"):
bootable_only[0] = True
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
extra_opts="z",
extra_long_opts=["bootable_zip"],
extra_option_handler=option_handler)
bootable_only = bootable_only[0]
if len(args) != 2:
common.Usage(__doc__)
sys.exit(1)
OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0])
output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
CopyInfo(output_zip)
try:
done = False
images_path = os.path.join(OPTIONS.input_tmp, "IMAGES")
if os.path.exists(images_path):
# If this is a new target-files, it already contains the images,
# and all we have to do is copy them to the output zip.
images = os.listdir(images_path)
if images:
for i in images:
if bootable_only and i not in ("boot.img", "recovery.img"): continue
if not i.endswith(".img"): continue
with open(os.path.join(images_path, i), "r") as f:
common.ZipWriteStr(output_zip, i, f.read())
done = True
if not done:
# We have an old target-files that doesn't already contain the
# images, so build them.
import add_img_to_target_files
OPTIONS.info_dict = common.LoadInfoDict(input_zip)
# If this image was originally labelled with SELinux contexts,
# make sure we also apply the labels in our new image. During
# building, the "file_contexts" is in the out/ directory tree,
# but for repacking from target-files.zip it's in the root
# directory of the ramdisk.
if "selinux_fc" in OPTIONS.info_dict:
OPTIONS.info_dict["selinux_fc"] = os.path.join(
OPTIONS.input_tmp, "BOOT", "RAMDISK", "file_contexts")
boot_image = common.GetBootableImage(
"boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
if boot_image:
boot_image.AddToZip(output_zip)
recovery_image = common.GetBootableImage(
"recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
if recovery_image:
recovery_image.AddToZip(output_zip)
def banner(s):
print "\n\n++++ " + s + " ++++\n\n"
if not bootable_only:
banner("AddSystem")
add_img_to_target_files.AddSystem(output_zip, prefix="")
try:
input_zip.getinfo("VENDOR/")
banner("AddVendor")
add_img_to_target_files.AddVendor(output_zip, prefix="")
except KeyError:
pass # no vendor partition for this device
try:
input_zip.getinfo("CUSTOM/")
banner("AddCustom")
add_img_to_target_files.AddCustom(output_zip, prefix="")
except KeyError:
pass # no custom partition for this device
banner("AddUserdata")
add_img_to_target_files.AddUserdata(output_zip, prefix="")
banner("AddCache")
add_img_to_target_files.AddCache(output_zip, prefix="")
finally:
print "cleaning up..."
output_zip.close()
shutil.rmtree(OPTIONS.input_tmp)
print "done."
if __name__ == '__main__':
try:
common.CloseInheritedPipes()
main(sys.argv[1:])
except common.ExternalError, e:
print
print " ERROR: %s" % (e,)
print
sys.exit(1)
| gpl-3.0 | 3,824,091,379,033,798,000 | 29.407407 | 78 | 0.634592 | false |
JKarathiya/Lean | Algorithm.Python/FutureOptionShortPutOTMExpiryRegressionAlgorithm.py | 1 | 5416 | # QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
from datetime import datetime, timedelta
import clr
from System import *
from System.Reflection import *
from QuantConnect import *
from QuantConnect.Algorithm import *
from QuantConnect.Data import *
from QuantConnect.Data.Market import *
from QuantConnect.Orders import *
from QuantConnect.Securities import *
from QuantConnect.Securities.Future import *
from QuantConnect import Market
### <summary>
### This regression algorithm tests Out of The Money (OTM) future option expiry for short puts.
### We expect 2 orders from the algorithm, which are:
###
### * Initial entry, sell ES Put Option (expiring OTM)
### - Profit the option premium, since the option was not assigned.
###
### * Liquidation of ES put OTM contract on the last trade date
###
### Additionally, we test delistings for future options and assert that our
### portfolio holdings reflect the orders the algorithm has submitted.
### </summary>
class FutureOptionShortPutOTMExpiryRegressionAlgorithm(QCAlgorithm):
def Initialize(self):
self.SetStartDate(2020, 1, 5)
self.SetEndDate(2020, 6, 30)
self.es19m20 = self.AddFutureContract(
Symbol.CreateFuture(
Futures.Indices.SP500EMini,
Market.CME,
datetime(2020, 6, 19)),
Resolution.Minute).Symbol
# Select a future option expiring ITM, and adds it to the algorithm.
self.esOption = self.AddFutureOptionContract(
list(
sorted(
[x for x in self.OptionChainProvider.GetOptionContractList(self.es19m20, self.Time) if x.ID.StrikePrice <= 3000.0 and x.ID.OptionRight == OptionRight.Put],
key=lambda x: x.ID.StrikePrice,
reverse=True
)
)[0], Resolution.Minute).Symbol
self.expectedContract = Symbol.CreateOption(self.es19m20, Market.CME, OptionStyle.American, OptionRight.Put, 3000.0, datetime(2020, 6, 19))
if self.esOption != self.expectedContract:
raise AssertionError(f"Contract {self.expectedContract} was not found in the chain");
self.Schedule.On(self.DateRules.Tomorrow, self.TimeRules.AfterMarketOpen(self.es19m20, 1), self.ScheduledMarketOrder)
def ScheduledMarketOrder(self):
self.MarketOrder(self.esOption, -1)
def OnData(self, data: Slice):
# Assert delistings, so that we can make sure that we receive the delisting warnings at
# the expected time. These assertions detect bug #4872
for delisting in data.Delistings.Values:
if delisting.Type == DelistingType.Warning:
if delisting.Time != datetime(2020, 6, 19):
raise AssertionError(f"Delisting warning issued at unexpected date: {delisting.Time}");
if delisting.Type == DelistingType.Delisted:
if delisting.Time != datetime(2020, 6, 20):
raise AssertionError(f"Delisting happened at unexpected date: {delisting.Time}");
def OnOrderEvent(self, orderEvent: OrderEvent):
if orderEvent.Status != OrderStatus.Filled:
# There's lots of noise with OnOrderEvent, but we're only interested in fills.
return
if not self.Securities.ContainsKey(orderEvent.Symbol):
raise AssertionError(f"Order event Symbol not found in Securities collection: {orderEvent.Symbol}")
security = self.Securities[orderEvent.Symbol]
if security.Symbol == self.es19m20:
raise AssertionError(f"Expected no order events for underlying Symbol {security.Symbol}")
if security.Symbol == self.expectedContract:
self.AssertFutureOptionContractOrder(orderEvent, security)
else:
raise AssertionError(f"Received order event for unknown Symbol: {orderEvent.Symbol}")
self.Log(f"{orderEvent}");
def AssertFutureOptionContractOrder(self, orderEvent: OrderEvent, optionContract: Security):
if orderEvent.Direction == OrderDirection.Sell and optionContract.Holdings.Quantity != -1:
raise AssertionError(f"No holdings were created for option contract {optionContract.Symbol}")
if orderEvent.Direction == OrderDirection.Buy and optionContract.Holdings.Quantity != 0:
raise AssertionError("Expected no options holdings after closing position")
if orderEvent.IsAssignment:
raise AssertionError(f"Assignment was not expected for {orderEvent.Symbol}")
def OnEndOfAlgorithm(self):
if self.Portfolio.Invested:
raise AssertionError(f"Expected no holdings at end of algorithm, but are invested in: {', '.join([str(i.ID) for i in self.Portfolio.Keys])}") | apache-2.0 | 1,143,300,330,185,993,500 | 45.299145 | 175 | 0.694978 | false |
Johnzero/erp | openerp/addons/clivia_analysis/report/analysis_report.py | 1 | 2399 | # -*- encoding: utf-8 -*-
import tools
from osv import fields, osv
class common_report(osv.osv):
_name = "clivia_analysis.production_report"
_description = "报表视图"
_auto = False
_rec_name = 'date'
_columns = {
'year': fields.char('年份', size=4, readonly=True),
'month': fields.selection([('01', '一月'), ('02', '二月'), ('03', '三月'), ('04', '四月'),
('05', '五月'), ('06', '六月'), ('07', '七月'), ('08', '八月'), ('09', '九月'), ('10', '十月'),
('11', '十一月'), ('12', '十二月')], '月份', readonly=True),
'date': fields.date('上报时间', required=True, readonly=True),
'product_id': fields.many2one('clivia_analysis.stocked_product', '产品', readonly=True),
'produced': fields.integer('生产', readonly=True),
'sent': fields.float('发出', readonly=True),
'sold': fields.integer('销售', readonly=True),
'hefei_today_inventory':fields.integer('君子兰结存', readonly=True),
'sanhe_last_inventory':fields.integer('三河实际库存', readonly=True),
}
_order = 'date desc'
def init(self, cr):
tools.drop_view_if_exists(cr, 'clivia_analysis_production_report')
cr.execute("""
CREATE OR REPLACE VIEW clivia_analysis_production_report AS
SELECT DISTINCT ON (product.id) product.id, product.id AS product_id,
mpl.production AS produced,
mpl.hefei_warning_level,
mpl.sanhe_warning_level,
drl.hefei_today_inventory AS hefei_today_inventory,
drl.sanhe_real_inventory AS sanhe_real_inventory,
dr.date_created date,
to_char(dr.date_created::timestamp with time zone, 'YYYY'::text) AS year,
to_char(dr.date_created::timestamp with time zone, 'MM'::text) AS month,
drl.sent,
drl.sold
FROM clivia_analysis_stocked_product product
JOIN clivia_analysis_daily_report_line drl ON product.id = drl.product_id
JOIN clivia_analysis_daily_report dr ON dr.id = drl.report_id
JOIN clivia_analysis_monthly_plan_line mpl ON mpl.product_id = product.id
WHERE dr.state::text = 'review'::text
ORDER BY product.id, dr.date_created DESC;
""")
| agpl-3.0 | -2,405,913,437,995,879,000 | 43.803922 | 96 | 0.570241 | false |
saun4app/python_lib_sphinx | tests/test_utils.py | 1 | 1148 | """
python_lib_sphinx tests.
"""
import unittest
from python_lib_sphinx.utils import Constants, orbital_speed, circumference, orbital_period
class TestUtils(unittest.TestCase):
"""
Test python_lib_sphinx's utils.
"""
def setUp(self):
pass
def test_orbital_speed(self):
"""
Calculate the orbital speed of an object.
"""
answer = orbital_speed(
Constants.Earth,
600000,
70
)
answer = round(answer, 3)
self.assertEqual(
answer,
2425.552
)
def test_circumference(self):
"""
2*pi*r
"""
answer = circumference(600000)
answer = round(answer, 3)
self.assertEqual(
answer,
3769911.184
)
def test_orbital_period(self):
"""
Calculate the orbital period of an object.
"""
answer = orbital_period(
Constants.Earth,
600000,
70
)
answer = round(answer, 3)
self.assertEqual(
answer,
1554.43
)
| mit | -412,183,571,264,602,000 | 19.5 | 91 | 0.498258 | false |
CN-UPB/OpenBarista | components/decaf-masta/decaf_masta/components/database/datacenter.py | 1 | 1976 | ##
# Copyright 2016 DECaF Project Group, University of Paderborn
# This file is part of the decaf orchestration framework
# All Rights Reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
##
__author__ = 'Kristian Hinnenthal'
__date__ = '$13-okt-2015 14:15:27$'
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.orm import relationship
from .mastadatabase import Base
from .keystone import Keystone
import json
class Datacenter(Base):
__tablename__ = 'datacenters'
datacenter_id = Column(Integer, primary_key=True,autoincrement=True)
datacenter_name = Column(String(250), nullable=False)
keystone_id = Column(Integer, ForeignKey('keystone_credentials.keystone_id'), nullable=False)
keystone_region = Column(String(250), nullable=False)
flavors = relationship('Flavor', backref='datacenters')
images = relationship('Image', backref='datacenters')
monitoring_alarms = relationship('MonitoringAlarm', backref='datacenters')
management_networks = relationship('ManagementNetwork', backref='datacenters')
public_networks = relationship('PublicNetwork', backref='datacenters')
vm_instances = relationship('VMInstance', backref='datacenters')
internal_edges = relationship('InternalEdge', backref='datacenters')
public_ports = relationship('PublicPort', backref='datacenters')
keypairs = relationship('KeyPair', backref='datacenter')
def to_json(self):
return json.dumps(self.to_dict())
def to_dict(self):
return_dict = {
"datacenter" : {
"datacenter_id": self.datacenter_id,
"datacenter_name": self.datacenter_name,
"keystone_id": self.keystone_id,
"keystone_region": self.keystone_region
}
}
return return_dict | mpl-2.0 | -113,731,610,094,640,820 | 41.06383 | 97 | 0.695344 | false |
google/fuzzbench | fuzzbench/test_e2e/test_e2e_run.py | 1 | 3103 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Checks the result of a test experiment run. Note that this is not a
standalone unit test module, but used as part of our end-to-end integration
test."""
import os
import pytest
import redis
import rq
from common import config_utils, yaml_utils
from experiment.build import docker_images
@pytest.fixture(scope='class')
def experiment_config():
"""Returns the default configuration for end-to-end testing."""
return config_utils.validate_and_expand(
yaml_utils.read('fuzzbench/test_e2e/end-to-end-test-config.yaml'))
@pytest.fixture(scope='class')
def redis_connection():
"""Returns the default redis server connection."""
return redis.Redis(host='queue-server')
# pylint: disable=no-self-use,redefined-outer-name
@pytest.mark.skipif('E2E_INTEGRATION_TEST' not in os.environ,
reason='Not running end-to-end test.')
@pytest.mark.usefixtures('redis_connection', 'experiment_config')
class TestEndToEndRunResults:
"""Checks the result of a test experiment run."""
def test_jobs_dependency(self, experiment_config, redis_connection):
"""Tests that jobs dependency preserves during working."""
all_images = docker_images.get_images_to_build(
experiment_config['fuzzers'], experiment_config['benchmarks'])
jobs = {
name: rq.job.Job.fetch(name, connection=redis_connection)
for name in all_images
}
for name, image in all_images.items():
if 'depends_on' in image:
for dep in image['depends_on']:
assert jobs[dep].ended_at <= jobs[name].started_at
def test_all_jobs_finished_successfully(self, experiment_config,
redis_connection):
"""Tests all jobs finished successully."""
all_images = docker_images.get_images_to_build(
experiment_config['fuzzers'], experiment_config['benchmarks'])
jobs = rq.job.Job.fetch_many(all_images.keys(),
connection=redis_connection)
for job in jobs:
assert job.get_status() == 'finished'
def test_measurement_jobs_were_started_before_trial_jobs_finished(self):
"""Fake test to be implemented later."""
assert True
def test_db_contains_experiment_results(self):
"""Fake test to be implemented later."""
assert True
def test_experiment_report_is_generated(self):
"""Fake test to be implemented later."""
assert True
| apache-2.0 | 4,631,983,200,264,544,000 | 37.308642 | 76 | 0.66903 | false |
airodactyl/qutebrowser | tests/helpers/stubs.py | 1 | 16414 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2018 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=invalid-name,abstract-method
"""Fake objects/stubs."""
from unittest import mock
import contextlib
import shutil
import attr
from PyQt5.QtCore import pyqtSignal, QPoint, QProcess, QObject, QUrl
from PyQt5.QtGui import QIcon
from PyQt5.QtNetwork import (QNetworkRequest, QAbstractNetworkCache,
QNetworkCacheMetaData)
from PyQt5.QtWidgets import QCommonStyle, QLineEdit, QWidget, QTabBar
from qutebrowser.browser import browsertab, downloads
from qutebrowser.utils import usertypes
from qutebrowser.mainwindow import mainwindow
class FakeNetworkCache(QAbstractNetworkCache):
"""Fake cache with no data."""
def cacheSize(self):
return 0
def data(self, _url):
return None
def insert(self, _dev):
pass
def metaData(self, _url):
return QNetworkCacheMetaData()
def prepare(self, _metadata):
return None
def remove(self, _url):
return False
def updateMetaData(self, _url):
pass
class FakeKeyEvent:
"""Fake QKeyPressEvent stub."""
def __init__(self, key, modifiers=0, text=''):
self.key = mock.Mock(return_value=key)
self.text = mock.Mock(return_value=text)
self.modifiers = mock.Mock(return_value=modifiers)
class FakeWebFrame:
"""A stub for QWebFrame."""
def __init__(self, geometry=None, *, scroll=None, plaintext=None,
html=None, parent=None, zoom=1.0):
"""Constructor.
Args:
geometry: The geometry of the frame as QRect.
scroll: The scroll position as QPoint.
plaintext: Return value of toPlainText
html: Return value of tohtml.
zoom: The zoom factor.
parent: The parent frame.
"""
if scroll is None:
scroll = QPoint(0, 0)
self.geometry = mock.Mock(return_value=geometry)
self.scrollPosition = mock.Mock(return_value=scroll)
self.parentFrame = mock.Mock(return_value=parent)
self.toPlainText = mock.Mock(return_value=plaintext)
self.toHtml = mock.Mock(return_value=html)
self.zoomFactor = mock.Mock(return_value=zoom)
class FakeChildrenFrame:
"""A stub for QWebFrame to test get_child_frames."""
def __init__(self, children=None):
if children is None:
children = []
self.childFrames = mock.Mock(return_value=children)
class FakeQApplication:
"""Stub to insert as QApplication module."""
UNSET = object()
def __init__(self, style=None, all_widgets=None, active_window=None,
instance=UNSET):
if instance is self.UNSET:
self.instance = mock.Mock(return_value=self)
else:
self.instance = mock.Mock(return_value=instance)
self.style = mock.Mock(spec=QCommonStyle)
self.style().metaObject().className.return_value = style
self.allWidgets = lambda: all_widgets
self.activeWindow = lambda: active_window
class FakeNetworkReply:
"""QNetworkReply stub which provides a Content-Disposition header."""
KNOWN_HEADERS = {
QNetworkRequest.ContentTypeHeader: 'Content-Type',
}
def __init__(self, headers=None, url=None):
if url is None:
url = QUrl()
if headers is None:
self.headers = {}
else:
self.headers = headers
self.url = mock.Mock(return_value=url)
def hasRawHeader(self, name):
"""Check if the reply has a certain header.
Args:
name: The name of the header as ISO-8859-1 encoded bytes object.
Return:
True if the header is present, False if not.
"""
return name.decode('iso-8859-1') in self.headers
def rawHeader(self, name):
"""Get the raw header data of a header.
Args:
name: The name of the header as ISO-8859-1 encoded bytes object.
Return:
The header data, as ISO-8859-1 encoded bytes() object.
"""
name = name.decode('iso-8859-1')
return self.headers[name].encode('iso-8859-1')
def header(self, known_header):
"""Get a known header.
Args:
known_header: A QNetworkRequest::KnownHeaders member.
"""
key = self.KNOWN_HEADERS[known_header]
try:
return self.headers[key]
except KeyError:
return None
def setHeader(self, known_header, value):
"""Set a known header.
Args:
known_header: A QNetworkRequest::KnownHeaders member.
value: The value to set.
"""
key = self.KNOWN_HEADERS[known_header]
self.headers[key] = value
def fake_qprocess():
"""Factory for a QProcess mock which has the QProcess enum values."""
m = mock.Mock(spec=QProcess)
for name in ['NormalExit', 'CrashExit', 'FailedToStart', 'Crashed',
'Timedout', 'WriteError', 'ReadError', 'UnknownError']:
setattr(m, name, getattr(QProcess, name))
return m
class FakeWebTabScroller(browsertab.AbstractScroller):
"""Fake AbstractScroller to use in tests."""
def __init__(self, tab, pos_perc):
super().__init__(tab)
self._pos_perc = pos_perc
def pos_perc(self):
return self._pos_perc
class FakeWebTabHistory(browsertab.AbstractHistory):
"""Fake for Web{Kit,Engine}History."""
def __init__(self, tab, *, can_go_back, can_go_forward):
super().__init__(tab)
self._can_go_back = can_go_back
self._can_go_forward = can_go_forward
def can_go_back(self):
assert self._can_go_back is not None
return self._can_go_back
def can_go_forward(self):
assert self._can_go_forward is not None
return self._can_go_forward
class FakeWebTabAudio(browsertab.AbstractAudio):
def is_muted(self):
return False
def is_recently_audible(self):
return False
class FakeWebTab(browsertab.AbstractTab):
"""Fake AbstractTab to use in tests."""
def __init__(self, url=QUrl(), title='', tab_id=0, *,
scroll_pos_perc=(0, 0),
load_status=usertypes.LoadStatus.success,
progress=0, can_go_back=None, can_go_forward=None):
super().__init__(win_id=0, mode_manager=None, private=False)
self._load_status = load_status
self._title = title
self._url = url
self._progress = progress
self.history = FakeWebTabHistory(self, can_go_back=can_go_back,
can_go_forward=can_go_forward)
self.scroller = FakeWebTabScroller(self, scroll_pos_perc)
self.audio = FakeWebTabAudio()
wrapped = QWidget()
self._layout.wrap(self, wrapped)
def url(self, requested=False):
assert not requested
return self._url
def title(self):
return self._title
def progress(self):
return self._progress
def load_status(self):
return self._load_status
def shutdown(self):
pass
def icon(self):
return QIcon()
class FakeSignal:
"""Fake pyqtSignal stub which does nothing.
Attributes:
signal: The name of the signal, like pyqtSignal.
_func: The function to be invoked when the signal gets called.
"""
def __init__(self, name='fake', func=None):
self.signal = '2{}(int, int)'.format(name)
self._func = func
def __call__(self):
if self._func is None:
raise TypeError("'FakeSignal' object is not callable")
else:
return self._func()
def connect(self, slot):
"""Connect the signal to a slot.
Currently does nothing, but could be improved to do some sanity
checking on the slot.
"""
pass
def disconnect(self, slot=None):
"""Disconnect the signal from a slot.
Currently does nothing, but could be improved to do some sanity
checking on the slot and see if it actually got connected.
"""
pass
def emit(self, *args):
"""Emit the signal.
Currently does nothing, but could be improved to do type checking based
on a signature given to __init__.
"""
pass
@attr.s
class FakeCmdUtils:
"""Stub for cmdutils which provides a cmd_dict."""
cmd_dict = attr.ib()
@attr.s(frozen=True)
class FakeCommand:
"""A simple command stub which has a description."""
name = attr.ib('')
desc = attr.ib('')
hide = attr.ib(False)
debug = attr.ib(False)
deprecated = attr.ib(False)
completion = attr.ib(None)
maxsplit = attr.ib(None)
takes_count = attr.ib(lambda: False)
modes = attr.ib((usertypes.KeyMode.normal, ))
class FakeTimer(QObject):
"""Stub for a usertypes.Timer."""
timeout_signal = pyqtSignal()
def __init__(self, parent=None, name=None):
super().__init__(parent)
self.timeout = mock.Mock(spec=['connect', 'disconnect', 'emit'])
self.timeout.connect.side_effect = self.timeout_signal.connect
self.timeout.disconnect.side_effect = self.timeout_signal.disconnect
self.timeout.emit.side_effect = self._emit
self._started = False
self._singleshot = False
self._interval = 0
self._name = name
def __repr__(self):
return '<{} name={!r}>'.format(self.__class__.__name__, self._name)
def _emit(self):
"""Called when the timeout "signal" gets emitted."""
if self._singleshot:
self._started = False
self.timeout_signal.emit()
def setInterval(self, interval):
self._interval = interval
def interval(self):
return self._interval
def setSingleShot(self, singleshot):
self._singleshot = singleshot
def isSingleShot(self):
return self._singleshot
def start(self, interval=None):
if interval:
self._interval = interval
self._started = True
def stop(self):
self._started = False
def isActive(self):
return self._started
class InstaTimer(QObject):
"""Stub for a QTimer that fires instantly on start().
Useful to test a time-based event without inserting an artificial delay.
"""
timeout = pyqtSignal()
def start(self, interval=None):
self.timeout.emit()
def setSingleShot(self, yes):
pass
def setInterval(self, interval):
pass
@staticmethod
def singleShot(_interval, fun):
fun()
class StatusBarCommandStub(QLineEdit):
"""Stub for the statusbar command prompt."""
got_cmd = pyqtSignal(str)
clear_completion_selection = pyqtSignal()
hide_completion = pyqtSignal()
update_completion = pyqtSignal()
show_cmd = pyqtSignal()
hide_cmd = pyqtSignal()
def prefix(self):
return self.text()[0]
class UrlMarkManagerStub(QObject):
"""Stub for the quickmark-manager or bookmark-manager object."""
added = pyqtSignal(str, str)
removed = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent)
self.marks = {}
def delete(self, key):
del self.marks[key]
self.removed.emit(key)
class BookmarkManagerStub(UrlMarkManagerStub):
"""Stub for the bookmark-manager object."""
pass
class QuickmarkManagerStub(UrlMarkManagerStub):
"""Stub for the quickmark-manager object."""
def quickmark_del(self, key):
self.delete(key)
class HostBlockerStub:
"""Stub for the host-blocker object."""
def __init__(self):
self.blocked_hosts = set()
class SessionManagerStub:
"""Stub for the session-manager object."""
def __init__(self):
self.sessions = []
def list_sessions(self):
return self.sessions
def save_autosave(self):
pass
class TabbedBrowserStub(QObject):
"""Stub for the tabbed-browser object."""
def __init__(self, parent=None):
super().__init__(parent)
self.widget = TabWidgetStub()
self.shutting_down = False
self.opened_url = None
def on_tab_close_requested(self, idx):
del self.widget.tabs[idx]
def widgets(self):
return self.widget.tabs
def tabopen(self, url):
self.opened_url = url
def openurl(self, url, *, newtab):
self.opened_url = url
class TabWidgetStub(QObject):
"""Stub for the tab-widget object."""
new_tab = pyqtSignal(browsertab.AbstractTab, int)
def __init__(self, parent=None):
super().__init__(parent)
self.tabs = []
self._qtabbar = QTabBar()
self.index_of = None
self.current_index = None
def count(self):
return len(self.tabs)
def widget(self, i):
return self.tabs[i]
def page_title(self, i):
return self.tabs[i].title()
def tabBar(self):
return self._qtabbar
def indexOf(self, _tab):
if self.index_of is None:
raise ValueError("indexOf got called with index_of None!")
elif self.index_of is RuntimeError:
raise RuntimeError
else:
return self.index_of
def currentIndex(self):
if self.current_index is None:
raise ValueError("currentIndex got called with current_index "
"None!")
return self.current_index
def currentWidget(self):
idx = self.currentIndex()
if idx == -1:
return None
return self.tabs[idx - 1]
class ApplicationStub(QObject):
"""Stub to insert as the app object in objreg."""
new_window = pyqtSignal(mainwindow.MainWindow)
class HTTPPostStub(QObject):
"""A stub class for HTTPClient.
Attributes:
url: the last url send by post()
data: the last data send by post()
"""
success = pyqtSignal(str)
error = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent)
self.url = None
self.data = None
def post(self, url, data=None):
self.url = url
self.data = data
class FakeDownloadItem(QObject):
"""Mock browser.downloads.DownloadItem."""
finished = pyqtSignal()
def __init__(self, fileobj, name, parent=None):
super().__init__(parent)
self.fileobj = fileobj
self.name = name
self.successful = False
class FakeDownloadManager:
"""Mock browser.downloads.DownloadManager."""
def __init__(self, tmpdir):
self._tmpdir = tmpdir
self.downloads = []
@contextlib.contextmanager
def _open_fileobj(self, target):
"""Ensure a DownloadTarget's fileobj attribute is available."""
if isinstance(target, downloads.FileDownloadTarget):
target.fileobj = open(target.filename, 'wb')
try:
yield target.fileobj
finally:
target.fileobj.close()
else:
yield target.fileobj
def get(self, url, target, **kwargs):
"""Return a FakeDownloadItem instance with a fileobj.
The content is copied from the file the given url links to.
"""
with self._open_fileobj(target):
download_item = FakeDownloadItem(target.fileobj, name=url.path())
with (self._tmpdir / url.path()).open('rb') as fake_url_file:
shutil.copyfileobj(fake_url_file, download_item.fileobj)
self.downloads.append(download_item)
return download_item
| gpl-3.0 | 5,947,849,121,296,085,000 | 24.88959 | 79 | 0.61277 | false |
googleapis/python-essential-contacts | google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py | 1 | 36542 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.essential_contacts_v1.services.essential_contacts_service import (
pagers,
)
from google.cloud.essential_contacts_v1.types import enums
from google.cloud.essential_contacts_v1.types import service
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import EssentialContactsServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import EssentialContactsServiceGrpcTransport
from .transports.grpc_asyncio import EssentialContactsServiceGrpcAsyncIOTransport
class EssentialContactsServiceClientMeta(type):
"""Metaclass for the EssentialContactsService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[EssentialContactsServiceTransport]]
_transport_registry["grpc"] = EssentialContactsServiceGrpcTransport
_transport_registry["grpc_asyncio"] = EssentialContactsServiceGrpcAsyncIOTransport
def get_transport_class(
cls, label: str = None,
) -> Type[EssentialContactsServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class EssentialContactsServiceClient(metaclass=EssentialContactsServiceClientMeta):
"""Manages contacts for important Google Cloud notifications."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "essentialcontacts.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
EssentialContactsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
EssentialContactsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> EssentialContactsServiceTransport:
"""Returns the transport used by the client instance.
Returns:
EssentialContactsServiceTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def contact_path(project: str, contact: str,) -> str:
"""Returns a fully-qualified contact string."""
return "projects/{project}/contacts/{contact}".format(
project=project, contact=contact,
)
@staticmethod
def parse_contact_path(path: str) -> Dict[str, str]:
"""Parses a contact path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/contacts/(?P<contact>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, EssentialContactsServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the essential contacts service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, EssentialContactsServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
if is_mtls:
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, EssentialContactsServiceTransport):
# transport is a EssentialContactsServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
def create_contact(
self,
request: service.CreateContactRequest = None,
*,
parent: str = None,
contact: service.Contact = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> service.Contact:
r"""Adds a new contact for a resource.
Args:
request (google.cloud.essential_contacts_v1.types.CreateContactRequest):
The request object. Request message for the
CreateContact method.
parent (str):
Required. The resource to save this contact for. Format:
organizations/{organization_id}, folders/{folder_id} or
projects/{project_id}
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
contact (google.cloud.essential_contacts_v1.types.Contact):
Required. The contact to create. Must
specify an email address and language
tag.
This corresponds to the ``contact`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.essential_contacts_v1.types.Contact:
A contact that will receive
notifications from Google Cloud.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, contact])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a service.CreateContactRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.CreateContactRequest):
request = service.CreateContactRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if contact is not None:
request.contact = contact
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_contact]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def update_contact(
self,
request: service.UpdateContactRequest = None,
*,
contact: service.Contact = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> service.Contact:
r"""Updates a contact.
Note: A contact's email address cannot be changed.
Args:
request (google.cloud.essential_contacts_v1.types.UpdateContactRequest):
The request object. Request message for the
UpdateContact method.
contact (google.cloud.essential_contacts_v1.types.Contact):
Required. The contact resource to
replace the existing saved contact.
Note: the email address of the contact
cannot be modified.
This corresponds to the ``contact`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. The update mask applied to the resource. For
the ``FieldMask`` definition, see
https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.essential_contacts_v1.types.Contact:
A contact that will receive
notifications from Google Cloud.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([contact, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a service.UpdateContactRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.UpdateContactRequest):
request = service.UpdateContactRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if contact is not None:
request.contact = contact
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.update_contact]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("contact.name", request.contact.name),)
),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def list_contacts(
self,
request: service.ListContactsRequest = None,
*,
parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListContactsPager:
r"""Lists the contacts that have been set on a resource.
Args:
request (google.cloud.essential_contacts_v1.types.ListContactsRequest):
The request object. Request message for the ListContacts
method.
parent (str):
Required. The parent resource name. Format:
organizations/{organization_id}, folders/{folder_id} or
projects/{project_id}
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.essential_contacts_v1.services.essential_contacts_service.pagers.ListContactsPager:
Response message for the ListContacts
method.
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a service.ListContactsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.ListContactsRequest):
request = service.ListContactsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_contacts]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListContactsPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
def get_contact(
self,
request: service.GetContactRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> service.Contact:
r"""Gets a single contact.
Args:
request (google.cloud.essential_contacts_v1.types.GetContactRequest):
The request object. Request message for the GetContact
method.
name (str):
Required. The name of the contact to retrieve. Format:
organizations/{organization_id}/contacts/{contact_id},
folders/{folder_id}/contacts/{contact_id} or
projects/{project_id}/contacts/{contact_id}
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.essential_contacts_v1.types.Contact:
A contact that will receive
notifications from Google Cloud.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a service.GetContactRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.GetContactRequest):
request = service.GetContactRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_contact]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def delete_contact(
self,
request: service.DeleteContactRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes a contact.
Args:
request (google.cloud.essential_contacts_v1.types.DeleteContactRequest):
The request object. Request message for the
DeleteContact method.
name (str):
Required. The name of the contact to delete. Format:
organizations/{organization_id}/contacts/{contact_id},
folders/{folder_id}/contacts/{contact_id} or
projects/{project_id}/contacts/{contact_id}
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a service.DeleteContactRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.DeleteContactRequest):
request = service.DeleteContactRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_contact]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
def compute_contacts(
self,
request: service.ComputeContactsRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ComputeContactsPager:
r"""Lists all contacts for the resource that are
subscribed to the specified notification categories,
including contacts inherited from any parent resources.
Args:
request (google.cloud.essential_contacts_v1.types.ComputeContactsRequest):
The request object. Request message for the
ComputeContacts method.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.essential_contacts_v1.services.essential_contacts_service.pagers.ComputeContactsPager:
Response message for the
ComputeContacts method.
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a service.ComputeContactsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.ComputeContactsRequest):
request = service.ComputeContactsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.compute_contacts]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ComputeContactsPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
def send_test_message(
self,
request: service.SendTestMessageRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Allows a contact admin to send a test message to
contact to verify that it has been configured correctly.
Args:
request (google.cloud.essential_contacts_v1.types.SendTestMessageRequest):
The request object. Request message for the
SendTestMessage method.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a service.SendTestMessageRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.SendTestMessageRequest):
request = service.SendTestMessageRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.send_test_message]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
)
# Send the request.
rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-essential-contacts",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("EssentialContactsServiceClient",)
| apache-2.0 | 6,529,256,094,567,324,000 | 41.540163 | 111 | 0.617946 | false |
SoftwareHeritage/swh-web-ui | swh/web/admin/adminurls.py | 1 | 1231 | # Copyright (C) 2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.common.urlsindex import UrlsIndex
class AdminUrls(UrlsIndex):
"""
Class to manage swh-web admin urls.
"""
scope = 'admin'
class admin_route(object): # noqa: N801
"""
Decorator to ease the registration of a swh-web admin endpoint
Args:
url_patterns: list of url patterns used by Django to identify the admin routes
view_name: the name of the Django view associated to the routes used to
reverse the url
""" # noqa
def __init__(self, *url_patterns, view_name=None):
super().__init__()
self.url_patterns = []
for url_pattern in url_patterns:
self.url_patterns.append('^' + url_pattern + '$')
self.view_name = view_name
def __call__(self, f):
# register the route and its view in the browse endpoints index
for url_pattern in self.url_patterns:
AdminUrls.add_url_pattern(url_pattern, f, self.view_name)
return f
| agpl-3.0 | -2,279,507,196,082,922,000 | 31.394737 | 86 | 0.65394 | false |
Dev-Cloud-Platform/Dev-Cloud | dev_cloud/cc1/src/wi/views/user/user.py | 1 | 7236 | # -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
"""@package src.wi.views.user.user
@author Piotr Wójcik
@date 31.01.2014
"""
from django.contrib import messages
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_protect
from wi.commontags.templatetags.templatetags import filesizeformatmb
from wi.forms.user import CMAuthenticationForm, HelpForm, PasswordChangeForm, \
AccountDataEdit
from wi.utils import get_dict_from_list, messages_ajax
from wi.utils.decorators import django_view, user_permission
from wi.utils.exceptions import RestErrorException
from wi.utils.messages_ajax import ajax_request
from wi.utils.messages_codes import get_message
from wi.utils.states import message_levels_reversed
from wi.utils.views import prep_data
@django_view
@user_permission
def change_cm(request, cm_id, success_url='mai_main'):
"""
View changing used CM.
"""
request.session['user'].cm_id = int(cm_id)
request.session.modified = True
messages.success(request, _('Cluster Manager changed.'))
return redirect(request.META['HTTP_REFERER'] or success_url)
@django_view
@ajax_request
@user_permission
def get_messages(request):
"""
Ajax view fetching user messages.
"""
if request.method == 'POST':
response = prep_data('user/message/get_list/', request.session)
for item in response:
item['text'] = get_message(item['code'], item['params'])
item['level'] = message_levels_reversed[item['level']]
return messages_ajax.success(response)
@django_view
@ajax_request
@user_permission
def acc_ajax_get_user_data(request):
"""
Ajax view. Returns user account data.
"""
if request.method == 'GET':
rest_data = prep_data({'user': 'user/user/get_my_data/',
'cms': 'guest/cluster/list_names/'
}, request.session)
user_data = rest_data['user']
users_cm = get_dict_from_list(rest_data['cms'], user_data['default_cluster_id'], key='cluster_id')
if users_cm is None:
raise Exception('User\'s default_cluster_id=%d is not a valid CM id.' % user_data['default_cluster_id'])
user_data['default_cluster_id'] = users_cm['name']
return messages_ajax.success(user_data)
@django_view
@ajax_request
@user_permission
@csrf_protect
def acc_ajax_account_data_edit(request, template_name='generic/form.html', form_class=AccountDataEdit):
"""
Ajax view for user account data editing.
"""
rest_data = prep_data({'cms': 'guest/cluster/list_names/'}, request.session)
if request.method == 'POST':
form = form_class(data=request.POST, rest_data=rest_data)
if form.is_valid():
prep_data({'user': ('user/user/edit/', form.cleaned_data)}, request.session)
request.session['user'].email = form.cleaned_data['email']
request.session['user'].default_cluster_id = form.cleaned_data['default_cluster_id']
request.session.modified = True
return messages_ajax.success(_('Account data edited.'))
else:
form = form_class(data={'email': request.session['user'].email,
'default_cluster_id': request.session['user'].default_cluster_id}, rest_data=rest_data)
return messages_ajax.success(render_to_string(template_name, {'form': form,
'text': '',
'confirmation': _('Save')},
context_instance=RequestContext(request)),
status=1)
@django_view
@ajax_request
@user_permission
def acc_ajax_get_user_quotas(request):
"""
Ajax view for fetching users' quotas.
"""
if request.method == 'GET':
quota = prep_data('user/user/check_quota/', request.session)
quota['memory'] = filesizeformatmb(quota['memory'])
quota['used_memory'] = filesizeformatmb(quota['used_memory'])
quota['storage'] = filesizeformatmb(quota['storage'])
quota['used_storage'] = filesizeformatmb(quota['used_storage'])
return messages_ajax.success(quota)
@django_view
@csrf_protect
@user_permission
def acc_password_change(request, template_name='account/password_change_form.html',
password_change_form=PasswordChangeForm):
"""
View for password changing (for logged users).
"""
if request.method == "POST":
form = password_change_form(user=request.session['user'], data=request.POST)
if form.is_valid():
new_password = form.cleaned_data['new_password1']
try:
prep_data(('user/user/set_password/', {'new_password': new_password}), request.session)
except RestErrorException as ex:
messages.error(request, ex.value)
request.session['user'].set_password(new_password)
request.session.modified = True
return redirect('acc_password_change_done')
else:
form = password_change_form(user=request.session['user'])
return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request))
@django_view
@user_permission
def hlp_form(request, form_class=HelpForm, template_name='help/form.html'):
"""
View handling help form.
"""
if request.method == 'POST':
form = form_class(data=request.POST)
if form.is_valid():
topic, issue, email = form.cleaned_data['topic'], form.cleaned_data['issue'], form.cleaned_data['email']
name = str(request.session.get('user', form.cleaned_data['firstlast']))
topic += _(' from user:') + name + ', email: ' + email
dictionary = {'issue': issue,
'topic': topic}
try:
prep_data(('user/user/send_issue/', dictionary), request.session)
except Exception:
return redirect('hlp_issue_error')
return redirect('hlp_issue_sent')
else:
form = form_class()
rest_data = prep_data('guest/user/is_mailer_active/', request.session)
return render_to_response(template_name, dict({'form': form}.items() + rest_data.items()),
context_instance=RequestContext(request))
| apache-2.0 | 6,534,912,669,814,899,000 | 35.913265 | 119 | 0.634969 | false |
nkmk/python-snippets | notebook/numpy_sin_cos_tan.py | 1 | 2677 | import numpy as np
print(np.__version__)
# 1.19.0
print(np.pi)
# 3.141592653589793
print(np.radians(180))
# 3.141592653589793
print(type(np.radians(180)))
# <class 'numpy.float64'>
a = np.array([0, 90, 180])
print(type(a))
# <class 'numpy.ndarray'>
print(np.radians(a))
# [0. 1.57079633 3.14159265]
print(type(np.radians(a)))
# <class 'numpy.ndarray'>
l = [0, 90, 180]
print(type(l))
# <class 'list'>
print(np.radians(l))
# [0. 1.57079633 3.14159265]
print(type(np.radians(l)))
# <class 'numpy.ndarray'>
print(np.radians(a))
# [0. 1.57079633 3.14159265]
print(np.round(np.radians(a)))
# [0. 2. 3.]
print(np.round(np.radians(a), 2))
# [0. 1.57 3.14]
print(np.sin(np.radians(30)))
# 0.49999999999999994
print(np.round(np.sin(np.radians(30)), 1))
# 0.5
print(np.sin(np.radians([0, 30, 90])))
# [0. 0.5 1. ]
print(np.sin(np.radians([0, 30, 90]))[1])
# 0.49999999999999994
np.set_printoptions(precision=20)
print(np.sin(np.radians([0, 30, 90])))
# [0. 0.49999999999999994 1. ]
np.set_printoptions(precision=8) # reset to default
print(np.radians([0, 90, 180]))
# [0. 1.57079633 3.14159265]
print(np.deg2rad([0, 90, 180]))
# [0. 1.57079633 3.14159265]
print(np.degrees([0, np.pi / 2, np.pi]))
# [ 0. 90. 180.]
print(np.rad2deg([0, np.pi / 2, np.pi]))
# [ 0. 90. 180.]
print(np.sin(np.radians([0, 30, 90])))
# [0. 0.5 1. ]
print(np.degrees(np.arcsin([0, 0.5, 1])))
# [ 0. 30. 90.]
print(np.cos(np.radians([0, 60, 90])))
# [1.000000e+00 5.000000e-01 6.123234e-17]
print(np.round(np.cos(np.radians([0, 60, 90])), 1))
# [1. 0.5 0. ]
print(np.degrees(np.arccos([0, 0.5, 1])))
# [90. 60. 0.]
print(np.tan(np.radians([0, 45, 90])))
# [0.00000000e+00 1.00000000e+00 1.63312394e+16]
print(np.degrees(np.arctan([0, 1, np.inf])))
# [ 0. 45. 90.]
print(np.degrees(np.arctan([-np.inf, -1, 0, 1, np.inf])))
# [-90. -45. 0. 45. 90.]
print(np.degrees(np.arctan2(-1, 1)))
# -45.0
print(np.degrees(np.arctan2(1, -1)))
# 135.0
print(np.degrees(np.arctan2([0, 1, 1, 1, 0],
[0, 1, 0, -1, -1])))
# [ 0. 45. 90. 135. 180.]
print(np.degrees(np.arctan2([0, -1, -1, -1, 0],
[0, 1, 0, -1, -1])))
# [ 0. -45. -90. -135. 180.]
print(np.degrees(np.arctan2(0, -1)))
# 180.0
print(np.degrees(np.arctan2(-0.0, -1.0)))
# -180.0
print(np.degrees(np.arctan2(-0, -1)))
# 180.0
print(np.degrees(np.arctan2([0.0, -0.0, 0.0, -0.0],
[0.0, 0.0, -0.0, -0.0])))
# [ 0. -0. 180. -180.]
print(np.sin(-0.0))
# -0.0
print(np.arcsin(-0.0))
# -0.0
print(np.tan(-0.0))
# -0.0
print(np.arctan(-0.0))
# -0.0
| mit | -1,797,956,438,561,032,200 | 18.82963 | 63 | 0.552111 | false |
mahim97/zulip | zerver/webhooks/github_webhook/tests.py | 5 | 21928 | from typing import Dict, Optional, Text
import ujson
from mock import MagicMock, patch
from zerver.lib.test_classes import WebhookTestCase
from zerver.lib.webhooks.git import COMMITS_LIMIT
from zerver.models import Message
class GithubWebhookTest(WebhookTestCase):
STREAM_NAME = 'github'
URL_TEMPLATE = "/api/v1/external/github?stream={stream}&api_key={api_key}"
FIXTURE_DIR_NAME = 'github_webhook'
EXPECTED_SUBJECT_REPO_EVENTS = u"public-repo"
EXPECTED_SUBJECT_ISSUE_EVENTS = u"public-repo / Issue #2 Spelling error in the README file"
EXPECTED_SUBJECT_PR_EVENTS = u"public-repo / PR #1 Update the README with new information"
EXPECTED_SUBJECT_DEPLOYMENT_EVENTS = u"public-repo / Deployment on production"
EXPECTED_SUBJECT_ORGANIZATION_EVENTS = u"baxterandthehackers organization"
EXPECTED_SUBJECT_BRANCH_EVENTS = u"public-repo / changes"
EXPECTED_SUBJECT_WIKI_EVENTS = u"public-repo / Wiki Pages"
def test_ping_event(self) -> None:
expected_message = u"GitHub webhook has been successfully configured by TomaszKolek"
self.send_and_test_stream_message('ping', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='ping')
def test_ping_organization_event(self) -> None:
expected_message = u"GitHub webhook has been successfully configured by eeshangarg"
self.send_and_test_stream_message('ping_organization', 'zulip-test-org', expected_message, HTTP_X_GITHUB_EVENT='ping')
def test_push_delete_branch(self) -> None:
expected_message = u"eeshangarg [deleted](https://github.com/eeshangarg/public-repo/compare/2e8cf535fb38...000000000000) the branch feature."
self.send_and_test_stream_message('push_delete_branch', u"public-repo / feature", expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_local_branch_without_commits(self) -> None:
expected_message = u"eeshangarg [pushed](https://github.com/eeshangarg/public-repo/compare/feature) the branch feature."
self.send_and_test_stream_message('push_local_branch_without_commits', u"public-repo / feature", expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_1_commit(self) -> None:
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 1 commit to branch changes.\n\n* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))"
self.send_and_test_stream_message('push_1_commit', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_1_commit_without_username(self) -> None:
expected_message = u"eeshangarg [pushed](https://github.com/eeshangarg/public-repo/compare/0383613da871...2e8cf535fb38) 1 commit to branch changes. Commits by John Snow (1).\n\n* Update the README ([2e8cf53](https://github.com/eeshangarg/public-repo/commit/2e8cf535fb38a3dab2476cdf856efda904ad4c94))"
self.send_and_test_stream_message('push_1_commit_without_username', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_1_commit_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url('master,changes')
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 1 commit to branch changes.\n\n* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))"
self.send_and_test_stream_message('push_1_commit', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_multiple_comitters(self) -> None:
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 6 commits to branch changes. Commits by Tomasz (3), Ben (2) and baxterthehacker (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 5)
self.send_and_test_stream_message('push_multiple_committers', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_multiple_comitters_with_others(self) -> None:
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 10 commits to branch changes. Commits by Tomasz (4), Ben (3), James (2) and others (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 9)
self.send_and_test_stream_message('push_multiple_committers_with_others', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_multiple_comitters_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url('master,changes')
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 6 commits to branch changes. Commits by Tomasz (3), Ben (2) and baxterthehacker (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 5)
self.send_and_test_stream_message('push_multiple_committers', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_multiple_comitters_with_others_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url('master,changes')
commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n'
expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 10 commits to branch changes. Commits by Tomasz (4), Ben (3), James (2) and others (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 9)
self.send_and_test_stream_message('push_multiple_committers_with_others', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_50_commits(self) -> None:
commit_info = "* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n"
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 50 commits to branch changes.\n\n{}[and 30 more commit(s)]".format(
commit_info * COMMITS_LIMIT
)
self.send_and_test_stream_message('push_50_commits', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_push_50_commits_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,changes')
commit_info = "* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n"
expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 50 commits to branch changes.\n\n{}[and 30 more commit(s)]".format(
commit_info * COMMITS_LIMIT
)
self.send_and_test_stream_message('push_50_commits', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_commit_comment_msg(self) -> None:
expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b#commitcomment-11056394) on [9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b)\n~~~ quote\nThis is a really good change! :+1:\n~~~"
self.send_and_test_stream_message('commit_comment', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='commit_comment')
def test_create_msg(self) -> None:
expected_message = u"baxterthehacker created tag 0.0.1"
self.send_and_test_stream_message('create', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='create')
def test_delete_msg(self) -> None:
expected_message = u"baxterthehacker deleted tag simple-tag"
self.send_and_test_stream_message('delete', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='delete')
def test_deployment_msg(self) -> None:
expected_message = u"baxterthehacker created new deployment"
self.send_and_test_stream_message('deployment', self.EXPECTED_SUBJECT_DEPLOYMENT_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='deployment')
def test_deployment_status_msg(self) -> None:
expected_message = u"Deployment changed status to success"
self.send_and_test_stream_message('deployment_status', self.EXPECTED_SUBJECT_DEPLOYMENT_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='deployment_status')
def test_fork_msg(self) -> None:
expected_message = u"baxterandthehackers forked [public-repo](https://github.com/baxterandthehackers/public-repo)"
self.send_and_test_stream_message('fork', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='fork')
def test_issue_comment_msg(self) -> None:
expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/issues/2#issuecomment-99262140) on [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2)\n\n~~~ quote\nYou are totally right! I'll get this fixed right away.\n~~~"
self.send_and_test_stream_message('issue_comment', self.EXPECTED_SUBJECT_ISSUE_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='issue_comment')
def test_issue_msg(self) -> None:
expected_message = u"baxterthehacker opened [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2)\n\n~~~ quote\nIt looks like you accidently spelled 'commit' with two 't's.\n~~~"
self.send_and_test_stream_message('issue', self.EXPECTED_SUBJECT_ISSUE_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='issues')
def test_membership_msg(self) -> None:
expected_message = u"baxterthehacker added [kdaigle](https://github.com/kdaigle) to Contractors team"
self.send_and_test_stream_message('membership', self.EXPECTED_SUBJECT_ORGANIZATION_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='membership')
def test_member_msg(self) -> None:
expected_message = u"baxterthehacker added [octocat](https://github.com/octocat) to [public-repo](https://github.com/baxterthehacker/public-repo)"
self.send_and_test_stream_message('member', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='member')
def test_pull_request_opened_msg(self) -> None:
expected_message = u"baxterthehacker opened [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`\n\n~~~ quote\nThis is a pretty simple change that we need to pull into master.\n~~~"
self.send_and_test_stream_message('opened_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_synchronized_msg(self) -> None:
expected_message = u"baxterthehacker updated [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`"
self.send_and_test_stream_message('synchronized_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_closed_msg(self) -> None:
expected_message = u"baxterthehacker closed without merge [PR](https://github.com/baxterthehacker/public-repo/pull/1)"
self.send_and_test_stream_message('closed_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_merged_msg(self) -> None:
expected_message = u"baxterthehacker merged [PR](https://github.com/baxterthehacker/public-repo/pull/1)"
self.send_and_test_stream_message('merged_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request')
def test_public_msg(self) -> None:
expected_message = u"baxterthehacker made [the repository](https://github.com/baxterthehacker/public-repo) public"
self.send_and_test_stream_message('public', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='public')
def test_wiki_pages_msg(self) -> None:
expected_message = u"jasonrudolph:\n* created [Home](https://github.com/baxterthehacker/public-repo/wiki/Home)\n* created [Home](https://github.com/baxterthehacker/public-repo/wiki/Home)"
self.send_and_test_stream_message('wiki_pages', self.EXPECTED_SUBJECT_WIKI_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='gollum')
def test_watch_msg(self) -> None:
expected_message = u"baxterthehacker starred [the repository](https://github.com/baxterthehacker/public-repo)"
self.send_and_test_stream_message('watch_repository', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='watch')
def test_repository_msg(self) -> None:
expected_message = u"baxterthehacker created [the repository](https://github.com/baxterandthehackers/public-repo)"
self.send_and_test_stream_message('repository', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='repository')
def test_team_add_msg(self) -> None:
expected_message = u"[The repository](https://github.com/baxterandthehackers/public-repo) was added to team github"
self.send_and_test_stream_message('team_add', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='team_add')
def test_release_msg(self) -> None:
expected_message = u"baxterthehacker published [the release](https://github.com/baxterthehacker/public-repo/releases/tag/0.0.1)"
self.send_and_test_stream_message('release', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='release')
def test_page_build_msg(self) -> None:
expected_message = u"Github Pages build, trigerred by baxterthehacker, is built"
self.send_and_test_stream_message('page_build', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='page_build')
def test_status_msg(self) -> None:
expected_message = u"[9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b) changed its status to success"
self.send_and_test_stream_message('status', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='status')
def test_pull_request_review_msg(self) -> None:
expected_message = u"baxterthehacker submitted [PR Review](https://github.com/baxterthehacker/public-repo/pull/1#pullrequestreview-2626884)"
self.send_and_test_stream_message('pull_request_review', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request_review')
def test_pull_request_review_comment_msg(self) -> None:
expected_message = u"baxterthehacker created [PR Review Comment](https://github.com/baxterthehacker/public-repo/pull/1#discussion_r29724692)\n\n~~~ quote\nMaybe you should use more emojji on this line.\n~~~"
self.send_and_test_stream_message('pull_request_review_comment', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request_review_comment')
def test_push_tag_msg(self) -> None:
expected_message = u"baxterthehacker pushed tag abc"
self.send_and_test_stream_message('push_tag', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push')
def test_pull_request_edited_msg(self) -> None:
expected_message = u"baxterthehacker edited [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`"
self.send_and_test_stream_message('edited_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message,
HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_assigned_msg(self) -> None:
expected_message = u"baxterthehacker assigned [PR](https://github.com/baxterthehacker/public-repo/pull/1) to baxterthehacker"
self.send_and_test_stream_message('assigned_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message,
HTTP_X_GITHUB_EVENT='pull_request')
def test_pull_request_unassigned_msg(self) -> None:
expected_message = u"eeshangarg unassigned [PR](https://github.com/zulip-test-org/helloworld/pull/1)"
self.send_and_test_stream_message(
'unassigned_pull_request',
'helloworld / PR #1 Mention that Zulip rocks!',
expected_message,
HTTP_X_GITHUB_EVENT='pull_request'
)
@patch('zerver.webhooks.github_webhook.view.check_send_stream_message')
def test_pull_request_labeled_ignore(
self, check_send_stream_message_mock: MagicMock) -> None:
payload = self.get_body('labeled_pull_request')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_stream_message')
def test_pull_request_unlabeled_ignore(
self, check_send_stream_message_mock: MagicMock) -> None:
payload = self.get_body('unlabeled_pull_request')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_stream_message')
def test_pull_request_request_review_ignore(
self, check_send_stream_message_mock: MagicMock) -> None:
payload = self.get_body('request_review_pull_request')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_stream_message')
def test_pull_request_request_review_remove_ignore(
self, check_send_stream_message_mock: MagicMock) -> None:
payload = self.get_body('request_review_removed_pull_request')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_stream_message')
def test_push_1_commit_filtered_by_branches_ignore(
self, check_send_stream_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push_1_commit')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_stream_message')
def test_push_50_commits_filtered_by_branches_ignore(
self, check_send_stream_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push_50_commits')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_stream_message')
def test_push_multiple_comitters_filtered_by_branches_ignore(
self, check_send_stream_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push_multiple_committers')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.github_webhook.view.check_send_stream_message')
def test_push_multiple_comitters_with_others_filtered_by_branches_ignore(
self, check_send_stream_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='master,development')
payload = self.get_body('push_multiple_committers_with_others')
result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
| apache-2.0 | 8,828,503,967,933,125,000 | 78.162455 | 387 | 0.727107 | false |
atztogo/spglib | python/test/test_collinear_spin.py | 1 | 1335 | import unittest
import numpy as np
from spglib import get_symmetry
class TestGetSymmetry(unittest.TestCase):
def setUp(self):
lattice = [[4, 0, 0], [0, 4, 0], [0, 0, 4]]
positions = [[0, 0, 0], [0.5, 0.5, 0.5]]
numbers = [1, 1]
magmoms = [0, 0]
self._cell = (lattice, positions, numbers, magmoms)
def tearDown(self):
pass
def test_get_symmetry_ferro(self):
self._cell[3][0] = 1
self._cell[3][1] = 1
sym = get_symmetry(self._cell)
self.assertEqual(96, len(sym['rotations']))
np.testing.assert_equal(sym['equivalent_atoms'], [0, 0])
def test_get_symmetry_anti_ferro(self):
self._cell[3][0] = 1
self._cell[3][1] = -1
sym = get_symmetry(self._cell)
self.assertEqual(96, len(sym['rotations']))
np.testing.assert_equal(sym['equivalent_atoms'], [0, 0])
def test_get_symmetry_broken_magmoms(self):
self._cell[3][0] = 1
self._cell[3][1] = 2
sym = get_symmetry(self._cell)
self.assertEqual(48, len(sym['rotations']))
np.testing.assert_equal(sym['equivalent_atoms'], [0, 1])
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestGetSymmetry)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
| bsd-3-clause | 1,407,334,135,078,816,500 | 30.046512 | 72 | 0.580524 | false |
aalien/subtitle2spu | parsesrt.py | 1 | 1661 | # Copyright (C) 2008 Antti Laine <[email protected]>
#
# This file is part of subtitle2spu.
#
# subtitle2spu is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# subtitle2spu is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with subtitle2spu. If not, see <http://www.gnu.org/licenses/>.
import sys
# States
READNUMBER = 1
READTIME = 2
READTEXT = 3
def parse( file, writer ):
state = READNUMBER
linecount = 0
lines = ""
for buf in file:
if not buf:
continue
if state == READNUMBER:
number = buf.split()[0]
state = READTIME
continue
if state == READTIME:
starttime = buf.split()[0]
endtime = buf.split()[2]
state = READTEXT
continue
if state == READTEXT:
if buf[0] not in ("\n", "\r"):
linecount += 1
lines += buf
else:
print "Writing subtitle %s" %(number)
if not writer.write( number, starttime, endtime, lines ):
return False
state = READNUMBER
linecount = 0
lines = ""
return True
| mit | 7,590,266,957,127,908,000 | 29.759259 | 73 | 0.584588 | false |
soscpd/bee | root/tests/zguide/examples/Python/mdcliapi.py | 1 | 3030 | """Majordomo Protocol Client API, Python version.
Implements the MDP/Worker spec at http:#rfc.zeromq.org/spec:7.
Author: Min RK <[email protected]>
Based on Java example by Arkadiusz Orzechowski
"""
import logging
import zmq
import MDP
from zhelpers import dump
class MajorDomoClient(object):
"""Majordomo Protocol Client API, Python version.
Implements the MDP/Worker spec at http:#rfc.zeromq.org/spec:7.
"""
broker = None
ctx = None
client = None
poller = None
timeout = 2500
retries = 3
verbose = False
def __init__(self, broker, verbose=False):
self.broker = broker
self.verbose = verbose
self.ctx = zmq.Context()
self.poller = zmq.Poller()
logging.basicConfig(format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S",
level=logging.INFO)
self.reconnect_to_broker()
def reconnect_to_broker(self):
"""Connect or reconnect to broker"""
if self.client:
self.poller.unregister(self.client)
self.client.close()
self.client = self.ctx.socket(zmq.REQ)
self.client.linger = 0
self.client.connect(self.broker)
self.poller.register(self.client, zmq.POLLIN)
if self.verbose:
logging.info("I: connecting to broker at %s...", self.broker)
def send(self, service, request):
"""Send request to broker and get reply by hook or crook.
Takes ownership of request message and destroys it when sent.
Returns the reply message or None if there was no reply.
"""
if not isinstance(request, list):
request = [request]
request = [MDP.C_CLIENT, service] + request
if self.verbose:
logging.warn("I: send request to '%s' service: ", service)
dump(request)
reply = None
retries = self.retries
while retries > 0:
self.client.send_multipart(request)
try:
items = self.poller.poll(self.timeout)
except KeyboardInterrupt:
break # interrupted
if items:
msg = self.client.recv_multipart()
if self.verbose:
logging.info("I: received reply:")
dump(msg)
# Don't try to handle errors, just assert noisily
assert len(msg) >= 3
header = msg.pop(0)
assert MDP.C_CLIENT == header
reply_service = msg.pop(0)
assert service == reply_service
reply = msg
break
else:
if retries:
logging.warn("W: no reply, reconnecting...")
self.reconnect_to_broker()
else:
logging.warn("W: permanent error, abandoning")
break
retries -= 1
return reply
def destroy(self):
self.context.destroy()
| mit | 8,947,509,223,582,141,000 | 28.705882 | 90 | 0.553465 | false |
scholer/cadnano2.5 | cadnano/strand/modscmd.py | 2 | 1922 | from cadnano.proxies.cnproxy import UndoCommand
from cadnano.cntypes import (
DocT,
StrandT
)
class AddModsCommand(UndoCommand):
def __init__(self, document: DocT, strand: StrandT, idx: int, mod_id: str):
super(AddModsCommand, self).__init__()
self._strand = strand
self._id_num = strand.idNum()
self._idx = idx
self._mod_id = mod_id
self.document = document
# end def
def redo(self):
strand = self._strand
mid = self._mod_id
part = strand.part()
idx = self._idx
part.addModStrandInstance(strand, idx, mid)
strand.strandModsAddedSignal.emit(strand, self.document, mid, idx)
# end def
def undo(self):
strand = self._strand
mid = self._mod_id
part = strand.part()
idx = self._idx
part.removeModStrandInstance(strand, idx, mid)
strand.strandModsRemovedSignal.emit(strand, self.document, mid, idx)
# end def
# end class
class RemoveModsCommand(UndoCommand):
def __init__(self, document, strand, idx, mod_id):
super(RemoveModsCommand, self).__init__()
self._strand = strand
self._id_num = strand.idNum()
self._idx = idx
self._mod_id = mod_id
self.document = document
# end def
def redo(self):
strand = self._strand
strand.isStaple()
mid = self._mod_id
part = strand.part()
idx = self._idx
part.removeModStrandInstance(strand, idx, mid)
strand.strandModsRemovedSignal.emit(strand, self.document, mid, idx)
# end def
def undo(self):
strand = self._strand
strand.isStaple()
mid = self._mod_id
part = strand.part()
idx = self._idx
part.addModStrandInstance(strand, idx, mid)
strand.strandModsAddedSignal.emit(strand, self.document, mid, idx)
# end def
# end class
| mit | 6,883,323,475,434,581,000 | 28.121212 | 79 | 0.596254 | false |
maxlikely/scikit-learn | sklearn/datasets/svmlight_format.py | 1 | 13250 | """This module implements a loader and dumper for the svmlight format
This format is a text-based format, with one sample per line. It does
not store zero valued features hence is suitable for sparse dataset.
The first element of each line can be used to store a target variable to
predict.
This format is used as the default format for both svmlight and the
libsvm command line programs.
"""
# Authors: Mathieu Blondel <[email protected]>
# Lars Buitinck <[email protected]>
# Olivier Grisel <[email protected]>
# License: Simple BSD.
from bz2 import BZ2File
from contextlib import closing
import gzip
import io
import os.path
import numpy as np
import scipy.sparse as sp
from ._svmlight_format import _load_svmlight_file
from .. import __version__
from ..externals import six
from ..utils import atleast2d_or_csr
def load_svmlight_file(f, n_features=None, dtype=np.float64,
multilabel=False, zero_based="auto", query_id=False):
"""Load datasets in the svmlight / libsvm format into sparse CSR matrix
This format is a text-based format, with one sample per line. It does
not store zero valued features hence is suitable for sparse dataset.
The first element of each line can be used to store a target variable
to predict.
This format is used as the default format for both svmlight and the
libsvm command line programs.
Parsing a text based source can be expensive. When working on
repeatedly on the same dataset, it is recommended to wrap this
loader with joblib.Memory.cache to store a memmapped backup of the
CSR results of the first call and benefit from the near instantaneous
loading of memmapped structures for the subsequent calls.
This implementation is naive: it does allocate too much memory and
is slow since written in python. On large datasets it is recommended
to use an optimized loader such as:
https://github.com/mblondel/svmlight-loader
In case the file contains a pairwise preference constraint (known
as "qid" in the svmlight format) these are ignored unless the
query_id parameter is set to True. These pairwise preference
constraints can be used to contraint the combination of samples
when using pairwise loss functions (as is the case in some
learning to rank problems) so that only pairs with the same
query_id value are considered.
Parameters
----------
f: {str, file-like, int}
(Path to) a file to load. If a path ends in ".gz" or ".bz2", it will
be uncompressed on the fly. If an integer is passed, it is assumed to
be a file descriptor. A file-like or file descriptor will not be closed
by this function. A file-like object must be opened in binary mode.
n_features: int or None
The number of features to use. If None, it will be inferred. This
argument is useful to load several files that are subsets of a
bigger sliced dataset: each subset might not have example of
every feature, hence the inferred shape might vary from one
slice to another.
multilabel: boolean, optional
Samples may have several labels each (see
http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html)
zero_based: boolean or "auto", optional
Whether column indices in f are zero-based (True) or one-based
(False). If column indices are one-based, they are transformed to
zero-based to match Python/NumPy conventions.
If set to "auto", a heuristic check is applied to determine this from
the file contents. Both kinds of files occur "in the wild", but they
are unfortunately not self-identifying. Using "auto" or True should
always be safe.
query_id: boolean, defaults to False
If True, will return the query_id array for each file.
Returns
-------
X: scipy.sparse matrix of shape (n_samples, n_features)
y: ndarray of shape (n_samples,), or, in the multilabel a list of
tuples of length n_samples.
query_id: array of shape (n_samples,)
query_id for each sample. Only returned when query_id is set to
True.
See also
--------
load_svmlight_files: similar function for loading multiple files in this
format, enforcing the same number of features/columns on all of them.
"""
return tuple(load_svmlight_files([f], n_features, dtype, multilabel,
zero_based, query_id))
def _gen_open(f):
if isinstance(f, int): # file descriptor
return io.open(f, "rb", closefd=False)
elif not isinstance(f, six.string_types):
raise TypeError("expected {str, int, file-like}, got %s" % type(f))
_, ext = os.path.splitext(f)
if ext == ".gz":
return gzip.open(f, "rb")
elif ext == ".bz2":
return BZ2File(f, "rb")
else:
return open(f, "rb")
def _open_and_load(f, dtype, multilabel, zero_based, query_id):
if hasattr(f, "read"):
return _load_svmlight_file(f, dtype, multilabel, zero_based, query_id)
# XXX remove closing when Python 2.7+/3.1+ required
with closing(_gen_open(f)) as f:
return _load_svmlight_file(f, dtype, multilabel, zero_based, query_id)
def load_svmlight_files(files, n_features=None, dtype=np.float64,
multilabel=False, zero_based="auto", query_id=False):
"""Load dataset from multiple files in SVMlight format
This function is equivalent to mapping load_svmlight_file over a list of
files, except that the results are concatenated into a single, flat list
and the samples vectors are constrained to all have the same number of
features.
In case the file contains a pairwise preference constraint (known
as "qid" in the svmlight format) these are ignored unless the
query_id parameter is set to True. These pairwise preference
constraints can be used to constraint the combination of samples
when using pairwise loss functions (as is the case in some
learning to rank problems) so that only pairs with the same
query_id value are considered.
Parameters
----------
files : iterable over {str, file-like, int}
(Paths of) files to load. If a path ends in ".gz" or ".bz2", it will
be uncompressed on the fly. If an integer is passed, it is assumed to
be a file descriptor. File-likes and file descriptors will not be
closed by this function. File-like objects must be opened in binary
mode.
n_features: int or None
The number of features to use. If None, it will be inferred from the
maximum column index occurring in any of the files.
multilabel: boolean, optional
Samples may have several labels each (see
http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html)
zero_based: boolean or "auto", optional
Whether column indices in f are zero-based (True) or one-based
(False). If column indices are one-based, they are transformed to
zero-based to match Python/NumPy conventions.
If set to "auto", a heuristic check is applied to determine this from
the file contents. Both kinds of files occur "in the wild", but they
are unfortunately not self-identifying. Using "auto" or True should
always be safe.
query_id: boolean, defaults to False
If True, will return the query_id array for each file.
Returns
-------
[X1, y1, ..., Xn, yn]
where each (Xi, yi) pair is the result from load_svmlight_file(files[i]).
If query_id is set to True, this will return instead [X1, y1, q1,
..., Xn, yn, qn] where (Xi, yi, qi) is the result from
load_svmlight_file(files[i])
Rationale
---------
When fitting a model to a matrix X_train and evaluating it against a
matrix X_test, it is essential that X_train and X_test have the same
number of features (X_train.shape[1] == X_test.shape[1]). This may not
be the case if you load the files individually with load_svmlight_file.
See also
--------
load_svmlight_file
"""
r = [_open_and_load(f, dtype, multilabel, bool(zero_based), bool(query_id))
for f in files]
if (zero_based is False
or zero_based == "auto" and all(np.min(tmp[1]) > 0 for tmp in r)):
for ind in r:
indices = ind[1]
indices -= 1
if n_features is None:
n_features = max(ind[1].max() for ind in r) + 1
result = []
for data, indices, indptr, y, query_values in r:
shape = (indptr.shape[0] - 1, n_features)
X = sp.csr_matrix((data, indices, indptr), shape)
X.sort_indices()
result += X, y
if query_id:
result.append(query_values)
return result
def _dump_svmlight(X, y, f, one_based, comment, query_id):
is_sp = int(hasattr(X, "tocsr"))
if X.dtype == np.float64:
value_pattern = u"%d:%0.16e"
else:
value_pattern = u"%d:%f"
if y.dtype.kind == 'i':
line_pattern = u"%d"
else:
line_pattern = u"%f"
if query_id is not None:
line_pattern += u" qid:%d"
line_pattern += u" %s\n"
if comment:
f.write("# Generated by dump_svmlight_file from scikit-learn %s\n"
% __version__)
f.write("# Column indices are %s-based\n" % ["zero", "one"][one_based])
f.write("#\n")
f.writelines("# %s\n" % line for line in comment.splitlines())
for i in range(X.shape[0]):
s = " ".join([value_pattern % (j + one_based, X[i, j])
for j in X[i].nonzero()[is_sp]])
if query_id is not None:
feat = (y[i], query_id[i], s)
else:
feat = (y[i], s)
f.write((line_pattern % feat).encode('ascii'))
def dump_svmlight_file(X, y, f, zero_based=True, comment=None, query_id=None):
"""Dump the dataset in svmlight / libsvm file format.
This format is a text-based format, with one sample per line. It does
not store zero valued features hence is suitable for sparse dataset.
The first element of each line can be used to store a target variable
to predict.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape = [n_samples]
Target values.
f : string or file-like in binary mode
If string, specifies the path that will contain the data.
If file-like, data will be written to f. f should be opened in binary
mode.
zero_based : boolean, optional
Whether column indices should be written zero-based (True) or one-based
(False).
comment : string, optional
Comment to insert at the top of the file. This should be either a
Unicode string, which will be encoded as UTF-8, or an ASCII byte
string.
If a comment is given, then it will be preceded by one that identifies
the file as having been dumped by scikit-learn. Note that not all
tools grok comments in SVMlight files.
query_id : array-like, shape = [n_samples]
Array containing pairwise preference constraints (qid in svmlight
format).
"""
if comment is not None:
# Convert comment string to list of lines in UTF-8.
# If a byte string is passed, then check whether it's ASCII;
# if a user wants to get fancy, they'll have to decode themselves.
# Avoid mention of str and unicode types for Python 3.x compat.
if isinstance(comment, bytes):
comment.decode("ascii") # just for the exception
else:
comment = comment.encode("utf-8")
if "\0" in comment:
raise ValueError("comment string contains NUL byte")
y = np.asarray(y)
if y.ndim != 1:
raise ValueError("expected y of shape (n_samples,), got %r"
% (y.shape,))
Xval = atleast2d_or_csr(X)
if Xval.shape[0] != y.shape[0]:
raise ValueError("X.shape[0] and y.shape[0] should be the same, got"
" %r and %r instead." % (Xval.shape[0], y.shape[0]))
# We had some issues with CSR matrices with unsorted indices (e.g. #1501),
# so sort them here, but first make sure we don't modify the user's X.
# TODO We can do this cheaper; sorted_indices copies the whole matrix.
if Xval is X and hasattr(Xval, "sorted_indices"):
X = Xval.sorted_indices()
else:
X = Xval
if hasattr(X, "sort_indices"):
X.sort_indices()
if query_id is not None:
query_id = np.asarray(query_id)
if query_id.shape[0] != y.shape[0]:
raise ValueError("expected query_id of shape (n_samples,), got %r"
% (query_id.shape,))
one_based = not zero_based
if hasattr(f, "write"):
_dump_svmlight(X, y, f, one_based, comment, query_id)
else:
with open(f, "wb") as f:
_dump_svmlight(X, y, f, one_based, comment, query_id)
| bsd-3-clause | 8,683,822,834,947,362,000 | 37.184438 | 79 | 0.644604 | false |
jorgenkg/python-neural-network | nimblenet/cost_functions.py | 1 | 1632 | import numpy as np
import math
def sum_squared_error( outputs, targets, derivative=False ):
if derivative:
return outputs - targets
else:
return 0.5 * np.mean(np.sum( np.power(outputs - targets,2), axis = 1 ))
#end cost function
def hellinger_distance( outputs, targets, derivative=False ):
"""
The output signals should be in the range [0, 1]
"""
root_difference = np.sqrt( outputs ) - np.sqrt( targets )
if derivative:
return root_difference/( np.sqrt(2) * np.sqrt( outputs ))
else:
return np.mean(np.sum( np.power(root_difference, 2 ), axis=1) / math.sqrt( 2 ))
#end cost function
def binary_cross_entropy_cost( outputs, targets, derivative=False, epsilon=1e-11 ):
"""
The output signals should be in the range [0, 1]
"""
# Prevent overflow
outputs = np.clip(outputs, epsilon, 1 - epsilon)
divisor = np.maximum(outputs * (1 - outputs), epsilon)
if derivative:
return (outputs - targets) / divisor
else:
return np.mean(-np.sum(targets * np.log( outputs ) + (1 - targets) * np.log(1 - outputs), axis=1))
#end cost function
cross_entropy_cost = binary_cross_entropy_cost
def softmax_categorical_cross_entropy_cost( outputs, targets, derivative=False, epsilon=1e-11 ):
"""
The output signals should be in the range [0, 1]
"""
outputs = np.clip(outputs, epsilon, 1 - epsilon)
if derivative:
return outputs - targets
else:
return np.mean(-np.sum(targets * np.log( outputs ), axis=1))
#end cost function
softmax_neg_loss = softmax_categorical_cross_entropy_cost | bsd-2-clause | -7,239,579,203,085,241,000 | 30.403846 | 106 | 0.645221 | false |
googleapis/googleapis-gen | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/enums/types/negative_geo_target_type.py | 1 | 1192 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.enums',
marshal='google.ads.googleads.v7',
manifest={
'NegativeGeoTargetTypeEnum',
},
)
class NegativeGeoTargetTypeEnum(proto.Message):
r"""Container for enum describing possible negative geo target
types.
"""
class NegativeGeoTargetType(proto.Enum):
r"""The possible negative geo target types."""
UNSPECIFIED = 0
UNKNOWN = 1
PRESENCE_OR_INTEREST = 4
PRESENCE = 5
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | -6,885,534,629,699,815,000 | 28.8 | 74 | 0.692953 | false |
mvaled/sentry | src/sentry/south_migrations/0326_auto__add_field_groupsnooze_count__add_field_groupsnooze_window__add_f.py | 1 | 116733 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GroupSnooze.count'
db.add_column(
'sentry_groupsnooze',
'count',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True),
keep_default=False
)
# Adding field 'GroupSnooze.window'
db.add_column(
'sentry_groupsnooze',
'window',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True),
keep_default=False
)
# Adding field 'GroupSnooze.user_count'
db.add_column(
'sentry_groupsnooze',
'user_count',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True),
keep_default=False
)
# Adding field 'GroupSnooze.user_window'
db.add_column(
'sentry_groupsnooze',
'user_window',
self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True),
keep_default=False
)
# Adding field 'GroupSnooze.state'
db.add_column(
'sentry_groupsnooze',
'state',
self.gf('sentry.db.models.fields.jsonfield.JSONField')(null=True),
keep_default=False
)
# Changing field 'GroupSnooze.until'
db.alter_column(
'sentry_groupsnooze',
'until',
self.gf('django.db.models.fields.DateTimeField')(null=True)
)
def backwards(self, orm):
raise RuntimeError(
"Cannot reverse this migration. 'GroupSnooze.until' and its values cannot be restored."
)
models = {
'sentry.activity': {
'Meta': {
'object_name': 'Activity'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.apiapplication': {
'Meta': {
'object_name': 'ApiApplication'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'client_id': (
'django.db.models.fields.CharField', [], {
'default': "'1fe2246606cd41688e14b95ae1bdc14c6b7652dea035446fa2dc8bcacf21afd6'",
'unique': 'True',
'max_length': '64'
}
),
'client_secret': (
'sentry.db.models.fields.encrypted.EncryptedTextField', [], {
'default': "'7f918820281a421d991389c5fad78a41551739601ae745e8a24e9cb56ee8ffaa'"
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'homepage_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'default': "'Trusting Weasel'",
'max_length': '64',
'blank': 'True'
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'privacy_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'terms_url':
('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.apiauthorization': {
'Meta': {
'unique_together': "(('user', 'application'),)",
'object_name': 'ApiAuthorization'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.apigrant': {
'Meta': {
'object_name': 'ApiGrant'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']"
}
),
'code': (
'django.db.models.fields.CharField', [], {
'default': "'d959d133f88c4292a581081e6190b949'",
'max_length': '64',
'db_index': 'True'
}
),
'expires_at': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 6, 1, 0, 0)',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'redirect_uri': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.apikey': {
'Meta': {
'object_name': 'ApiKey'
},
'allowed_origins':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32'
}),
'label': (
'django.db.models.fields.CharField', [], {
'default': "'Default'",
'max_length': '64',
'blank': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Organization']"
}
),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.apitoken': {
'Meta': {
'object_name': 'ApiToken'
},
'application': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiApplication']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'expires_at': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 7, 1, 0, 0)',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'refresh_token': (
'django.db.models.fields.CharField', [], {
'default': "'6c4fadd19de34e39ac0859f3f896065cd8c3cd19c56c453287ab9f199c539138'",
'max_length': '64',
'unique': 'True',
'null': 'True'
}
),
'scope_list': (
'sentry.db.models.fields.array.ArrayField', [], {
'of': ('django.db.models.fields.TextField', [], {})
}
),
'scopes': ('django.db.models.fields.BigIntegerField', [], {
'default': 'None'
}),
'token': (
'django.db.models.fields.CharField', [], {
'default': "'94b568466766407cad05e6e2a630f6561a04ecb269c047c381f78c857d84422a'",
'unique': 'True',
'max_length': '64'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.auditlogentry': {
'Meta': {
'object_name': 'AuditLogEntry'
},
'actor': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_actors'",
'null': 'True',
'to': "orm['sentry.User']"
}
),
'actor_key': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ApiKey']",
'null': 'True',
'blank': 'True'
}
),
'actor_label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'target_object':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'target_user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'audit_targets'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.authenticator': {
'Meta': {
'unique_together': "(('user', 'type'),)",
'object_name': 'Authenticator',
'db_table': "'auth_authenticator'"
},
'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
'created_at':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authidentity': {
'Meta': {
'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))",
'object_name': 'AuthIdentity'
},
'auth_provider': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.AuthProvider']"
}
),
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_verified':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.authprovider': {
'Meta': {
'object_name': 'AuthProvider'
},
'config':
('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_global_access':
('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'default_role':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50'
}),
'default_teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'unique': 'True'
}
),
'provider': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'sync_time':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.broadcast': {
'Meta': {
'object_name': 'Broadcast'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_expires': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 6, 8, 0, 0)',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active':
('django.db.models.fields.BooleanField', [], {
'default': 'True',
'db_index': 'True'
}),
'link': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.CharField', [], {
'max_length': '256'
}),
'title': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'upstream_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.broadcastseen': {
'Meta': {
'unique_together': "(('broadcast', 'user'),)",
'object_name': 'BroadcastSeen'
},
'broadcast': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Broadcast']"
}
),
'date_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.commit': {
'Meta': {
'unique_together': "(('repository_id', 'key'),)",
'object_name': 'Commit',
'index_together': "(('repository_id', 'date_added'),)"
},
'author': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.CommitAuthor']",
'null': 'True'
}
),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'message': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {
'unique_together':
"(('organization_id', 'email'), ('organization_id', 'external_id'))",
'object_name':
'CommitAuthor'
},
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'external_id':
('django.db.models.fields.CharField', [], {
'max_length': '164',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.commitfilechange': {
'Meta': {
'unique_together': "(('commit', 'filename'),)",
'object_name': 'CommitFileChange'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'filename': ('django.db.models.fields.CharField', [], {
'max_length': '255'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '1'
})
},
'sentry.counter': {
'Meta': {
'object_name': 'Counter',
'db_table': "'sentry_projectcounter'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'unique': 'True'
}
),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.deploy': {
'Meta': {
'object_name': 'Deploy'
},
'date_finished':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'notified': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'db_index': 'True',
'blank': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
)
},
'sentry.distribution': {
'Meta': {
'unique_together': "(('release', 'name'),)",
'object_name': 'Distribution'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.dsymapp': {
'Meta': {
'unique_together': "(('project', 'platform', 'app_id'),)",
'object_name': 'DSymApp'
},
'app_id': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_synced':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'sync_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
})
},
'sentry.dsymbundle': {
'Meta': {
'object_name': 'DSymBundle'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'sdk': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymSDK']"
}
)
},
'sentry.dsymobject': {
'Meta': {
'object_name': 'DSymObject'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_path': ('django.db.models.fields.TextField', [], {
'db_index': 'True'
}),
'uuid':
('django.db.models.fields.CharField', [], {
'max_length': '36',
'db_index': 'True'
}),
'vmaddr':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'vmsize':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
})
},
'sentry.dsymsdk': {
'Meta': {
'object_name':
'DSymSDK',
'index_together':
"[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]"
},
'dsym_type':
('django.db.models.fields.CharField', [], {
'max_length': '20',
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'sdk_name': ('django.db.models.fields.CharField', [], {
'max_length': '20'
}),
'version_build': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'version_major': ('django.db.models.fields.IntegerField', [], {}),
'version_minor': ('django.db.models.fields.IntegerField', [], {}),
'version_patchlevel': ('django.db.models.fields.IntegerField', [], {})
},
'sentry.dsymsymbol': {
'Meta': {
'unique_together': "[('object', 'address')]",
'object_name': 'DSymSymbol'
},
'address':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymObject']"
}
),
'symbol': ('django.db.models.fields.TextField', [], {})
},
'sentry.environment': {
'Meta': {
'unique_together': "(('project_id', 'name'), ('organization_id', 'name'))",
'object_name': 'Environment'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Project']",
'through': "orm['sentry.EnvironmentProject']",
'symmetrical': 'False'
}
)
},
'sentry.environmentproject': {
'Meta': {
'unique_together': "(('project', 'environment'),)",
'object_name': 'EnvironmentProject'
},
'environment': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Environment']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.event': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'Event',
'db_table': "'sentry_message'",
'index_together': "(('group_id', 'datetime'),)"
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True',
'db_column': "'message_id'"
}
),
'group_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'time_spent':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'null': 'True'
})
},
'sentry.eventmapping': {
'Meta': {
'unique_together': "(('project_id', 'event_id'),)",
'object_name': 'EventMapping'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {
'unique_together': "(('raw_event', 'processing_issue'),)",
'object_name': 'EventProcessingIssue'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'processing_issue': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ProcessingIssue']"
}
),
'raw_event': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.RawEvent']"
}
)
},
'sentry.eventtag': {
'Meta': {
'unique_together':
"(('event_id', 'key_id', 'value_id'),)",
'object_name':
'EventTag',
'index_together':
"(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {
'unique_together':
"(('project', 'ident'), ('project', 'hash'))",
'object_name':
'EventUser',
'index_together':
"(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'ip_address': (
'django.db.models.fields.GenericIPAddressField', [], {
'max_length': '39',
'null': 'True'
}
),
'name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'username':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'null': 'True'
})
},
'sentry.file': {
'Meta': {
'object_name': 'File'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'legacy_blob'",
'null': 'True',
'to': "orm['sentry.FileBlob']"
}
),
'blobs': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.FileBlob']",
'through': "orm['sentry.FileBlobIndex']",
'symmetrical': 'False'
}
),
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'null': 'True'
}),
'headers': ('sentry.db.models.fields.jsonfield.JSONField', [], {
'default': '{}'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.fileblob': {
'Meta': {
'object_name': 'FileBlob'
},
'checksum':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '40'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'path': ('django.db.models.fields.TextField', [], {
'null': 'True'
}),
'size':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'timestamp': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
)
},
'sentry.fileblobindex': {
'Meta': {
'unique_together': "(('file', 'blob', 'offset'),)",
'object_name': 'FileBlobIndex'
},
'blob': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.FileBlob']"
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.globaldsymfile': {
'Meta': {
'object_name': 'GlobalDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'uuid':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '36'
})
},
'sentry.group': {
'Meta': {
'unique_together': "(('project', 'short_id'),)",
'object_name': 'Group',
'db_table': "'sentry_groupedmessage'",
'index_together': "(('project', 'first_release'),)"
},
'active_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'culprit': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'null': 'True',
'db_column': "'view'",
'blank': 'True'
}
),
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']",
'null': 'True',
'on_delete': 'models.PROTECT'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_public': (
'django.db.models.fields.NullBooleanField', [], {
'default': 'False',
'null': 'True',
'blank': 'True'
}
),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'platform':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'resolved_at':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'db_index': 'True'
}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'short_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'time_spent_count':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'time_spent_total':
('sentry.db.models.fields.bounded.BoundedIntegerField', [], {
'default': '0'
}),
'times_seen': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '1',
'db_index': 'True'
}
)
},
'sentry.groupassignee': {
'Meta': {
'object_name': 'GroupAssignee',
'db_table': "'sentry_groupasignee'"
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'unique': 'True',
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'assignee_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_assignee_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupbookmark': {
'Meta': {
'unique_together': "(('project', 'user', 'group'),)",
'object_name': 'GroupBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'bookmark_set'",
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'sentry_bookmark_set'",
'to': "orm['sentry.User']"
}
)
},
'sentry.groupcommitresolution': {
'Meta': {
'unique_together': "(('group_id', 'commit_id'),)",
'object_name': 'GroupCommitResolution'
},
'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
})
},
'sentry.groupemailthread': {
'Meta': {
'unique_together': "(('email', 'group'), ('email', 'msgid'))",
'object_name': 'GroupEmailThread'
},
'date': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'msgid': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'groupemail_set'",
'to': "orm['sentry.Project']"
}
)
},
'sentry.grouphash': {
'Meta': {
'unique_together': "(('project', 'hash'),)",
'object_name': 'GroupHash'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
)
},
'sentry.groupmeta': {
'Meta': {
'unique_together': "(('group', 'key'),)",
'object_name': 'GroupMeta'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {
'object_name': 'GroupRedirect'
},
'group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'db_index': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'previous_group_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'unique': 'True'
})
},
'sentry.grouprelease': {
'Meta': {
'unique_together': "(('group_id', 'release_id', 'environment'),)",
'object_name': 'GroupRelease'
},
'environment':
('django.db.models.fields.CharField', [], {
'default': "''",
'max_length': '64'
}),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.groupresolution': {
'Meta': {
'object_name': 'GroupResolution'
},
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouprulestatus': {
'Meta': {
'unique_together': "(('rule', 'group'),)",
'object_name': 'GroupRuleStatus'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_active': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'rule': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Rule']"
}
),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
})
},
'sentry.groupseen': {
'Meta': {
'unique_together': "(('user', 'group'),)",
'object_name': 'GroupSeen'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'db_index': 'False'
}
)
},
'sentry.groupsnooze': {
'Meta': {
'object_name': 'GroupSnooze'
},
'count':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'unique': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'state': ('sentry.db.models.fields.jsonfield.JSONField', [], {
'null': 'True'
}),
'until': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'user_count':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'user_window':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'window':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
})
},
'sentry.groupsubscription': {
'Meta': {
'unique_together': "(('group', 'user'),)",
'object_name': 'GroupSubscription'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'subscription_set'",
'to': "orm['sentry.Project']"
}
),
'reason':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.grouptagkey': {
'Meta': {
'unique_together': "(('project', 'group', 'key'),)",
'object_name': 'GroupTagKey'
},
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.grouptagvalue': {
'Meta': {
'unique_together': "(('group_id', 'key', 'value'),)",
'object_name': 'GroupTagValue',
'db_table': "'sentry_messagefiltervalue'",
'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"
},
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.lostpasswordhash': {
'Meta': {
'object_name': 'LostPasswordHash'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'hash': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'unique': 'True'
}
)
},
'sentry.option': {
'Meta': {
'object_name': 'Option'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '64'
}),
'last_updated':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {
'object_name': 'Organization'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'default_role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'members': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'org_memberships'",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMember']",
'to': "orm['sentry.User']"
}
),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'slug':
('django.db.models.fields.SlugField', [], {
'unique': 'True',
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.organizationaccessrequest': {
'Meta': {
'unique_together': "(('team', 'member'),)",
'object_name': 'OrganizationAccessRequest'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'member': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationavatar': {
'Meta': {
'object_name': 'OrganizationAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.Organization']"
}
)
},
'sentry.organizationmember': {
'Meta': {
'unique_together': "(('organization', 'user'), ('organization', 'email'))",
'object_name': 'OrganizationMember'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': (
'django.db.models.fields.EmailField', [], {
'max_length': '75',
'null': 'True',
'blank': 'True'
}
),
'flags': ('django.db.models.fields.BigIntegerField', [], {
'default': '0'
}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'member_set'",
'to': "orm['sentry.Organization']"
}
),
'role':
('django.db.models.fields.CharField', [], {
'default': "'member'",
'max_length': '32'
}),
'teams': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': "orm['sentry.Team']",
'symmetrical': 'False',
'through': "orm['sentry.OrganizationMemberTeam']",
'blank': 'True'
}
),
'token': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'unique': 'True',
'null': 'True',
'blank': 'True'
}
),
'type': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '50',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'blank': 'True',
'related_name': "'sentry_orgmember_set'",
'null': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.organizationmemberteam': {
'Meta': {
'unique_together': "(('team', 'organizationmember'),)",
'object_name': 'OrganizationMemberTeam',
'db_table': "'sentry_organizationmember_teams'"
},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'organizationmember': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.OrganizationMember']"
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.organizationonboardingtask': {
'Meta': {
'unique_together': "(('organization', 'task'),)",
'object_name': 'OrganizationOnboardingTask'
},
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {
'default': '{}'
}),
'date_completed':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
)
},
'sentry.organizationoption': {
'Meta': {
'unique_together': "(('organization', 'key'),)",
'object_name': 'OrganizationOption',
'db_table': "'sentry_organizationoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {
'unique_together': "(('project', 'checksum', 'type'),)",
'object_name': 'ProcessingIssue'
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '40',
'db_index': 'True'
}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'type': ('django.db.models.fields.CharField', [], {
'max_length': '30'
})
},
'sentry.project': {
'Meta': {
'unique_together': "(('team', 'slug'), ('organization', 'slug'))",
'object_name': 'Project'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'first_event': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'flags':
('django.db.models.fields.BigIntegerField', [], {
'default': '0',
'null': 'True'
}),
'forced_color': (
'django.db.models.fields.CharField', [], {
'max_length': '6',
'null': 'True',
'blank': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'public': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'team': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Team']"
}
)
},
'sentry.projectbookmark': {
'Meta': {
'unique_together': "(('project_id', 'user'),)",
'object_name': 'ProjectBookmark'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project_id': (
'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True',
'blank': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.projectdsymfile': {
'Meta': {
'unique_together': "(('project', 'uuid'),)",
'object_name': 'ProjectDSymFile'
},
'cpu_name': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'uuid': ('django.db.models.fields.CharField', [], {
'max_length': '36'
})
},
'sentry.projectkey': {
'Meta': {
'object_name': 'ProjectKey'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'key_set'",
'to': "orm['sentry.Project']"
}
),
'public_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'rate_limit_count':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'rate_limit_window':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'roles': ('django.db.models.fields.BigIntegerField', [], {
'default': '1'
}),
'secret_key': (
'django.db.models.fields.CharField', [], {
'max_length': '32',
'unique': 'True',
'null': 'True'
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.projectoption': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'ProjectOption',
'db_table': "'sentry_projectoptions'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {
'unique_together': "(('project_id', 'platform'),)",
'object_name': 'ProjectPlatform'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'platform': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.rawevent': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'RawEvent'
},
'data':
('sentry.db.models.fields.node.NodeField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.release': {
'Meta': {
'unique_together': "(('organization', 'version'),)",
'object_name': 'Release'
},
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_released':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'date_started':
('django.db.models.fields.DateTimeField', [], {
'null': 'True',
'blank': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True',
'blank': 'True'
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'projects': (
'django.db.models.fields.related.ManyToManyField', [], {
'related_name': "'releases'",
'symmetrical': 'False',
'through': "orm['sentry.ReleaseProject']",
'to': "orm['sentry.Project']"
}
),
'ref': (
'django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True',
'blank': 'True'
}
),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '64'
})
},
'sentry.releasecommit': {
'Meta': {
'unique_together': "(('release', 'commit'), ('release', 'order'))",
'object_name': 'ReleaseCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseenvironment': {
'Meta': {
'unique_together':
"(('project_id', 'release_id', 'environment_id'), ('organization_id', 'release_id', 'environment_id'))",
'object_name':
'ReleaseEnvironment',
'db_table':
"'sentry_environmentrelease'"
},
'environment_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'first_seen':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'project_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True',
'db_index': 'True'
}
),
'release_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
)
},
'sentry.releasefile': {
'Meta': {
'unique_together': "(('release', 'ident'),)",
'object_name': 'ReleaseFile'
},
'dist': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Distribution']",
'null': 'True'
}
),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': ('django.db.models.fields.CharField', [], {
'max_length': '40'
}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'project_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'null': 'True'
}),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.releaseheadcommit': {
'Meta': {
'unique_together': "(('repository_id', 'release'),)",
'object_name': 'ReleaseHeadCommit'
},
'commit': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Commit']"
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
),
'repository_id':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.releaseproject': {
'Meta': {
'unique_together': "(('project', 'release'),)",
'object_name': 'ReleaseProject',
'db_table': "'sentry_release_project'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'new_groups': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'release': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Release']"
}
)
},
'sentry.repository': {
'Meta': {
'unique_together':
"(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))",
'object_name':
'Repository'
},
'config': ('sentry.db.models.fields.jsonfield.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'external_id':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '200'
}),
'organization_id': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'db_index': 'True'
}
),
'provider':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
),
'url': ('django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True'
})
},
'sentry.reprocessingreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'ReprocessingReport'
},
'datetime':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'event_id':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.rule': {
'Meta': {
'object_name': 'Rule'
},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status': (
'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0',
'db_index': 'True'
}
)
},
'sentry.savedsearch': {
'Meta': {
'unique_together': "(('project', 'name'),)",
'object_name': 'SavedSearch'
},
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_default': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'owner': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']",
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {
'unique_together': "(('project', 'user'),)",
'object_name': 'SavedSearchUserDefault',
'db_table': "'sentry_savedsearch_userdefault'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'savedsearch': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.SavedSearch']"
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
)
},
'sentry.scheduleddeletion': {
'Meta': {
'unique_together': "(('app_label', 'model_name', 'object_id'),)",
'object_name': 'ScheduledDeletion'
},
'aborted': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'actor_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'app_label': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'data': ('sentry.db.models.fields.jsonfield.JSONField', [], {
'default': '{}'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'date_scheduled': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime(2017, 7, 1, 0, 0)'
}
),
'guid': (
'django.db.models.fields.CharField', [], {
'default': "'7dcd5c1ace824812b6cc232360d975f7'",
'unique': 'True',
'max_length': '32'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'in_progress': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'model_name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.tagkey': {
'Meta': {
'unique_together': "(('project', 'key'),)",
'object_name': 'TagKey',
'db_table': "'sentry_filterkey'"
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'label':
('django.db.models.fields.CharField', [], {
'max_length': '64',
'null': 'True'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'values_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.tagvalue': {
'Meta': {
'unique_together': "(('project', 'key', 'value'),)",
'object_name': 'TagValue',
'db_table': "'sentry_filtervalue'"
},
'data': (
'sentry.db.models.fields.gzippeddict.GzippedDictField', [], {
'null': 'True',
'blank': 'True'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True',
'db_index': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'times_seen':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
}),
'value': ('django.db.models.fields.CharField', [], {
'max_length': '200'
})
},
'sentry.team': {
'Meta': {
'unique_together': "(('organization', 'slug'),)",
'object_name': 'Team'
},
'date_added': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']"
}
),
'slug': ('django.db.models.fields.SlugField', [], {
'max_length': '50'
}),
'status':
('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {
'default': '0'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_managed': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_password_expired':
('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {
'null': 'True'
}),
'name': (
'django.db.models.fields.CharField', [], {
'max_length': '200',
'db_column': "'first_name'",
'blank': 'True'
}
),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'session_nonce':
('django.db.models.fields.CharField', [], {
'max_length': '12',
'null': 'True'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
'sentry.useravatar': {
'Meta': {
'object_name': 'UserAvatar'
},
'avatar_type':
('django.db.models.fields.PositiveSmallIntegerField', [], {
'default': '0'
}),
'file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.File']",
'unique': 'True',
'null': 'True',
'on_delete': 'models.SET_NULL'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'ident': (
'django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '32',
'db_index': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'avatar'",
'unique': 'True',
'to': "orm['sentry.User']"
}
)
},
'sentry.useremail': {
'Meta': {
'unique_together': "(('user', 'email'),)",
'object_name': 'UserEmail'
},
'date_hash_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'is_verified': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'related_name': "'emails'",
'to': "orm['sentry.User']"
}
),
'validation_hash': (
'django.db.models.fields.CharField', [], {
'default': "u'UgLIAnDusbhZ8E66pCx3Af5EoUtzEmSA'",
'max_length': '32'
}
)
},
'sentry.useroption': {
'Meta': {
'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))",
'object_name': 'UserOption'
},
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'key': ('django.db.models.fields.CharField', [], {
'max_length': '64'
}),
'organization': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Organization']",
'null': 'True'
}
),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']",
'null': 'True'
}
),
'user': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.User']"
}
),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {
'unique_together': "(('project', 'event_id'),)",
'object_name': 'UserReport',
'index_together': "(('project', 'event_id'), ('project', 'date_added'))"
},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email': ('django.db.models.fields.EmailField', [], {
'max_length': '75'
}),
'event_id': ('django.db.models.fields.CharField', [], {
'max_length': '32'
}),
'event_user_id':
('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {
'null': 'True'
}),
'group': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Group']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'project': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.Project']"
}
)
},
'sentry.versiondsymfile': {
'Meta': {
'unique_together': "(('dsym_file', 'version', 'build'),)",
'object_name': 'VersionDSymFile'
},
'build':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'null': 'True'
}),
'date_added':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'dsym_app': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.DSymApp']"
}
),
'dsym_file': (
'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {
'to': "orm['sentry.ProjectDSymFile']",
'null': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'version': ('django.db.models.fields.CharField', [], {
'max_length': '32'
})
}
}
complete_apps = ['sentry']
| bsd-3-clause | -7,866,482,563,248,466,000 | 35.812677 | 120 | 0.398756 | false |
tazo90/lux | setup.py | 1 | 1994 | import os
import json
from setuptools import setup, find_packages
package_name = 'lux'
def read(name):
root_dir = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(root_dir, name), 'r') as f:
return f.read()
def run():
install_requires = []
dependency_links = []
pkg = json.loads(read('package.json'))
for line in read('requirements.txt').split('\n'):
if line.startswith('-e '):
link = line[3:].strip()
if link == '.':
continue
dependency_links.append(link)
line = link.split('=')[1]
line = line.strip()
if line:
install_requires.append(line)
packages = find_packages(exclude=['tests', 'tests.*'])
setup(name=package_name,
version=pkg['version'],
author=pkg['author']['name'],
author_email=pkg['author']['email'],
url=pkg['homepage'],
license=pkg['licenses'][0]['type'],
description=pkg['description'],
long_description=read('README.rst'),
packages=packages,
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
dependency_links=dependency_links,
scripts=['bin/luxmake.py'],
classifiers=['Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: JavaScript',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Utilities'])
if __name__ == '__main__':
run()
| bsd-3-clause | -8,769,033,512,469,859,000 | 31.16129 | 64 | 0.516048 | false |
pierluigiferrari/fcn8s_tensorflow | cityscapesscripts/annotation/cityscapesLabelTool.py | 1 | 109346 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#################
## Import modules
#################
# pyqt for everything graphical
from PyQt4 import QtGui, QtCore
# get command line parameters
import sys
# walk directories
import glob
# access to OS functionality
import os
# (de)serialize config file
import json
# call processes
import subprocess
# get the user name
import getpass
# xml parsing
import xml.etree.ElementTree as ET
# copy text to clipboard
try:
from Tkinter import Tk
except:
from tkinter import Tk
# copy stuff
import copy
#################
## Helper classes
#################
# annotation helper
sys.path.append( os.path.normpath( os.path.join( os.path.dirname( __file__ ) , '..' , 'helpers' ) ) )
from annotation import Point, Annotation, CsObject
from labels import name2label, assureSingleInstanceName
# Helper class that contains the current configuration of the Gui
# This config is loaded when started and saved when leaving
class configuration:
# Constructor
def __init__(self):
# The filename of the image we currently working on
self.currentFile = ""
# The filename of the labels we currently working on
self.currentLabelFile = ""
# The filename of the corrections we currently working on
self.currentCorrectionFile = ""
# The path where the Cityscapes dataset is located
self.csPath = ""
# The path of the images of the currently loaded city
self.city = ""
# The name of the currently loaded city
self.cityName = ""
# The type of the current annotations
self.gtType = ""
# The split, where the currently loaded city belongs to
self.split = ""
# The path of the labels. In this folder we expect a folder for each city
# Within these city folders we expect the label with a filename matching
# the images, except for the extension
self.labelPath = ""
# The path to store correction markings
self.correctionPath = ""
# The transparency of the labels over the image
self.transp = 0.5
# The zoom toggle
self.zoom = False
# The zoom factor
self.zoomFactor = 1.0
# The size of the zoom window. Currently there is no setter or getter for that
self.zoomSize = 400 #px
# The highlight toggle
self.highlight = False
# The highlight label
self.highlightLabelSelection = ""
# Screenshot file
self.screenshotFilename = "%i"
# Correction mode
self.correctionMode = False
# Warn before saving that you are overwriting files
self.showSaveWarning = True
# Load from given filename
def load(self, filename):
if os.path.isfile(filename):
with open(filename, 'r') as f:
jsonText = f.read()
jsonDict = json.loads(jsonText)
for key in jsonDict:
if key in self.__dict__:
self.__dict__[key] = jsonDict[key]
self.fixConsistency()
# Make sure the config is consistent.
# Automatically called after loading
def fixConsistency(self):
if self.currentFile:
self.currentFile = os.path.normpath(self.currentFile)
if self.currentLabelFile:
self.currentLabelFile = os.path.normpath(self.currentLabelFile)
if self.currentCorrectionFile:
self.currentCorrectionFile = os.path.normpath(self.currentCorrectionFile)
if self.csPath:
self.csPath = os.path.normpath(self.csPath)
if not os.path.isdir(self.csPath):
self.csPath = ""
if self.city:
self.city = os.path.normpath(self.city)
if not os.path.isdir(self.city):
self.city = ""
if self.labelPath:
self.labelPath = os.path.normpath(self.labelPath)
if self.correctionPath:
self.correctionPath = os.path.normpath(self.correctionPath)
if self.city:
self.cityName == os.path.basename(self.city)
if not os.path.isfile(self.currentFile) or os.path.dirname(self.currentFile) != self.city:
self.currentFile = ""
if not os.path.isfile(self.currentLabelFile) or \
not os.path.isdir( os.path.join(self.labelPath,self.cityName) ) or \
os.path.dirname(self.currentLabelFile) != os.path.join(self.labelPath,self.cityName):
self.currentLabelFile = ""
if not os.path.isfile(self.currentCorrectionFile) or \
not os.path.isdir( os.path.join(self.correctionPath,self.cityName) ) or \
os.path.dirname(self.currentCorrectionFile) != os.path.join(self.correctionPath,self.cityName):
self.currentCorrectionFile = ""
# Save to given filename (using pickle)
def save(self, filename):
with open(filename, 'w') as f:
f.write(json.dumps(self.__dict__, default=lambda o: o.__dict__, sort_keys=True, indent=4))
def enum(**enums):
return type('Enum', (), enums)
class CorrectionBox:
types = enum(TO_CORRECT=1, TO_REVIEW=2, RESOLVED=3, QUESTION=4)
def __init__(self, rect=None, annotation=""):
self.type = CorrectionBox.types.TO_CORRECT
self.bbox = rect
self.annotation = annotation
self.selected = False
return
def get_colour(self):
if self.type == CorrectionBox.types.TO_CORRECT:
return QtGui.QColor(255,0,0)
elif self.type == CorrectionBox.types.TO_REVIEW:
return QtGui.QColor(255,255,0)
elif self.type == CorrectionBox.types.RESOLVED:
return QtGui.QColor(0,255,0)
elif self.type == CorrectionBox.types.QUESTION:
return QtGui.QColor(0,0,255)
def select(self):
if not self.selected:
self.selected = True
return
def unselect(self):
if self.selected:
self.selected = False
return
# Read the information from the given object node in an XML file
# The node must have the tag object and contain all expected fields
def readFromXMLNode(self, correctionNode):
if not correctionNode.tag == 'correction':
return
typeNode = correctionNode.find('type')
self.type = int(typeNode.text)
annotationNode = correctionNode.find('annotation')
self.annotation = annotationNode.text
bboxNode = correctionNode.find('bbox')
x = float(bboxNode.find('x').text)
y = float(bboxNode.find('y').text)
width = float(bboxNode.find('width').text)
height = float(bboxNode.find('height').text)
self.bbox = QtCore.QRectF(x,y,width,height)
# Append the information to a node of an XML file
# Creates an object node with all children and appends to the given node
# Usually the given node is the root
def appendToXMLNode(self, node):
# New object node
correctionNode = ET.SubElement(node,'correction')
correctionNode.tail = "\n"
correctionNode.text = "\n"
# Name node
typeNode = ET.SubElement(correctionNode,'type')
typeNode.tail = "\n"
typeNode.text = str(int(self.type))
# Deleted node
annotationNode = ET.SubElement(correctionNode,'annotation')
annotationNode.tail = "\n"
annotationNode.text = str(self.annotation)
# Polygon node
bboxNode = ET.SubElement(correctionNode,'bbox')
bboxNode.text = "\n"
bboxNode.tail = "\n"
xNode = ET.SubElement(bboxNode,'x')
xNode.tail = "\n"
yNode = ET.SubElement(bboxNode,'y')
yNode.tail = "\n"
xNode.text = str(int(round(self.bbox.x())))
yNode.text = str(int(round(self.bbox.y())))
wNode = ET.SubElement(bboxNode,'width')
wNode.tail = "\n"
hNode = ET.SubElement(bboxNode,'height')
hNode.tail = "\n"
wNode.text = str(int(round(self.bbox.width())))
hNode.text = str(int(round(self.bbox.height())))
#################
## Main GUI class
#################
# The main class which is a QtGui -> Main Window
class CityscapesLabelTool(QtGui.QMainWindow):
#############################
## Construction / Destruction
#############################
# Constructor
def __init__(self):
# Construct base class
super(CityscapesLabelTool, self).__init__()
# The filename of where the config is saved and loaded
configDir = os.path.dirname( __file__ )
self.configFile = os.path.join( configDir , "cityscapesLabelTool.conf" )
# This is the configuration.
self.config = configuration()
self.config.load(self.configFile)
# for copying text to clipboard
self.tk = Tk()
# Other member variables
# The width that we actually use to show the image
self.w = 0
# The height that we actually use to show the image
self.h = 0
# The horizontal offset where we start drawing within the widget
self.xoff = 0
# The vertical offset where we start drawing withing the widget
self.yoff = 0
# A gap that we leave around the image as little border
self.bordergap = 20
# The scale that was used, ie
# self.w = self.scale * self.image.width()
# self.h = self.scale * self.image.height()
self.scale = 1.0
# Filenames of all images in current city
self.images = []
# Image extension
self.imageExt = "_leftImg8bit.png"
# Ground truth extension
self.gtExt = "{}_polygons.json"
# Current image as QImage
self.image = QtGui.QImage()
# Index of the current image within the city folder
self.idx = 0
# All annotated objects in current image
self.annotation = None
# The XML ElementTree representing the corrections for the current image
self.correctionXML = None
# A list of changes that we did on the current annotation
# Each change is simply a descriptive string
self.changes = []
# The current object the mouse points to. It's index in self.annotation.objects
self.mouseObj = -1
# The currently selected objects. Their index in self.annotation.objects
self.selObjs = []
# The objects that are highlighted. List of object instances
self.highlightObjs = []
# A label that is selected for highlighting
self.highlightObjLabel = None
# Texture for highlighting
self.highlightTexture = None
# The position of the mouse
self.mousePos = None
# TODO: NEEDS BETTER EXPLANATION/ORGANISATION
self.mousePosOrig = None
# The position of the mouse scaled to label coordinates
self.mousePosScaled = None
# If the mouse is outside of the image
self.mouseOutsideImage = True
# The position of the mouse upon enabling the zoom window
self.mousePosOnZoom = None
# The button state of the mouse
self.mouseButtons = 0
# A list of objects with changed layer
self.changedLayer = []
# A list of objects with changed polygon
self.changedPolygon = []
# A polygon that is drawn by the user
self.drawPoly = QtGui.QPolygonF()
# Treat the polygon as being closed
self.drawPolyClosed = False
# A point of this poly that is dragged
self.draggedPt = -1
# A list of toolbar actions that need an image
self.actImage = []
# A list of toolbar actions that need an image that is not the first
self.actImageNotFirst = []
# A list of toolbar actions that need an image that is not the last
self.actImageNotLast = []
# A list of toolbar actions that need changes
self.actChanges = []
# A list of toolbar actions that need a drawn polygon or selected objects
self.actPolyOrSelObj = []
# A list of toolbar actions that need a closed drawn polygon
self.actClosedPoly = []
# A list of toolbar actions that need selected objects
self.actSelObj = []
# A list of toolbar actions that need a single active selected object
self.singleActSelObj = []
# Toggle status of auto-doing screenshots
self.screenshotToggleState = False
# Toggle status of the play icon
self.playState = False
# Temporary zero transparency
self.transpTempZero = False
# Toggle correction mode on and off
self.correctAction = []
self.corrections = []
self.selected_correction = -1
self.in_progress_bbox = None
self.in_progress_correction = None
self.mousePressEvent = []
# Default label
self.defaultLabel = 'static'
if not self.defaultLabel in name2label:
print( 'The {0} label is missing in the internal label definitions.'.format(self.defaultLabel) )
return
# Last selected label
self.lastLabel = self.defaultLabel
# Setup the GUI
self.initUI()
# Initially clear stuff
self.deselectAllObjects()
self.clearPolygon()
self.clearChanges()
# If we already know a city from the saved config -> load it
self.loadCity()
self.imageChanged()
# Destructor
def __del__(self):
self.config.save(self.configFile)
# Construct everything GUI related. Called by constructor
def initUI(self):
# Create a toolbar
self.toolbar = self.addToolBar('Tools')
# Add the tool buttons
iconDir = os.path.join( os.path.dirname(__file__) , 'icons' )
# Loading a new city
loadAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'open.png' )), '&Tools', self)
loadAction.setShortcuts(['o'])
self.setTip( loadAction, 'Open city' )
loadAction.triggered.connect( self.selectCity )
self.toolbar.addAction(loadAction)
# Open previous image
backAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'back.png')), '&Tools', self)
backAction.setShortcut('left')
backAction.setStatusTip('Previous image')
backAction.triggered.connect( self.prevImage )
self.toolbar.addAction(backAction)
self.actImageNotFirst.append(backAction)
# Open next image
nextAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'next.png')), '&Tools', self)
nextAction.setShortcut('right')
self.setTip( nextAction, 'Next image' )
nextAction.triggered.connect( self.nextImage )
self.toolbar.addAction(nextAction)
self.actImageNotLast.append(nextAction)
# Play
playAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'play.png')), '&Tools', self)
playAction.setShortcut(' ')
playAction.setCheckable(True)
playAction.setChecked(False)
self.setTip( playAction, 'Play all images' )
playAction.triggered.connect( self.playImages )
self.toolbar.addAction(playAction)
self.actImageNotLast.append(playAction)
self.playAction = playAction
# Select image
selImageAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'shuffle.png' )), '&Tools', self)
selImageAction.setShortcut('i')
self.setTip( selImageAction, 'Select image' )
selImageAction.triggered.connect( self.selectImage )
self.toolbar.addAction(selImageAction)
self.actImage.append(selImageAction)
# Save the current image
saveAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'save.png' )), '&Tools', self)
saveAction.setShortcuts('s')
self.setTip( saveAction,'Save changes' )
saveAction.triggered.connect( self.save )
self.toolbar.addAction(saveAction)
self.actChanges.append(saveAction)
# Clear the currently edited polygon
clearPolAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'clearpolygon.png' )), '&Tools', self)
clearPolAction.setShortcuts(['q','Esc'])
self.setTip( clearPolAction, 'Clear polygon' )
clearPolAction.triggered.connect( self.clearPolygonAction )
self.toolbar.addAction(clearPolAction)
self.actPolyOrSelObj.append(clearPolAction)
# Create new object from drawn polygon
newObjAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'newobject.png' )), '&Tools', self)
newObjAction.setShortcuts(['n'])
self.setTip( newObjAction, 'New object' )
newObjAction.triggered.connect( self.newObject )
self.toolbar.addAction(newObjAction)
self.actClosedPoly.append(newObjAction)
# Delete the currently selected object
deleteObjectAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'deleteobject.png' )), '&Tools', self)
deleteObjectAction.setShortcuts(['d','delete'])
self.setTip( deleteObjectAction, 'Delete object' )
deleteObjectAction.triggered.connect( self.deleteObject )
self.toolbar.addAction(deleteObjectAction)
self.actSelObj.append(deleteObjectAction)
# Undo changes in current image, ie. reload labels from file
undoAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'undo.png' )), '&Tools', self)
undoAction.setShortcuts('u')
self.setTip( undoAction,'Undo all unsaved changes' )
undoAction.triggered.connect( self.undo )
self.toolbar.addAction(undoAction)
self.actChanges.append(undoAction)
# Modify the label of a selected object
labelAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'modify.png' )), '&Tools', self)
labelAction.setShortcuts(['m','l'])
self.setTip( labelAction, 'Modify label' )
labelAction.triggered.connect( self.modifyLabel )
self.toolbar.addAction(labelAction)
self.actSelObj.append(labelAction)
# Move selected object a layer up
layerUpAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'layerup.png' )), '&Tools', self)
layerUpAction.setShortcuts(['Up'])
self.setTip( layerUpAction, 'Move object a layer up' )
layerUpAction.triggered.connect( self.layerUp )
self.toolbar.addAction(layerUpAction)
self.singleActSelObj.append(layerUpAction)
# Move selected object a layer down
layerDownAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'layerdown.png' )), '&Tools', self)
layerDownAction.setShortcuts(['Down'])
self.setTip( layerDownAction, 'Move object a layer down' )
layerDownAction.triggered.connect( self.layerDown )
self.toolbar.addAction(layerDownAction)
self.singleActSelObj.append(layerDownAction)
# Enable/disable zoom. Toggle button
zoomAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'zoom.png' )), '&Tools', self)
zoomAction.setShortcuts(['z'])
zoomAction.setCheckable(True)
zoomAction.setChecked(self.config.zoom)
self.setTip( zoomAction, 'Enable/disable permanent zoom' )
zoomAction.toggled.connect( self.zoomToggle )
self.toolbar.addAction(zoomAction)
self.actImage.append(zoomAction)
# Highlight objects of a certain class
highlightAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'highlight.png' )), '&Tools', self)
highlightAction.setShortcuts(['g'])
highlightAction.setCheckable(True)
highlightAction.setChecked(self.config.highlight)
self.setTip( highlightAction, 'Enable/disable highlight of certain object class' )
highlightAction.toggled.connect( self.highlightClassToggle )
self.toolbar.addAction(highlightAction)
self.actImage.append(highlightAction)
# Decrease transparency
minusAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'minus.png' )), '&Tools', self)
minusAction.setShortcut('-')
self.setTip( minusAction, 'Decrease transparency' )
minusAction.triggered.connect( self.minus )
self.toolbar.addAction(minusAction)
# Increase transparency
plusAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'plus.png' )), '&Tools', self)
plusAction.setShortcut('+')
self.setTip( plusAction, 'Increase transparency' )
plusAction.triggered.connect( self.plus )
self.toolbar.addAction(plusAction)
# Take a screenshot
screenshotAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'screenshot.png' )), '&Tools', self)
screenshotAction.setShortcut('t')
self.setTip( screenshotAction, 'Take a screenshot' )
screenshotAction.triggered.connect( self.screenshot )
self.toolbar.addAction(screenshotAction)
self.actImage.append(screenshotAction)
# Take a screenshot in each loaded frame
screenshotToggleAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'screenshotToggle.png' )), '&Tools', self)
screenshotToggleAction.setShortcut('Ctrl+t')
screenshotToggleAction.setCheckable(True)
screenshotToggleAction.setChecked(False)
self.setTip( screenshotToggleAction, 'Take a screenshot in each loaded frame' )
screenshotToggleAction.toggled.connect( self.screenshotToggle )
self.toolbar.addAction(screenshotToggleAction)
self.actImage.append(screenshotToggleAction)
# Display path to current image in message bar
displayFilepathAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'filepath.png' )), '&Tools', self)
displayFilepathAction.setShortcut('f')
self.setTip( displayFilepathAction, 'Show path to current image' )
displayFilepathAction.triggered.connect( self.displayFilepath )
self.toolbar.addAction(displayFilepathAction)
# Open correction mode
self.correctAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'checked6.png' )), '&Tools', self)
self.correctAction.setShortcut('c')
self.correctAction.setCheckable(True)
self.correctAction.setChecked(self.config.correctionMode)
if self.config.correctionMode:
self.correctAction.setIcon(QtGui.QIcon(os.path.join( iconDir , 'checked6_red.png' )))
self.setTip( self.correctAction, 'Toggle correction mode' )
self.correctAction.triggered.connect( self.toggleCorrectionMode )
self.toolbar.addAction(self.correctAction)
# Display help message
helpAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'help19.png' )), '&Tools', self)
helpAction.setShortcut('h')
self.setTip( helpAction, 'Help' )
helpAction.triggered.connect( self.displayHelpMessage )
self.toolbar.addAction(helpAction)
# Close the application
exitAction = QtGui.QAction(QtGui.QIcon( os.path.join( iconDir , 'exit.png' )), '&Tools', self)
#exitAction.setShortcuts(['Esc'])
self.setTip( exitAction, 'Exit' )
exitAction.triggered.connect( self.close )
self.toolbar.addAction(exitAction)
# The default text for the status bar
self.defaultStatusbar = 'Ready'
# Create a statusbar. Init with default
self.statusBar().showMessage( self.defaultStatusbar )
# Enable mouse move events
self.setMouseTracking(True)
self.toolbar.setMouseTracking(True)
# Open in full screen
screenShape = QtGui.QDesktopWidget().screenGeometry()
self.resize(screenShape.width(), screenShape.height())
# Set a title
self.applicationTitle = 'Cityscapes Label Tool v1.0'
self.setWindowTitle(self.applicationTitle)
# And show the application
self.show()
#############################
## Toolbar call-backs
#############################
# The user pressed "select city"
# The purpose of this method is to set these configuration attributes:
# - self.config.city : path to the folder containing the images to annotate
# - self.config.cityName : name of this folder, i.e. the city
# - self.config.labelPath : path to the folder to store the polygons
# - self.config.correctionPath : path to store the correction boxes in
# - self.config.gtType : type of ground truth, e.g. gtFine or gtCoarse
# - self.config.split : type of split, e.g. train, val, test
# The current implementation uses the environment variable 'CITYSCAPES_DATASET'
# to determine the dataset root folder and search available data within.
# Annotation types are required to start with 'gt', e.g. gtFine or gtCoarse.
# To add your own annotations you could create a folder gtCustom with similar structure.
#
# However, this implementation could be easily changed to a completely different folder structure.
# Just make sure to specify all three paths and a descriptive name as 'cityName'.
# The gtType and split can be left empty.
def selectCity(self):
# Reset the status bar to this message when leaving
restoreMessage = self.statusBar().currentMessage()
csPath = self.config.csPath
if not csPath or not os.path.isdir(csPath):
if 'CITYSCAPES_DATASET' in os.environ:
csPath = os.environ['CITYSCAPES_DATASET']
else:
csPath = os.path.join(os.path.dirname(os.path.realpath(__file__)),'..','..')
availableCities = []
annotations = sorted( glob.glob( os.path.join( csPath , 'gt*' ) ) )
annotations = [ os.path.basename(a) for a in annotations ]
splits = [ "train_extra" , "train" , "val" , "test" ]
for gt in annotations:
for split in splits:
cities = glob.glob(os.path.join(csPath, gt, split, '*'))
cities.sort()
availableCities.extend( [ (split,gt,os.path.basename(c)) for c in cities if os.path.isdir(c)] )
# List of possible labels
items = [split + ", " + gt + ", " + city for (split,gt,city) in availableCities]
# default
previousItem = self.config.split + ", " + self.config.gtType + ", " + self.config.cityName
default = 0
if previousItem in items:
default = items.index(previousItem)
# Specify title
dlgTitle = "Select city"
message = dlgTitle
question = dlgTitle
message = "Select city for editing"
question = "Which city would you like to edit?"
self.statusBar().showMessage(message)
if items:
# Create and wait for dialog
(item, ok) = QtGui.QInputDialog.getItem(self, dlgTitle, question, items, default, False)
# Restore message
self.statusBar().showMessage( restoreMessage )
if ok and item:
(split,gt,city) = [ str(i) for i in item.split(', ') ]
self.config.city = os.path.normpath( os.path.join( csPath, "leftImg8bit" , split , city ) )
self.config.cityName = city
self.config.labelPath = os.path.normpath( os.path.join( csPath, gt , split , city ) )
self.config.correctionPath = os.path.normpath( os.path.join( csPath, gt+'_corrections', split , city ) )
self.config.gtType = gt
self.config.split = split
self.deselectAllObjects()
self.clearPolygon()
self.loadCity()
self.imageChanged()
else:
warning = ""
warning += "The data was not found. Please:\n\n"
warning += " - make sure the scripts folder is in the Cityscapes root folder\n"
warning += "or\n"
warning += " - set CITYSCAPES_DATASET to the Cityscapes root folder\n"
warning += " e.g. 'export CITYSCAPES_DATASET=<root_path>'\n"
reply = QtGui.QMessageBox.information(self, "ERROR!", warning, QtGui.QMessageBox.Ok)
if reply == QtGui.QMessageBox.Ok:
sys.exit()
return
# Switch to previous image in file list
# Load the image
# Load its labels
# Update the mouse selection
# View
def prevImage(self):
if not self.images:
return
if self.idx > 0:
if self.checkAndSave():
self.idx -= 1
self.imageChanged()
return
# Switch to next image in file list
# Load the image
# Load its labels
# Update the mouse selection
# View
def nextImage(self):
if not self.images:
return
if self.idx < len(self.images)-1:
if self.checkAndSave():
self.idx += 1
self.imageChanged()
elif self.playState:
self.playState = False
self.playAction.setChecked(False)
if self.playState:
QtCore.QTimer.singleShot(0, self.nextImage)
return
# Play images, i.e. auto-switch to next image
def playImages(self, status):
self.playState = status
if self.playState:
QtCore.QTimer.singleShot(0, self.nextImage)
# switch correction mode on and off
def toggleCorrectionMode(self):
if not self.config.correctionMode:
self.config.correctionMode = True
iconDir = os.path.join( os.path.dirname(sys.argv[0]) , 'icons' )
self.correctAction.setIcon(QtGui.QIcon(os.path.join( iconDir , 'checked6_red.png' )))
else:
self.config.correctionMode = False
iconDir = os.path.join( os.path.dirname(sys.argv[0]) , 'icons' )
self.correctAction.setIcon(QtGui.QIcon(os.path.join( iconDir , 'checked6.png' )))
self.update()
return
# Switch to a selected image of the file list
# Ask the user for an image
# Load the image
# Load its labels
# Update the mouse selection
# View
def selectImage(self):
if not self.images:
return
dlgTitle = "Select image to load"
self.statusBar().showMessage(dlgTitle)
items = QtCore.QStringList( [ "{}: {}".format(num,os.path.basename(i)) for (num,i) in enumerate(self.images) ] )
(item, ok) = QtGui.QInputDialog.getItem(self, dlgTitle, "Image", items, self.idx, False)
if (ok and item):
idx = items.indexOf(item)
if idx != self.idx and self.checkAndSave():
self.idx = idx
self.imageChanged()
else:
# Restore the message
self.statusBar().showMessage( self.defaultStatusbar )
# Save labels
def save(self):
# Status
saved = False
# Message to show at the status bar when done
message = ""
# Only save if there are changes, labels, an image filename and an image
if self.changes and (self.annotation or self.corrections) and self.config.currentFile and self.image:
if self.annotation:
# Determine the filename
# If we have a loaded label file, then this is also the filename
filename = self.config.currentLabelFile
# If not, then generate one
if not filename:
filename = self.getLabelFilename(True)
if filename:
proceed = True
# warn user that he is overwriting an old file
if os.path.isfile(filename) and self.config.showSaveWarning:
msgBox = QtGui.QMessageBox(self)
msgBox.setWindowTitle("Overwriting")
msgBox.setText("Saving overwrites the original file and it cannot be reversed. Do you want to continue?")
msgBox.addButton(QtGui.QMessageBox.Cancel)
okAndNeverAgainButton = msgBox.addButton('OK and never ask again',QtGui.QMessageBox.AcceptRole)
okButton = msgBox.addButton(QtGui.QMessageBox.Ok)
msgBox.setDefaultButton(QtGui.QMessageBox.Ok)
msgBox.setIcon(QtGui.QMessageBox.Warning)
msgBox.exec_()
# User clicked on "OK"
if msgBox.clickedButton() == okButton:
pass
# User clicked on "OK and never ask again"
elif msgBox.clickedButton() == okAndNeverAgainButton:
self.config.showSaveWarning = False
else:
# Do nothing
message += "Nothing saved, no harm has been done. "
proceed = False
# Save JSON file
if proceed:
try:
self.annotation.toJsonFile(filename)
saved = True
message += "Saved labels to {0} ".format(filename)
except IOError as e:
message += "Error writing labels to {0}. Message: {1} ".format( filename, e.strerror )
else:
message += "Error writing labels. Cannot generate a valid filename. "
if self.corrections or self.config.currentCorrectionFile:
# Determine the filename
# If we have a loaded label file, then this is also the filename
filename = self.config.currentCorrectionFile
# If not, then generate one
if not filename:
filename = self.getCorrectionFilename(True)
if filename:
# Prepare the root
root = ET.Element('correction')
root.text = "\n"
root.tail = "\n"
# Add the filename of the image that is annotated
filenameNode = ET.SubElement(root,'filename')
filenameNode.text = os.path.basename(self.config.currentFile)
filenameNode.tail = "\n"
# Add the folder where this image is located in
# For compatibility with the LabelMe Tool, we need to use the folder
# StereoDataset/cityName
folderNode = ET.SubElement(root,'folder')
folderNode.text = "StereoDataset/" + self.config.cityName
folderNode.tail = "\n"
# The name of the tool. Here, we do not follow the output of the LabelMe tool,
# since this is crap anyway
sourceNode = ET.SubElement(root,'source')
sourceNode.text = "\n"
sourceNode.tail = "\n"
sourceImageNode = ET.SubElement(sourceNode,'sourceImage')
sourceImageNode.text = "Label Cities"
sourceImageNode.tail = "\n"
sourceAnnotationNode = ET.SubElement(sourceNode,'sourceAnnotation')
sourceAnnotationNode.text = "mcLabelTool"
sourceAnnotationNode.tail = "\n"
# The image size
imagesizeNode = ET.SubElement(root,'imagesize')
imagesizeNode.text = "\n"
imagesizeNode.tail = "\n"
nrowsNode = ET.SubElement(imagesizeNode,'nrows')
nrowsNode.text = str(self.image.height())
nrowsNode.tail = "\n"
ncolsNode = ET.SubElement(imagesizeNode,'ncols')
ncolsNode.text = str(self.image.height())
ncolsNode.tail = "\n"
# Add all objects
for correction in self.corrections:
correction.appendToXMLNode(root)
# Create the actual XML tree
self.correctionXML = ET.ElementTree(root)
# Save XML file
try:
self.correctionXML.write(filename)
saved = True
message += "Saved corrections to {0} ".format(filename)
except IOError as e:
message += "Error writing corrections to {0}. Message: {1} ".format( filename, e.strerror )
else:
message += "Error writing corrections. Cannot generate a valid filename. "
# Clear changes
if saved:
self.clearChanges()
else:
message += "Nothing to save "
saved = True
# Show the status message
self.statusBar().showMessage(message)
return saved
# Undo changes, ie. reload labels
def undo(self):
# check if we really want to do this in case there are multiple changes
if len( self.changes ) > 1:
# Backup of status message
restoreMessage = self.statusBar().currentMessage()
# Create the dialog
dlgTitle = "Undo changes?"
self.statusBar().showMessage(dlgTitle)
text = "Do you want to undo the following changes?\n"
for c in self.changes:
text += "- " + c + '\n'
buttons = QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel
ret = QtGui.QMessageBox.question(self, dlgTitle, text, buttons, QtGui.QMessageBox.Ok )
proceed = False
# If the user selected yes -> undo
if ret == QtGui.QMessageBox.Ok:
proceed = True
self.statusBar().showMessage( restoreMessage )
# If we do not proceed -> return
if not proceed:
return
# Clear labels to force a reload
self.annotation = None
# Reload
self.imageChanged()
# Clear the drawn polygon and update
def clearPolygonAction(self):
self.deselectAllObjects()
self.clearPolygon()
self.update()
# Create a new object from the current polygon
def newObject(self):
# Default label
label = self.lastLabel
# Ask the user for a label
(label, ok) = self.getLabelFromUser( label )
if ok and label:
# Append and create the new object
self.appendObject( label , self.drawPoly )
# Clear the drawn polygon
self.deselectAllObjects()
self.clearPolygon()
# Default message
self.statusBar().showMessage( self.defaultStatusbar )
# Set as default label for next time
self.lastLabel = label
# Redraw
self.update()
# Delete the currently selected object
def deleteObject(self):
# Cannot do anything without a selected object
if not self.selObjs:
return
# Cannot do anything without labels
if not self.annotation:
return
for selObj in self.selObjs:
# The selected object that is deleted
obj = self.annotation.objects[selObj]
# Delete
obj.delete()
# Save changes
self.addChange( "Deleted object {0} with label {1}".format( obj.id, obj.label ) )
# Clear polygon
self.deselectAllObjects()
self.clearPolygon()
# Redraw
self.update()
# Modify the label of a selected object
def modifyLabel(self):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without a selected object
if not self.selObjs:
return
# The last selected object
obj = self.annotation.objects[self.selObjs[-1]]
# default label
defaultLabel = obj.label
defaultId = -1
# If there is only one object the dialog text can be improved
if len(self.selObjs) == 1:
defaultId = obj.id
(label, ok) = self.getLabelFromUser( defaultLabel , defaultId )
if ok and label:
for selObj in self.selObjs:
# The selected object that is modified
obj = self.annotation.objects[selObj]
# Save changes
if obj.label != label:
self.addChange( "Set label {0} for object {1} with previous label {2}".format( label, obj.id, obj.label ) )
obj.label = label
obj.updateDate()
# Update
self.update()
# Move object a layer up
def layerUp(self):
# Change layer
self.modifyLayer(+1)
# Update
self.update()
# Move object a layer down
def layerDown(self):
# Change layer
self.modifyLayer(-1)
# Update
self.update()
# Toggle zoom
def zoomToggle(self, status):
self.config.zoom = status
if status :
self.mousePosOnZoom = self.mousePos
self.update()
# Toggle highlight
def highlightClassToggle(self, status):
if status :
defaultLabel = ""
if self.config.highlightLabelSelection and self.config.highlightLabelSelection in name2label:
defaultLabel = self.config.highlightLabelSelection
(label, ok) = self.getLabelFromUser( defaultLabel )
if ok and label:
self.config.highlightLabelSelection = label
else:
status = False
self.config.highlight = status
self.update()
# Increase label transparency
def minus(self):
self.config.transp = max(self.config.transp-0.1,0.0)
self.update()
def displayFilepath(self):
self.statusBar().showMessage("Current image: {0}".format( self.config.currentFile ))
self.update()
# Decrease label transparency
def plus(self):
self.config.transp = min(self.config.transp+0.1,1.0)
self.update()
# Take a screenshot
def screenshot(self):
# Get a filename for saving
dlgTitle = "Get screenshot filename"
filter = "Images (*.png *.xpm *.jpg)"
answer = QtGui.QFileDialog.getSaveFileName(self, dlgTitle, self.config.screenshotFilename,filter, options=QtGui.QFileDialog.DontUseNativeDialog)
if answer:
self.config.screenshotFilename = str(answer)
else:
return
# Actually make the screenshot
self.doScreenshot()
# Toggle auto-making of screenshots
def screenshotToggle(self, status):
self.screenshotToggleState = status
if status:
self.screenshot()
def displayHelpMessage(self):
message = self.applicationTitle + "\n\n"
message += "INSTRUCTIONS\n"
message += " - press open (left button) to select a city from drop-down menu\n"
message += " - browse images and edit labels using\n"
message += " the toolbar buttons (check tooltips) and the controls below\n"
message += " - note that the editing happens in-place;\n"
message += " if you want to annotate your own images or edit a custom\n"
message += " set of labels, check (and modify) the code of the method 'loadCity'\n"
message += " - note that this tool modifys the JSON polygon files, but\n"
message += " does not create or update the pngs; for the latter use\n"
message += " the preparation tools that come with this tool box.\n"
message += "\n"
message += "CONTROLS\n"
message += " - highlight objects [move mouse]\n"
message += " - draw new polygon\n"
message += " - start drawing a polygon [left click]\n"
message += " - add point to open polygon [left click]\n"
message += " - delete last added point [Backspace]\n"
message += " - close polygon [left click on first point]\n"
message += " - select closed polygon, existing object [Ctrl + left click]\n"
message += " - move point [left click and hold on point, move mouse]\n"
message += " - add point [click on edge]\n"
message += " - delete point from polygon [Shift + left click on point]\n"
message += " - deselect polygon [Q]\n"
message += " - select multiple polygons [Ctrl + left click]\n"
message += " - intersect/merge two polygons: draw new polygon, then\n"
message += " - intersect [Shift + left click on existing polygon]\n"
message += " - merge [Alt + left click on existing polygon]\n"
message += " - open zoom window [Z or hold down right mouse button]\n"
message += " - zoom in/out [mousewheel]\n"
message += " - enlarge/shrink zoom window [shift+mousewheel]\n"
message += " - start correction mode [C]\n"
message += " - draw a correction box [left click and hold, move, release]\n"
message += " - set box type [1,2,3,4]\n"
message += " - previous/next box [E,R]\n"
message += " - delete box [D]\n"
message += " - modify text, use ascii only [M]\n"
QtGui.QMessageBox.about(self, "HELP!", message)
self.update()
# Close the application
def closeEvent(self,event):
if self.checkAndSave():
event.accept()
else:
event.ignore()
#############################
## Custom events
#############################
def imageChanged(self):
# Clear corrections
self.corrections = []
self.selected_correction = -1
# Clear the polygon
self.deselectAllObjects()
self.clearPolygon()
# Load the first image
self.loadImage()
# Load its labels if available
self.loadLabels()
# Load its corrections if available
self.loadCorrections()
# Update the object the mouse points to
self.updateMouseObject()
# Update the GUI
self.update()
# Save screenshot if set
if self.screenshotToggleState:
self.doScreenshot()
#############################
## File I/O
#############################
# Load the currently selected city if possible
def loadCity(self):
# Search for all *.pngs to get the image list
self.images = []
if os.path.isdir(self.config.city):
self.images = glob.glob( os.path.join( self.config.city , '*' + self.imageExt ) )
self.images.sort()
if self.config.currentFile in self.images:
self.idx = self.images.index(self.config.currentFile)
else:
self.idx = 0
# Load the currently selected image
# Does only load if not previously loaded
# Does not refresh the GUI
def loadImage(self):
success = False
message = self.defaultStatusbar
if self.images:
filename = self.images[self.idx]
filename = os.path.normpath( filename )
if not self.image.isNull() and filename == self.config.currentFile:
success = True
else:
self.image = QtGui.QImage(filename)
if self.image.isNull():
message = "Failed to read image: {0}".format( filename )
else:
message = "Read image: {0}".format( filename )
self.config.currentFile = filename
success = True
# Update toolbar actions that need an image
for act in self.actImage:
act.setEnabled(success)
for act in self.actImageNotFirst:
act.setEnabled(success and self.idx > 0)
for act in self.actImageNotLast:
act.setEnabled(success and self.idx < len(self.images)-1)
self.statusBar().showMessage(message)
# Load the labels from file
# Only loads if they exist
# Otherwise the filename is stored and that's it
def loadLabels(self):
filename = self.getLabelFilename()
if not filename or not os.path.isfile(filename):
self.clearAnnotation()
return
# If we have everything and the filename did not change, then we are good
if self.annotation and filename == self.currentLabelFile:
return
# Clear the current labels first
self.clearAnnotation()
try:
self.annotation = Annotation()
self.annotation.fromJsonFile(filename)
except IOError as e:
# This is the error if the file does not exist
message = "Error parsing labels in {0}. Message: {1}".format( filename, e.strerror )
self.statusBar().showMessage(message)
# Remember the filename loaded
self.currentLabelFile = filename
# Remeber the status bar message to restore it later
restoreMessage = self.statusBar().currentMessage()
# Restore the message
self.statusBar().showMessage( restoreMessage )
# Load the labels from file
# Only loads if they exist
# Otherwise the filename is stored and that's it
def loadCorrections(self): #TODO
filename = self.getCorrectionFilename()
if not filename:
self.clearCorrections()
return
# If we have everything and the filename did not change, then we are good
if self.correctionXML and self.corrections and filename == self.config.currentCorrectionFile:
return
# Clear the current labels first
self.clearCorrections()
# We do not always expect to have corrections, therefore prevent a failure due to missing file
if not os.path.isfile(filename):
return
try:
# Try to parse the XML file
self.correctionXML = ET.parse( filename )
except IOError as e:
# This is the error if the file does not exist
message = "Error parsing corrections in {0}. Message: {1}".format( filename, e.strerror )
self.statusBar().showMessage(message)
self.correctionXML = []
return
except ET.ParseError as e:
# This is the error if the content is no valid XML
message = "Error parsing corrections in {0}. Message: {1}".format( filename, e )
self.statusBar().showMessage(message)
self.correctionXML = []
return
# Remember the filename loaded
self.config.currentCorrectionFile = filename
# Remeber the status bar message to restore it later
restoreMessage = self.statusBar().currentMessage()
# Iterate through all objects in the XML
root = self.correctionXML.getroot()
for i, objNode in enumerate(root.findall('correction')):
# Instantate a new object and read the XML node
obj = CorrectionBox()
obj.readFromXMLNode( objNode )
if i == 0:
self.selected_correction = 0
obj.select()
# Append the object to our list of labels
self.corrections.append(obj)
# Restore the message
self.statusBar().showMessage( restoreMessage )
def modify_correction_type(self, correction_type):
if self.selected_correction >= 0:
self.corrections[self.selected_correction].type = correction_type
self.addChange( "Modified correction type.")
self.update()
return
def delete_selected_annotation(self):
if self.selected_correction >= 0 and self.config.correctionMode:
del self.corrections[self.selected_correction]
if self.selected_correction == len(self.corrections):
self.selected_correction = self.selected_correction - 1
if self.selected_correction >= 0:
self.corrections[self.selected_correction].select()
self.addChange( "Deleted correction.")
self.update()
return
def modify_correction_description(self):
if self.selected_correction >= 0 and self.config.correctionMode:
description = QtGui.QInputDialog.getText(self, "Modify Error Description", "Please describe the labeling error briefly.",
text = self.corrections[self.selected_correction].annotation)
if description[1]:
self.corrections[self.selected_correction].annotation = description[0]
self.addChange( "Changed correction description.")
self.update()
return
def select_next_correction(self):
if self.selected_correction >= 0:
self.corrections[self.selected_correction].unselect()
if self.selected_correction == (len(self.corrections) - 1) :
self.selected_correction = 0
else :
self.selected_correction = self.selected_correction + 1
self.corrections[self.selected_correction].select()
self.update()
return
def select_previous_correction(self):
if self.selected_correction >= 0 :
self.corrections[self.selected_correction].unselect()
if self.selected_correction == 0 :
self.selected_correction = (len(self.corrections) - 1)
else :
self.selected_correction = self.selected_correction - 1
self.corrections[self.selected_correction].select()
self.update()
return
#############################
## Drawing
#############################
# This method is called when redrawing everything
# Can be manually triggered by self.update()
# Note that there must not be any other self.update within this method
# or any methods that are called within
def paintEvent(self, event):
# Create a QPainter that can perform draw actions within a widget or image
qp = QtGui.QPainter()
# Begin drawing in the application widget
qp.begin(self)
# Update scale
self.updateScale(qp)
# Determine the object ID to highlight
self.getHighlightedObject(qp)
# Draw the image first
self.drawImage(qp)
# Draw the labels on top
overlay = self.drawLabels(qp)
# Draw the user drawn polygon
self.drawDrawPoly(qp)
self.drawDrawRect(qp)
# Draw the label name next to the mouse
self.drawLabelAtMouse(qp)
# Draw the zoom
# self.drawZoom(qp, overlay)
self.drawZoom(qp,None)
# Thats all drawing
qp.end()
# Forward the paint event
QtGui.QMainWindow.paintEvent(self,event)
# Update the scaling
def updateScale(self, qp):
if not self.image.width() or not self.image.height():
return
# Horizontal offset
self.xoff = self.bordergap
# Vertical offset
self.yoff = self.toolbar.height()+self.bordergap
# We want to make sure to keep the image aspect ratio and to make it fit within the widget
# Without keeping the aspect ratio, each side of the image is scaled (multiplied) with
sx = float(qp.device().width() - 2*self.xoff) / self.image.width()
sy = float(qp.device().height() - 2*self.yoff) / self.image.height()
# To keep the aspect ratio while making sure it fits, we use the minimum of both scales
# Remember the scale for later
self.scale = min( sx , sy )
# These are then the actual dimensions used
self.w = self.scale * self.image.width()
self.h = self.scale * self.image.height()
# Determine the highlighted object for drawing
def getHighlightedObject(self, qp):
# These variables we want to fill
self.highlightObjs = []
self.highlightObjLabel = None
# Without labels we cannot do so
if not self.annotation:
return
# If available set the selected objects
highlightObjIds = self.selObjs
# If not available but the polygon is empty or closed, its the mouse object
if not highlightObjIds and (self.drawPoly.isEmpty() or self.drawPolyClosed) and self.mouseObj>=0 and not self.mouseOutsideImage:
highlightObjIds = [self.mouseObj]
# Get the actual object that is highlighted
if highlightObjIds:
self.highlightObjs = [ self.annotation.objects[i] for i in highlightObjIds ]
# Set the highlight object label if appropriate
if self.config.highlight:
self.highlightObjLabel = self.config.highlightLabelSelection
elif len(highlightObjIds) == 1 and self.config.correctionMode:
self.highlightObjLabel = self.annotation.objects[highlightObjIds[-1]].label
# Draw the image in the given QPainter qp
def drawImage(self, qp):
# Return if no image available
if self.image.isNull():
return
# Save the painters current setting to a stack
qp.save()
# Draw the image
qp.drawImage(QtCore.QRect( self.xoff, self.yoff, self.w, self.h ), self.image)
# Restore the saved setting from the stack
qp.restore()
def getPolygon(self, obj):
poly = QtGui.QPolygonF()
for pt in obj.polygon:
point = QtCore.QPointF(pt.x,pt.y)
poly.append( point )
return poly
# Draw the labels in the given QPainter qp
# optionally provide a list of labels to ignore
def drawLabels(self, qp, ignore = []):
if self.image.isNull() or self.w <= 0 or self.h <= 0:
return
if not self.annotation:
return
if self.transpTempZero:
return
# The overlay is created in the viewing coordinates
# This way, the drawing is more dense and the polygon edges are nicer
# We create an image that is the overlay
# Within this image we draw using another QPainter
# Finally we use the real QPainter to overlay the overlay-image on what is drawn so far
# The image that is used to draw the overlays
overlay = QtGui.QImage( self.w, self.h, QtGui.QImage.Format_ARGB32_Premultiplied )
# Fill the image with the default color
defaultLabel = name2label[self.defaultLabel]
col = QtGui.QColor( *defaultLabel.color )
overlay.fill( col )
# Create a new QPainter that draws in the overlay image
qp2 = QtGui.QPainter()
qp2.begin(overlay)
# The color of the outlines
qp2.setPen(QtGui.QColor('white'))
# Draw all objects
for obj in self.annotation.objects:
# Some are flagged to not be drawn. Skip them
if not obj.draw:
continue
# The label of the object
name = assureSingleInstanceName( obj.label )
# If we do not know a color for this label, warn the user
if not name in name2label:
print( "The annotations contain unkown labels. This should not happen. Please inform the datasets authors. Thank you!" )
print( "Details: label '{}', file '{}'".format(name,self.currentLabelFile) )
continue
# If we ignore this label, skip
if name in ignore:
continue
poly = self.getPolygon(obj)
# Scale the polygon properly
polyToDraw = poly * QtGui.QTransform.fromScale(self.scale,self.scale)
# Default drawing
# Color from color table, solid brush
col = QtGui.QColor( *name2label[name].color )
brush = QtGui.QBrush( col, QtCore.Qt.SolidPattern )
qp2.setBrush(brush)
# Overwrite drawing if this is the highlighted object
if ( obj in self.highlightObjs or name == self.highlightObjLabel ):
# First clear everything below of the polygon
qp2.setCompositionMode( QtGui.QPainter.CompositionMode_Clear )
qp2.drawPolygon( polyToDraw )
qp2.setCompositionMode( QtGui.QPainter.CompositionMode_SourceOver )
# Set the drawing to a special pattern
brush = QtGui.QBrush(col,QtCore.Qt.DiagCrossPattern)
qp2.setBrush(brush)
qp2.drawPolygon( polyToDraw )
# Draw outline of selected object dotted
for obj in self.highlightObjs:
brush = QtGui.QBrush(QtCore.Qt.NoBrush)
qp2.setBrush(brush)
qp2.setPen(QtCore.Qt.DashLine)
polyToDraw = self.getPolygon(obj) * QtGui.QTransform.fromScale(self.scale,self.scale)
qp2.drawPolygon( polyToDraw )
# End the drawing of the overlay
qp2.end()
# Save QPainter settings to stack
qp.save()
# Define transparency
qp.setOpacity(self.config.transp)
# Draw the overlay image
qp.drawImage(self.xoff,self.yoff,overlay)
# Restore settings
qp.restore()
return overlay
def drawDrawRect(self, qp):
qp.save()
qp.setBrush(QtGui.QBrush(QtCore.Qt.NoBrush))
qp.setFont(QtGui.QFont('QFont::AnyStyle', 14))
thickPen = QtGui.QPen()
qp.setPen(thickPen)
for c in self.corrections:
rect = copy.deepcopy(c.bbox)
width = rect.width()
height = rect.height()
rect.setX(c.bbox.x() * self.scale + self.xoff)
rect.setY(c.bbox.y() * self.scale + self.yoff)
rect.setWidth(width * self.scale)
rect.setHeight(height * self.scale)
if c.selected:
thickPen.setColor(QtGui.QColor(0,0,0))
if c.type == CorrectionBox.types.QUESTION:
descr = "QUESTION"
elif c.type == CorrectionBox.types.RESOLVED:
descr = "FIXED"
else:
descr = "ERROR"
qp.setPen(thickPen)
qp.drawText(QtCore.QPoint( self.xoff, self.yoff + self.h + 20 ),
"(%s: %s)" % (descr, c.annotation))
pen_width = 6
else:
pen_width = 3
colour = c.get_colour()
thickPen.setColor(colour)
thickPen.setWidth(pen_width)
qp.setPen(thickPen)
qp.drawRect(rect)
if self.in_progress_bbox is not None:
rect = copy.deepcopy(self.in_progress_bbox)
width = rect.width()
height = rect.height()
rect.setX(self.in_progress_bbox.x() * self.scale + self.xoff)
rect.setY(self.in_progress_bbox.y() * self.scale + self.yoff)
rect.setWidth(width * self.scale)
rect.setHeight(height * self.scale)
thickPen.setColor(QtGui.QColor(255,0,0))
thickPen.setWidth(3)
qp.setPen(thickPen)
qp.drawRect(rect)
qp.restore()
# Draw the polygon that is drawn and edited by the user
# Usually the polygon must be rescaled properly. However when drawing
# The polygon within the zoom, this is not needed. Therefore the option transform.
def drawDrawPoly(self, qp, transform=None):
# Nothing to do?
if self.drawPoly.isEmpty():
return
if not self.image:
return
# Save QPainter settings to stack
qp.save()
# The polygon - make a copy
poly = QtGui.QPolygonF(self.drawPoly)
# Append the current mouse position
if not self.drawPolyClosed and (self.mousePosScaled is not None):
poly.append( self.mousePosScaled )
# Transform
if not transform:
poly = poly * QtGui.QTransform.fromScale(self.scale,self.scale)
poly.translate(self.xoff,self.yoff)
else:
poly = poly * transform
# Do not fill the polygon
qp.setBrush(QtGui.QBrush(QtCore.Qt.NoBrush))
# Draw the polygon edges
polyColor = QtGui.QColor(255,0,0)
qp.setPen(polyColor)
if not self.drawPolyClosed:
qp.drawPolyline( poly )
else:
qp.drawPolygon( poly )
# Get the ID of the closest point to the mouse
if self.mousePosScaled is not None:
closestPt = self.getClosestPoint( self.drawPoly, self.mousePosScaled )
else:
closestPt = (-1,-1)
# If a polygon edge is selected, draw in bold
if closestPt[0] != closestPt[1]:
thickPen = QtGui.QPen(polyColor)
thickPen.setWidth(3)
qp.setPen(thickPen)
qp.drawLine( poly[closestPt[0]], poly[closestPt[1]] )
# Draw the polygon points
qp.setPen(polyColor)
startDrawingPts = 0
# A bit different if not closed
if not self.drawPolyClosed:
# Draw
self.drawPoint( qp, poly.first(), True, closestPt==(0,0) and self.drawPoly.size()>1 )
# Do not draw again
startDrawingPts = 1
# The next in red
for pt in range(startDrawingPts,poly.size()):
self.drawPoint( qp, poly[pt], False, self.drawPolyClosed and closestPt==(pt,pt) )
# Restore QPainter settings from stack
qp.restore()
# Draw the label name next to the mouse
def drawLabelAtMouse(self, qp):
# Nothing to do without a highlighted object
if not self.highlightObjs:
return
# Also we do not want to draw the label, if we have a drawn polygon
if not self.drawPoly.isEmpty():
return
# Nothing to without a mouse position
if not self.mousePos:
return
# Save QPainter settings to stack
qp.save()
# That is the mouse positiong
mouse = self.mousePos
# Will show zoom
showZoom = self.config.zoom and not self.image.isNull() and self.w and self.h
# The text that is written next to the mouse
mouseText = self.highlightObjs[-1].label
# Where to write the text
# Depends on the zoom (additional offset to mouse to make space for zoom?)
# The location in the image (if we are at the top we want to write below of the mouse)
off = 36
if showZoom:
off += self.config.zoomSize/2
if mouse.y()-off > self.toolbar.height():
top = mouse.y()-off
btm = mouse.y()
vAlign = QtCore.Qt.AlignTop
else:
# The height of the cursor
if not showZoom:
off += 20
top = mouse.y()
btm = mouse.y()+off
vAlign = QtCore.Qt.AlignBottom
# Here we can draw
rect = QtCore.QRect()
rect.setTopLeft(QtCore.QPoint(mouse.x()-100,top))
rect.setBottomRight(QtCore.QPoint(mouse.x()+100,btm))
# The color
qp.setPen(QtGui.QColor('white'))
# The font to use
font = QtGui.QFont("Helvetica",20,QtGui.QFont.Bold)
qp.setFont(font)
# Non-transparent
qp.setOpacity(1)
# Draw the text, horizontally centered
qp.drawText(rect,QtCore.Qt.AlignHCenter|vAlign,mouseText)
# Restore settings
qp.restore()
# Draw the zoom
def drawZoom(self,qp,overlay):
# Zoom disabled?
if not self.config.zoom:
return
# No image
if self.image.isNull() or not self.w or not self.h:
return
# No mouse
if not self.mousePos:
return
# Abbrevation for the zoom window size
zoomSize = self.config.zoomSize
# Abbrevation for the mouse position
mouse = self.mousePos
# The pixel that is the zoom center
pix = self.mousePosScaled
# The size of the part of the image that is drawn in the zoom window
selSize = zoomSize / ( self.config.zoomFactor * self.config.zoomFactor )
# The selection window for the image
sel = QtCore.QRectF(pix.x() -selSize/2 ,pix.y() -selSize/2 ,selSize,selSize )
# The selection window for the widget
view = QtCore.QRectF(mouse.x()-zoomSize/2,mouse.y()-zoomSize/2,zoomSize,zoomSize)
# Show the zoom image
qp.drawImage(view,self.image,sel)
# If we are currently drawing the polygon, we need to draw again in the zoom
if not self.drawPoly.isEmpty():
transform = QtGui.QTransform()
quadFrom = QtGui.QPolygonF()
quadFrom.append( sel.topLeft() )
quadFrom.append( sel.topRight() )
quadFrom.append( sel.bottomRight() )
quadFrom.append( sel.bottomLeft() )
quadTo = QtGui.QPolygonF()
quadTo.append( view.topLeft() )
quadTo.append( view.topRight() )
quadTo.append( view.bottomRight() )
quadTo.append( view.bottomLeft() )
if QtGui.QTransform.quadToQuad( quadFrom , quadTo , transform ):
qp.setClipRect(view)
#transform.translate(self.xoff,self.yoff)
self.drawDrawPoly(qp,transform)
else:
print( "not possible" )
#############################
## Mouse/keyboard events
#############################
# Mouse moved
# Need to save the mouse position
# Need to drag a polygon point
# Need to update the mouse selected object
def mouseMoveEvent(self,event):
if self.image.isNull() or self.w == 0 or self.h == 0:
return
self.updateMousePos( event.posF() )
if not self.config.correctionMode:
# If we are dragging a point, update
if self.draggedPt >= 0:
# Update the dragged point
self.drawPoly.replace( self.draggedPt , self.mousePosScaled )
# If the polygon is the polygon of the selected object,
# update the object polygon and
# keep track of the changes we do
if self.selObjs:
obj = self.annotation.objects[self.selObjs[-1]]
obj.polygon[self.draggedPt] = Point(self.mousePosScaled.x(),self.mousePosScaled.y())
# Check if we changed the object's polygon the first time
if not obj.id in self.changedPolygon:
self.changedPolygon.append(obj.id)
self.addChange( "Changed polygon of object {0} with label {1}".format( obj.id, obj.label ) )
else:
if self.in_progress_bbox is not None:
p0 = (self.mousePosScaled.x(), self.mousePosScaled.y())
p1 = (self.mousePressEvent.x(), self.mousePressEvent.y())
xy = min(p0[0], p1[0]), min(p0[1], p1[1])
w, h = abs(p0[0] - p1[0]), abs(p0[1] - p1[1])
self.in_progress_bbox = QtCore.QRectF(xy[0], xy[1], w, h)
#p.set_x(xy[0])
#p.set_y(xy[1])
#p.set_width(w)
#p.set_height(h)
# Update the object selected by the mouse
self.updateMouseObject()
# Redraw
self.update()
# Mouse left the widget
def leaveEvent(self, event):
self.mousePos = None
self.mousePosScaled = None
self.mouseOutsideImage = True
# Mouse button pressed
# Start dragging of polygon point
# Enable temporary toggling of zoom
def mousePressEvent(self,event):
self.mouseButtons = event.buttons()
shiftPressed = QtGui.QApplication.keyboardModifiers() == QtCore.Qt.ShiftModifier
self.updateMousePos( event.posF() )
self.mousePressEvent = self.mousePosScaled
# Handle left click
if event.button() == QtCore.Qt.LeftButton:
# If the drawn polygon is closed and the mouse clicks a point,
# Then this one is dragged around
if not self.config.correctionMode:
if self.drawPolyClosed and (self.mousePosScaled is not None):
closestPt = self.getClosestPoint( self.drawPoly, self.mousePosScaled )
if shiftPressed :
if closestPt[0] == closestPt[1]:
del self.drawPoly[closestPt[0]]
# If the polygon is the polygon of the selected object,
# update the object
# and keep track of the changes we do
if self.selObjs:
obj = self.annotation.objects[self.selObjs[-1]]
del obj.polygon[closestPt[0]]
# Check if we changed the object's polygon the first time
if not obj.id in self.changedPolygon:
self.changedPolygon.append(obj.id)
self.addChange( "Changed polygon of object {0} with label {1}".format( obj.id, obj.label ) )
self.update()
else :
# If we got a point (or nothing), we make it dragged
if closestPt[0] == closestPt[1]:
self.draggedPt = closestPt[0]
# If we got an edge, we insert a point and make it dragged
else:
self.drawPoly.insert( closestPt[1] , self.mousePosScaled )
self.draggedPt = closestPt[1]
# If the polygon is the polygon of the selected object,
# update the object
# and keep track of the changes we do
if self.selObjs:
obj = self.annotation.objects[self.selObjs[-1]]
obj.polygon.insert( closestPt[1] , Point( self.mousePosScaled.x() , self.mousePosScaled.y() ) )
# Check if we changed the object's polygon the first time
if not obj.id in self.changedPolygon:
self.changedPolygon.append(obj.id)
self.addChange( "Changed polygon of object {0} with label {1}".format( obj.id, obj.label ) )
else:
assert self.in_progress_bbox == None
self.in_progress_bbox = QtCore.QRectF(self.mousePosScaled.x(), self.mousePosScaled.y(), 0, 0)
# Handle right click
elif event.button() == QtCore.Qt.RightButton:
self.toggleZoom( event.posF() )
# Redraw
self.update()
# Mouse button released
# End dragging of polygon
# Select an object
# Add a point to the polygon
# Disable temporary toggling of zoom
def mouseReleaseEvent(self,event):
self.mouseButtons = event.buttons()
ctrlPressed = event.modifiers() & QtCore.Qt.ControlModifier
shiftPressed = event.modifiers() & QtCore.Qt.ShiftModifier
altPressed = event.modifiers() & QtCore.Qt.AltModifier
# Handle left click
if event.button() == QtCore.Qt.LeftButton:
if not self.config.correctionMode:
# Check if Ctrl is pressed
if ctrlPressed:
# If also Shift is pressed and we have a closed polygon, then we intersect
# the polygon with the mouse object
if shiftPressed and self.drawPolyClosed:
self.intersectPolygon()
# If also Alt is pressed and we have a closed polygon, then we merge
# the polygon with the mouse object
if altPressed and self.drawPolyClosed:
self.mergePolygon()
# Make the current mouse object the selected
# and process the selection
else:
self.selectObject()
# Add the point to the drawn polygon if not already closed
elif not self.drawPolyClosed:
# If the mouse would close the poly make sure to do so
if self.ptClosesPoly( ):
self.closePolygon()
elif self.mousePosScaled is not None:
if not self.drawPolyClosed and self.drawPoly.isEmpty() :
self.mousePosOnZoom = self.mousePos
self.addPtToPoly( self.mousePosScaled )
# Otherwise end a possible dragging
elif self.drawPolyClosed:
self.draggedPt = -1
else:
if self.in_progress_bbox is not None:
if self.in_progress_bbox.width() > 20:
description = QtGui.QInputDialog.getText(self, "Error Description", "Please describe the labeling error briefly.")
if description[1] and description[0]:
self.corrections.append(CorrectionBox(self.in_progress_bbox, annotation=description[0]))
#last_annotation = self.in_progress_annotation #TODO: self?
self.corrections[self.selected_correction].unselect()
self.selected_correction = len(self.corrections)-1
self.corrections[self.selected_correction].select()
self.addChange( "Added correction.")
self.in_progress_annotation = None
self.in_progress_bbox = None
# Handle right click
elif event.button() == QtCore.Qt.RightButton:
self.toggleZoom( event.posF() )
# Redraw
self.update()
# Mouse wheel scrolled
def wheelEvent(self, event):
deltaDegree = event.delta() / 8 # Rotation in degree
deltaSteps = deltaDegree / 15 # Usually one step on the mouse is 15 degrees
if self.config.zoom:
# If shift is pressed, change zoom window size
if event.modifiers() and QtCore.Qt.Key_Shift:
self.config.zoomSize += deltaSteps * 10
self.config.zoomSize = max( self.config.zoomSize, 10 )
self.config.zoomSize = min( self.config.zoomSize, 1000 )
# Change zoom factor
else:
self.config.zoomFactor += deltaSteps * 0.05
self.config.zoomFactor = max( self.config.zoomFactor, 0.1 )
self.config.zoomFactor = min( self.config.zoomFactor, 10 )
self.update()
# Key pressed
def keyPressEvent(self,e):
# Ctrl key changes mouse cursor
if e.key() == QtCore.Qt.Key_Control:
QtGui.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# Backspace deletes last point from polygon
elif e.key() == QtCore.Qt.Key_Backspace:
if not self.drawPolyClosed:
del self.drawPoly[-1]
self.update()
# set alpha to temporary zero
elif e.key() == QtCore.Qt.Key_0:
self.transpTempZero = True
self.update()
elif e.key() == QtCore.Qt.Key_E:
self.select_next_correction()
elif e.key() == QtCore.Qt.Key_R:
self.select_previous_correction()
elif e.key() == QtCore.Qt.Key_1:
self.modify_correction_type(CorrectionBox.types.TO_CORRECT)
elif e.key() == QtCore.Qt.Key_2:
self.modify_correction_type(CorrectionBox.types.TO_REVIEW)
elif e.key() == QtCore.Qt.Key_3:
self.modify_correction_type(CorrectionBox.types.RESOLVED)
elif e.key() == QtCore.Qt.Key_4:
self.modify_correction_type(CorrectionBox.types.QUESTION)
elif e.key() == QtCore.Qt.Key_D and self.config.correctionMode:
self.delete_selected_annotation()
elif e.key() == QtCore.Qt.Key_M and self.config.correctionMode:
self.modify_correction_description()
# Key released
def keyReleaseEvent(self,e):
# Ctrl key changes mouse cursor
if e.key() == QtCore.Qt.Key_Control:
QtGui.QApplication.restoreOverrideCursor()
# check for zero to release temporary zero
# somehow, for the numpad key in some machines, a check on Insert is needed aswell
elif e.key() == QtCore.Qt.Key_0 or e.key() == QtCore.Qt.Key_Insert:
self.transpTempZero = False
self.update()
#############################
## Little helper methods
#############################
# Helper method that sets tooltip and statustip
# Provide an QAction and the tip text
# This text is appended with a hotkeys and then assigned
def setTip( self, action, tip ):
tip += " (Hotkeys: '" + "', '".join([str(s.toString()) for s in action.shortcuts()]) + "')"
action.setStatusTip(tip)
action.setToolTip(tip)
# Set the mouse positions
# There are the original positions refering to the screen
# Scaled refering to the image
# And a zoom version, where the mouse movement is artificially slowed down
def updateMousePos( self, mousePosOrig ):
if self.config.zoomFactor <= 1 or (self.drawPolyClosed or self.drawPoly.isEmpty()):
sens = 1.0
else :
sens = 1.0/pow(self.config.zoomFactor, 3);
if self.config.zoom and self.mousePosOnZoom is not None:
mousePos = QtCore.QPointF(round((1-sens)*self.mousePosOnZoom.x() + (sens)*mousePosOrig.x()), round((1-sens)*self.mousePosOnZoom.y() + sens*mousePosOrig.y()))
else :
mousePos = mousePosOrig
mousePosScaled = QtCore.QPointF( float(mousePos.x() - self.xoff) / self.scale , float(mousePos.y() - self.yoff) / self.scale )
mouseOutsideImage = not self.image.rect().contains( mousePosScaled.toPoint() )
mousePosScaled.setX( max( mousePosScaled.x() , 0. ) )
mousePosScaled.setY( max( mousePosScaled.y() , 0. ) )
mousePosScaled.setX( min( mousePosScaled.x() , self.image.rect().right() ) )
mousePosScaled.setY( min( mousePosScaled.y() , self.image.rect().bottom() ) )
if not self.image.rect().contains( mousePosScaled.toPoint() ):
self.mousePos = None
self.mousePosScaled = None
self.mousePosOrig = None
self.updateMouseObject()
self.update()
return
self.mousePos = mousePos
self.mousePosScaled = mousePosScaled
self.mousePosOrig = mousePosOrig
self.mouseOutsideImage = mouseOutsideImage
# Toggle the zoom and update all mouse positions
def toggleZoom(self, mousePosOrig):
self.config.zoom = not self.config.zoom
if self.config.zoom:
self.mousePosOnZoom = self.mousePos
# Update the mouse position afterwards
self.updateMousePos( mousePosOrig )
else:
# Update the mouse position first
self.updateMousePos( mousePosOrig )
# Update the dragged point to the non-zoom point
if not self.config.correctionMode and self.draggedPt >= 0:
self.drawPoly.replace( self.draggedPt , self.mousePosScaled )
# Get the point/edge index within the given polygon that is close to the given point
# Returns (-1,-1) if none is close enough
# Returns (i,i) if the point with index i is closed
# Returns (i,i+1) if the edge from points i to i+1 is closest
def getClosestPoint( self, poly, pt ):
closest = (-1,-1)
distTh = 4.0
dist = 1e9 # should be enough
for i in range(poly.size()):
curDist = self.ptDist(poly[i],pt)
if curDist < dist:
closest = (i,i)
dist = curDist
# Close enough?
if dist <= distTh:
return closest
# Otherwise see if the polygon is closed, but a line is close enough
if self.drawPolyClosed and poly.size() >= 2:
for i in range(poly.size()):
pt1 = poly[i]
j = i+1
if j == poly.size():
j = 0
pt2 = poly[j]
edge = QtCore.QLineF(pt1,pt2)
normal = edge.normalVector()
normalThroughMouse = QtCore.QLineF( pt.x(),pt.y(),pt.x()+normal.dx(),pt.y()+normal.dy() )
intersectionPt = QtCore.QPointF()
intersectionType = edge.intersect( normalThroughMouse , intersectionPt )
if intersectionType == QtCore.QLineF.BoundedIntersection:
curDist = self.ptDist(intersectionPt,pt)
if curDist < dist:
closest = (i,j)
dist = curDist
# Close enough?
if dist <= distTh:
return closest
# If we didnt return yet, we didnt find anything
return (-1,-1)
# Get distance between two points
def ptDist( self, pt1, pt2 ):
# A line between both
line = QtCore.QLineF( pt1 , pt2 )
# Length
lineLength = line.length()
return lineLength
# Determine if the given point closes the drawn polygon (snapping)
def ptClosesPoly(self):
if self.drawPoly.isEmpty():
return False
if self.mousePosScaled is None:
return False
closestPt = self.getClosestPoint( self.drawPoly, self.mousePosScaled )
return closestPt==(0,0)
# Draw a point using the given QPainter qp
# If its the first point in a polygon its drawn in green
# if not in red
# Also the radius might be increased
def drawPoint(self, qp, pt, isFirst, increaseRadius):
# The first in green
if isFirst:
qp.setBrush(QtGui.QBrush(QtGui.QColor(0,255,0),QtCore.Qt.SolidPattern))
# Other in red
else:
qp.setBrush(QtGui.QBrush(QtGui.QColor(255,0,0),QtCore.Qt.SolidPattern))
# Standard radius
r = 3.0
# Increase maybe
if increaseRadius:
r *= 2.5
# Draw
qp.drawEllipse( pt, r, r )
# Determine if the given candidate for a label path makes sense
def isLabelPathValid(self,labelPath):
return os.path.isdir(labelPath)
# Ask the user to select a label
# If you like, you can give an object ID for a better dialog texting
# Note that giving an object ID assumes that its current label is the default label
# If you dont, the message "Select new label" is used
# Return is (label, ok). 'ok' is false if the user pressed Cancel
def getLabelFromUser(self, defaultLabel = "", objID = -1):
# Reset the status bar to this message when leaving
restoreMessage = self.statusBar().currentMessage()
# Update defaultLabel
if not defaultLabel:
defaultLabel = self.defaultLabel
# List of possible labels
items = QtCore.QStringList(name2label.keys())
items.sort()
default = items.indexOf(defaultLabel)
if default < 0:
self.statusBar().showMessage( 'The selected label is missing in the internal color map.' )
return
# Specify title
dlgTitle = "Select label"
message = dlgTitle
question = dlgTitle
if objID >= 0:
message = "Select new label for object {0} with current label {1}".format( objID, defaultLabel )
question = "Label for object {0}".format(objID)
self.statusBar().showMessage(message)
# Create and wait for dialog
(item, ok) = QtGui.QInputDialog.getItem(self, dlgTitle, question, items, default, False)
# Process the answer a bit
item = str(item)
# Restore message
self.statusBar().showMessage( restoreMessage )
# Return
return (item, ok)
# Add a point to the drawn polygon
def addPtToPoly(self, pt):
self.drawPoly.append( pt )
# Enable actions that need a polygon
for act in self.actPolyOrSelObj:
act.setEnabled(True)
# Clear the drawn polygon
def clearPolygon(self):
# We do not clear, since the drawPoly might be a reference on an object one
self.drawPoly = QtGui.QPolygonF()
self.drawPolyClosed = False
# Disable actions that need a polygon
for act in self.actPolyOrSelObj:
act.setEnabled(bool(self.selObjs))
for act in self.actClosedPoly:
act.setEnabled(False)
# We just closed the polygon and need to deal with this situation
def closePolygon(self):
self.drawPolyClosed = True
for act in self.actClosedPoly:
act.setEnabled(True)
message = "What should I do with the polygon? Press n to create a new object, press Ctrl + Left Click to intersect with another object"
self.statusBar().showMessage(message)
# Intersect the drawn polygon with the mouse object
# and create a new object with same label and so on
def intersectPolygon(self):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without a single selected object
if self.mouseObj < 0:
return
# The selected object that is modified
obj = self.annotation.objects[self.mouseObj]
# The intersection of the polygons
intersection = self.drawPoly.intersected( self.getPolygon(obj) )
if not intersection.isEmpty():
# Ask the user for a label
self.drawPoly = intersection
(label, ok) = self.getLabelFromUser( obj.label )
if ok and label:
# Append and create the new object
self.appendObject( label , intersection )
# Clear the drawn polygon
self.clearPolygon()
# Default message
self.statusBar().showMessage( self.defaultStatusbar )
# Deselect
self.deselectAllObjects()
# Redraw
self.update()
# Merge the drawn polygon with the mouse object
# and create a new object with same label and so on
def mergePolygon(self):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without a single selected object
if self.mouseObj < 0:
return
# The selected object that is modified
obj = self.annotation.objects[self.mouseObj]
# The union of the polygons
union = self.drawPoly.united( self.getPolygon(obj) )
if not union.isEmpty():
# Ask the user for a label
self.drawPoly = union
(label, ok) = self.getLabelFromUser( obj.label )
if ok and label:
# Append and create the new object
self.appendObject( label , union )
# Clear the drawn polygon
self.clearPolygon()
# Default message
self.statusBar().showMessage( self.defaultStatusbar )
# Deselect
self.deselectAllObjects()
# Redraw
self.update()
# Edit an object's polygon or clear the polygon if multiple objects are selected
def initPolygonFromObject(self):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without any selected object
if not self.selObjs:
return
# If there are multiple objects selected, we clear the polygon
if len(self.selObjs) > 1:
self.clearPolygon()
self.update()
return
# The selected object that is used for init
obj = self.annotation.objects[self.selObjs[-1]]
# Make a reference to the polygon
self.drawPoly = self.getPolygon(obj)
# Make sure its closed
self.drawPolyClosed = True
# Update toolbar icons
# Enable actions that need a polygon
for act in self.actPolyOrSelObj:
act.setEnabled(True)
# Enable actions that need a closed polygon
for act in self.actClosedPoly:
act.setEnabled(True)
# Redraw
self.update()
# Create new object
def appendObject(self, label, polygon):
# Create empty annotation object
# if first object
if not self.annotation:
self.annotation = Annotation()
# Search the highest ID
newID = 0
for obj in self.annotation.objects:
if obj.id >= newID:
newID = obj.id + 1
# New object
# Insert the object in the labels list
obj = CsObject()
obj.label = label
obj.polygon = [ Point(p.x(),p.y()) for p in polygon ]
obj.id = newID
obj.deleted = 0
obj.verified = 0
obj.user = getpass.getuser()
obj.updateDate()
self.annotation.objects.append(obj)
# Append to changes
self.addChange( "Created object {0} with label {1}".format( newID, label ) )
# Clear the drawn polygon
self.deselectAllObjects()
self.clearPolygon()
# select the new object
self.mouseObj = 0
self.selectObject()
# Helper for leaving an image
# Returns true if the image can be left, false if not
# Checks for possible changes and asks the user if they should be saved
# If the user says yes, then they are saved and true is returned
def checkAndSave(self):
# Without changes it's ok to leave the image
if not self.changes:
return True
# Backup of status message
restoreMessage = self.statusBar().currentMessage()
# Create the dialog
dlgTitle = "Save changes?"
self.statusBar().showMessage(dlgTitle)
text = "Do you want to save the following changes?\n"
for c in self.changes:
text += "- " + c + '\n'
buttons = QtGui.QMessageBox.Save | QtGui.QMessageBox.Discard | QtGui.QMessageBox.Cancel
ret = QtGui.QMessageBox.question(self, dlgTitle, text, buttons, QtGui.QMessageBox.Save )
proceed = False
# If the user selected yes -> save
if ret == QtGui.QMessageBox.Save:
proceed = self.save()
# If the user selected to discard the changes, clear them
elif ret == QtGui.QMessageBox.Discard:
self.clearChanges( )
proceed = True
# Otherwise prevent leaving the image
else:
proceed = False
self.statusBar().showMessage( restoreMessage )
return proceed
# Actually save a screenshot
def doScreenshot(self):
# For creating the screenshot we re-use the label drawing function
# However, we draw in an image using a QPainter
# Create such an image
img = QtGui.QImage( self.image )
# Create a QPainter that can perform draw actions within a widget or image
qp = QtGui.QPainter()
# Begin drawing in the image
qp.begin(img)
# Remember some settings
xoff = self.xoff
yoff = self.yoff
scale = self.scale
w = self.w
h = self.h
# Update scale
self.xoff = 0
self.yoff = 0
self.scale = 1
self.w = self.image.width()
self.h = self.image.height()
# Detactivate the highlighted object
self.highlightObjs = []
# Blur the license plates
# make this variabel a member and use as option if desired
blurLicensePlates = True
if blurLicensePlates:
self.blurLicensePlates(qp)
# Draw the labels on top
ignore = []
if blurLicensePlates:
ignore.append( 'numberplate' )
self.drawLabels(qp,ignore)
# Finish drawing
qp.end()
# Reset scale and stuff
self.xoff = xoff
self.yoff = yoff
self.scale = scale
self.w = w
self.h = h
# Generate the real filename for saving
file = self.config.screenshotFilename
# Replace occurance of %c with the city name (as directory)
# Generate the directory if necessary
cityIdx = file.find('%c')
if cityIdx >= 0:
if self.config.cityName:
dir = os.path.join( file[:cityIdx] , self.config.cityName )
if not os.path.exists(dir):
os.makedirs(dir)
file = file.replace( '%c',self.config.cityName + '/', 1 )
if file.find('%c') > 0:
message = "Found multiple '%c' in screenshot filename. Not allowed"
file = None
else:
message = "Do not have a city name. Cannot replace '%c' in screenshot filename."
file = None
# Replace occurances of %i with the image filename (without extension)
if file:
file = file.replace( '%i',os.path.splitext(os.path.basename(self.config.currentFile))[0] )
# Add extension .png if no extension given
if file:
if not os.path.splitext(file)[1]:
file += '.png'
# Save
if file:
success = img.save(file)
if success:
message = "Saved screenshot to " + file
else:
message = "Failed to save screenshot"
self.statusBar().showMessage(message)
# Update to reset everything to the correct state
self.update()
# Blur the license plates
# Argument is a qPainter
# Thus, only use this method for screenshots.
def blurLicensePlates(self,qp):
# license plate name
searchedNames = [ 'license plate' ]
# the image
img = self.image
# Draw all objects
for obj in self.annotation.objects:
# Some are flagged to not be drawn. Skip them
if not obj.draw:
continue
# The label of the object
name = obj.label
# If we do not know a color for this label, skip
if not name2label.has_key( name ):
continue
# If we do not blur this label, skip
if not name in searchedNames:
continue
# Scale the polygon properly
polyToDraw = self.getPolygon(obj) * QtGui.QTransform.fromScale(self.scale,self.scale)
bb = polyToDraw.boundingRect()
# Get the mean color within the polygon
meanR = 0
meanG = 0
meanB = 0
num = 0
for y in range( max(int(bb.top()),0) , min(int(bb.bottom()+1.5),img.height()) ):
for x in range( max(int(bb.left()),0) , min(int(bb.right()+1.5),img.width()) ):
col = img.pixel(x,y)
meanR += QtGui.QColor(col).red()
meanG += QtGui.QColor(col).green()
meanB += QtGui.QColor(col).blue()
num += 1
meanR /= float(num)
meanG /= float(num)
meanB /= float(num)
col = QtGui.QColor( meanR , meanG , meanB )
qp.setPen(col)
brush = QtGui.QBrush( col, QtCore.Qt.SolidPattern )
qp.setBrush(brush)
# Default drawing
qp.drawPolygon( polyToDraw )
# Update the object that is selected by the current mouse curser
def updateMouseObject(self):
self.mouseObj = -1
if self.mousePosScaled is None:
return
if not self.annotation or not self.annotation.objects:
return
for idx in reversed(range(len(self.annotation.objects))):
obj = self.annotation.objects[idx]
if obj.draw and self.getPolygon(obj).containsPoint(self.mousePosScaled, QtCore.Qt.OddEvenFill):
self.mouseObj = idx
break
# Print info about the currently selected object at the status bar
def infoOnSelectedObject(self):
if not self.selObjs:
return
objID = self.selObjs[-1]
if self.annotation and objID >= 0:
obj = self.annotation.objects[objID]
self.statusBar().showMessage("Label of object {0}: {1}".format(obj.id,obj.label))
#else:
# self.statusBar().showMessage(self.defaultStatusbar)
# Make the object selected by the mouse the real selected object
def selectObject(self):
# If there is no mouse selection, we are good
if self.mouseObj < 0:
self.deselectObject()
return
# Append the object to selection if it's not in there
if not self.mouseObj in self.selObjs:
self.selObjs.append( self.mouseObj )
# Otherwise remove the object
else:
self.deselectObject()
# update polygon
self.initPolygonFromObject()
# If we have selected objects make the toolbar actions active
if self.selObjs:
for act in self.actSelObj + self.actPolyOrSelObj:
act.setEnabled(True)
# If we have a single selected object make their toolbar actions active
for act in self.singleActSelObj:
act.setEnabled(len(self.selObjs) == 1)
self.infoOnSelectedObject()
# Deselect object
def deselectObject(self):
# If there is no object to deselect, we are good
if not self.selObjs:
return
# If the mouse does not select and object, remove the last one
if self.mouseObj < 0:
del self.selObjs[-1]
# Otherwise try to find the mouse obj in the list
if self.mouseObj in self.selObjs:
self.selObjs.remove(self.mouseObj)
# No object left?
if not self.selObjs:
for act in self.actSelObj:
act.setEnabled(False)
for act in self.actPolyOrSelObj:
act.setEnabled(bool(self.drawPoly))
# If we have a single selected object make their toolbar actions active
for act in self.singleActSelObj:
act.setEnabled(len(self.selObjs) == 1)
self.infoOnSelectedObject()
# Deselect all objects
def deselectAllObjects(self):
# If there is no object to deselect, we are good
self.selObjs = []
self.mouseObj = -1
for act in self.actSelObj:
act.setEnabled(False)
# If we have a single selected object make their toolbar actions active
for act in self.singleActSelObj:
act.setEnabled(len(self.selObjs) == 1)
self.infoOnSelectedObject()
# Modify the layer of the selected object
# Move the layer up (negative offset) or down (postive offset)
def modifyLayer(self, offset):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without a single selected object
if len(self.selObjs) != 1:
return
# The selected object that is modified
obj = self.annotation.objects[self.selObjs[-1]]
# The index in the label list we are right now
oldidx = self.selObjs[-1]
# The index we want to move to
newidx = oldidx + offset
# Make sure not not exceed zero and the list
newidx = max(newidx,0)
newidx = min(newidx,len(self.annotation.objects)-1)
# If new and old idx are equal, there is nothing to do
if oldidx == newidx:
return
# Move the entry in the labels list
self.annotation.objects.insert(newidx, self.annotation.objects.pop(oldidx))
# Update the selected object to the new index
self.selObjs[-1] = newidx
self.statusBar().showMessage("Moved object {0} with label {1} to layer {2}".format(obj.id,obj.label,newidx))
# Check if we moved the object the first time
if not obj.id in self.changedLayer:
self.changedLayer.append(obj.id)
self.addChange( "Changed layer for object {0} with label {1}".format( obj.id, obj.label ) )
# Add a new change
def addChange(self, text):
if not text:
return
self.changes.append( text )
for act in self.actChanges:
act.setEnabled(True)
# Clear list of changes
def clearChanges(self):
self.changes = []
self.changedLayer = []
self.changedPolygon = []
for act in self.actChanges:
act.setEnabled(False)
# Clear the current labels
def clearAnnotation(self):
self.annotation = None
self.clearChanges()
self.deselectAllObjects()
self.clearPolygon()
self.config.currentLabelFile = ""
def clearCorrections(self):
self.correctionXML = None
self.corrections = []
#self.clearChanges() #TODO perhaps?
#self.clearPolygon()
self.config.currentCorrectionFile = ""
# Get the filename where to load/save labels
# Returns empty string if not possible
# Set the createDirs to true, if you want to create needed directories
def getLabelFilename( self , createDirs = False ):
# We need the name of the current city
if not self.config.cityName:
return ""
# And we need to have a directory where labels should be searched
if not self.config.labelPath:
return ""
# Without the name of the current images, there is also nothing we can do
if not self.config.currentFile:
return ""
# Check if the label directory is valid. This folder is selected by the user
# and thus expected to exist
if not self.isLabelPathValid(self.config.labelPath):
return ""
# Dirs are not automatically created in this version of the tool
if not os.path.isdir( self.config.labelPath ):
return ""
labelDir = self.config.labelPath
# extension of ground truth files
if self.config.gtType:
ext = self.gtExt.format('_'+self.config.gtType)
else:
ext = self.gtExt.format('')
# Generate the filename of the label file
filename = os.path.basename( self.config.currentFile )
filename = filename.replace( self.imageExt , ext )
filename = os.path.join( labelDir , filename )
filename = os.path.normpath(filename)
return filename
# Get the filename where to load/save labels
# Returns empty string if not possible
# Set the createDirs to true, if you want to create needed directories
def getCorrectionFilename( self , createDirs = False ):
# And we need to have a directory where corrections are stored
if not self.config.correctionPath:
return ""
# Without the name of the current images, there is also nothing we can do
if not self.config.currentFile:
return ""
# Folder where to store the labels
correctionDir = self.config.correctionPath
# If the folder does not exist, create it if allowed
if not os.path.isdir( correctionDir ):
if createDirs:
os.makedirs( correctionDir )
if not os.path.isdir( correctionDir ):
return ""
else:
return ""
# Generate the filename of the label file
filename = os.path.basename( self.config.currentFile )
filename = filename.replace( self.imageExt ,'.xml')
filename = os.path.join( correctionDir , filename )
filename = os.path.normpath(filename)
return filename
# Disable the popup menu on right click
def createPopupMenu(self):
pass
def main():
app = QtGui.QApplication(sys.argv)
tool = CityscapesLabelTool()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| gpl-3.0 | 3,308,305,943,006,116,400 | 38.262478 | 175 | 0.585746 | false |
Robbie1977/TGscripts | plJHwarpToTemplate.py | 1 | 8574 | import os, re, shutil, subprocess, datetime, socket
ba = '/groups/sciserv/flyolympiad/vnc_align/toolkit/JBA/brainaligner'
cmtkdir = '/usr/local/cmtk/bin/'
fiji = '/usr/local/Fiji/ImageJ-linux64'
Rawconv = '~/script/raw2nrrdCrop.ijm'
Nrrdconv = '~/script/nrrd2rawUncrop.ijm'
Tfile = '~/template/flyVNCtemplate20xDaC.nrrd'
TfileR = '~/template/flyVNCtemplate20xDa.raw'
TfileM = '~/template/flyVNCtemplate20xDa.marker'
Qual = '~/script/Quality.py'
outdir = os.getcwd() + '/'
fo = open("PLwarp.txt",'r')
filelist = fo.readlines()
fo.close()
hostn = socket.gethostname()
runid = os.getpid()
procid = '[' + hostn + ';' + str(runid) + ']'
for fname in filelist:
fo = open("stop.txt",'r')
stoplist = fo.readlines()
if (hostn + '\n') in stoplist:
print 'Stop requested!'
else:
fname = fname.replace('\n','').replace('/disk/data/VFB/IMAGE_DATA/Janelia2012/TG/logs/',outdir)
try:
if os.path.exists(fname):
os.rename(fname,fname.replace('.lsm','~.lsm').replace('.raw','~.raw'))
basename = fname.replace(outdir,'').replace('.lsm','').replace('20130404_s/','').replace('.raw','').replace('Rigid/','').replace('/groups/sciserv/flyolympiad/vnc_align/20130404_lsms/','')
with open("PLwarp.log", "a") as myfile: # Log entry for process time and error checking
myfile.write(basename + ', Started JH warp, ' + procid + ', ' + str(datetime.datetime.now()) + '\n')
FloatFile = fname.replace('.lsm','~.lsm').replace('.raw','~.raw')
GxDF = outdir + basename + '-global.raw'
Goutput = basename + '-rigid.raw'
Axform = outdir + basename + '-rigid-affine.xform'
Foutput = Goutput.replace('-rigid.raw', '-rigid_C2.nrrd')
SigFile = Goutput.replace('-rigid.raw', '-rigid_C1.nrrd')
W5xform = outdir + basename + '-rigid-fastwarp.xform'
W5output = outdir + basename + '-rigid-BGwarp.nrrd'
Wsigout = outdir + basename + '-rigid-SGwarp.nrrd'
Routput = basename + '-rigid-warp.raw'
Loutput = basename + '-rigid-warp-local'
print 'Warping file %s...' % fname
#check for complete skip
if os.path.exists(W5xform):
print 'Warp5 output already exists - skipping.'
else:
#Generate the Initial Transform
if os.path.exists(Goutput):
print 'Global alignment already exists - skipping.'
else:
return_code = subprocess.call('nice ' + ba + ' -t %s -s %s -o %s -F %s -w 0 -C 0 -c 1 -B 1024 -Y' % (TfileR, FloatFile, Goutput, GxDF), shell=True)
print 'Brain Aligner Global alignment returned: %d' % return_code
#Convert raw to nrrd
return_code = subprocess.call('nice xvfb-run ' + fiji + ' -macro %s %s' % (Rawconv, Goutput), shell=True)
print 'Fiji/ImageJ conversion returned: %d' % return_code
#Generate the Affine Transform
if os.path.exists(Axform):
print 'Affine xform already exists - skipping.'
else:
FloatFile = Foutput
return_code = subprocess.call('nice ' + cmtkdir + 'registration --dofs 6,9 --auto-multi-levels 4 --match-histograms -o %s %s %s' % (Axform + '_part', Tfile, FloatFile), shell=True)
os.rename(Axform + '_part', Axform)
print 'registration returned: %d' % return_code
#Generate the Warped Transform
if os.path.exists(W5xform):
print 'Warp5 xform already exists - skipping.'
else:
return_code = subprocess.call('nice ' + cmtkdir + 'warp -o %s --grid-spacing 80 --exploration 30 --coarsest 4 --match-histograms --accuracy 0.2 --refine 4 --energy-weight 1e-1 --initial %s %s %s' % (W5xform + '_part', Axform, Tfile, FloatFile), shell=True) #coarsest adjusted from 8 to 4 as per greg sug.
os.rename(W5xform + '_part', W5xform)
print 'warp (5) returned: %d' % return_code
#Output a file to show the Warped Transform
if os.path.exists(W5output):
print 'Warp5 output already exists - skipping.'
else:
return_code = subprocess.call('nice ' + cmtkdir + 'reformatx -o %s --floating %s %s %s' % (W5output, FloatFile, Tfile, W5xform), shell=True)
print 'reformatx returned: %d' % return_code
print 'Completed background warpimg for %s.' % basename
if os.path.exists(Wsigout):
print 'Signal warp output already exists - skipping.'
else:
return_code = subprocess.call('nice ' + cmtkdir + 'reformatx -o %s --floating %s %s %s' % (Wsigout, SigFile, Tfile, W5xform), shell=True)
print 'reformatx returned: %d' % return_code
print 'Completed signal warpimg for %s.' % basename
if os.path.exists(Routput):
print 'RAW warp output already exists - skipping.'
else:
return_code = subprocess.call('nice xvfb-run ' + fiji + ' -macro %s %s' % (Nrrdconv, Routput), shell=True)
print 'Fiji returned: %d' % return_code
print 'Completed generating RAW warp for %s.' % basename
# if os.path.exists(Loutput + '.raw'):
# print 'Brianaligner local output already exists - skipping.'
# else:
# return_code = subprocess.call('nice ' + ba + ' -t %s -s %s -L %s -o %s -w 10 -C 0 -c 0 -H 2 -B 1024' % (TfileR, Routput, TfileM, Loutput + '.raw'), shell=True) #
# print 'Brainaligner returned: %d' % return_code
# print 'Completed generating RAW warp for %s.' % basename
if os.path.exists(Routput + '_qual.csv'):
print 'Quality measure already exists - skipping.'
else:
return_code = subprocess.call('nice python %s %s %s %s_qual.csv' % (Qual, W5output, Tfile, Routput), shell=True)
print 'Qual returned: %d' % return_code
print 'Completed generating Qual measure for %s.' % basename
if os.path.exists(W5output):
#os.remove(fname.replace('_blue',''))
#shutil.move(fname.replace('_blue',''),fname.replace('logs/','logs/nrrds/'))
#os.remove(Goutput)
#os.remove(Ioutput) Add if used
#shutil.rmtree(Axform, ignore_errors=True)
#os.remove(Aoutput)
#os.remove(W5xform) #Needed for Signal Channel Warp
with open("PLdone.txt", "a") as myfile:
myfile.write(Routput + '\n')
#os.remove(W5output) #Needed for checking only
print 'Clean-up for %s done.' % basename
with open("PLwarp.log", "a") as myfile: # Log entry for process time and error checking
myfile.write(basename + ', Finished JH warp, ' + procid + ', ' + str(datetime.datetime.now()) + '\n')
else:
print 'Failed warpimg for %s.' % basename
os.rename(fname.replace('_blue',''),fname.replace('_blue','_blue_error'))
with open("PLwarp.log", "a") as myfile: # Log entry for process time and error checking
myfile.write(basename + ', Failed JH warp, ' + procid + ', ' + str(datetime.datetime.now()) + '\n')
except OSError as e:
print 'Skiping file'
with open("PLwarp.log", "a") as myfile: # Log entry for process time and error checking
myfile.write(basename + ', Error during JH warp: ' + e.strerror + ', ' + procid + ', ' + str(datetime.datetime.now()) + '\n')
print 'All Done.'
| mit | -6,054,312,207,964,567,000 | 52.265823 | 328 | 0.515512 | false |
tartopum/Lactum | setup.py | 1 | 1422 | import os
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
import lactum
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errcode = pytest.main(self.test_args)
sys.exit(errcode)
with open("README.md", "r") as f:
readme = f.read()
def reqs(*f):
def strip_comments(l):
return l.split("#", 1)[0].strip()
return list(filter(None, [strip_comments(l) for l in open(os.path.join(os.getcwd(), *f)).readlines()]))
requirements = reqs("requirements.txt")
test_requirements = reqs("requirements-dev.txt")
test_requirements = requirements + test_requirements[1:]
setup(
name="lactum",
description="",
long_description=readme,
author="Vayel",
author_email="[email protected]",
url="https://github.com/tartopum/Lactum",
packages=["lactum"],
package_dir={"lactum": "lactum"},
include_package_data=True,
install_requires=requirements,
license="MIT",
zip_safe=False,
classifiers=[
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 3.5"
],
cmdclass={"test": PyTest},
tests_require=test_requirements
)
| mit | 268,804,563,701,314,200 | 24.392857 | 107 | 0.631505 | false |
funkring/fdoo | addons-funkring/at_sale_layout_ext/sale.py | 1 | 1573 | # -*- coding: utf-8 -*-
#############################################################################
#
# Copyright (c) 2007 Martin Reisenhofer <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class SaleLayoutCategory(osv.Model):
_inherit = "sale_layout.category"
_columns = {
"order_id" : fields.many2one("sale.order", "Order", ondelete="cascade")
}
class sale_order(osv.Model):
_inherit = "sale.order"
_columns = {
"layout_categ_ids" : fields.one2many("sale_layout.category", "order_id", "Layout Categories")
}
class sale_order_line(osv.Model):
_inherit = "sale.order.line"
_columns = {
"prod_categ_id" : fields.related("product_id", "categ_id", string="Category", type="many2one", relation="product.category", readonly=True)
}
| agpl-3.0 | -4,435,612,682,991,795,700 | 37.365854 | 146 | 0.613477 | false |
kmpf/uap | tools/segemehl_2017_reformatCigar.py | 1 | 4801 | #!/bin/bash
"exec" "`dirname $0`/../python_env/bin/python" "$0" "$@"
#"exec" "python" "$0" "$@"
# ^^^
# the cmd above ensures that the correct python environment is
# selected to execute this script.
# The correct environment is the one belonging to uap, since all
# neccessary python modules are installed there.
# filename: segemehl_2017_reformatCigar.py
# author: Jana Hertel
# date: 2017/06/07
# version: 1.0
# description: Reformat the cigar string such that htseq-count is able to process
# the according SAM files. Consecutive values for 'ix', 'j=' and 'kM'
# are summed up and replaced by nM with n being the sum of i, j and k.
import argparse
import sys
import re
from multiprocessing import Pool
import itertools
parser = argparse.ArgumentParser(
description='Python script to process a large file '
'using multi-processing.')
parser.add_argument('--version', action='version', version='%(prog)s 1.0')
parser.add_argument(
'--in-file',
dest='my_file_in',
required=True,
type=argparse.FileType('r'),
help='A large file whose lines are independent from each other and '
'can be processed separately.')
parser.add_argument('--threads', dest='my_cores', default=1,
type=int,
help='Number of CPUs 2B used. Default: 1')
parser.add_argument(
'--blocksize',
dest='my_bufsize',
default=2,
type=int,
help='Size of buffer to read the input file (in MB). Default: 2')
args = parser.parse_args()
##########################################################################
# my_range(start, end, step)
#
# This function creates a range with a user defined step to walk through.
# returns: the respective new start values
def my_range(start, end, step):
while start <= end:
yield start
start += step
##########################################################################
##########################################################################
# process_line(line)
#
# function that does something with the line:
# in this case:
# - split the line into columns by tab
# - returns the columns separated by tab
def process_line(lines):
newlines = list()
c = 0
for line in lines:
c += 1
columns = line.strip().split('\t')
# don't process header lines
if(columns[0][:1] == "@"):
newlines.append(line.strip())
continue
cigar = columns[5]
x = re.split(r'(\D)', cigar)
# split cigar string and sum up consecutive values
# for '=' and 'X' (match and mismatch)
# leave values as they are for 'I','D' and 'N' (del, insertion, split)
M = 0
cigar_new = ''
for j in range(1, len(x) - 1, 2):
# match or mismatch
if x[j] == '=' or x[j] == 'X' or x[j] == 'M':
M = M + int(x[j - 1])
else: # del or ins
if M > 0:
# print the previous match/mismatch
cigar_new += str(M) + "M"
M = 0
# anything else but '=', 'X' or 'M'
cigar_new += x[j - 1] + x[j]
if M > 0:
cigar_new += str(M) + "M"
if cigar_new == "0M*":
cigar_new = "*"
# print the sam line with the new cigar string to stdout
new_line = ""
for k in range(0, 5):
new_line += "%s\t" % columns[k]
new_line += "%s\t" % cigar_new
for k in range(6, len(columns)):
new_line += "%s" % columns[k]
if(not k == len(columns)):
new_line += "\t"
newlines.append(new_line)
return newlines
# END: process_line(line)
##########################################################################
if __name__ == '__main__':
# create my_cores -1 pools, 1 control + the remaining for processing the
# lines
p = Pool(args.my_cores)
a = list()
eof_reached = False
# bufsize needs to be provided in bytes
# argument provided megabytes
bufsize = args.my_bufsize * 1000000
while not eof_reached:
for i in range(args.my_cores - 1):
linelist = args.my_file_in.readlines(bufsize)
if len(linelist) == 0:
eof_reached = True
else:
a.append(linelist) # ~ 2MB chunks
l = p.map(process_line, a)
for j in l:
print('\n'.join(j))
a[:] = [] # delete processed lines from the list
# this works in principle.. too much i/o
# for line in p.imap(process_line, args.my_file_in):
# print line, # the coma prevents printing an additional new line
# idea for mp:
# read file in chunks of the size 1/args.my_cores
# --> each chunk in one process
| gpl-3.0 | 6,380,944,447,513,988,000 | 27.076023 | 82 | 0.531764 | false |
REVLWorld/elasticsearch-dsl-py | test_elasticsearch_dsl/test_integration/test_search.py | 1 | 2708 | from elasticsearch import TransportError
from elasticsearch_dsl import Search, DocType, Date, String, MultiSearch, \
MetaField, Index, Q
from .test_data import DATA
from pytest import raises
class Repository(DocType):
created_at = Date()
description = String(analyzer='snowball')
tags = String(index='not_analyzed')
class Meta:
index = 'git'
doc_type = 'repos'
class Commit(DocType):
class Meta:
doc_type = 'commits'
index = 'git'
parent = MetaField(type='repos')
def test_inner_hits_are_wrapped_in_response(data_client):
i = Index('git')
i.doc_type(Repository)
i.doc_type(Commit)
s = i.search()[0:1].doc_type(Commit).query('has_parent', type='repos', inner_hits={}, query=Q('match_all'))
response = s.execute()
commit = response.hits[0]
assert isinstance(commit.meta.inner_hits.repos, response.__class__)
assert isinstance(commit.meta.inner_hits.repos[0], Repository)
def test_suggest_can_be_run_separately(data_client):
s = Search()
s = s.suggest('simple_suggestion', 'elasticserach', term={'field': 'organization'})
response = s.execute_suggest()
assert response.success()
assert response.simple_suggestion[0].options[0].text == 'elasticsearch'
def test_scan_respects_doc_types(data_client):
repos = list(Repository.search().scan())
assert 1 == len(repos)
assert isinstance(repos[0], Repository)
def test_scan_iterates_through_all_docs(data_client):
s = Search(index='git').filter('term', _type='commits')
commits = list(s.scan())
assert 52 == len(commits)
assert set(d['_id'] for d in DATA if d['_type'] == 'commits') == set(c.meta.id for c in commits)
def test_response_is_cached(data_client):
s = Repository.search()
repos = list(s)
assert hasattr(s, '_response')
assert s._response.hits == repos
def test_multi_search(data_client):
s1 = Repository.search()
s2 = Search(doc_type='commits')
ms = MultiSearch(index='git')
ms = ms.add(s1).add(s2)
r1, r2 = ms.execute()
assert 1 == len(r1)
assert isinstance(r1[0], Repository)
assert r1.search is s1
assert 52 == r2.hits.total
assert r2.search is s2
def test_multi_missing(data_client):
s1 = Repository.search()
s2 = Search(doc_type='commits')
s3 = Search(index='does_not_exist')
ms = MultiSearch()
ms = ms.add(s1).add(s2).add(s3)
with raises(TransportError):
ms.execute()
r1, r2, r3 = ms.execute(raise_on_error=False)
assert 1 == len(r1)
assert isinstance(r1[0], Repository)
assert r1.search is s1
assert 52 == r2.hits.total
assert r2.search is s2
assert r3 is None
| apache-2.0 | -6,427,527,156,314,254,000 | 25.811881 | 111 | 0.649557 | false |
elewis33/doorstop | doorstop/server/utilities.py | 1 | 1176 | """Shared functions for the `doorstop.server` package."""
from doorstop import common
from doorstop import settings
log = common.logger(__name__)
class StripPathMiddleware(object): # pylint: disable=R0903
"""WSGI middleware that strips trailing slashes from all URLs."""
def __init__(self, app):
self.app = app
def __call__(self, e, h): # pragma: no cover (integration test)
e['PATH_INFO'] = e['PATH_INFO'].rstrip('/')
return self.app(e, h)
def build_url(host=None, port=None, path=None):
"""Build the server's URL with optional path."""
host = host or settings.SERVER_HOST
port = port or settings.SERVER_PORT
log.debug("building URL: {} + {} + {}".format(host, port, path))
if not host:
return None
url = 'http://{}'.format(host)
if port != 80:
url += ':{}'.format(port)
if path:
url += path
return url
def json_response(request): # pragma: no cover (integration test)
"""Determine if the request's response should be JSON."""
if request.query.get('format') == 'json':
return True
else:
return request.content_type == 'application/json'
| lgpl-3.0 | -7,028,435,530,194,350,000 | 27.682927 | 69 | 0.617347 | false |
TGThorax/python-ka2ring | src/leeftijd.py | 1 | 1962 | # Oefening: Vraag de geboortedatum van de gebruiker en zeg de leeftijd.
huidig_jaar = 2017
huidige_maand = 10
huidige_dag = 24
jaar = int(input("In welk jaar ben je geboren? "))
maand = int(input("En in welke maand? (getal) "))
# De dag moeten we pas weten als de geboortemaand deze maand is!
# Je kan het hier natuurlijk ook al vragen als je wilt.
leeftijd = huidig_jaar - jaar
if (maand > huidige_maand): # De gebruiker is nog niet verjaard
leeftijd -= 1 # hetzelfde als "leeftijd = leeftijd - 1"
elif (maand == huidige_maand):
dag = int(input("En welke dag? (getal) "))
if (dag > huidige_dag):
leeftijd -= 1
elif (dag == huidige_dag):
# leeftijd = leeftijd # Dat doet helemaal niets natuurlijk
print("Gelukkige verjaardag!")
# else: # Enkel (dag < huidige_dag) kan nog =>
# # De gebruiker is al verjaard deze maand!
# leeftijd = leeftijd # Maar er hoeft niets veranderd te worden.
# else: # De gebruiker is zeker al verjaard,
# # want enkel maand < huidige_maand kan nog!
# leeftijd = leeftijd # Maar we moeten niets aanpassen!
print("Dan ben je " + str(leeftijd) + " jaar oud.")
## Oefeningen zonder oplossing (mail bij vragen!).
#
# Oefening: start met leeftijd = huidig_jaar - jaar - 1 en verhoog die
# waarde wanneer nodig. Vergelijk de voorwaarden daar met die hier.
# Wat is het verschil in de tekens?
#
# Nog een oefening: kijk de waarden die de gebruiker ingeeft na.
# Zorg ervoor je geen dag kan invoeren die later komt dan vandaag.
# Probeer dat zo onafhankelijk mogelijk te doen van de bovenstaande code.
#
# Nadenkoefening: kan je bovenstaande 2 voorwaarden in de vorige opdracht uitvoeren
# zonder in de herhaling te vallen? (dat is geen uitdaging, maar een vraag!).
# Als je toch iets mag aanpassen aan bovenstaande code, kan het dan?
# Wat denk je dat de beste optie is? | mit | 5,076,325,352,904,479,000 | 43.613636 | 83 | 0.664118 | false |
DTOcean/dtocean-core | dtocean_core/strategies/__init__.py | 1 | 3195 | # -*- coding: utf-8 -*-
# Copyright (C) 2016-2018 Mathew Topper
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import abc
from copy import deepcopy
from polite.abc import abstractclassmethod
from ..menu import ModuleMenu, ThemeMenu
from ..pipeline import Tree
class Strategy(object):
'''The base abstract class for all strategy classes'''
__metaclass__ = abc.ABCMeta
def __init__(self):
'''The init method should never have arguments. Provided for all
subclasses are:
self._module_menu: a ModuleMenu object
self._theme_menu: a ThemeMenu object
self._tree: a Tree object
'''
self._module_menu = ModuleMenu()
self._theme_menu = ThemeMenu()
self._tree = Tree()
# Record the simulation indexes used in the strategy
self._sim_record = []
# Record the configuration dictionary of the strategy (assume this
# is picklable)
self._config = None
# Record any detailed information about the simulation (assume this
# is picklable)
self.sim_details = None
return
@abstractclassmethod
def get_name(cls):
'''A class method for the common name of the strategy.
Returns:
str: A unique string
'''
return cls()
@abc.abstractmethod
def configure(self):
'''The configure method is collect information required for executing
the strategy.
'''
return
@abc.abstractmethod
def get_variables(self):
'''The get_variables method returns the list of any variables that
will be set by the strategy
'''
return
@abc.abstractmethod
def execute(self, core, project):
'''The execute method is used to execute the strategy. It should always
take a Core and a Project class as the only inputs.
'''
return
def get_config(self):
return deepcopy(self._config)
def set_config(self, config_dict):
self._config = config_dict
return
def add_simulation_index(self, sim_index):
self._sim_record.append(sim_index)
return
def get_simulation_record(self):
return self._sim_record[:]
def restart(self):
self._sim_record = []
self.sim_details = None
return
| gpl-3.0 | 3,059,546,488,809,789,400 | 24.56 | 79 | 0.593427 | false |
mtholder/taxalotl | taxalotl/parsing/col.py | 1 | 5000 | from __future__ import print_function
import io
import logging
from peyutil import shorter_fp_form
from taxalotl.resource_wrapper import TaxonomyWrapper
from taxalotl.parsing.darwin_core import normalize_darwin_core_taxonomy
_LOG = logging.getLogger(__name__)
COL_PARTMAP = {
'Archaea': frozenset([52435722]),
'Bacteria': frozenset([52433432]),
'Eukaryota': frozenset([52433499, 52435027, 52433974, 52433370]),
'Archaeplastida': frozenset([52433499]),
'Fungi': frozenset([52433393]),
'Metazoa': frozenset([52433370]),
'Viruses': frozenset([52433426]),
'Glaucophyta': frozenset([52444130]),
'Rhodophyta': frozenset([52444134]),
'Chloroplastida': frozenset([52442327, 52442210, 52442148, 52434330, 52434201, 52433500, ]),
'Annelida': frozenset([52433489]),
'Arthropoda': frozenset([52433375]),
'Malacostraca': frozenset([52433389]),
'Arachnida': frozenset([52433402]),
'Insecta': frozenset([52433376]),
'Diptera': frozenset([52433521]),
'Coleoptera': frozenset([52433486]),
'Lepidoptera': frozenset([52433663]),
'Hymenoptera': frozenset([52433621]),
'Bryozoa': frozenset([52442814]),
'Chordata': frozenset([52433371]),
'Cnidaria': frozenset([52433398]),
'Ctenophora': frozenset([52443092]),
'Mollusca': frozenset([52440786]),
'Nematoda': frozenset([52436787]),
'Platyhelminthes': frozenset([52443117]),
'Porifera': frozenset([52442836]),
}
# noinspection PyUnreachableCode
def partition_col_by_root_id(tax_part): # type (TaxonPartition) -> None
"""Reads the serialized taxonomy of the parent, adds the easy lines to their partition element,
and returns dicts needed to finish the assignments.
Signature for partition functions. Takes:
1. abs path of taxonomy file for parent taxon
2. list of PartitionElements whose roots are sets that specify IDs that are the
roots of the subtrees that are to go in each partition elemen.
Returns a tuple:
0. par_id ->[child_id] dict,
1. id -> partition_element dict for already assigned IDs,
2. id -> line dict - may only have unassigned IDs in it,
3. synonym id -> [(accepted_id, line), ] for any synonyms
4. roots_set - a frozen set of the union of the partition element roots
5. the rootless partition element ("garbage_bin" for all unassigned IDs)
6. header for taxon file
7. header for synonyms file (or None)
"""
assert False
complete_taxon_fp = tax_part.tax_fp
syn_fp = tax_part.input_synonyms_filepath
assert not syn_fp
syn_by_id = tax_part._syn_by_id
ptp = shorter_fp_form(complete_taxon_fp)
with io.open(complete_taxon_fp, 'rU', encoding='utf-8') as inp:
iinp = iter(inp)
tax_part.taxon_header = next(iinp)
prev_line = None
# vt = unicode('\x0b') # Do some lines have vertical tabs? Of course they do....
# istwo = unicode('\x1e')
for n, line in enumerate(iinp):
if not line.endswith('\n'):
if prev_line:
prev_line = prev_line + line[:-1]
else:
prev_line = line[:-1]
continue
elif prev_line:
line = prev_line + line
prev_line = ''
ls = line.split('\t')
if n % 1000 == 0:
_LOG.info(' read taxon {} from {}'.format(n, ptp))
try:
col_id, accept_id, par_id = ls[0], ls[4], ls[5]
col_id = int(col_id)
if accept_id:
try:
accept_id = int(accept_id)
except:
if n == 0:
continue
syn_by_id.setdefault(accept_id, []).append((col_id, line))
else:
tax_part.read_taxon_line(col_id, par_id, line)
except Exception:
_LOG.exception("Exception parsing line {}:\n{}".format(1 + n, line))
raise
# noinspection PyAbstractClass
class CoLTaxonomyWrapper(TaxonomyWrapper):
taxon_filename = 'taxonomy.tsv'
# synonyms_filename = None
# partition_parsing_fn = staticmethod(partition_col_by_root_id)
schema = {"http://rs.tdwg.org/dwc/"}
def __init__(self, obj, parent=None, refs=None):
TaxonomyWrapper.__init__(self, obj, parent=parent, refs=refs)
@property
def partition_source_dir(self):
return self.normalized_filedir
def get_primary_partition_map(self):
return COL_PARTMAP
def normalize(self):
normalize_darwin_core_taxonomy(self.unpacked_filepath, self.normalized_filedir, self)
def _post_process_tree(self, tree):
self.collapse_incertae_sedis_by_name_prefix(tree, 'not assigned')
def post_process_interim_tax_data(self, interim_tax_data):
self.collapse_as_incertae_sedis_interim_tax_data(interim_tax_data, 'not assigned')
| bsd-2-clause | 700,893,670,152,985,700 | 36.313433 | 99 | 0.61 | false |
roglew/pappy-proxy | pappyproxy/interface/decode.py | 1 | 10668 | import html
import base64
import datetime
import gzip
import shlex
import string
import urllib
from ..util import hexdump, printable_data, copy_to_clipboard, clipboard_contents, encode_basic_auth, parse_basic_auth
from ..console import CommandError
from io import StringIO
def print_maybe_bin(s):
binary = False
for c in s:
if chr(c) not in string.printable:
binary = True
break
if binary:
print(hexdump(s))
else:
print(s.decode())
def asciihex_encode_helper(s):
return ''.join('{0:x}'.format(c) for c in s).encode()
def asciihex_decode_helper(s):
ret = []
try:
for a, b in zip(s[0::2], s[1::2]):
c = chr(a)+chr(b)
ret.append(chr(int(c, 16)))
return ''.join(ret).encode()
except Exception as e:
raise CommandError(e)
def gzip_encode_helper(s):
out = StringIO.StringIO()
with gzip.GzipFile(fileobj=out, mode="w") as f:
f.write(s)
return out.getvalue()
def gzip_decode_helper(s):
dec_data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(s))
dec_data = dec_data.read()
return dec_data
def base64_decode_helper(s):
try:
return base64.b64decode(s)
except TypeError:
for i in range(1, 5):
try:
s_padded = base64.b64decode(s + '='*i)
return s_padded
except:
pass
raise CommandError("Unable to base64 decode string")
def url_decode_helper(s):
bs = s.decode()
return urllib.parse.unquote(bs).encode()
def url_encode_helper(s):
bs = s.decode()
return urllib.parse.quote_plus(bs).encode()
def html_encode_helper(s):
return ''.join(['&#x{0:x};'.format(c) for c in s]).encode()
def html_decode_helper(s):
return html.unescape(s.decode()).encode()
def _code_helper(args, func, copy=True):
if len(args) == 0:
s = clipboard_contents().encode()
print('Will decode:')
print(printable_data(s))
s = func(s)
if copy:
try:
copy_to_clipboard(s)
except Exception as e:
print('Result cannot be copied to the clipboard. Result not copied.')
raise e
return s
else:
s = func(args[0].encode())
if copy:
try:
copy_to_clipboard(s)
except Exception as e:
print('Result cannot be copied to the clipboard. Result not copied.')
raise e
return s
def base64_decode(client, args):
"""
Base64 decode a string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, base64_decode_helper))
def base64_encode(client, args):
"""
Base64 encode a string.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, base64.b64encode))
def url_decode(client, args):
"""
URL decode a string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, url_decode_helper))
def url_encode(client, args):
"""
URL encode special characters in a string.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, url_encode_helper))
def asciihex_decode(client, args):
"""
Decode an ascii hex string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, asciihex_decode_helper))
def asciihex_encode(client, args):
"""
Convert all the characters in a line to hex and combine them.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, asciihex_encode_helper))
def html_decode(client, args):
"""
Decode an html encoded string.
If no string is given, will decode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, html_decode_helper))
def html_encode(client, args):
"""
Encode a string and escape html control characters.
If no string is given, will encode the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, html_encode_helper))
def gzip_decode(client, args):
"""
Un-gzip a string.
If no string is given, will decompress the contents of the clipboard.
Results are copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, gzip_decode_helper))
def gzip_encode(client, args):
"""
Gzip a string.
If no string is given, will decompress the contents of the clipboard.
Results are NOT copied to the clipboard.
"""
print_maybe_bin(_code_helper(args, gzip_encode_helper, copy=False))
def base64_decode_raw(client, args):
"""
Same as base64_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, base64_decode_helper, copy=False))
def base64_encode_raw(client, args):
"""
Same as base64_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, base64.b64encode, copy=False))
def url_decode_raw(client, args):
"""
Same as url_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, url_decode_helper, copy=False))
def url_encode_raw(client, args):
"""
Same as url_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, url_encode_helper, copy=False))
def asciihex_decode_raw(client, args):
"""
Same as asciihex_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, asciihex_decode_helper, copy=False))
def asciihex_encode_raw(client, args):
"""
Same as asciihex_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, asciihex_encode_helper, copy=False))
def html_decode_raw(client, args):
"""
Same as html_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, html_decode_helper, copy=False))
def html_encode_raw(client, args):
"""
Same as html_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, html_encode_helper, copy=False))
def gzip_decode_raw(client, args):
"""
Same as gzip_decode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, gzip_decode_helper, copy=False))
def gzip_encode_raw(client, args):
"""
Same as gzip_encode but the output will never be printed as a hex dump and
results will not be copied. It is suggested you redirect the output
to a file.
"""
print(_code_helper(args, gzip_encode_helper, copy=False))
def unix_time_decode_helper(line):
unix_time = int(line.strip())
dtime = datetime.datetime.fromtimestamp(unix_time)
return dtime.strftime('%Y-%m-%d %H:%M:%S')
def unix_time_decode(client, args):
print(_code_helper(args, unix_time_decode_helper))
def http_auth_encode(client, args):
if len(args) != 2:
raise CommandError('Usage: http_auth_encode <username> <password>')
username, password = args
print(encode_basic_auth(username, password))
def http_auth_decode(client, args):
username, password = decode_basic_auth(args[0])
print(username)
print(password)
def load_cmds(cmd):
cmd.set_cmds({
'base64_decode': (base64_decode, None),
'base64_encode': (base64_encode, None),
'asciihex_decode': (asciihex_decode, None),
'asciihex_encode': (asciihex_encode, None),
'url_decode': (url_decode, None),
'url_encode': (url_encode, None),
'html_decode': (html_decode, None),
'html_encode': (html_encode, None),
'gzip_decode': (gzip_decode, None),
'gzip_encode': (gzip_encode, None),
'base64_decode_raw': (base64_decode_raw, None),
'base64_encode_raw': (base64_encode_raw, None),
'asciihex_decode_raw': (asciihex_decode_raw, None),
'asciihex_encode_raw': (asciihex_encode_raw, None),
'url_decode_raw': (url_decode_raw, None),
'url_encode_raw': (url_encode_raw, None),
'html_decode_raw': (html_decode_raw, None),
'html_encode_raw': (html_encode_raw, None),
'gzip_decode_raw': (gzip_decode_raw, None),
'gzip_encode_raw': (gzip_encode_raw, None),
'unixtime_decode': (unix_time_decode, None),
'httpauth_encode': (http_auth_encode, None),
'httpauth_decode': (http_auth_decode, None)
})
cmd.add_aliases([
('base64_decode', 'b64d'),
('base64_encode', 'b64e'),
('asciihex_decode', 'ahd'),
('asciihex_encode', 'ahe'),
('url_decode', 'urld'),
('url_encode', 'urle'),
('html_decode', 'htmld'),
('html_encode', 'htmle'),
('gzip_decode', 'gzd'),
('gzip_encode', 'gze'),
('base64_decode_raw', 'b64dr'),
('base64_encode_raw', 'b64er'),
('asciihex_decode_raw', 'ahdr'),
('asciihex_encode_raw', 'aher'),
('url_decode_raw', 'urldr'),
('url_encode_raw', 'urler'),
('html_decode_raw', 'htmldr'),
('html_encode_raw', 'htmler'),
('gzip_decode_raw', 'gzdr'),
('gzip_encode_raw', 'gzer'),
('unixtime_decode', 'uxtd'),
('httpauth_encode', 'hae'),
('httpauth_decode', 'had'),
])
| mit | -4,319,996,795,967,323,600 | 31.723926 | 118 | 0.624953 | false |
pycontw/pycontw2016 | src/proposals/migrations/0038_add_new_conference.py | 1 | 1404 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2019-07-10 07:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('proposals', '0037_auto_20180305_1339'),
]
operations = [
migrations.AlterField(
model_name='additionalspeaker',
name='conference',
field=models.SlugField(choices=[('pycontw-2016', 'PyCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018'), ('pycontw-2019', 'PyCon Taiwan 2019')], default='pycontw-2019', verbose_name='conference'),
),
migrations.AlterField(
model_name='talkproposal',
name='conference',
field=models.SlugField(choices=[('pycontw-2016', 'PyCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018'), ('pycontw-2019', 'PyCon Taiwan 2019')], default='pycontw-2019', verbose_name='conference'),
),
migrations.AlterField(
model_name='tutorialproposal',
name='conference',
field=models.SlugField(choices=[('pycontw-2016', 'PyCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018'), ('pycontw-2019', 'PyCon Taiwan 2019')], default='pycontw-2019', verbose_name='conference'),
),
]
| mit | -1,369,078,986,699,272,400 | 45.8 | 252 | 0.625356 | false |
zzh8829/RevOctane | bstream.py | 1 | 3007 | import io
import struct
little_endian_types = {
'int8': 'b',
'uint8': 'B',
'int16': 'h',
'uint16': 'H',
'int32': 'i',
'uint32': 'I',
'int64': 'q',
'uint64': 'Q',
'float': 'f',
'float32': 'f',
'double': 'd',
'char': 'c',
'bool': '?',
'pad': 'x',
'void*': 'P',
}
big_endian_types = { k:">"+v for k,v in little_endian_types.items()}
special_types = {
'int12': 'read_int12',
'uint12': 'read_int12',
'float16': 'read_float16',
}
class BStream:
def __init__(self, **kwargs):
if "file" in kwargs:
self.stream = open(kwargs["file"], "rb")
elif "stream" in kwargs:
self.stream = kwargs["stream"]
elif "bytes" in kwargs:
self.stream = io.BytesIO(kwargs["bytes"])
else:
raise Exception("unknown stream source")
self.endianness = kwargs.get("endianness","little")
if self.endianness == "little":
self.normal_types = little_endian_types
elif self.endianness == "big":
self.normal_types = big_endian_types
def read(self, type_name='char'):
if isinstance(type_name,int):
return self.unpack('%ds'%type_name)[0]
type_name = type_name.lower()
if type_name.endswith('_t'):
type_name = type_name[:-2]
if type_name in special_types:
return getattr(self, special_types[type_name])()
if type_name in self.normal_types:
return self.unpack(self.normal_types[type_name])[0]
raise Exception("unknown type")
def unpack(self, fmt):
return struct.unpack(fmt, self.stream.read(struct.calcsize(fmt)))
def read_cstring(self):
string = ""
while True:
char = self.read('char')
if ord(char) == 0:
break
string += char.decode("utf-8")
return string
def read_string(self):
return self.unpack('%ds'%self.read('uint32_t'))[0].decode('utf-8')
def read_all(self):
return self.read(self.size() - self.get_position())
def read_int12(self):
return int.from_bytes(self.read(3),byteorder=self.endianness)
def read_float16(self):
data = self.read('uint16_t')
s = int((data >> 15) & 0x00000001) # sign
e = int((data >> 10) & 0x0000001f) # exponent
f = int(data & 0x000003ff) # fraction
if e == 0:
if f == 0:
return int(s << 31)
else:
while not (f & 0x00000400):
f = f << 1
e -= 1
e += 1
f &= ~0x00000400
#print(s,e,f)
elif e == 31:
if f == 0:
return int((s << 31) | 0x7f800000)
else:
return int((s << 31) | 0x7f800000 | (f << 13))
e = e + (127 -15)
f = f << 13
buf = struct.pack('I',int((s << 31) | (e << 23) | f))
return struct.unpack('f',buf)[0]
def tell(self):
return self.stream.tell()
def seek(self, pos, whence):
return self.stream.seek(pos, whence)
def get_position(self):
return self.tell()
def set_position(self, pos, whence=0):
return self.seek(pos, whence)
def size(self):
pos = self.get_position()
self.set_position(0,2)
end = self.get_position()
self.set_position(pos,0)
return end
def align(self, alignment=4):
self.set_position((self.get_position() + alignment - 1) // alignment * alignment)
| mit | -8,549,833,761,040,858,000 | 21.954198 | 84 | 0.60858 | false |
cjerdonek/open-rcv | openrcv/models.py | 1 | 6692 | #
# Copyright (c) 2014 Chris Jerdonek. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
"""Internal models that do not require JSON serialization.
Ballot Model
------------
For now, the "Ballot" object is not represented by a class. It is
simply a `(weight, choices)` 2-tuple, where `weight` is a number and
choices is a tuple of integer choice ID's.
"""
from contextlib import contextmanager
import logging
import tempfile
# The current module should not depend on any modules in openrcv.formats.
from openrcv import streams, utils
from openrcv.utils import ReprMixin
log = logging.getLogger(__name__)
def make_candidate_numbers(candidate_count):
"""Return an iterable of candidate numbers."""
return range(1, candidate_count + 1)
# TODO: allow ordering and compressing to be done separately.
def normalize_ballots_to(source, target):
"""Normalize ballots by ordering and "compressing" them.
This function orders the ballots lexicographically by the list of
choices on each ballot, and also uses the weight component to "compress"
ballots having identical choices.
Arguments:
source: source ballots resource.
target: target ballots resource.
TODO: incorporate some of the wording below into the above.
This class takes a StreamInfo of internal ballots and returns a
new StreamInfo that represents an equivalent set of internal ballots,
but both "compressed" (by using the weight component) and ordered
lexicographically for readability by the list of choices on the ballot.
"""
# A dict mapping tuples of choices to the cumulative weight.
choices_dict = {}
with source.reading() as ballots:
for weight, choices in ballots:
try:
choices_dict[choices] += weight
except KeyError:
# Then we are adding the choices for the first time.
choices_dict[choices] = weight
sorted_choices = sorted(choices_dict.keys())
with target.writing() as gen:
for choices in sorted_choices:
weight = choices_dict[choices]
ballot = weight, choices
gen.send(ballot)
def normalize_ballots(ballots_resource):
"""Normalize the given ballots in place.
Arguments:
ballots_resource: a ballots resource.
"""
with ballots_resource.replacement() as temp_resource:
normalize_ballots_to(ballots_resource, temp_resource)
class BallotsResourceMixin(object):
def count_ballots(self):
with self.resource.reading() as gen:
return sum(weight for weight, choices in gen)
def normalize(self):
normalize_ballots(self)
class BallotsResource(streams.WrapperResource, BallotsResourceMixin):
pass
class CandidatesInfo(object):
"""Represents the collection of candidates."""
def __init__(self, candidates):
"""
Arguments:
candidates: an iterable of the candidate names.
"""
self.candidates = candidates
def from_number(self, number):
return self.candidates[number - 1]
def from_numbers(self, numbers):
return [self.from_number(n) for n in numbers]
class ContestInput(ReprMixin):
"""
Attributes:
ballots_resource: a BallotsResource object.
candidates: an iterable of the names of all candidates, in numeric
order of their ballot ID.
name: contest name.
seat_count: integer number of winners.
"""
# We include an underscore at the end of id_ since id() is a built-in.
# TODO: test defaults -- especially properties of default ballots resource.
# TODO: instead make seat_count part of the "rules".
def __init__(self, name=None, notes=None, candidates=None, seat_count=None,
ballots_resource=None):
if ballots_resource is None:
ballots_resource = streams.NullStreamResource()
if candidates is None:
candidates = []
if seat_count is None:
seat_count = 1
self.ballots_resource = ballots_resource
self.candidates = candidates
self.name = name
self.notes = notes
self.seat_count = seat_count
def repr_info(self):
return "name=%r" % (self.name, )
def make_candidates_info(self):
"""Return a CandidatesInfo object."""
return CandidatesInfo(self.candidates)
# TODO: remove this.
def get_candidate_numbers(self):
"""Return an iterable of the candidate numbers."""
return make_candidate_numbers(len(self.candidates))
@property
def should_normalize_ballots(self):
# Default to normalizing.
return (self.normalize_ballots is None) or self.normalize_ballots
class ContestOutcome(object):
def __init__(self, interrupted=None):
self.interrupted = interrupted
class RoundResults(object):
"""Represents contest results."""
def __init__(self, candidates_info=None, elected=None, eliminated=None,
tied_last_place=None, totals=None, tie_break=None):
"""
Arguments:
totals: dict of candidate number to vote total.
"""
self.candidates_info = candidates_info
self.elected = elected
self.eliminated = eliminated
self.tie_break = tie_break
self.tied_last_place = tied_last_place
self.totals = totals
class ContestResults(ReprMixin):
"""Represents contest results."""
def __init__(self, outcome=None, rounds=None):
self.outcome = outcome
self.rounds = rounds
def repr_info(self):
return "rounds=%s" % (len(self.rounds), )
| mit | 6,903,355,429,168,822,000 | 30.866667 | 79 | 0.678571 | false |
avedaee/DIRAC | DataManagementSystem/Client/ReplicaContainers.py | 1 | 4513 | # $HeadURL$
__RCSID__ = "$Id$"
""" This module contains three classes associated to Replicas.
The Replica class contains simply three member elements: SE, PFN and Status and provides access methods for each (inluding type checking).
The CatalogReplica class inherits the Replica class.
The PhysicalReplica class inherits the Replica class and adds the 'size','checksum','online' and 'migrated' members.
In this context Replica refers to any copy of a file. This can be the first or an additional copy.
OBSOLETE?
K.C.
"""
import types
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Utilities.CFG import CFG
class Replica:
def __init__(self,pfn='',storageElement='',status=''):
# These are the possible attributes for a replica
if not type(pfn) in types.StringTypes:
raise AttributeError, "pfn should be string type"
self.pfn = str(pfn)
if not type(storageElement) in types.StringTypes:
raise AttributeError, "storageElement should be string type"
self.se = str(storageElement)
if not type(status) in types.StringTypes:
raise AttributeError, "status should be string type"
self.status = str(status)
def setPFN(self,pfn):
if not type(pfn) in types.StringTypes:
return S_ERROR("PFN should be %s and not %s" % (types.StringType,type(pfn)))
self.pfn = str(pfn)
return S_OK()
def setSE(self,se):
if not type(se) in types.StringTypes:
return S_ERROR("SE should be %s and not %s" % (types.StringType,type(se)))
self.se = str(se)
return S_OK()
def setStatus(self,status):
if not type(status) in types.StringTypes:
return S_ERROR("Status should be %s and not %s" % (types.StringType,type(status)))
self.status = str(status)
return S_OK()
def getPFN(self):
return S_OK(self.pfn)
def getSE(self):
return S_OK(self.se)
def getStatus(self):
return S_OK(self.status)
def digest(self):
""" Get short description string of replica and status
"""
return S_OK("%s:%s:%s" % (self.se,self.pfn,self.status))
def toCFG(self):
oCFG = CFG()
oCFG.createNewSection(self.se)
oCFG.setOption('%s/Status' % (self.se), self.status)
oCFG.setOption('%s/PFN' % (self.se), self.pfn)
return S_OK(str(oCFG))
class CatalogReplica(Replica):
def __init__(self,pfn='',storageElement='',status='U'):
Replica.__init__(self,pfn,storageElement,status)
class PhysicalReplica(Replica):
def __init__(self,pfn='',storageElement='',status='',size=0,checksum='',online=False,migrated=False):
# These are the possible attributes for a physical replica
Replica.__init__(self,pfn,storageElement,status)
try:
self.size = int(size)
except:
raise AttributeError, "size should be integer type"
if not type(checksum) in types.StringTypes:
raise AttributeError, "checksum should be string type"
self.checksum = str(checksum)
if not type(online) == types.BooleanType:
raise AttributeError, "online should be bool type"
self.online = online
if not type(migrated) == types.BooleanType:
raise AttributeError, "migrated should be bool type"
self.migrated = migrated
def setSize(self,size):
try:
self.size = int(size)
return S_OK()
except:
return S_ERROR("Size should be %s and not %s" % (types.IntType,type(size)))
def setChecksum(self,checksum):
if not type(checksum) in types.StringTypes:
return S_ERROR("Checksum should be %s and not %s" % (types.StringType,type(checksum)))
self.checksum = str(checksum)
return S_OK()
def setOnline(self,online):
if not type(online) == types.BooleanType:
return S_ERROR("online should be %s and not %s" % (types.BooleanType,type(online)))
self.online = online
return S_OK()
def setMigrated(self,migrated):
if not type(migrated) == types.BooleanType:
return S_ERROR("migrated should be %s and not %s" % (types.BooleanType,type(migrated)))
self.migrated = migrated
return S_OK()
def getSize(self):
return S_OK(self.size)
def getChecksum(self):
return S_OK(self.checksum)
def getOnline(self):
return S_OK(self.online)
def getMigrated(self):
return S_OK(self.migrated)
def digest(self):
online = 'NotOnline'
if self.online:
online = 'Online'
migrated = 'NotMigrated'
if self.migrated:
migrated = 'Migrated'
return S_OK("%s:%s:%d:%s:%s:%s" % (self.se,self.pfn,self.size,self.status,online,migrated))
| gpl-3.0 | 2,198,887,395,661,303,000 | 31.007092 | 143 | 0.670286 | false |
erikrose/oedipus | oedipus/results.py | 1 | 4275 | class SearchResults(object):
"""Results in the order in which they came out of Sphinx
Since Sphinx stores no non-numerical attributes, we have to reach into the
DB to pull them out.
"""
def __init__(self, type, ids, fields):
self.type = type
# Sphinx may return IDs of objects since deleted from the DB.
self.ids = ids
self.fields = fields # tuple
self.objects = dict(self._objects()) # {id: obj/tuple/dict, ...}
def _queryset(self):
"""Return a QuerySet of the objects parallel to the found docs."""
return self.type.objects.filter(id__in=self.ids)
def __iter__(self):
"""Iterate over results in the same order they came out of Sphinx."""
# Ripped off from elasticutils
return (self.objects[id] for id in self.ids if id in self.objects)
class DictResults(SearchResults):
"""Results as an iterable of dictionaries"""
def _dicts_with_ids(self):
"""Return an iterable of dicts with ``id`` attrs, each representing a matched DB object."""
fields = self.fields
# Append ID to the requested fields so we can keep track of object
# identity to sort by weight (or whatever Sphinx sorted by). We could
# optimize slightly by not prepending ID if the user already
# specifically asked for it, but then we'd have to keep track of its
# offset.
if fields and 'id' not in fields:
fields += ('id',)
# Get values rather than values_list, because we need to be able to
# find the ID afterward, and we don't want to have to go rooting around
# in the Django model to figure out what order the fields were declared
# in in the case that no fields were passed in.
return self._queryset().values(*fields)
def _objects(self):
"""Return an iterable of (document ID, dict) pairs."""
should_strip_ids = self.fields and 'id' not in self.fields
for d in self._dicts_with_ids():
id = d.pop('id') if should_strip_ids else d['id']
yield id, d
@classmethod
def content_for_fields(klass, result, fields, highlight_fields):
"""Returns a tuple with content values for highlight_fields.
:param result: A result generated by this class.
:param fields: Iterable of fields for a result from this class.
:param highlight_fields: Iterable of the fields to highlight.
This should be a subset of ``fields``.
:returns: Tuple with content in the field indexes specified by
``highlight_fields``.
:raises KeyError: If there is a field in ``highlight_fields``
that doesn't exist in ``fields``.
"""
return tuple(result[field] for field in highlight_fields)
class TupleResults(DictResults):
"""Results as an iterable of tuples, like Django's values_list()"""
def _objects(self):
"""Return an iterable of (document ID, tuple) pairs."""
for d in self._dicts_with_ids():
yield d['id'], tuple(d[k] for k in self.fields)
@classmethod
def content_for_fields(klass, result, fields, highlight_fields):
"""See ``DictResults.content_for_fields``.
:raises ValueError: If there is a field in
``highlight_fields`` that doesn't exist in ``fields``.
"""
return tuple(result[fields.index(field)]
for field in highlight_fields)
class ObjectResults(SearchResults):
"""Results as an iterable of Django model-like objects"""
def _objects(self):
"""Return an iterable of (document ID, model object) pairs."""
# Assuming the document ID is called "id" lets us depend on fewer
# Djangoisms than assuming it's the pk; we'd have to get
# self.type._meta to get the name of the pk.
return ((o.id, o) for o in self._queryset())
@classmethod
def content_for_fields(klass, result, fields, highlight_fields):
"""See ``DictResults.content_for_fields``.
:raises AttributeError: If there is a field in
``highlight_fields`` that doesn't exist in ``fields``.
"""
return tuple(getattr(result, field) for field in highlight_fields)
| bsd-3-clause | 2,535,138,606,245,989 | 39.330189 | 99 | 0.629474 | false |
arjunasuresh3/Mypykoans | python 2/koans/about_monkey_patching.py | 1 | 1451 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Related to AboutOpenClasses in the Ruby Koans
#
from runner.koan import *
class AboutMonkeyPatching(Koan):
class Dog(object):
def bark(self):
return "WOOF"
def test_as_defined_dogs_do_bark(self):
fido = self.Dog()
self.assertEqual("WOOF", fido.bark())
# ------------------------------------------------------------------
# Add a new method to an existing class.
def test_after_patching_dogs_can_both_wag_and_bark(self):
def wag(self): return "HAPPY"
self.Dog.wag = wag
fido = self.Dog()
self.assertEqual("HAPPY", fido.wag())
self.assertEqual("WOOF", fido.bark())
# ------------------------------------------------------------------
def test_most_built_in_classes_cannot_be_monkey_patched(self):
try:
int.is_even = lambda self: (self % 2) == 0
except StandardError as ex:
self.assertMatch("can't set attributes of built-in/extension type 'int'", ex[0])
# ------------------------------------------------------------------
class MyInt(int): pass
def test_subclasses_of_built_in_classes_can_be_be_monkey_patched(self):
self.MyInt.is_even = lambda self: (self % 2) == 0
self.assertEqual(False, self.MyInt(1).is_even())
self.assertEqual(True, self.MyInt(2).is_even())
| mit | 6,103,702,846,106,463,000 | 29.229167 | 92 | 0.500345 | false |
tiagocoutinho/bliss | examples/ct2/card/acq1.py | 1 | 5788 | """
ESRF-BCU acquisition with internal master:
counter 11 counts the acquisition time (using internal clock);
counter 12 counts the number of points
"""
import os
import sys
import time
import pprint
import logging
import argparse
import datetime
import numpy
from bliss.controllers.ct2.card import (P201Card, Clock, Level, CtConfig, \
OutputSrc, CtClockSrc, CtGateSrc,
CtHardStartSrc, CtHardStopSrc)
def configure(device, channels):
device.request_exclusive_access()
device.set_interrupts()
device.reset()
device.software_reset()
# -------------------------------------------------------------------------
# Channel configuration (could be loaded from beacon, for example. We
# choose to hard code it here)
# -------------------------------------------------------------------------
# for counters we only care about clock source, gate source here. The rest
# will be up to the actual acquisition to setup according to the type of
# acquisition
for _, ch_nb in channels.items():
ct_config = CtConfig(clock_source=CtClockSrc(ch_nb % 5),
gate_source=CtGateSrc.GATE_CMPT,
# anything will do for the remaining fields. It
# will be properly setup in the acquisition slave
# setup
hard_start_source=CtHardStartSrc.SOFTWARE,
hard_stop_source=CtHardStopSrc.SOFTWARE,
reset_from_hard_soft_stop=False,
stop_from_hard_stop=False)
device.set_counter_config(ch_nb, ct_config)
# TODO: Set input and output channel configuration (TTL/NIM level, 50ohm,
# edge interrupt, etc)
# internal clock 100 Mhz
device.set_clock(Clock.CLK_100_MHz)
def prepare_master(device, acq_time, nb_points):
ct_11_config = CtConfig(clock_source=CtClockSrc.CLK_100_MHz,
gate_source=CtGateSrc.CT_12_GATE_ENVELOP,
hard_start_source=CtHardStartSrc.SOFTWARE,
hard_stop_source=CtHardStopSrc.CT_11_EQ_CMP_11,
reset_from_hard_soft_stop=True,
stop_from_hard_stop=False)
ct_12_config = CtConfig(clock_source=CtClockSrc.INC_CT_11_STOP,
gate_source=CtGateSrc.GATE_CMPT,
hard_start_source=CtHardStartSrc.SOFTWARE,
hard_stop_source=CtHardStopSrc.CT_12_EQ_CMP_12,
reset_from_hard_soft_stop=True,
stop_from_hard_stop=True)
device.set_counters_config({11:ct_11_config, 12:ct_12_config})
device.set_counter_comparator_value(11, int(acq_time * 1E8))
device.set_counter_comparator_value(12, nb_points)
def prepare_slaves(device, acq_time, nb_points, channels):
channel_nbs = list(channels.values())
for ch_name, ch_nb in channels.iteritems():
ct_config = device.get_counter_config(ch_nb)
ct_config['gate_source'] = CtGateSrc.CT_11_GATE_ENVELOP
ct_config['hard_start_source'] = CtHardStartSrc.SOFTWARE
ct_config['hard_stop_source'] = CtHardStopSrc.CT_11_EQ_CMP_11
ct_config['reset_from_hard_soft_stop'] = True
ct_config['stop_from_hard_stop'] = False
device.set_counter_config(ch_nb, ct_config)
device.set_counter_config(ch_nb, ct_config)
# counter 11 will latch all active counters/channels
latch_sources = dict([(ct, 11) for ct in channel_nbs + [12]])
device.set_counters_latch_sources(latch_sources)
# make all counters enabled by software
device.enable_counters_software(channel_nbs + [11, 12])
def main():
def to_str(values, fmt="9d"):
fmt = "%" + fmt
return "[" + "".join([fmt % value for value in values]) + "]"
def out(msg=""):
sys.stdout.write(msg)
sys.stdout.flush()
channels = {
"I0": 3,
"V2F": 5,
"SCA": 6,
}
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--log-level', type=str, default='info',
help='log level (debug, info, warning, error) [default: info]')
parser.add_argument('--nb-points', type=int,
help='number of points', default=10)
parser.add_argument('--acq-time', type=float, default=1,
help='acquisition time')
args = parser.parse_args()
logging.basicConfig(level=getattr(logging, args.log_level.upper()),
format="%(asctime)s %(levelname)s %(name)s: %(message)s")
nb_points = args.nb_points
acq_time = args.acq_time
device = P201Card()
configure(device, channels)
prepare_master(device, acq_time, nb_points)
prepare_slaves(device, acq_time, nb_points, channels)
# start counting...
nap = 0.1
start_time = time.time()
device.start_counters_software(channels.values() + [11, 12])
while True:
time.sleep(nap)
counter_values = device.get_counters_values()
latch_values = device.get_latches_values()
status = device.get_counters_status()
if not status[12]['run']:
stop_time = time.time()
break
msg = "\r{0} {1}".format(to_str(counter_values), to_str(latch_values))
out(msg)
print("\n{0} {1}".format(to_str(counter_values), to_str(latch_values)))
print("Took ~{0}s (err: {1}s)".format(stop_time-start_time, nap))
pprint.pprint(device.get_counters_status())
device.relinquish_exclusive_access()
if __name__ == "__main__":
main()
| lgpl-3.0 | 7,870,878,238,078,015,000 | 35.866242 | 87 | 0.57671 | false |
eirannejad/pyRevit | pyrevitlib/pyrevit/versionmgr/upgrade.py | 1 | 1082 | """Perform upgrades between version, e.g. adding a new config parameter"""
#pylint: disable=W0611
import os
import os.path as op
from pyrevit.coreutils import appdata
def upgrade_user_config(user_config): #pylint: disable=W0613
"""Upgarde user configurations.
Args:
user_config (:obj:`pyrevit.userconfig.PyRevitConfig`): config object
val (type): desc
"""
# upgrade value formats
for section in user_config:
for option in section:
setattr(section, option, getattr(section, option))
def remove_leftover_temp_files():
"""4.8.5 had a bug that would create temp files with extension ..bak
This cleans them up
"""
univ_path = op.dirname(appdata.get_universal_data_file("X", 'bak'))
if op.exists(univ_path):
for entry in os.listdir(univ_path):
if op.isfile(entry) and entry.lower().endswith('..bak'):
appdata.garbage_data_file(op.join(univ_path, entry))
def upgrade_existing_pyrevit():
"""Upgrade existing pyRevit deployment."""
remove_leftover_temp_files()
| gpl-3.0 | -1,800,407,789,069,525,000 | 29.914286 | 76 | 0.663586 | false |
wtsi-hgi/docker-icat | tests/test_builds.py | 1 | 3327 | import logging
import os
import unittest
from abc import ABCMeta
from typing import List, Optional, Tuple, Union
import docker
from hgicommon.docker.client import create_client
from hgicommon.helpers import create_random_string
from hgicommon.testing import create_tests, TestUsingObject, ObjectTypeUsedInTest
from tests._common import setups
from useintest.predefined.irods import build_irods_service_controller_type, IrodsDockerisedService
_PROJECT_ROOT = "%s/.." % os.path.dirname(os.path.realpath(__file__))
class _TestICAT(TestUsingObject[ObjectTypeUsedInTest], metaclass=ABCMeta):
"""
Tests for an iCAT setup.
"""
@staticmethod
def _build_image(top_level_image: Tuple[Optional[Tuple], Tuple[str, str]]):
"""
Builds images bottom up, building the top level image last.
:param top_level_image: representation of the top level image
"""
image = top_level_image
images = [] # type: List[Tuple[str, str]]
while image is not None:
images.insert(0, image[1])
image = image[0]
docker_client = create_client()
for image in images:
tag = image[0]
directory = "%s/%s" % (_PROJECT_ROOT, image[1])
for line in docker_client.build(tag=tag, path=directory):
logging.debug(line)
@staticmethod
def _run(command: Union[str, List[str]], service: IrodsDockerisedService) -> str:
"""
Run the given commend on the containerised server.
:param command: the command to run
:param service: the containerised service managing the iCAT
"""
container_id = service.container["Id"]
docker_client = create_client()
id = docker_client.exec_create(container_id, cmd=command)
chunks = []
for chunk in docker_client.exec_start(id, stream=True):
logging.debug(chunk)
chunks.append(chunk.decode("utf-8"))
return "".join(chunks)
def setUp(self):
self.setup = self.get_object_to_test()
self.test_image_name = create_random_string(self.setup.image_name)
type(self)._build_image((self.setup.base_image_to_build, (self.test_image_name, self.setup.location)))
repository, tag = self.test_image_name.split(":")
ServiceController = build_irods_service_controller_type(repository, tag, self.setup.superclass)
self.service_controller = ServiceController()
self.service = self.service_controller.start_service()
def tearDown(self):
self.service_controller.stop_service(self.service)
client = docker.from_env()
client.images.remove(self.test_image_name, force=True)
def test_starts(self):
test_file_name = "test123"
self._run(["touch", test_file_name], self.service)
self._run(["iput", test_file_name], self.service)
self.assertIn(test_file_name, self._run(["ils"], self.service))
# Setup tests
globals().update(create_tests(_TestICAT, setups, lambda superclass, test_object: "TestICATWith%s"
% test_object.location.split("/")[1]))
# Fix for stupidity of test runners
del _TestICAT, TestUsingObject, create_tests
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | -837,343,037,614,442,200 | 38.141176 | 119 | 0.641118 | false |
tedlaz/pyted | misthodosia/m13a/fmy.py | 1 | 2802 | # -*- coding: utf-8 -*-
'''
Created on 16 Ιαν 2013
@author: tedlaz
'''
from utils import dec as d
def f13(poso):
poso = d(poso)
ekp = d(0)
if poso < d(21500):
ekp = d(2100)
elif poso < d(22500):
ekp = d(2000)
elif poso < d(23500):
ekp = d(1900)
elif poso < d(24500):
ekp = d(1800)
elif poso < d(25500):
ekp = d(1700)
elif poso < d(26500):
ekp = d(1600)
elif poso < d(27500):
ekp = d(1500)
elif poso < d(28500):
ekp = d(1400)
elif poso < d(29500):
ekp = d(1300)
elif poso < d(30500):
ekp = d(1200)
elif poso < d(31500):
ekp = d(1100)
elif poso < d(32500):
ekp = d(1000)
elif poso < d(33500):
ekp = d(900)
elif poso < d(34500):
ekp = d(800)
elif poso < d(35500):
ekp = d(700)
elif poso < d(36500):
ekp = d(600)
elif poso < d(37500):
ekp = d(500)
elif poso < d(38500):
ekp = d(400)
elif poso < d(39500):
ekp = d(300)
elif poso < d(40500):
ekp = d(200)
elif poso < d(41500):
ekp = d(100)
else:
ekp = d(0)
#print 'ekptosi',poso,ekp
foros = d(0)
if poso <= d(25000):
foros = d(poso * d(22) / d(100))
else:
foros = d(5500)
poso = poso - d(25000)
if poso <= d(17000):
foros += d(poso * d(32) / d(100))
else:
foros += d(5440)
poso = poso - d(17000)
foros += d(poso * d(42) / d(100))
foros = foros - ekp
if foros < d(0) :
foros = d(0)
return foros
def eea(poso):
poso = d(poso)
if poso <= d(12000):
synt = d(0)
elif poso <= d(20000):
synt = d(1)
elif poso <= d(50000):
synt = d(2)
elif poso <= d(100000):
synt = d(3)
else:
synt = d(4)
return d(poso * synt / d(100))
def eeap(poso,bar=1): #bar : 1 εάν ολόκληρη περίοδος 2 εάν μισή (πχ.επίδομα αδείας)
poso = d(poso)
tb = d(14) * d(bar)
eis = poso * tb
ee = eea(eis)
return d(ee / tb)
def fp13(poso,bar=1):
poso = poso
tb = 14 * bar
eis = poso * tb
f = f13(eis)
#pf = d(f - d(0.015,3) * f)
return f / tb
def fpXrisis(poso,bar=1,xrisi=2013):
if xrisi == 2013:
return fp13(poso,bar)
else:
return 0
def eeaXrisis(poso,bar=1,xrisi=2013):
if xrisi == 2012 or xrisi == 2013:
return eeap(poso,bar)
else:
return d(0)
if __name__ == '__main__':
p = 2035.72
print fpXrisis(p,1,2013)
print eeaXrisis(p,1,2013) | gpl-3.0 | -9,046,916,456,819,823,000 | 21.389831 | 83 | 0.451414 | false |
perchrn/TaktPlayer | gui/configurationGui/CurveGui.py | 1 | 32833 | '''
Created on 27. dec. 2012
@author: pcn
'''
import wx
from widgets.PcnImageButton import PcnImageButton
from widgets.PcnCurveDisplayWindget import PcnCurveDisplayWidget
from widgets.PcnEvents import EVT_DOUBLE_CLICK_EVENT, EVT_MOUSE_MOVE_EVENT
from configurationGui.UtilityDialogs import updateChoices
from video.Curve import Curve
class CurveGui(object):
def __init__(self, mainConfing):
self._mainConfig = mainConfing
self._updateWidget = None
self._closeCallback = None
self._saveCallback = None
self._saveArgument = None
self._curveConfig = Curve()
self._helpBitmap = wx.Bitmap("graphics/helpButton.png") #@UndefinedVariable
self._helpPressedBitmap = wx.Bitmap("graphics/helpButtonPressed.png") #@UndefinedVariable
self._closeButtonBitmap = wx.Bitmap("graphics/closeButton.png") #@UndefinedVariable
self._closeButtonPressedBitmap = wx.Bitmap("graphics/closeButtonPressed.png") #@UndefinedVariable
self._updateButtonBitmap = wx.Bitmap("graphics/updateButton.png") #@UndefinedVariable
self._updateButtonPressedBitmap = wx.Bitmap("graphics/updateButtonPressed.png") #@UndefinedVariable
self._updateRedButtonBitmap = wx.Bitmap("graphics/updateButtonRed.png") #@UndefinedVariable
self._updateRedButtonPressedBitmap = wx.Bitmap("graphics/updateButtonRedPressed.png") #@UndefinedVariable
self._saveBigBitmap = wx.Bitmap("graphics/saveButtonBig.png") #@UndefinedVariable
self._saveBigPressedBitmap = wx.Bitmap("graphics/saveButtonBigPressed.png") #@UndefinedVariable
self._saveBigGreyBitmap = wx.Bitmap("graphics/saveButtonBigGrey.png") #@UndefinedVariable
self._deleteColourButtonBitmap = wx.Bitmap("graphics/deleteColourButton.png") #@UndefinedVariable
self._deleteColourButtonPressedBitmap = wx.Bitmap("graphics/deleteColourButtonPressed.png") #@UndefinedVariable
self._deletePointButtonBitmap = wx.Bitmap("graphics/deletePointButton.png") #@UndefinedVariable
self._deletePointButtonPressedBitmap = wx.Bitmap("graphics/deletePointButtonPressed.png") #@UndefinedVariable
def setupCurveGui(self, plane, sizer, parentSizer, parentClass):
self._mainCurveGuiPlane = plane
self._mainCurveGuiSizer = sizer
self._parentSizer = parentSizer
self._hideCurveCallback = parentClass.hideCurveGui
self._fixCurveGuiLayout = parentClass.fixCurveGuiLayout
headerLabel = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Curve editor:") #@UndefinedVariable
headerFont = headerLabel.GetFont()
headerFont.SetWeight(wx.BOLD) #@UndefinedVariable
headerLabel.SetFont(headerFont)
self._mainCurveGuiSizer.Add(headerLabel, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
curveModeSizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
tmpText1 = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Curve mode:") #@UndefinedVariable
self._curveModeField = wx.ComboBox(self._mainCurveGuiPlane, wx.ID_ANY, size=(200, -1), choices=["Off"], style=wx.CB_READONLY) #@UndefinedVariable
updateChoices(self._curveModeField, self._curveConfig.getChoices, "Off", "Off")
curveModeButton = PcnImageButton(self._mainCurveGuiPlane, self._helpBitmap, self._helpPressedBitmap, (-1, -1), wx.ID_ANY, size=(17, 17)) #@UndefinedVariable
curveModeButton.Bind(wx.EVT_BUTTON, self._onCurveModeHelp) #@UndefinedVariable
curveModeSizer.Add(tmpText1, 1, wx.ALL, 5) #@UndefinedVariable
curveModeSizer.Add(self._curveModeField, 2, wx.ALL, 5) #@UndefinedVariable
curveModeSizer.Add(curveModeButton, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(curveModeSizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._mainCurveGuiPlane.Bind(wx.EVT_COMBOBOX, self._onCurveModeChosen, id=self._curveModeField.GetId()) #@UndefinedVariable
self._curveSubModeSizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
tmpText1 = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Curve sub mode:") #@UndefinedVariable
self._curveSubModeField = wx.ComboBox(self._mainCurveGuiPlane, wx.ID_ANY, size=(200, -1), choices=["Linear"], style=wx.CB_READONLY) #@UndefinedVariable
updateChoices(self._curveSubModeField, self._curveConfig.getSubChoices, "Linear", "Linear")
curveSubModeButton = PcnImageButton(self._mainCurveGuiPlane, self._helpBitmap, self._helpPressedBitmap, (-1, -1), wx.ID_ANY, size=(17, 17)) #@UndefinedVariable
curveSubModeButton.Bind(wx.EVT_BUTTON, self._onCurveSubModeHelp) #@UndefinedVariable
self._curveSubModeSizer.Add(tmpText1, 1, wx.ALL, 5) #@UndefinedVariable
self._curveSubModeSizer.Add(self._curveSubModeField, 2, wx.ALL, 5) #@UndefinedVariable
self._curveSubModeSizer.Add(curveSubModeButton, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._curveSubModeSizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._mainCurveGuiPlane.Bind(wx.EVT_COMBOBOX, self._onCurveSubModeChosen, id=self._curveSubModeField.GetId()) #@UndefinedVariable
self._curveChannelSizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
tmpText1 = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Edit channel:") #@UndefinedVariable
self._curveChannelField = wx.ComboBox(self._mainCurveGuiPlane, wx.ID_ANY, size=(200, -1), choices=["Red"], style=wx.CB_READONLY) #@UndefinedVariable
updateChoices(self._curveChannelField, None, "Red", "Red", ["Red", "Green", "Blue"])
curveChannelButton = PcnImageButton(self._mainCurveGuiPlane, self._helpBitmap, self._helpPressedBitmap, (-1, -1), wx.ID_ANY, size=(17, 17)) #@UndefinedVariable
curveChannelButton.Bind(wx.EVT_BUTTON, self._onCurveChannelHelp) #@UndefinedVariable
self._curveChannelSizer.Add(tmpText1, 1, wx.ALL, 5) #@UndefinedVariable
self._curveChannelSizer.Add(self._curveChannelField, 2, wx.ALL, 5) #@UndefinedVariable
self._curveChannelSizer.Add(curveChannelButton, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._curveChannelSizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._mainCurveGuiPlane.Bind(wx.EVT_COMBOBOX, self._onCurveChannelChosen, id=self._curveChannelField.GetId()) #@UndefinedVariable
self._curveGraphicsSizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
self._curveGraphicsLabel = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Curve graph:") #@UndefinedVariable
self._curveGraphicsDisplay = PcnCurveDisplayWidget(self._mainCurveGuiPlane)
curveGraphicsValueButton = PcnImageButton(self._mainCurveGuiPlane, self._helpBitmap, self._helpPressedBitmap, (-1, -1), wx.ID_ANY, size=(17, 17)) #@UndefinedVariable
curveGraphicsValueButton.Bind(wx.EVT_BUTTON, self._onCurveGraphicsHelp) #@UndefinedVariable
self._curveGraphicsDisplay.Bind(wx.EVT_BUTTON, self._onCurveSingleClick) #@UndefinedVariable
self._curveGraphicsDisplay.Bind(EVT_DOUBLE_CLICK_EVENT, self._onCurveDoubleClick) #@UndefinedVariable
self._curveGraphicsDisplay.Bind(EVT_MOUSE_MOVE_EVENT, self._onMouseMove) #@UndefinedVariable
self._curveGraphicsSizer.Add(self._curveGraphicsLabel, 1, wx.ALL, 5) #@UndefinedVariable
self._curveGraphicsSizer.Add(self._curveGraphicsDisplay, 2, wx.ALL, 5) #@UndefinedVariable
self._curveGraphicsSizer.Add(curveGraphicsValueButton, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._curveGraphicsSizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._pointSelectSizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
self._pointSelectLabel = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Select:") #@UndefinedVariable
self._pointSelectSlider = wx.Slider(plane, wx.ID_ANY, minValue=0, maxValue=255, size=(200, -1)) #@UndefinedVariable
self._pointSelectDisplay = wx.StaticText(plane, wx.ID_ANY, "1", size=(30,-1)) #@UndefinedVariable
self._pointSelectSizer.Add(self._pointSelectLabel, 1, wx.ALL, 5) #@UndefinedVariable
self._pointSelectSizer.Add(self._pointSelectSlider, 2, wx.ALL, 5) #@UndefinedVariable
self._pointSelectSizer.Add(self._pointSelectDisplay, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._pointSelectSizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._pointPositionSizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
self._pointPositionLabel = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Position:") #@UndefinedVariable
self._pointPositionSlider = wx.Slider(plane, wx.ID_ANY, minValue=0, maxValue=255, size=(200, -1)) #@UndefinedVariable
self._pointPositionDisplay = wx.StaticText(plane, wx.ID_ANY, "1", size=(30,-1)) #@UndefinedVariable
self._pointPositionSizer.Add(self._pointPositionLabel, 1, wx.ALL, 5) #@UndefinedVariable
self._pointPositionSizer.Add(self._pointPositionSlider, 2, wx.ALL, 5) #@UndefinedVariable
self._pointPositionSizer.Add(self._pointPositionDisplay, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._pointPositionSizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._pointValue1Sizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
self._pointValue1Label = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Red:") #@UndefinedVariable
self._pointValue1Slider = wx.Slider(plane, wx.ID_ANY, minValue=0, maxValue=255, size=(200, -1)) #@UndefinedVariable
self._pointValue1Display = wx.StaticText(plane, wx.ID_ANY, "1", size=(30,-1)) #@UndefinedVariable
self._pointValue1Sizer.Add(self._pointValue1Label, 1, wx.ALL, 5) #@UndefinedVariable
self._pointValue1Sizer.Add(self._pointValue1Slider, 2, wx.ALL, 5) #@UndefinedVariable
self._pointValue1Sizer.Add(self._pointValue1Display, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._pointValue1Sizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._pointValue2Sizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
self._pointValue2Label = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Green:") #@UndefinedVariable
self._pointValue2Slider = wx.Slider(plane, wx.ID_ANY, minValue=0, maxValue=255, size=(200, -1)) #@UndefinedVariable
self._pointValue2Display = wx.StaticText(plane, wx.ID_ANY, "1", size=(30,-1)) #@UndefinedVariable
self._pointValue2Sizer.Add(self._pointValue2Label, 1, wx.ALL, 5) #@UndefinedVariable
self._pointValue2Sizer.Add(self._pointValue2Slider, 2, wx.ALL, 5) #@UndefinedVariable
self._pointValue2Sizer.Add(self._pointValue2Display, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._pointValue2Sizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._pointValue3Sizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
self._pointValue3Label = wx.StaticText(self._mainCurveGuiPlane, wx.ID_ANY, "Blue:") #@UndefinedVariable
self._pointValue3Slider = wx.Slider(plane, wx.ID_ANY, minValue=0, maxValue=255, size=(200, -1)) #@UndefinedVariable
self._pointValue3Display = wx.StaticText(plane, wx.ID_ANY, "1", size=(30,-1)) #@UndefinedVariable
self._pointValue3Sizer.Add(self._pointValue3Label, 1, wx.ALL, 5) #@UndefinedVariable
self._pointValue3Sizer.Add(self._pointValue3Slider, 2, wx.ALL, 5) #@UndefinedVariable
self._pointValue3Sizer.Add(self._pointValue3Display, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._pointValue3Sizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
self._selectSliderId = self._pointSelectSlider.GetId()
self._selectedPointId = 0
self._positionSliderId = self._pointPositionSlider.GetId()
self._value1SliderId = self._pointValue1Slider.GetId()
self._value2SliderId = self._pointValue2Slider.GetId()
self._value3SliderId = self._pointValue3Slider.GetId()
plane.Bind(wx.EVT_SLIDER, self._onSlide) #@UndefinedVariable
"""Buttons"""
self._buttonsSizer = wx.BoxSizer(wx.HORIZONTAL) #@UndefinedVariable |||
closeButton = PcnImageButton(self._mainCurveGuiPlane, self._closeButtonBitmap, self._closeButtonPressedBitmap, (-1, -1), wx.ID_ANY, size=(55, 17)) #@UndefinedVariable
closeButton.Bind(wx.EVT_BUTTON, self._onCloseButton) #@UndefinedVariable
self._saveButton = PcnImageButton(self._mainCurveGuiPlane, self._updateButtonBitmap, self._updateButtonPressedBitmap, (-1, -1), wx.ID_ANY, size=(67, 17)) #@UndefinedVariable
self._saveButton.Bind(wx.EVT_BUTTON, self._onSaveButton) #@UndefinedVariable
self._deleteButton = PcnImageButton(self._mainCurveGuiPlane, self._deletePointButtonBitmap, self._deletePointButtonPressedBitmap, (-1, -1), wx.ID_ANY, size=(97, 17)) #@UndefinedVariable
self._deleteButton.Bind(wx.EVT_BUTTON, self._onDeleteButton) #@UndefinedVariable
self._buttonsSizer.Add(closeButton, 0, wx.ALL, 5) #@UndefinedVariable
self._buttonsSizer.Add(self._saveButton, 0, wx.ALL, 5) #@UndefinedVariable
self._buttonsSizer.Add(self._deleteButton, 0, wx.ALL, 5) #@UndefinedVariable
self._mainCurveGuiSizer.Add(self._buttonsSizer, proportion=0, flag=wx.EXPAND) #@UndefinedVariable
def _onCurveModeHelp(self, event):
text = "Selects curve mode.\n"
text += "\n"
text += "Off:\tNo curve modifications are done.\n"
text += "All:\tOne curve controlls all channels.\n"
text += "Threshold:\tSets colours for different levels. Use BW input\n"
text += "RGB:\tOne curve for each RGB colour.\n"
text += "HSV:\tOne curve for each HSV channel.\n"
dlg = wx.MessageDialog(self._mainCurveGuiPlane, text, 'Curve mode help', wx.OK|wx.ICON_INFORMATION) #@UndefinedVariable
dlg.ShowModal()
dlg.Destroy()
def _onCurveModeChosen(self, event):
updateChoices(self._curveModeField, self._curveConfig.getChoices, self._curveModeField.GetValue(), "Off")
self._curveConfig.changeModeString(self._curveModeField.GetValue())
self._onCurveChannelChosen(None)
if((self._curveConfig.getMode() == Curve.Threshold) or (self._curveConfig.getMode() == Curve.Off)):
self._mainCurveGuiSizer.Hide(self._curveSubModeSizer)
else:
self._mainCurveGuiSizer.Show(self._curveSubModeSizer)
self._autoUpdateSliders()
self._updateCurveGraph()
def _onCurveSubModeHelp(self, event):
text = "Selects how we edit the curve.\n"
text += "\n"
text += "Linear:\tAdd points to define curve.\n"
text += "Curve:\tAdd points to define bendt curve.\n"
text += "Array:\tDraw the curve pixel by pixel.\n"
dlg = wx.MessageDialog(self._mainCurveGuiPlane, text, 'Curve sub mode help', wx.OK|wx.ICON_INFORMATION) #@UndefinedVariable
dlg.ShowModal()
dlg.Destroy()
def _onCurveSubModeChosen(self, event):
updateChoices(self._curveSubModeField, self._curveConfig.getSubChoices, self._curveSubModeField.GetValue(), "Linear")
self._curveConfig.changeSubModeString(self._curveSubModeField.GetValue())
self._autoUpdateSliders()
self._updateCurveGraph()
def _onCurveChannelHelp(self, event):
if(self._curveConfig.getMode() == Curve.HSV):
text = "Selects which channel we are editing now.\n"
text += "\n"
text += "Hue:\tEdits hue curve. (Colour rotation.)\n"
text += "Saturation:\tEdits saturation curve.\n"
text += "Value:\tEdits value curve.\n"
else:
text = "Selects which channel we are editing now.\n"
text += "\n"
text += "Red:\tEdits red colour curve.\n"
text += "Green:\tEdits green colour curve.\n"
text += "Blue:\tEdits blue colour curve.\n"
dlg = wx.MessageDialog(self._mainCurveGuiPlane, text, 'Curve sub mode help', wx.OK|wx.ICON_INFORMATION) #@UndefinedVariable
dlg.ShowModal()
dlg.Destroy()
def _onCurveChannelChosen(self, event):
if(self._curveConfig.getMode() == Curve.HSV):
self._mainCurveGuiSizer.Show(self._curveChannelSizer)
updateChoices(self._curveChannelField, None, self._curveChannelField.GetValue(), "Hue", ["Hue", "Saturation", "Value"])
elif(self._curveConfig.getMode() == Curve.RGB):
self._mainCurveGuiSizer.Show(self._curveChannelSizer)
updateChoices(self._curveChannelField, None, self._curveChannelField.GetValue(), "Red", ["Red", "Green", "Blue"])
else:
self._mainCurveGuiSizer.Hide(self._curveChannelSizer)
self._autoUpdateSliders()
self._fixCurveGuiLayout()
def _onCurveGraphicsHelp(self, event):
if(self._curveConfig.getSubMode() == Curve.Linear):
text = "Shows the curve\n"
text += "\n"
text += "Add points by doubble clicking.\n"
text += "Select and drag points with left button."
if(self._curveConfig.getSubMode() == Curve.Curve):
text = "Shows the curve\n"
text += "\n"
text += "Add points by doubble clicking.\n"
text += "Select and drag points with left button."
if(self._curveConfig.getSubMode() == Curve.Array):
text = "Shows the curve\n"
text += "\n"
text += "Set point(s) with left button."
else:
text = "Shows the curve."
dlg = wx.MessageDialog(self._mainCurveGuiPlane, text, 'Curve display help', wx.OK|wx.ICON_INFORMATION) #@UndefinedVariable
dlg.ShowModal()
dlg.Destroy()
def getSubId(self):
if(self._curveConfig.getMode() == Curve.Off):
return -1
if(self._curveConfig.getMode() == Curve.All):
return -1
channelString = self._curveChannelField.GetValue()
if((channelString == "Red") or (channelString == "Hue")):
return 0
if((channelString == "Green") or (channelString == "Saturation")):
return 1
if((channelString == "Blue") or (channelString == "Value")):
return 2
def _onSlide(self, event):
sliderId = event.GetEventObject().GetId()
curveMode = self._curveConfig.getMode()
if(sliderId == self._selectSliderId):
if(curveMode == Curve.Threshold):
self._updateThresholdId(False)
elif(((curveMode == Curve.Off) and (curveMode == Curve.Array)) == False):
self._updatePointId(False)
else:
self._hideSliders(curveMode == Curve.Off)
elif(sliderId == self._positionSliderId):
value = self._pointPositionSlider.GetValue()
self._pointPositionDisplay.SetLabel(str(value))
if(curveMode == Curve.Threshold):
self._updateThresholdSetting(value, None, None, None)
else:
self._updatePointSetting(value, None)
elif(sliderId == self._value1SliderId):
value = self._pointValue1Slider.GetValue()
if(curveMode == Curve.Threshold):
self._pointValue1Display.SetLabel("%02X" %(value))
self._updateThresholdSetting(None, value, None, None)
else:
self._pointValue1Display.SetLabel(str(value))
self._updatePointSetting(None, value)
elif(sliderId == self._value2SliderId):
value = self._pointValue2Slider.GetValue()
self._pointValue2Display.SetLabel("%02X" %(value))
self._updateThresholdSetting(None, None, value, None)
elif(sliderId == self._value3SliderId):
value = self._pointValue3Slider.GetValue()
self._pointValue3Display.SetLabel("%02X" %(value))
self._updateThresholdSetting(None, None, None, value)
def _updateThresholdSetting(self, value, red, green, blue):
if(self._selectedPointId != None):
settingsList = self._curveConfig.getThresholdsSettings()
settingsListLen = len(settingsList)
if((self._selectedPointId >= 0) and (self._selectedPointId < settingsListLen)):
colour, xPos = settingsList[self._selectedPointId]
if(value != None):
settingsList[self._selectedPointId] = colour, value
elif(red != None):
newColour = (colour & 0x00ffff) + (red * 0x010000)
settingsList[self._selectedPointId] = newColour, xPos
elif(green != None):
newColour = (colour & 0xff00ff) + (green * 0x000100)
settingsList[self._selectedPointId] = newColour, xPos
elif(blue != None):
newColour = (colour & 0xffff00) + blue
settingsList[self._selectedPointId] = newColour, xPos
self._curveConfig.updateFromThresholdsSettings()
self._updateCurveGraph()
def _updatePointSetting(self, pos, value):
if(self._selectedPointId != None):
settingsList = self._curveConfig.getPoints(self.getSubId())[0]
settingsListLen = len(settingsList)
if((self._selectedPointId >= 0) and (self._selectedPointId < settingsListLen)):
xPos, yPos = settingsList[self._selectedPointId]
if(pos != None):
self._curveConfig.movePoint((xPos, yPos), (pos, yPos), self.getSubId())
elif(value != None):
self._curveConfig.movePoint((xPos, yPos), (xPos, value), self.getSubId())
self._updateCurveGraph()
def _autoUpdateSliders(self):
curveMode = self._curveConfig.getMode()
curveSubMode = self._curveConfig.getSubMode()
if(curveMode == Curve.Threshold):
self._updateThresholdId(True)
elif(curveMode == Curve.Off):
self._hideSliders(True)
elif(curveSubMode == Curve.Array):
self._hideSliders(False)
else:
self._updatePointId(True)
self._fixCurveGuiLayout()
def _hideSliders(self, isOff):
self._mainCurveGuiSizer.Hide(self._pointSelectSizer)
if(isOff == True):
self._mainCurveGuiSizer.Hide(self._pointPositionSizer)
self._mainCurveGuiSizer.Hide(self._pointValue1Sizer)
else:
self._mainCurveGuiSizer.Show(self._pointPositionSizer)
self._mainCurveGuiSizer.Show(self._pointValue1Sizer)
self._pointValue1Label.SetLabel("Value:")
self._mainCurveGuiSizer.Hide(self._pointValue2Sizer)
self._mainCurveGuiSizer.Hide(self._pointValue3Sizer)
self._buttonsSizer.Hide(self._deleteButton)
def _updateThresholdId(self, forceUpdate):
settingsList = self._curveConfig.getThresholdsSettings()
settingsListLen = len(settingsList)
if(forceUpdate == True):
thresholdId = self._selectedPointId
self._selectedPointId = -1
else:
value = self._pointSelectSlider.GetValue()
thresholdId = int((float(value) / 256) * settingsListLen)
self._mainCurveGuiSizer.Show(self._pointSelectSizer)
self._mainCurveGuiSizer.Show(self._pointPositionSizer)
self._pointValue1Label.SetLabel("Red:")
self._mainCurveGuiSizer.Show(self._pointValue1Sizer)
self._mainCurveGuiSizer.Show(self._pointValue2Sizer)
self._mainCurveGuiSizer.Show(self._pointValue3Sizer)
self._deleteButton.setBitmaps(self._deleteColourButtonBitmap, self._deleteColourButtonPressedBitmap)
self._buttonsSizer.Show(self._deleteButton)
if(thresholdId >= settingsListLen):
thresholdId = settingsListLen - 1
if(thresholdId != self._selectedPointId):
self._selectedPointId = thresholdId
self._pointSelectSlider.SetValue(int((thresholdId + 0.5)*256/settingsListLen))
self._pointSelectDisplay.SetLabel(str(thresholdId+1))
colour, xPos = settingsList[thresholdId]
red = (int(colour)&0xff0000) / 0x010000
green = (int(colour)&0x00ff00) / 0x000100
blue = (int(colour)&0x0000ff)
self._pointPositionSlider.SetValue(int(xPos))
self._pointPositionDisplay.SetLabel(str(xPos))
self._pointValue1Slider.SetValue(red)
self._pointValue1Display.SetLabel("%02X" %(red))
self._pointValue2Slider.SetValue(green)
self._pointValue2Display.SetLabel("%02X" %(green))
self._pointValue3Slider.SetValue(blue)
self._pointValue3Display.SetLabel("%02X" %(blue))
def _updatePointId(self, forceUpdate):
subId = self.getSubId()
curveMode = self._curveConfig.getMode()
self._mainCurveGuiSizer.Show(self._pointSelectSizer)
self._mainCurveGuiSizer.Show(self._pointPositionSizer)
self._pointValue1Label.SetLabel("Value:")
self._mainCurveGuiSizer.Show(self._pointValue1Sizer)
self._mainCurveGuiSizer.Hide(self._pointValue2Sizer)
self._mainCurveGuiSizer.Hide(self._pointValue3Sizer)
self._deleteButton.setBitmaps(self._deletePointButtonBitmap, self._deletePointButtonPressedBitmap)
self._buttonsSizer.Show(self._deleteButton)
if((subId == -1) and (curveMode != Curve.All)):
return
settingsList = self._curveConfig.getPoints(self.getSubId())[0]
settingsListLen = len(settingsList)
if(forceUpdate == True):
pointId = self._selectedPointId
self._selectedPointId = -1
else:
value = self._pointSelectSlider.GetValue()
pointId = int((float(value) / 256) * settingsListLen)
if(pointId >= settingsListLen):
pointId = settingsListLen - 1
if(pointId != self._selectedPointId):
self._selectedPointId = pointId
self._pointSelectSlider.SetValue(int((pointId + 0.5)*256/settingsListLen))
self._pointSelectDisplay.SetLabel(str(pointId+1))
xPos, yPos = settingsList[pointId]
self._pointPositionSlider.SetValue(int(xPos))
self._pointPositionDisplay.SetLabel(str(xPos))
self._pointValue1Slider.SetValue(yPos)
self._pointValue1Display.SetLabel("%d" %(yPos))
def _onCurveSingleClick(self, event):
self._curveConfig.drawingDone(self.getSubId())
self._updateCurveGraph()
self._autoUpdateSliders()
if((self._curveConfig.getSubMode() == Curve.Linear) or (self._curveConfig.getSubMode() == Curve.Curve)):
self._curveConfig.findActivePointId(self.getSubId(), self._curveGraphicsDisplay.getLastPos())
curveActivePoint = self._curveConfig.getActivePointId(self.getSubId())
if(curveActivePoint != None):
self._selectedPointId = curveActivePoint
self._updatePointId(True)
def _onCurveDoubleClick(self, event):
thresholdPointId = self._curveConfig.addPoint(self._curveGraphicsDisplay.getLastPos(), self.getSubId())
if(self._curveConfig.getMode() == Curve.Threshold):
self._curveConfig.updateFromThresholdsSettings()
self._selectedPointId = thresholdPointId
self._updateCurveGraph()
self._autoUpdateSliders()
if((self._curveConfig.getSubMode() == Curve.Linear) or (self._curveConfig.getSubMode() == Curve.Curve)):
self._curveConfig.findActivePointId(self.getSubId(), self._curveGraphicsDisplay.getLastPos())
curveActivePoint = self._curveConfig.getActivePointId(self.getSubId())
if(curveActivePoint != None):
self._selectedPointId = curveActivePoint
self._updatePointId(True)
def _onMouseMove(self, event):
if(event.mousePressed == True):
self._curveConfig.drawPoint(event.mousePosition, self.getSubId())
self._updateCurveGraph()
else:
self._curveConfig.drawingDone(-1)
if(self._curveConfig.getSubMode() == Curve.Array):
xPos, yPos = event.mousePosition
self._pointPositionSlider.SetValue(int(xPos))
self._pointPositionDisplay.SetLabel(str(xPos))
self._pointValue1Slider.SetValue(yPos)
self._pointValue1Display.SetLabel("%d" %(yPos))
def _updateCurveGraph(self):
self._curveGraphicsDisplay.drawCurve(self._curveConfig)
self._checkForUpdates()
def _onCloseButton(self, event):
if(self._closeCallback != None):
self._closeCallback()
self._hideCurveCallback()
def _onSaveButton(self, event):
curveString = self._curveConfig.getString()
if(self._updateWidget != None):
self._updateWidget.SetValue(curveString)
if(self._saveCallback):
if(self._saveArgument != None):
self._saveCallback(self._saveArgument, curveString)
else:
self._saveCallback(None)
self._lastSavedCurveString = curveString
self._checkForUpdates()
def _onDeleteButton(self, event):
curveMode = self._curveConfig.getMode()
curveSubMode = self._curveConfig.getSubMode()
if(curveMode == Curve.Threshold):
if(self._selectedPointId != None):
settingsList = self._curveConfig.getThresholdsSettings()
settingsListLen = len(settingsList)
if((self._selectedPointId >= 0) and (self._selectedPointId < settingsListLen)):
settingsList.pop(self._selectedPointId)
self._curveConfig.updateFromThresholdsSettings()
elif(curveMode == Curve.Off):
pass
elif(curveSubMode == Curve.Array):
pass
else:
if(self._selectedPointId != None):
settingsList = self._curveConfig.getPoints(self.getSubId())[0]
settingsListLen = len(settingsList)
if((self._selectedPointId >= 0) and (self._selectedPointId < settingsListLen)):
settingsList.pop(self._selectedPointId)
self._autoUpdateSliders()
self._updateCurveGraph()
def _checkForUpdates(self, event = None):
newCurveString = self._curveConfig.getString()
if(self._lastSavedCurveString != newCurveString):
if(self._saveArgument == None):
self._saveButton.setBitmaps(self._updateRedButtonBitmap, self._updateRedButtonPressedBitmap)
else:
self._saveButton.setBitmaps(self._saveBigBitmap, self._saveBigPressedBitmap)
else:
if(self._saveArgument == None):
self._saveButton.setBitmaps(self._updateButtonBitmap, self._updateButtonPressedBitmap)
else:
self._saveButton.setBitmaps(self._saveBigGreyBitmap, self._saveBigGreyBitmap)
def updateGui(self, curveConfigString, widget, closeCallback, saveCallback, saveArgument):
self._updateWidget = widget
self._closeCallback = closeCallback
self._saveCallback = saveCallback
self._saveArgument = saveArgument
self._lastSavedCurveString = curveConfigString
self._curveConfig.setString(curveConfigString)
updateChoices(self._curveModeField, self._curveConfig.getChoices, self._curveConfig.getChoices()[self._curveConfig.getMode()], "Off")
updateChoices(self._curveSubModeField, self._curveConfig.getSubChoices, self._curveConfig.getSubChoices()[self._curveConfig.getSubMode()], "Linear")
self._onCurveChannelChosen(None)
if((self._curveConfig.getMode() == Curve.Threshold) or (self._curveConfig.getMode() == Curve.Off)):
self._mainCurveGuiSizer.Hide(self._curveSubModeSizer)
else:
self._mainCurveGuiSizer.Show(self._curveSubModeSizer)
self._autoUpdateSliders()
self._updateCurveGraph()
self._checkForUpdates()
| gpl-2.0 | 7,028,322,448,341,386,000 | 58.1337 | 193 | 0.65559 | false |
xpclove/autofp | strategy/strategy_neutron_tof.py | 1 | 2601 | '''strategy setting file, note: this is an important file. You should be careful when modifying the file.
Please keep the format.
The words is case sensitive.
You can modify parameters group order in 'param_order' and parameters group in 'param_group'.
'''
strategy={
"neutron_tof":{
# task type
"type":"neutron_tof",
# param group format: 'group_name':[ group_member1,group_member2,...]
"param_group":{
'scale': ["Scale","Extinct"],
'zero': ["Transparency","Zero"],
'simple background': ["BACK[0]"],
'cell a,b,c': ["a-Pha","b-P","c-P"],
'W': ["W-Pr"],
'complex background': ["BACK"],
"UVW": ["Sig2-Pr", "Sig1-Pr","Sig0-Pr"],
"Asym": ["ALPH","BETA"],
'Y,X': ["Gam1-Pr","Gam2-Pr"],
'Atom x y z': ["X-Atom","Y-Atom","Z-Atom"],
'Pref,Bov': ["Pref","Bov"],
'Biso-Atom': ["Biso-Atom"],
'GausS,1G': ["Z1","GausS","1G"],
'Occ-Atom': ["Occ-Atom"],
'Anisotropic Thermal factors': ["B1","B2","B3"],
'D_H': ["D_HG2","D_HL","Shift"],
'S_L,D_L': ["PA", "S_L","D_L"],
#"Sysin","Displacement",
#"Dtt1", # == "Sysin","Displacement",
#"Gam0" # == "LorSiz","SZ",
#"LStr","LSiz","Str",
'instrument': ["Dtt2"#,"Sycos","Transparency"
#"Str",
],
"manual background": ["BCK"],
'ABS':["ABS"]
},
# param order format: [group_name1,group_name2,...]
'param_order': [
"scale",
"cell a,b,c",
"simple background" ,
"zero",
"Atom x y z",
"Asym",
"Biso-Atom",
"complex background" ,
"UVW",
"Y,X",
'D_H',
"Pref,Bov",
"GausS,1G",
"instrument",
"Occ-Atom",
"Anisotropic Thermal factors",
"ABS",
"manual background"
],
# target function, the valid variable : R_Factor["Rwp"], R_Factor["Rp"], R_Factor["Chi2"]
# MIN = minimum function
'target':'MIN=R_Factor["Rwp"]'
}
} | gpl-3.0 | 4,291,717,651,290,399,000 | 39.65625 | 105 | 0.381392 | false |
Sorsly/subtle | google-cloud-sdk/platform/gsutil/third_party/apitools/run_pylint.py | 3 | 8173 | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Custom script to run PyLint on apitools codebase.
"Inspired" by the similar script in gcloud-python.
This runs pylint as a script via subprocess in two different
subprocesses. The first lints the production/library code
using the default rc file (PRODUCTION_RC). The second lints the
demo/test code using an rc file (TEST_RC) which allows more style
violations (hence it has a reduced number of style checks).
"""
import ConfigParser
import copy
import os
import subprocess
import sys
IGNORED_DIRECTORIES = [
'apitools/gen/testdata',
'samples/storage_sample/storage',
'venv',
]
IGNORED_FILES = [
'ez_setup.py',
'run_pylint.py',
'setup.py',
]
PRODUCTION_RC = 'default.pylintrc'
TEST_RC = 'reduced.pylintrc'
TEST_DISABLED_MESSAGES = [
'exec-used',
'invalid-name',
'missing-docstring',
'protected-access',
]
TEST_RC_ADDITIONS = {
'MESSAGES CONTROL': {
'disable': ',\n'.join(TEST_DISABLED_MESSAGES),
},
}
def read_config(filename):
"""Reads pylintrc config onto native ConfigParser object."""
config = ConfigParser.ConfigParser()
with open(filename, 'r') as file_obj:
config.readfp(file_obj)
return config
def make_test_rc(base_rc_filename, additions_dict, target_filename):
"""Combines a base rc and test additions into single file."""
main_cfg = read_config(base_rc_filename)
# Create fresh config for test, which must extend production.
test_cfg = ConfigParser.ConfigParser()
test_cfg._sections = copy.deepcopy(main_cfg._sections)
for section, opts in additions_dict.items():
curr_section = test_cfg._sections.setdefault(
section, test_cfg._dict())
for opt, opt_val in opts.items():
curr_val = curr_section.get(opt)
if curr_val is None:
raise KeyError('Expected to be adding to existing option.')
curr_section[opt] = '%s\n%s' % (curr_val, opt_val)
with open(target_filename, 'w') as file_obj:
test_cfg.write(file_obj)
def valid_filename(filename):
"""Checks if a file is a Python file and is not ignored."""
for directory in IGNORED_DIRECTORIES:
if filename.startswith(directory):
return False
return (filename.endswith('.py') and
filename not in IGNORED_FILES)
def is_production_filename(filename):
"""Checks if the file contains production code.
:rtype: boolean
:returns: Boolean indicating production status.
"""
return not ('demo' in filename or 'test' in filename or
filename.startswith('regression'))
def get_files_for_linting(allow_limited=True, diff_base=None):
"""Gets a list of files in the repository.
By default, returns all files via ``git ls-files``. However, in some cases
uses a specific commit or branch (a so-called diff base) to compare
against for changed files. (This requires ``allow_limited=True``.)
To speed up linting on Travis pull requests against master, we manually
set the diff base to origin/master. We don't do this on non-pull requests
since origin/master will be equivalent to the currently checked out code.
One could potentially use ${TRAVIS_COMMIT_RANGE} to find a diff base but
this value is not dependable.
:type allow_limited: boolean
:param allow_limited: Boolean indicating if a reduced set of files can
be used.
:rtype: pair
:returns: Tuple of the diff base using the the list of filenames to be
linted.
"""
if os.getenv('TRAVIS') == 'true':
# In travis, don't default to master.
diff_base = None
if (os.getenv('TRAVIS_BRANCH') == 'master' and
os.getenv('TRAVIS_PULL_REQUEST') != 'false'):
# In the case of a pull request into master, we want to
# diff against HEAD in master.
diff_base = 'origin/master'
if diff_base is not None and allow_limited:
result = subprocess.check_output(['git', 'diff', '--name-only',
diff_base])
print 'Using files changed relative to %s:' % (diff_base,)
print '-' * 60
print result.rstrip('\n') # Don't print trailing newlines.
print '-' * 60
else:
print 'Diff base not specified, listing all files in repository.'
result = subprocess.check_output(['git', 'ls-files'])
return result.rstrip('\n').split('\n'), diff_base
def get_python_files(all_files=None, diff_base=None):
"""Gets a list of all Python files in the repository that need linting.
Relies on :func:`get_files_for_linting()` to determine which files should
be considered.
NOTE: This requires ``git`` to be installed and requires that this
is run within the ``git`` repository.
:type all_files: list or ``NoneType``
:param all_files: Optional list of files to be linted.
:rtype: tuple
:returns: A tuple containing two lists and a boolean. The first list
contains all production files, the next all test/demo files and
the boolean indicates if a restricted fileset was used.
"""
using_restricted = False
if all_files is None:
all_files, diff_base = get_files_for_linting(diff_base=diff_base)
using_restricted = diff_base is not None
library_files = []
non_library_files = []
for filename in all_files:
if valid_filename(filename):
if is_production_filename(filename):
library_files.append(filename)
else:
non_library_files.append(filename)
return library_files, non_library_files, using_restricted
def lint_fileset(filenames, rcfile, description):
"""Lints a group of files using a given rcfile."""
# Only lint filenames that exist. For example, 'git diff --name-only'
# could spit out deleted / renamed files. Another alternative could
# be to use 'git diff --name-status' and filter out files with a
# status of 'D'.
filenames = [filename for filename in filenames
if os.path.exists(filename)]
if filenames:
rc_flag = '--rcfile=%s' % (rcfile,)
pylint_shell_command = ['pylint', rc_flag] + filenames
status_code = subprocess.call(pylint_shell_command)
if status_code != 0:
error_message = ('Pylint failed on %s with '
'status %d.' % (description, status_code))
print >> sys.stderr, error_message
sys.exit(status_code)
else:
print 'Skipping %s, no files to lint.' % (description,)
def main(argv):
"""Script entry point. Lints both sets of files."""
diff_base = argv[1] if len(argv) > 1 else None
make_test_rc(PRODUCTION_RC, TEST_RC_ADDITIONS, TEST_RC)
library_files, non_library_files, using_restricted = get_python_files(
diff_base=diff_base)
try:
lint_fileset(library_files, PRODUCTION_RC, 'library code')
lint_fileset(non_library_files, TEST_RC, 'test and demo code')
except SystemExit:
if not using_restricted:
raise
message = 'Restricted lint failed, expanding to full fileset.'
print >> sys.stderr, message
all_files, _ = get_files_for_linting(allow_limited=False)
library_files, non_library_files, _ = get_python_files(
all_files=all_files)
lint_fileset(library_files, PRODUCTION_RC, 'library code')
lint_fileset(non_library_files, TEST_RC, 'test and demo code')
if __name__ == '__main__':
main(sys.argv)
| mit | -4,517,023,543,270,012,400 | 34.534783 | 78 | 0.650679 | false |
angr/angr | angr/analyses/decompiler/optimization_passes/mod_simplifier.py | 1 | 2880 | import logging
from ailment import Expr
from ... import AnalysesHub
from .engine_base import SimplifierAILEngine, SimplifierAILState
from .optimization_pass import OptimizationPass, OptimizationPassStage
_l = logging.getLogger(name=__name__)
class ModSimplifierAILEngine(SimplifierAILEngine):
def _ail_handle_Sub(self, expr):
operand_0 = self._expr(expr.operands[0])
operand_1 = self._expr(expr.operands[1])
x_0, c_0, x_1, c_1 = None, None, None, None
if isinstance(operand_1, Expr.BinaryOp) \
and isinstance(operand_1.operands[1], Expr.Const) \
and operand_1.op == 'Mul':
if isinstance(operand_1.operands[0], Expr.BinaryOp) \
and isinstance(operand_1.operands[0].operands[1], Expr.Const) \
and operand_1.operands[0].op in ['Div', 'DivMod']:
x_0 = operand_1.operands[0].operands[0]
x_1 = operand_0
c_0 = operand_1.operands[1]
c_1 = operand_1.operands[0].operands[1]
elif isinstance(operand_1.operands[0], Expr.Convert) \
and isinstance(operand_1.operands[0].operand, Expr.BinaryOp) \
and operand_1.operands[0].operand.op in ['Div', 'DivMod']:
x_0 = operand_1.operands[0].operand.operands[0]
x_1 = operand_0
c_0 = operand_1.operands[1]
c_1 = operand_1.operands[0].operand.operands[1]
if x_0 is not None and x_1 is not None and x_0 == x_1 and c_0.value == c_1.value:
return Expr.BinaryOp(expr.idx, 'Mod', [x_0, c_0], expr.signed, **expr.tags)
if (operand_0, operand_1) != (expr.operands[0], expr.operands[1]):
return Expr.BinaryOp(expr.idx, 'Sub', [operand_0, operand_1], expr.signed, **expr.tags)
return expr
def _ail_handle_Mod(self, expr): #pylint: disable=no-self-use
return expr
class ModSimplifier(OptimizationPass):
ARCHES = ["X86", "AMD64"]
PLATFORMS = ["linux", "windows"]
STAGE = OptimizationPassStage.AFTER_GLOBAL_SIMPLIFICATION
def __init__(self, func, **kwargs):
super().__init__(func, **kwargs)
self.state = SimplifierAILState(self.project.arch)
self.engine = ModSimplifierAILEngine()
self.analyze()
def _check(self):
return True, None
def _analyze(self, cache=None):
for block in list(self._graph.nodes()):
new_block = block
old_block = None
while new_block != old_block:
old_block = new_block
new_block = self.engine.process(state=self.state.copy(), block=old_block.copy())
_l.debug("new block: %s", new_block.statements)
self._update_block(block, new_block)
AnalysesHub.register_default("ModSimplifier", ModSimplifier)
| bsd-2-clause | 3,081,194,925,430,433,000 | 35.923077 | 99 | 0.591667 | false |
pjz/Zappa | test_settings.py | 1 | 1325 | APP_MODULE = 'tests.test_app'
APP_FUNCTION = 'hello_world'
DJANGO_SETTINGS = None
DEBUG = 'True'
LOG_LEVEL = 'DEBUG'
SCRIPT_NAME = 'hello_world'
DOMAIN = None
API_STAGE = 'ttt888'
PROJECT_NAME = 'ttt888'
REMOTE_ENV='s3://lmbda/test_env.json'
## test_env.json
#{
# "hello": "world"
#}
#
AWS_EVENT_MAPPING = {
'arn:aws:s3:1': 'test_settings.aws_s3_event',
'arn:aws:sns:1': 'test_settings.aws_sns_event',
'arn:aws:dynamodb:1': 'test_settings.aws_dynamodb_event',
'arn:aws:kinesis:1': 'test_settings.aws_kinesis_event',
'arn:aws:sqs:1': 'test_settings.aws_sqs_event'
}
ENVIRONMENT_VARIABLES={'testenv': 'envtest'}
AUTHORIZER_FUNCTION='test_settings.authorizer_event'
def prebuild_me():
print("This is a prebuild script!")
def callback(self):
print("this is a callback")
def aws_s3_event(event, content):
return "AWS S3 EVENT"
def aws_sns_event(event, content):
return "AWS SNS EVENT"
def aws_async_sns_event(arg1, arg2, arg3):
return "AWS ASYNC SNS EVENT"
def aws_dynamodb_event(event, content):
return "AWS DYNAMODB EVENT"
def aws_kinesis_event(event, content):
return "AWS KINESIS EVENT"
def aws_sqs_event(event, content):
return "AWS SQS EVENT"
def authorizer_event(event, content):
return "AUTHORIZER_EVENT"
def command():
print("command")
| mit | -1,091,853,218,482,797,700 | 18.485294 | 61 | 0.676981 | false |
BayesianLogic/blog | tools/blog_py_lexer/blog/lexer.py | 1 | 3373 | from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import *
class BlogLexer(RegexLexer):
name = 'BLOG'
aliases = ['blog']
filenames = ['*.blog', '*.dblog']
operators = ['\\-\\>', ':', '\\+', '\\-', '\\*', '/', '\\[', ']',
'\\{', '}', '!', '\\<', '\\>', '\\<=', '\\>=', '==', '!=',
'&', '\\|', '=\\>', '#', '\\^', '%', '@']
wordops = ['isEmptyString', 'succ', 'pred',
'prev', 'inv', 'det', 'min', 'max',
'round', 'transpose', 'sin', 'cos', 'tan',
'atan2', 'sum', 'vstack', 'eye', 'zeros',
'ones', 'toInt', 'toReal', 'diag', 'repmat',
'hstack', 'vstack', 'pi', 'trace']
deliminators = [',', ';', '\\(', '\\)', '=', '~']
keywords = ['extern','import','fixed','distinct','random','origin',
'param','type', 'forall', 'exists', 'obs', 'query',
'if', 'then', 'else', 'for', 'case', 'in']
types = ['Integer','Real','Boolean','NaturalNum','List','Map',
'Timestep','RealMatrix','IntegerMatrix']
distribs = ['TabularCPD', 'Distribution','Gaussian',
'UniformChoice', 'MultivarGaussian', 'Poisson',
'Bernoulli', 'BooleanDistrib', 'Binomial', 'Beta', 'BoundedGenometric',
'Categorical', 'Dirichlet', 'EqualsCPD', 'Gamma', 'Geometric', 'Iota',
'LinearGaussian', 'MixtureDistrib', 'Multinomial',
'NegativeBinamial', 'RoundedLogNormal', 'TabularInterp',
'UniformVector', 'UnivarGaussian',
'Exponential', 'UniformInt', 'UniformReal']
idname_reg = '[a-zA-Z_]\\w*'
def gen_regex(ops):
return "|".join(ops)
tokens = {
'root' : [
(r'//.*?\n', Comment.Single),
(r'(?s)/\*.*?\*/', Comment.Multiline),
('('+idname_reg+')(\\()', bygroups(Name.Function, Punctuation)),
('('+gen_regex(types)+')\\b', Keyword.Type),
('('+gen_regex(distribs)+')\\b', Name.Class),
('('+gen_regex(keywords)+')\\b', Keyword),
(gen_regex(operators), Operator),
('(' + gen_regex(wordops) +')\\b', Operator.Word),
('(true|false|null)\\b', Keyword.Constant),
('('+idname_reg+')\\b', Name),
(r'"(\\\\|\\"|[^"])*"', String),
(gen_regex(deliminators), Punctuation),
(r'\d*\.\d+', Number.Float),
(r'\d+', Number.Integer),
(r'\s+', Text),
]
}
def run_tests():
tests = [
"type Person;",
"distinct Person Alice, Bob, P[100];",
"random Real x1_x2x3 ~ Gaussian(0, 1);\nrandom Real y ~ Gaussian(x, 1);",
"random type0 funcname(type1 x) =expression;\nrandom type0 funcname(type1 x) dependency-expression;",
"random NaturalNum x ~ Poisson(a);",
"param Real a: 0 < a & a < 10 ;"
"random Real funcname(type1 x);",
"1.0 + 2.0 * 3.0 - 4.0",
"Twice( 10.0 ) * 5.5",
"fixed NaturalNum[] c = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];",
"fixed NaturalNum[][] table = [1, 2, 3; 4, 5, 6];",
"fixed List<NaturalNum> a = List(1, 2, 3, 4, 5, 6);",
"fixed Map<Boolean, Real> map1 = {true -> 0.3, false -> 0.7};",
"Categorical<Boolean> cpd1 =Categorical({true -> 0.3, false -> 0.7});",
"List",
"/*abc */",
"""
/* Evidence for the Hidden Markov Model.
*/
"""
]
lexer = BlogLexer()
for test in tests:
print(test)
for token in (lexer.get_tokens(test)):
print(token)
if __name__ == '__main__':
run_tests()
| bsd-3-clause | 4,312,323,977,143,371,000 | 37.770115 | 105 | 0.501927 | false |
haematologic/cellcounter | cellcounter/accounts/views.py | 1 | 7496 | from braces.views import LoginRequiredMixin
from django.contrib import messages
from django.contrib.auth import authenticate, login
from django.contrib.auth.forms import PasswordChangeForm
from django.contrib.auth.forms import SetPasswordForm
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.decorators import method_decorator
from django.utils.http import urlsafe_base64_decode
from django.utils.safestring import mark_safe
from django.views.decorators.debug import sensitive_post_parameters
from django.views.generic import FormView, UpdateView, DetailView, DeleteView
from ratelimit.exceptions import Ratelimited
from ratelimit.mixins import RatelimitMixin
from ratelimit.utils import is_ratelimited
from .forms import EmailUserCreationForm, PasswordResetForm
class RateLimitedFormView(FormView):
ratelimit_key = 'ip'
ratelimit_block = True
ratelimit_rate = '1/h'
ratelimit_group = None
def dispatch(self, *args, **kwargs):
ratelimited = is_ratelimited(request=self.request,
group=self.ratelimit_group,
key=self.ratelimit_key,
rate=self.ratelimit_rate,
increment=False)
if ratelimited and self.ratelimit_block:
raise Ratelimited()
return super(RateLimitedFormView, self).dispatch(*args, **kwargs)
class RegistrationView(RateLimitedFormView):
template_name = 'accounts/register.html'
form_class = EmailUserCreationForm
ratelimit_group = 'registration'
def form_valid(self, form):
user = form.save()
messages.success(self.request,
mark_safe(
"Successfully registered, you are now logged in! <a href='%s'>View your profile</a>" %
reverse('user-detail', kwargs={'pk': user.id})))
user = authenticate(username=form.cleaned_data['username'],
password=form.cleaned_data['password1'])
login(self.request, user)
is_ratelimited(request=self.request, group=self.ratelimit_group, key=self.ratelimit_key,
rate=self.ratelimit_rate, increment=True)
return super(RegistrationView, self).form_valid(form)
def get_success_url(self):
return reverse('new_count')
class PasswordChangeView(LoginRequiredMixin, FormView):
template_name = 'accounts/password_change.html'
form_class = PasswordChangeForm
def get_form_kwargs(self):
kwargs = super(PasswordChangeView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
form.save()
messages.success(self.request, "Password changed successfully")
return HttpResponseRedirect(reverse('new_count'))
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
context_object_name = 'user_detail'
template_name = 'accounts/user_detail.html'
def get_object(self, queryset=None):
if self.request.user.id == int(self.kwargs['pk']):
return super(UserDetailView, self).get_object()
else:
raise PermissionDenied
def get_context_data(self, **kwargs):
context = super(UserDetailView, self).get_context_data(**kwargs)
context['keyboards'] = self.object.keyboard_set.all().order_by('-is_primary')
return context
class UserDeleteView(LoginRequiredMixin, DeleteView):
model = User
context_object_name = 'user_object'
template_name = 'accounts/user_check_delete.html'
def get_object(self, queryset=None):
if self.request.user.id == int(self.kwargs['pk']):
return super(UserDeleteView, self).get_object()
else:
raise PermissionDenied
def get_success_url(self):
messages.success(self.request, "User account deleted")
return reverse('new_count')
class UserUpdateView(LoginRequiredMixin, UpdateView):
model = User
fields = ['first_name', 'last_name', 'email', ]
template_name = 'accounts/user_update.html'
def get_object(self, queryset=None):
if self.request.user.id == int(self.kwargs['pk']):
return super(UserUpdateView, self).get_object()
else:
raise PermissionDenied
def get_success_url(self):
messages.success(self.request, "User details updated")
return reverse('user-detail', kwargs={'pk': self.kwargs['pk']})
class PasswordResetView(RatelimitMixin, FormView):
template_name = 'accounts/reset_form.html'
form_class = PasswordResetForm
ratelimit_rate = '5/h'
ratelimit_group = 'pwdreset'
ratelimit_key = 'ip'
ratelimit_block = True
def form_valid(self, form):
form.save(request=self.request)
messages.success(self.request, 'Reset email sent')
return super(PasswordResetView, self).form_valid(form)
def form_invalid(self, form):
"""Don't expose form errors to the user"""
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
return reverse('new_count')
class PasswordResetConfirmView(FormView):
template_name = 'accounts/reset_confirm.html'
form_class = SetPasswordForm
@method_decorator(sensitive_post_parameters())
def dispatch(self, request, *args, **kwargs):
return super(PasswordResetConfirmView, self).dispatch(request, *args, **kwargs)
@staticmethod
def valid_user(uidb64):
try:
uid = urlsafe_base64_decode(uidb64)
user = User.objects.get(pk=uid)
except (TypeError, ValueError, OverflowError, User.DoesNotExist):
return None
return user
@staticmethod
def valid_token(user, token):
if user is not None:
return default_token_generator.check_token(user, token)
else:
return False
def _valid_inputs(self, uidb64, token):
self.user_object = self.valid_user(uidb64)
return self.valid_token(self.user_object, token)
def get(self, request, *args, **kwargs):
if self._valid_inputs(self.kwargs['uidb64'], self.kwargs['token']):
form = self.get_form(self.get_form_class())
return self.render_to_response(self.get_context_data(form=form, validlink=True))
else:
return self.render_to_response(self.get_context_data(validlink=False))
def post(self, request, *args, **kwargs):
if self._valid_inputs(self.kwargs['uidb64'], self.kwargs['token']):
return super(PasswordResetConfirmView, self).post(request, *args, **kwargs)
else:
return self.render_to_response(self.get_context_data(validlink=False))
def get_form_kwargs(self):
kwargs = super(PasswordResetConfirmView, self).get_form_kwargs()
kwargs['user'] = self.user_object
return kwargs
def form_valid(self, form):
form.save()
messages.success(self.request, 'Password reset successfully')
return HttpResponseRedirect(reverse('new_count'))
def rate_limited(request, exception):
messages.error(request, 'You have been rate limited')
return HttpResponseRedirect(reverse('new_count'))
| mit | -4,984,765,481,467,892,000 | 36.293532 | 115 | 0.665555 | false |
rohitwaghchaure/digitales_erpnext | erpnext/accounts/report/budget_variance_report/budget_variance_report.py | 1 | 4804 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, msgprint
from frappe.utils import flt
from frappe.utils import formatdate
import time
from erpnext.accounts.utils import get_fiscal_year
from erpnext.controllers.trends import get_period_date_ranges, get_period_month_ranges
def execute(filters=None):
if not filters: filters = {}
columns = get_columns(filters)
period_month_ranges = get_period_month_ranges(filters["period"], filters["fiscal_year"])
cam_map = get_costcenter_account_month_map(filters)
data = []
for cost_center, cost_center_items in cam_map.items():
for account, monthwise_data in cost_center_items.items():
row = [cost_center, account]
totals = [0, 0, 0]
for relevant_months in period_month_ranges:
period_data = [0, 0, 0]
for month in relevant_months:
month_data = monthwise_data.get(month, {})
for i, fieldname in enumerate(["target", "actual", "variance"]):
value = flt(month_data.get(fieldname))
period_data[i] += value
totals[i] += value
period_data[2] = period_data[0] - period_data[1]
row += period_data
totals[2] = totals[0] - totals[1]
row += totals
data.append(row)
return columns, sorted(data, key=lambda x: (x[0], x[1]))
def get_columns(filters):
for fieldname in ["fiscal_year", "period", "company"]:
if not filters.get(fieldname):
label = (" ".join(fieldname.split("_"))).title()
msgprint(_("Please specify") + ": " + label,
raise_exception=True)
columns = [_("Cost Center") + ":Link/Cost Center:120", _("Account") + ":Link/Account:120"]
group_months = False if filters["period"] == "Monthly" else True
for from_date, to_date in get_period_date_ranges(filters["period"], filters["fiscal_year"]):
for label in [_("Target") + " (%s)", _("Actual") + " (%s)", _("Variance") + " (%s)"]:
if group_months:
label = label % (formatdate(from_date, format_string="MMM") + " - " + formatdate(from_date, format_string="MMM"))
else:
label = label % formatdate(from_date, format_string="MMM")
columns.append(label+":Float:120")
return columns + [_("Total Target") + ":Float:120", _("Total Actual") + ":Float:120",
_("Total Variance") + ":Float:120"]
#Get cost center & target details
def get_costcenter_target_details(filters):
return frappe.db.sql("""select cc.name, cc.distribution_id,
cc.parent_cost_center, bd.account, bd.budget_allocated
from `tabCost Center` cc, `tabBudget Detail` bd
where bd.parent=cc.name and bd.fiscal_year=%s and
cc.company=%s order by cc.name""" % ('%s', '%s'),
(filters.get("fiscal_year"), filters.get("company")), as_dict=1)
#Get target distribution details of accounts of cost center
def get_target_distribution_details(filters):
target_details = {}
for d in frappe.db.sql("""select bd.name, bdd.month, bdd.percentage_allocation
from `tabBudget Distribution Detail` bdd, `tabBudget Distribution` bd
where bdd.parent=bd.name and bd.fiscal_year=%s""", (filters["fiscal_year"]), as_dict=1):
target_details.setdefault(d.name, {}).setdefault(d.month, flt(d.percentage_allocation))
return target_details
#Get actual details from gl entry
def get_actual_details(filters):
ac_details = frappe.db.sql("""select gl.account, gl.debit, gl.credit,
gl.cost_center, MONTHNAME(gl.posting_date) as month_name
from `tabGL Entry` gl, `tabBudget Detail` bd
where gl.fiscal_year=%s and company=%s
and bd.account=gl.account and bd.parent=gl.cost_center""" % ('%s', '%s'),
(filters.get("fiscal_year"), filters.get("company")), as_dict=1)
cc_actual_details = {}
for d in ac_details:
cc_actual_details.setdefault(d.cost_center, {}).setdefault(d.account, []).append(d)
return cc_actual_details
def get_costcenter_account_month_map(filters):
import datetime
costcenter_target_details = get_costcenter_target_details(filters)
tdd = get_target_distribution_details(filters)
actual_details = get_actual_details(filters)
cam_map = {}
for ccd in costcenter_target_details:
for month_id in range(1, 13):
month = datetime.date(2013, month_id, 1).strftime('%B')
cam_map.setdefault(ccd.name, {}).setdefault(ccd.account, {})\
.setdefault(month, frappe._dict({
"target": 0.0, "actual": 0.0
}))
tav_dict = cam_map[ccd.name][ccd.account][month]
month_percentage = tdd.get(ccd.distribution_id, {}).get(month, 0) \
if ccd.distribution_id else 100.0/12
tav_dict.target = flt(ccd.budget_allocated) * month_percentage / 100
for ad in actual_details.get(ccd.name, {}).get(ccd.account, []):
if ad.month_name == month:
tav_dict.actual += flt(ad.debit) - flt(ad.credit)
return cam_map
| agpl-3.0 | 4,049,678,227,760,201,700 | 36.826772 | 117 | 0.678601 | false |
lexman/tuttle | tests/test_log_follower.py | 1 | 8283 | # -*- coding: utf-8 -*-
from tests.functional_tests import isolate, run_tuttle_file
from cStringIO import StringIO
from tuttle.log_follower import LogTracer, LogsFollower
import logging
import sys
class CaptureOutputs(object):
"""
Captures stdin and stdout
"""
def __init__(self):
self._oldout, self._olderr = sys.stdout, sys.stderr
def __enter__(self):
self._out = StringIO()
sys.stdout,sys.stderr = self._out, self._out
return self
def __exit__(self, *args):
sys.stdout, sys.stderr = self._oldout, self._olderr
self.output = self._out.getvalue()
class TestLogFollower():
@isolate([])
def test_log_single_file(self):
"""LogTracer should log the content of a file"""
with CaptureOutputs() as co:
logger = LogsFollower.get_logger()
lt = LogTracer(logger, logging.INFO, "test.log")
with open("test.log", "w") as f:
f.write("line 1\n")
f.write("line 2\n")
f.write("line 3\n")
lt.trace()
output = co.output
assert output.find("line 1") >= 0, output
assert output.find("line 2") >= 0, output
assert output.find("line 3") >= 0, output
@isolate([])
def test_log_should_not_double_carriage_return(self):
""" """
with CaptureOutputs() as co:
logger = LogsFollower.get_logger()
lt = LogTracer(logger, logging.INFO, "test.log")
with open("test.log", "w") as f:
f.write("line 1\n")
f.write("line 2\n")
lt.trace()
output = co.output
assert output.find("\n\n") == -1, output
@isolate([])
def test_log_should_(self):
""" The last char of the file must be logged even if the
file does not finish with CR """
with CaptureOutputs() as co:
logger = LogsFollower.get_logger()
lt = LogTracer(logger, logging.INFO, "test.log")
with open("test.log", "w") as f:
f.write("line 1")
lt.trace()
output = co.output
assert output.find("line 1") >= 0, output
@isolate([])
def test_log_huge_file(self):
"""LogTracer should log the content of a big file in stdout"""
with CaptureOutputs() as co:
logger = LogsFollower.get_logger()
lt = LogTracer(logger, "namespace", "test.log")
with open("test.log", "w") as f:
for i in xrange(5000):
f.write("line {}\n".format(i))
while lt.trace():
pass
output = co.output
assert output.find("line 1") >= 0, output
assert output.find("line 2") >= 0, output
assert output.find("line 3") >= 0, output
assert output.find("line 4999") >= 0, output
@isolate([])
def test_log_multiple_files(self):
"""LogTracer should log the content of several files in stdout"""
with CaptureOutputs() as co:
lf = LogsFollower()
lf.follow_process("w1.stdout", "w1.stderr", "process1")
lf.follow_process("w2.stdout", "w2.stderr", "process2")
lf.follow_process("w3.stdout", "w3.stderr", "process3")
with open("w1.stdout", "w") as fo1, \
open("w1.stderr", "w") as fe1, \
open("w2.stdout", "w") as fo2, \
open("w2.stderr", "w") as fe2, \
open("w3.stdout", "w") as fo3, \
open("w3.stderr", "w") as fe3 :
for i in xrange(5000):
fo1.write("w1.stdout - line {}\n".format(i))
fe1.write("w1.stderr - line {}\n".format(i))
fo2.write("w2.stdout - line {}\n".format(i))
fe2.write("w2.stderr - line {}\n".format(i))
fo3.write("w3.stdout - line {}\n".format(i))
fe3.write("w3.stderr - line {}\n".format(i))
while lf.trace_logs():
pass
output = co.output
assert output.find("w1.stderr - line 1") >= 0, output
assert output.find("w1.stdout - line 1") >= 0, output
assert output.find("w2.stderr - line 1") >= 0, output
assert output.find("w2.stdout - line 1") >= 0, output
assert output.find("w3.stdout - line 1") >= 0, output
assert output.find("w3.stderr - line 1") >= 0, output
assert output.find("w1.stderr - line 4999") >= 0, output
assert output.find("w1.stdout - line 4999") >= 0, output
assert output.find("w2.stderr - line 4999") >= 0, output
assert output.find("w2.stdout - line 4999") >= 0, output
assert output.find("w3.stdout - line 4999") >= 0, output
assert output.find("w3.stderr - line 4999") >= 0, output
@isolate([])
def test_log_format(self):
"""logs should display log level and message"""
with CaptureOutputs() as co:
logger = LogsFollower.get_logger()
logger.info("MESSAGE")
assert co.output.find("MESSAGE") == 0, co.output
@isolate([])
def test_log_format_stdout_stderr(self):
"""logs should display log level and message"""
with CaptureOutputs() as co:
lf = LogsFollower()
lf.follow_process("stdout", "stderr", "process_id")
with open("stdout", "w") as fout, \
open("stderr", "w") as ferr:
fout.write("file stdout")
ferr.write("file stderr")
while lf.trace_logs():
pass
assert co.output.find("[process_id::stdout] file stdout") >= 0, co.output
assert co.output.find("[process_id::stderr] file stderr") >= 0, co.output
@isolate([])
def test_log_in_background(self):
"""Should log in background ans stop when foreground processing
is over"""
import time
with CaptureOutputs() as co:
lf = LogsFollower()
lf.follow_process("stdout", "stderr", "process_id")
lf.trace_in_background()
with open("stdout", "w") as fout, \
open("stderr", "w") as ferr:
fout.write("file stdout")
ferr.write("file stderr")
lf.terminate()
assert co.output.find("[process_id::stdout] file stdout") >= 0, co.output
assert co.output.find("[process_id::stderr] file stderr") >= 0, co.output
@isolate([])
def test_log_a_lot_in_background(self):
"""Should log in background ans stop when foreground processing
is over even with a lot a data"""
with CaptureOutputs() as co:
lf = LogsFollower()
lf.follow_process("stdout", "stderr", "process_id")
lf.trace_in_background()
with open("stdout", "w") as fout, \
open("stderr", "w") as ferr:
fout.write("file stdout")
ferr.write("file stderr")
for i in xrange(5000):
fout.write("stdout - line {}\n".format(i))
ferr.write("stderr - line {}\n".format(i))
lf.terminate()
assert co.output.find("[process_id::stdout] stdout - line 1") >= 0, co.output
assert co.output.find("[process_id::stderr] stderr - line 1") >= 0, co.output
assert co.output.find("[process_id::stdout] stdout - line 4999") >= 0, co.output
assert co.output.find("[process_id::stderr] stderr - line 4999") >= 0, co.output
@isolate([])
def test_thread_protection(self):
"""When a section of code using the LogsFollower is complete, the thread should stop"""
lf = LogsFollower()
lf.follow_process("stdout", "stderr", "process_id")
with lf.trace_in_background():
assert lf._thread.is_alive(), "Backgroung thread isn't running..."
with open("stdout", "w") as fout, \
open("stderr", "w") as ferr:
fout.write("file stdout")
ferr.write("file stderr")
assert not lf._thread.is_alive(), "Backgroung hasn't stopped !"
| mit | -3,655,338,017,070,619,600 | 39.802956 | 95 | 0.530243 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.