repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
wanderlog/posthog | ee/clickhouse/sql/person.py | a88b81d44ab31d262be07e84a85d045c4e28f2a3 | from ee.clickhouse.sql.clickhouse import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine
from ee.clickhouse.sql.table_engines import CollapsingMergeTree, ReplacingMergeTree
from ee.kafka_client.topics import KAFKA_PERSON, KAFKA_PERSON_DISTINCT_ID, KAFKA_PERSON_UNIQUE_ID
from posthog.settings import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE
TRUNCATE_PERSON_TABLE_SQL = f"TRUNCATE TABLE IF EXISTS person ON CLUSTER '{CLICKHOUSE_CLUSTER}'"
DROP_PERSON_TABLE_SQL = f"DROP TABLE IF EXISTS person ON CLUSTER '{CLICKHOUSE_CLUSTER}'"
TRUNCATE_PERSON_DISTINCT_ID_TABLE_SQL = f"TRUNCATE TABLE IF EXISTS person_distinct_id ON CLUSTER '{CLICKHOUSE_CLUSTER}'"
TRUNCATE_PERSON_DISTINCT_ID2_TABLE_SQL = (
f"TRUNCATE TABLE IF EXISTS person_distinct_id2 ON CLUSTER '{CLICKHOUSE_CLUSTER}'"
)
PERSONS_TABLE = "person"
PERSONS_TABLE_BASE_SQL = """
CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}'
(
id UUID,
created_at DateTime64,
team_id Int64,
properties VARCHAR,
is_identified Int8,
is_deleted Int8 DEFAULT 0
{extra_fields}
) ENGINE = {engine}
"""
PERSONS_TABLE_ENGINE = lambda: ReplacingMergeTree(PERSONS_TABLE, ver="_timestamp")
PERSONS_TABLE_SQL = lambda: (
PERSONS_TABLE_BASE_SQL
+ """Order By (team_id, id)
{storage_policy}
"""
).format(
table_name=PERSONS_TABLE,
cluster=CLICKHOUSE_CLUSTER,
engine=PERSONS_TABLE_ENGINE(),
extra_fields=KAFKA_COLUMNS,
storage_policy=STORAGE_POLICY(),
)
KAFKA_PERSONS_TABLE_SQL = lambda: PERSONS_TABLE_BASE_SQL.format(
table_name="kafka_" + PERSONS_TABLE, cluster=CLICKHOUSE_CLUSTER, engine=kafka_engine(KAFKA_PERSON), extra_fields="",
)
# You must include the database here because of a bug in clickhouse
# related to https://github.com/ClickHouse/ClickHouse/issues/10471
PERSONS_TABLE_MV_SQL = """
CREATE MATERIALIZED VIEW {table_name}_mv ON CLUSTER '{cluster}'
TO {database}.{table_name}
AS SELECT
id,
created_at,
team_id,
properties,
is_identified,
is_deleted,
_timestamp,
_offset
FROM {database}.kafka_{table_name}
""".format(
table_name=PERSONS_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE,
)
GET_LATEST_PERSON_SQL = """
SELECT * FROM person JOIN (
SELECT id, max(_timestamp) as _timestamp, max(is_deleted) as is_deleted
FROM person
WHERE team_id = %(team_id)s
GROUP BY id
) as person_max ON person.id = person_max.id AND person._timestamp = person_max._timestamp
WHERE team_id = %(team_id)s
AND person_max.is_deleted = 0
{query}
"""
GET_LATEST_PERSON_ID_SQL = """
(select id from (
{latest_person_sql}
))
""".format(
latest_person_sql=GET_LATEST_PERSON_SQL
)
#
# person_distinct_id table - use this still in queries, but this will eventually get removed.
#
PERSONS_DISTINCT_ID_TABLE = "person_distinct_id"
PERSONS_DISTINCT_ID_TABLE_BASE_SQL = """
CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}'
(
distinct_id VARCHAR,
person_id UUID,
team_id Int64,
_sign Int8 DEFAULT 1,
is_deleted Int8 ALIAS if(_sign==-1, 1, 0)
{extra_fields}
) ENGINE = {engine}
"""
PERSONS_DISTINCT_ID_TABLE_SQL = lambda: (
PERSONS_DISTINCT_ID_TABLE_BASE_SQL
+ """Order By (team_id, distinct_id, person_id)
{storage_policy}
"""
).format(
table_name=PERSONS_DISTINCT_ID_TABLE,
cluster=CLICKHOUSE_CLUSTER,
engine=CollapsingMergeTree(PERSONS_DISTINCT_ID_TABLE, ver="_sign"),
extra_fields=KAFKA_COLUMNS,
storage_policy=STORAGE_POLICY(),
)
# :KLUDGE: We default is_deleted to 0 for backwards compatibility for when we drop `is_deleted` from message schema.
# Can't make DEFAULT if(_sign==-1, 1, 0) because Cyclic aliases error.
KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL = lambda: """
CREATE TABLE {table_name} ON CLUSTER '{cluster}'
(
distinct_id VARCHAR,
person_id UUID,
team_id Int64,
_sign Nullable(Int8),
is_deleted Nullable(Int8)
) ENGINE = {engine}
""".format(
table_name="kafka_" + PERSONS_DISTINCT_ID_TABLE,
cluster=CLICKHOUSE_CLUSTER,
engine=kafka_engine(KAFKA_PERSON_UNIQUE_ID),
)
# You must include the database here because of a bug in clickhouse
# related to https://github.com/ClickHouse/ClickHouse/issues/10471
PERSONS_DISTINCT_ID_TABLE_MV_SQL = """
CREATE MATERIALIZED VIEW {table_name}_mv ON CLUSTER '{cluster}'
TO {database}.{table_name}
AS SELECT
distinct_id,
person_id,
team_id,
coalesce(_sign, if(is_deleted==0, 1, -1)) AS _sign,
_timestamp,
_offset
FROM {database}.kafka_{table_name}
""".format(
table_name=PERSONS_DISTINCT_ID_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE,
)
#
# person_distinct_ids2 - new table!
#
PERSON_DISTINCT_ID2_TABLE = "person_distinct_id2"
PERSON_DISTINCT_ID2_TABLE_BASE_SQL = """
CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}'
(
team_id Int64,
distinct_id VARCHAR,
person_id UUID,
is_deleted Int8,
version Int64 DEFAULT 1
{extra_fields}
) ENGINE = {engine}
"""
PERSON_DISTINCT_ID2_TABLE_ENGINE = lambda: ReplacingMergeTree(PERSON_DISTINCT_ID2_TABLE, ver="version")
PERSON_DISTINCT_ID2_TABLE_SQL = lambda: (
PERSON_DISTINCT_ID2_TABLE_BASE_SQL
+ """
ORDER BY (team_id, distinct_id)
SETTINGS index_granularity = 512
"""
).format(
table_name=PERSON_DISTINCT_ID2_TABLE,
cluster=CLICKHOUSE_CLUSTER,
engine=PERSON_DISTINCT_ID2_TABLE_ENGINE(),
extra_fields=KAFKA_COLUMNS + "\n, _partition UInt64",
)
KAFKA_PERSON_DISTINCT_ID2_TABLE_SQL = lambda: PERSON_DISTINCT_ID2_TABLE_BASE_SQL.format(
table_name="kafka_" + PERSON_DISTINCT_ID2_TABLE,
cluster=CLICKHOUSE_CLUSTER,
engine=kafka_engine(KAFKA_PERSON_DISTINCT_ID),
extra_fields="",
)
# You must include the database here because of a bug in clickhouse
# related to https://github.com/ClickHouse/ClickHouse/issues/10471
PERSON_DISTINCT_ID2_MV_SQL = """
CREATE MATERIALIZED VIEW {table_name}_mv ON CLUSTER '{cluster}'
TO {database}.{table_name}
AS SELECT
team_id,
distinct_id,
person_id,
is_deleted,
version,
_timestamp,
_offset,
_partition
FROM {database}.kafka_{table_name}
""".format(
table_name=PERSON_DISTINCT_ID2_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE,
)
#
# Static Cohort
#
PERSON_STATIC_COHORT_TABLE = "person_static_cohort"
PERSON_STATIC_COHORT_BASE_SQL = """
CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}'
(
id UUID,
person_id UUID,
cohort_id Int64,
team_id Int64
{extra_fields}
) ENGINE = {engine}
"""
PERSON_STATIC_COHORT_TABLE_ENGINE = lambda: ReplacingMergeTree(PERSON_STATIC_COHORT_TABLE, ver="_timestamp")
PERSON_STATIC_COHORT_TABLE_SQL = lambda: (
PERSON_STATIC_COHORT_BASE_SQL
+ """Order By (team_id, cohort_id, person_id, id)
{storage_policy}
"""
).format(
table_name=PERSON_STATIC_COHORT_TABLE,
cluster=CLICKHOUSE_CLUSTER,
engine=PERSON_STATIC_COHORT_TABLE_ENGINE(),
storage_policy=STORAGE_POLICY(),
extra_fields=KAFKA_COLUMNS,
)
TRUNCATE_PERSON_STATIC_COHORT_TABLE_SQL = (
f"TRUNCATE TABLE IF EXISTS {PERSON_STATIC_COHORT_TABLE} ON CLUSTER '{CLICKHOUSE_CLUSTER}'"
)
INSERT_PERSON_STATIC_COHORT = (
f"INSERT INTO {PERSON_STATIC_COHORT_TABLE} (id, person_id, cohort_id, team_id, _timestamp) VALUES"
)
#
# Other queries
#
GET_TEAM_PERSON_DISTINCT_IDS = """
SELECT distinct_id, argMax(person_id, _timestamp) as person_id
FROM (
SELECT distinct_id, person_id, max(_timestamp) as _timestamp
FROM person_distinct_id
WHERE team_id = %(team_id)s %(extra_where)s
GROUP BY person_id, distinct_id, team_id
HAVING max(is_deleted) = 0
)
GROUP BY distinct_id
"""
# Query to query distinct ids using the new table, will be used if 0003_fill_person_distinct_id2 migration is complete
GET_TEAM_PERSON_DISTINCT_IDS_NEW_TABLE = """
SELECT distinct_id, argMax(person_id, version) as person_id
FROM person_distinct_id2
WHERE team_id = %(team_id)s %(extra_where)s
GROUP BY distinct_id
HAVING argMax(is_deleted, version) = 0
"""
GET_PERSON_IDS_BY_FILTER = """
SELECT DISTINCT p.id
FROM ({latest_person_sql}) AS p
INNER JOIN ({GET_TEAM_PERSON_DISTINCT_IDS}) AS pdi ON p.id = pdi.person_id
WHERE team_id = %(team_id)s
{distinct_query}
{limit}
{offset}
""".format(
latest_person_sql=GET_LATEST_PERSON_SQL,
distinct_query="{distinct_query}",
limit="{limit}",
offset="{offset}",
GET_TEAM_PERSON_DISTINCT_IDS="{GET_TEAM_PERSON_DISTINCT_IDS}",
)
INSERT_PERSON_SQL = """
INSERT INTO person (id, created_at, team_id, properties, is_identified, _timestamp, _offset, is_deleted) SELECT %(id)s, %(created_at)s, %(team_id)s, %(properties)s, %(is_identified)s, %(_timestamp)s, 0, 0
"""
INSERT_PERSON_DISTINCT_ID = """
INSERT INTO person_distinct_id SELECT %(distinct_id)s, %(person_id)s, %(team_id)s, %(_sign)s, now(), 0 VALUES
"""
INSERT_PERSON_DISTINCT_ID2 = """
INSERT INTO person_distinct_id2 (distinct_id, person_id, team_id, is_deleted, version, _timestamp, _offset, _partition) SELECT %(distinct_id)s, %(person_id)s, %(team_id)s, 0, %(version)s, now(), 0, 0 VALUES
"""
DELETE_PERSON_BY_ID = """
INSERT INTO person (id, created_at, team_id, properties, is_identified, _timestamp, _offset, is_deleted) SELECT %(id)s, %(created_at)s, %(team_id)s, %(properties)s, %(is_identified)s, %(_timestamp)s, 0, 1
"""
DELETE_PERSON_EVENTS_BY_ID = """
ALTER TABLE events DELETE
WHERE distinct_id IN (
SELECT distinct_id FROM person_distinct_id WHERE person_id=%(id)s AND team_id = %(team_id)s
)
AND team_id = %(team_id)s
"""
INSERT_COHORT_ALL_PEOPLE_THROUGH_PERSON_ID = """
INSERT INTO {cohort_table} SELECT generateUUIDv4(), actor_id, %(cohort_id)s, %(team_id)s, %(_timestamp)s, 0 FROM (
SELECT actor_id FROM ({query})
)
"""
INSERT_COHORT_ALL_PEOPLE_SQL = """
INSERT INTO {cohort_table} SELECT generateUUIDv4(), id, %(cohort_id)s, %(team_id)s, %(_timestamp)s, 0 FROM (
SELECT id FROM (
{latest_person_sql}
) as person INNER JOIN (
SELECT person_id, distinct_id FROM ({GET_TEAM_PERSON_DISTINCT_IDS}) WHERE person_id IN ({content_sql})
) as pdi ON person.id = pdi.person_id
WHERE team_id = %(team_id)s
GROUP BY id
)
"""
GET_DISTINCT_IDS_BY_PROPERTY_SQL = """
SELECT distinct_id
FROM (
{GET_TEAM_PERSON_DISTINCT_IDS}
)
WHERE person_id IN
(
SELECT id
FROM (
SELECT id, argMax(properties, person._timestamp) as properties, max(is_deleted) as is_deleted
FROM person
WHERE team_id = %(team_id)s
GROUP BY id
HAVING is_deleted = 0
)
WHERE {filters}
)
"""
GET_DISTINCT_IDS_BY_PERSON_ID_FILTER = """
SELECT distinct_id
FROM ({GET_TEAM_PERSON_DISTINCT_IDS})
WHERE {filters}
"""
GET_PERSON_PROPERTIES_COUNT = """
SELECT tupleElement(keysAndValues, 1) as key, count(*) as count
FROM person
ARRAY JOIN JSONExtractKeysAndValuesRaw(properties) as keysAndValues
WHERE team_id = %(team_id)s
GROUP BY tupleElement(keysAndValues, 1)
ORDER BY count DESC, key ASC
"""
GET_ACTORS_FROM_EVENT_QUERY = """
SELECT
{id_field} AS actor_id
{matching_events_select_statement}
FROM ({events_query})
GROUP BY actor_id
{limit}
{offset}
"""
COMMENT_DISTINCT_ID_COLUMN_SQL = (
lambda: f"ALTER TABLE person_distinct_id ON CLUSTER '{CLICKHOUSE_CLUSTER}' COMMENT COLUMN distinct_id 'skip_0003_fill_person_distinct_id2'"
)
SELECT_PERSON_PROP_VALUES_SQL = """
SELECT
value,
count(value)
FROM (
SELECT
{property_field} as value
FROM
person
WHERE
team_id = %(team_id)s AND
is_deleted = 0 AND
{property_field} IS NOT NULL AND
{property_field} != ''
ORDER BY id DESC
LIMIT 100000
)
GROUP BY value
ORDER BY count(value) DESC
LIMIT 20
"""
SELECT_PERSON_PROP_VALUES_SQL_WITH_FILTER = """
SELECT
value,
count(value)
FROM (
SELECT
{property_field} as value
FROM
person
WHERE
team_id = %(team_id)s AND
is_deleted = 0 AND
{property_field} ILIKE %(value)s
ORDER BY id DESC
LIMIT 100000
)
GROUP BY value
ORDER BY count(value) DESC
LIMIT 20
"""
| [((1110, 1161), 'ee.clickhouse.sql.table_engines.ReplacingMergeTree', 'ReplacingMergeTree', (['PERSONS_TABLE'], {'ver': '"""_timestamp"""'}), "(PERSONS_TABLE, ver='_timestamp')\n", (1128, 1161), False, 'from ee.clickhouse.sql.table_engines import CollapsingMergeTree, ReplacingMergeTree\n'), ((4949, 5009), 'ee.clickhouse.sql.table_engines.ReplacingMergeTree', 'ReplacingMergeTree', (['PERSON_DISTINCT_ID2_TABLE'], {'ver': '"""version"""'}), "(PERSON_DISTINCT_ID2_TABLE, ver='version')\n", (4967, 5009), False, 'from ee.clickhouse.sql.table_engines import CollapsingMergeTree, ReplacingMergeTree\n'), ((6462, 6526), 'ee.clickhouse.sql.table_engines.ReplacingMergeTree', 'ReplacingMergeTree', (['PERSON_STATIC_COHORT_TABLE'], {'ver': '"""_timestamp"""'}), "(PERSON_STATIC_COHORT_TABLE, ver='_timestamp')\n", (6480, 6526), False, 'from ee.clickhouse.sql.table_engines import CollapsingMergeTree, ReplacingMergeTree\n'), ((1430, 1446), 'ee.clickhouse.sql.clickhouse.STORAGE_POLICY', 'STORAGE_POLICY', ([], {}), '()\n', (1444, 1446), False, 'from ee.clickhouse.sql.clickhouse import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine\n'), ((1592, 1618), 'ee.clickhouse.sql.clickhouse.kafka_engine', 'kafka_engine', (['KAFKA_PERSON'], {}), '(KAFKA_PERSON)\n', (1604, 1618), False, 'from ee.clickhouse.sql.clickhouse import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine\n'), ((3307, 3366), 'ee.clickhouse.sql.table_engines.CollapsingMergeTree', 'CollapsingMergeTree', (['PERSONS_DISTINCT_ID_TABLE'], {'ver': '"""_sign"""'}), "(PERSONS_DISTINCT_ID_TABLE, ver='_sign')\n", (3326, 3366), False, 'from ee.clickhouse.sql.table_engines import CollapsingMergeTree, ReplacingMergeTree\n'), ((3419, 3435), 'ee.clickhouse.sql.clickhouse.STORAGE_POLICY', 'STORAGE_POLICY', ([], {}), '()\n', (3433, 3435), False, 'from ee.clickhouse.sql.clickhouse import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine\n'), ((3980, 4016), 'ee.clickhouse.sql.clickhouse.kafka_engine', 'kafka_engine', (['KAFKA_PERSON_UNIQUE_ID'], {}), '(KAFKA_PERSON_UNIQUE_ID)\n', (3992, 4016), False, 'from ee.clickhouse.sql.clickhouse import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine\n'), ((5559, 5597), 'ee.clickhouse.sql.clickhouse.kafka_engine', 'kafka_engine', (['KAFKA_PERSON_DISTINCT_ID'], {}), '(KAFKA_PERSON_DISTINCT_ID)\n', (5571, 5597), False, 'from ee.clickhouse.sql.clickhouse import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine\n'), ((6831, 6847), 'ee.clickhouse.sql.clickhouse.STORAGE_POLICY', 'STORAGE_POLICY', ([], {}), '()\n', (6845, 6847), False, 'from ee.clickhouse.sql.clickhouse import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine\n')] |
davmre/sigvisa | scripts/dump_training_data.py | 91a1f163b8f3a258dfb78d88a07f2a11da41bd04 | from sigvisa.learn.train_coda_models import get_shape_training_data
import numpy as np
X, y, evids = get_shape_training_data(runid=4, site="AS12", chan="SHZ", band="freq_2.0_3.0", phases=["P",], target="amp_transfer", max_acost=np.float("inf"), min_amp=-2)
np.savetxt("X.txt", X)
np.savetxt("y.txt", y)
np.savetxt("evids.txt", evids)
| [((258, 280), 'numpy.savetxt', 'np.savetxt', (['"""X.txt"""', 'X'], {}), "('X.txt', X)\n", (268, 280), True, 'import numpy as np\n'), ((281, 303), 'numpy.savetxt', 'np.savetxt', (['"""y.txt"""', 'y'], {}), "('y.txt', y)\n", (291, 303), True, 'import numpy as np\n'), ((304, 334), 'numpy.savetxt', 'np.savetxt', (['"""evids.txt"""', 'evids'], {}), "('evids.txt', evids)\n", (314, 334), True, 'import numpy as np\n'), ((229, 244), 'numpy.float', 'np.float', (['"""inf"""'], {}), "('inf')\n", (237, 244), True, 'import numpy as np\n')] |
wbrandenburger/ShadowDetection | shdw/tools/welford.py | 2a58df93e32e8baf99806555655a7daf7e68735a | import math
import numpy as np
# plt.style.use('seaborn')
# plt.rcParams['figure.figsize'] = (12, 8)
def welford(x_array):
k = 0
M = 0
S = 0
for x in x_array:
k += 1
Mnext = M + (x - M) / k
S = S + (x - M)*(x - Mnext)
M = Mnext
return (M, S/(k-1))
class Welford(object):
""" Implements Welford's algorithm for computing a running mean
and standard deviation as described at:
http://www.johndcook.com/standard_deviation.html
can take single values or iterables
Properties:
mean - returns the mean
std - returns the std
meanfull- returns the mean and std of the mean
Usage:
>>> foo = Welford()
>>> foo(range(100))
>>> foo
<Welford: 49.5 +- 29.0114919759>
>>> foo([1]*1000)
>>> foo
<Welford: 5.40909090909 +- 16.4437417146>
>>> foo.mean
5.409090909090906
>>> foo.std
16.44374171455467
>>> foo.meanfull
(5.409090909090906, 0.4957974674244838)
"""
def __init__(self,lst=None, num=1, mean=0, std=0):
self._num = num
self._mean = mean
self._std = math.pow(std, 2)*(num-1)
self.__call__(lst)
@property
def num(self):
return self._num
@property
def mean(self):
return self._mean
@property
def std(self):
if self._num==1:
return 0
return math.sqrt(self._std/(self._num-1))
@property
def meanfull(self):
return self._mean, self._std/math.sqrt(self._num)
@property
def stats(self):
return self._mean, self.std
def update(self, lst):
if lst is None:
return
if hasattr(lst, "__iter__"):
for x in lst:
self.update_welford(x)
else:
self.update_welford(lst)
def update_welford(self, x):
if x is None:
return
new_mean = self._mean + (x - self._mean)*1./self._num
new_std = self._std + (x - self._mean)*(x - new_mean)
self._num += 1
self._mean, self._std = new_mean, new_std
def consume(self,lst):
if isinstance(lst, np.ndarray):
npfunc = np.vectorize(self.update)
npfunc(lst)
else:
lst = iter(lst)
for x in lst:
self.update(x)
def __call__(self,x):
if hasattr(x,"__iter__"):
self.consume(x)
else:
self.update(x)
def __repr__(self):
return "<Stats: {} +- {}>".format(self.mean, self.std)
| [((1475, 1513), 'math.sqrt', 'math.sqrt', (['(self._std / (self._num - 1))'], {}), '(self._std / (self._num - 1))\n', (1484, 1513), False, 'import math\n'), ((1195, 1211), 'math.pow', 'math.pow', (['std', '(2)'], {}), '(std, 2)\n', (1203, 1211), False, 'import math\n'), ((2268, 2293), 'numpy.vectorize', 'np.vectorize', (['self.update'], {}), '(self.update)\n', (2280, 2293), True, 'import numpy as np\n'), ((1586, 1606), 'math.sqrt', 'math.sqrt', (['self._num'], {}), '(self._num)\n', (1595, 1606), False, 'import math\n')] |
lilbond/bitis | day3/functions.py | 58e5eeebade6cea99fbf86fdf285721fb602e4ef |
def greet():
print("Hi")
def greet_again(message):
print(message)
def greet_again_with_type(message):
print(type(message))
print(message)
greet()
greet_again("Hello Again")
greet_again_with_type("One Last Time")
greet_again_with_type(1234)
# multiple types
def multiple_types(x):
if x < 0:
return -1
else:
return "Returning Hello"
print(multiple_types(-2))
print(multiple_types(10))
# variable arguments
def var_arguments(*args): # args will be tuples containing all the values
for value in args:
print(value)
var_arguments(1, 2, 3)
a = [1, 2, 3]
var_arguments(a)
var_arguments(*a) # expanding
def key_arg(**kwargs):
for key,value in kwargs.items():
print(key, value)
v
b = {"first" : "python", "second" : "python again"}
key_arg(b) | [] |
KipCrossing/Micropython-AD9833 | test.py | c684f5a9543bc5b67dcbf357c50f4d8f4057b2bf | from ad9833 import AD9833
# DUMMY classes for testing without board
class SBI(object):
def __init__(self):
pass
def send(self, data):
print(data)
class Pin(object):
def __init__(self):
pass
def low(self):
print(" 0")
def high(self):
print(" 1")
# Code
SBI1 = SBI()
PIN3 = Pin()
wave = AD9833(SBI1, PIN3)
wave.set_freq(14500)
wave.set_type(2)
wave.send()
print(wave.shape_type)
| [((387, 405), 'ad9833.AD9833', 'AD9833', (['SBI1', 'PIN3'], {}), '(SBI1, PIN3)\n', (393, 405), False, 'from ad9833 import AD9833\n')] |
devicehive/devicehive-plugin-python-template | tests/test_api_network.py | ad532a57ebf9ae52f12afc98eeb867380707d47d | # Copyright (C) 2018 DataArt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from six.moves import range
def test_subscribe_events(test):
test.only_admin_implementation()
plugin_api = test.plugin_api()
device_hive_api = test.device_hive_api()
def init_data():
net_name = test.generate_id('n-s-e', test.NETWORK_ENTITY)
net_description = '%s-description' % net_name
network = device_hive_api.create_network(net_name, net_description)
device_id = test.generate_id('n-s-e', test.DEVICE_ENTITY)
device = device_hive_api.put_device(device_id, network_id=network.id)
command_name = '%s-command' % device_id
notification_name = '%s-notification' % device_id
return {'device': device,
'network': network,
'command_name': command_name,
'notification_name': notification_name}
def send_data(device, command_name, notification_name):
command = device.send_command(command_name)
command.status = 'status'
command.save()
notification = device.send_notification(notification_name)
return command.id, command.id, notification.id
def handle_connect(handler):
event_ids = send_data(handler.data['device'],
handler.data['command_name'],
handler.data['notification_name'])
command_insert_id, command_update_id, notification_id = event_ids
handler.data['event_ids'] = [('command/insert', command_insert_id),
('command/update', command_update_id),
('notification/insert', notification_id)]
def handle_event(handler, event):
action_id_pair = (event.action, event.data.id)
assert action_id_pair in handler.data['event_ids']
handler.data['event_ids'].remove(action_id_pair)
if handler.data['event_ids']:
return
handler.data['device'].remove()
handler.disconnect()
data = init_data()
name = test.generate_id('n-s-e', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id])
test.run(plugin, handle_connect, handle_event, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
event_ids = send_data(handler.data['device'],
handler.data['command_name'],
handler.data['notification_name'])
command_insert_id, command_update_id, notification_id = event_ids
handler.data['event_ids'] = [('command/insert', command_insert_id),
('command/update', command_update_id)]
data = init_data()
name = test.generate_id('n-s-e', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_notifications=False)
test.run(plugin, handle_connect, handle_event, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
event_ids = send_data(handler.data['device'],
handler.data['command_name'],
handler.data['notification_name'])
command_insert_id, command_update_id, notification_id = event_ids
handler.data['event_ids'] = [('command/insert', command_insert_id),
('notification/insert', notification_id)]
data = init_data()
name = test.generate_id('n-s-e', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_update_commands=False)
test.run(plugin, handle_connect, handle_event, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
event_ids = send_data(handler.data['device'],
handler.data['command_name'],
handler.data['notification_name'])
command_insert_id, command_update_id, notification_id = event_ids
handler.data['event_ids'] = [('command/update', command_update_id),
('notification/insert', notification_id)]
data = init_data()
name = test.generate_id('n-s-e', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_insert_commands=False)
test.run(plugin, handle_connect, handle_event, data=data)
plugin_api.remove_plugin(plugin['topicName'])
def test_subscribe_insert_commands(test):
test.only_admin_implementation()
plugin_api = test.plugin_api()
device_hive_api = test.device_hive_api()
def init_data():
net_name = test.generate_id('n-s-i-c', test.NETWORK_ENTITY)
net_description = '%s-description' % net_name
network = device_hive_api.create_network(net_name, net_description)
device_id = test.generate_id('n-s-i-c', test.DEVICE_ENTITY)
device = device_hive_api.put_device(device_id, network_id=network.id)
command_names = ['%s-name-%s' % (device_id, i) for i in range(2)]
return {'device': device,
'network': network,
'command_names': command_names}
def send_data(device, command_names):
return [device.send_command(name).id for name in command_names]
def handle_connect(handler):
handler.data['command_ids'] = send_data(handler.data['device'],
handler.data['command_names'])
def handle_command_insert(handler, command):
assert command.id in handler.data['command_ids']
handler.data['command_ids'].remove(command.id)
if handler.data['command_ids']:
return
handler.data['device'].remove()
handler.disconnect()
data = init_data()
name = test.generate_id('n-s-i-c', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_update_commands=False,
subscribe_notifications=False)
test.run(plugin, handle_connect,
handle_command_insert=handle_command_insert, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
handler.data['command_ids'] = send_data(
handler.data['device'], handler.data['command_names'])[-1:]
data = init_data()
name = test.generate_id('n-s-i-c', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
names=data['command_names'][-1:],
subscribe_update_commands=False,
subscribe_notifications=False)
test.run(plugin, handle_connect,
handle_command_insert=handle_command_insert, data=data)
plugin_api.remove_plugin(plugin['topicName'])
def test_subscribe_update_commands(test):
test.only_admin_implementation()
plugin_api = test.plugin_api()
device_hive_api = test.device_hive_api()
def init_data():
net_name = test.generate_id('n-s-u-c', test.NETWORK_ENTITY)
net_description = '%s-description' % net_name
network = device_hive_api.create_network(net_name, net_description)
device_id = test.generate_id('n-s-u-c', test.DEVICE_ENTITY)
device = device_hive_api.put_device(device_id, network_id=network.id)
command_names = ['%s-name-%s' % (device_id, i) for i in range(2)]
return {'device': device,
'network': network,
'command_names': command_names}
def send_data(device, command_names):
command_ids = []
for name in command_names:
command = device.send_command(name)
command.status = 'status'
command.save()
command_ids.append(command.id)
return command_ids
def handle_connect(handler):
handler.data['command_ids'] = send_data(handler.data['device'],
handler.data['command_names'])
def handle_command_update(handler, command):
assert command.id in handler.data['command_ids']
assert command.status == 'status'
handler.data['command_ids'].remove(command.id)
if handler.data['command_ids']:
return
handler.data['device'].remove()
handler.disconnect()
data = init_data()
name = test.generate_id('n-s-u-c', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_insert_commands=False,
subscribe_notifications=False)
test.run(plugin, handle_connect,
handle_command_update=handle_command_update, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
handler.data['command_ids'] = send_data(
handler.data['device'], handler.data['command_names'])[-1:]
data = init_data()
name = test.generate_id('n-s-u-c', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
names=data['command_names'][-1:],
subscribe_insert_commands=False,
subscribe_notifications=False)
test.run(plugin, handle_connect,
handle_command_update=handle_command_update, data=data)
plugin_api.remove_plugin(plugin['topicName'])
def test_subscribe_notifications(test):
test.only_admin_implementation()
plugin_api = test.plugin_api()
device_hive_api = test.device_hive_api()
def init_data():
net_name = test.generate_id('n-s-n', test.NETWORK_ENTITY)
net_description = '%s-description' % net_name
network = device_hive_api.create_network(net_name, net_description)
device_id = test.generate_id('n-s-n', test.DEVICE_ENTITY)
device = device_hive_api.put_device(device_id, network_id=network.id)
notification_names = ['%s-name-%s' % (device_id, i) for i in range(2)]
return {'device': device,
'network': network,
'notification_names': notification_names}
def send_data(device, notification_names):
return [device.send_notification(name).id for name in
notification_names]
def handle_connect(handler):
handler.data['notification_ids'] = send_data(
handler.data['device'], handler.data['notification_names'])
def handle_notification(handler, notification):
assert notification.id in handler.data['notification_ids']
handler.data['notification_ids'].remove(notification.id)
if handler.data['notification_ids']:
return
handler.data['device'].remove()
handler.disconnect()
data = init_data()
name = test.generate_id('n-s-n', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
subscribe_insert_commands=False,
subscribe_update_commands=False)
test.run(plugin, handle_connect,
handle_notification=handle_notification, data=data)
plugin_api.remove_plugin(plugin['topicName'])
# =========================================================================
def handle_connect(handler):
handler.data['notification_ids'] = send_data(
handler.data['device'], handler.data['notification_names'])[-1:]
data = init_data()
name = test.generate_id('n-s-n', test.PLUGIN_ENTITY)
description = '%s-description' % name
plugin = plugin_api.create_plugin(name, description,
network_ids=[data['network'].id],
names=data['notification_names'][-1:],
subscribe_insert_commands=False,
subscribe_update_commands=False)
test.run(plugin, handle_connect,
handle_notification=handle_notification, data=data)
plugin_api.remove_plugin(plugin['topicName'])
| [((6449, 6457), 'six.moves.range', 'range', (['(2)'], {}), '(2)\n', (6454, 6457), False, 'from six.moves import range\n'), ((9168, 9176), 'six.moves.range', 'range', (['(2)'], {}), '(2)\n', (9173, 9176), False, 'from six.moves import range\n'), ((12099, 12107), 'six.moves.range', 'range', (['(2)'], {}), '(2)\n', (12104, 12107), False, 'from six.moves import range\n')] |
miciux/telegram-bot-admin | filehandler.py | feb267ba6ce715b734b1a5911487c1080410a4a9 | import logging
import abstracthandler
import os
class FileHandler(abstracthandler.AbstractHandler):
def __init__(self, conf, bot):
abstracthandler.AbstractHandler.__init__(self, 'file', conf, bot)
self.log = logging.getLogger(__name__)
self.commands={}
self.commands['list'] = self.get_file_list
def handle_message(self,cid, command, args):
try:
self.commands[command](cid,args)
except Exception as e:
self.send_formatted_message(cid,self.get_sorry_message())
self.log.error(e)
def get_file_list(self, cid, args):
if len(args) >= 1:
for folder in args:
self.send_formatted_message(cid,self.get_folder_content(folder))
else:
self.send_formatted_message(cid,'*file list* usage: file list _[DIRECTORY]_...')
def get_folder_content(self, folder):
message = 'Lista dei files in *%s*:\n_%s_'
files = '\n'.join(os.listdir(folder))
return message % (folder,files);
| [((145, 210), 'abstracthandler.AbstractHandler.__init__', 'abstracthandler.AbstractHandler.__init__', (['self', '"""file"""', 'conf', 'bot'], {}), "(self, 'file', conf, bot)\n", (185, 210), False, 'import abstracthandler\n'), ((230, 257), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (247, 257), False, 'import logging\n'), ((981, 999), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (991, 999), False, 'import os\n')] |
Tensorflow-Devs/federated | tensorflow_federated/python/learning/federated_evaluation.py | 5df96d42d72fa43a050df6465271a38175a5fd7a | # Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple implementation of federated evaluation."""
import collections
from typing import Callable, Optional
import tensorflow as tf
from tensorflow_federated.python.core.api import computation_base
from tensorflow_federated.python.core.api import computations
from tensorflow_federated.python.core.impl.federated_context import intrinsics
from tensorflow_federated.python.core.impl.types import computation_types
from tensorflow_federated.python.core.templates import measured_process
from tensorflow_federated.python.learning import model as model_lib
from tensorflow_federated.python.learning import model_utils
from tensorflow_federated.python.learning.framework import dataset_reduce
from tensorflow_federated.python.learning.framework import optimizer_utils
# Convenience aliases.
SequenceType = computation_types.SequenceType
def build_federated_evaluation(
model_fn: Callable[[], model_lib.Model],
broadcast_process: Optional[measured_process.MeasuredProcess] = None,
use_experimental_simulation_loop: bool = False,
) -> computation_base.Computation:
"""Builds the TFF computation for federated evaluation of the given model.
Args:
model_fn: A no-arg function that returns a `tff.learning.Model`. This method
must *not* capture TensorFlow tensors or variables and use them. The model
must be constructed entirely from scratch on each invocation, returning
the same pre-constructed model each call will result in an error.
broadcast_process: A `tff.templates.MeasuredProcess` that broadcasts the
model weights on the server to the clients. It must support the signature
`(input_values@SERVER -> output_values@CLIENTS)` and have empty state. If
set to default None, the server model is broadcast to the clients using
the default tff.federated_broadcast.
use_experimental_simulation_loop: Controls the reduce loop function for
input dataset. An experimental reduce loop is used for simulation.
Returns:
A federated computation (an instance of `tff.Computation`) that accepts
model parameters and federated data, and returns the evaluation metrics
as aggregated by `tff.learning.Model.federated_output_computation`.
"""
if broadcast_process is not None:
if not isinstance(broadcast_process, measured_process.MeasuredProcess):
raise ValueError('`broadcast_process` must be a `MeasuredProcess`, got '
f'{type(broadcast_process)}.')
if optimizer_utils.is_stateful_process(broadcast_process):
raise ValueError(
'Cannot create a federated evaluation with a stateful '
'broadcast process, must be stateless, has state: '
f'{broadcast_process.initialize.type_signature.result!r}')
# Construct the model first just to obtain the metadata and define all the
# types needed to define the computations that follow.
# TODO(b/124477628): Ideally replace the need for stamping throwaway models
# with some other mechanism.
with tf.Graph().as_default():
model = model_fn()
model_weights_type = model_utils.weights_type_from_model(model)
batch_type = computation_types.to_type(model.input_spec)
@computations.tf_computation(model_weights_type, SequenceType(batch_type))
@tf.function
def client_eval(incoming_model_weights, dataset):
"""Returns local outputs after evaluting `model_weights` on `dataset`."""
with tf.init_scope():
model = model_fn()
model_weights = model_utils.ModelWeights.from_model(model)
tf.nest.map_structure(lambda v, t: v.assign(t), model_weights,
incoming_model_weights)
def reduce_fn(num_examples, batch):
model_output = model.forward_pass(batch, training=False)
if model_output.num_examples is None:
# Compute shape from the size of the predictions if model didn't use the
# batch size.
return num_examples + tf.shape(
model_output.predictions, out_type=tf.int64)[0]
else:
return num_examples + tf.cast(model_output.num_examples, tf.int64)
dataset_reduce_fn = dataset_reduce.build_dataset_reduce_fn(
use_experimental_simulation_loop)
num_examples = dataset_reduce_fn(
reduce_fn=reduce_fn,
dataset=dataset,
initial_state_fn=lambda: tf.zeros([], dtype=tf.int64))
return collections.OrderedDict(
local_outputs=model.report_local_outputs(), num_examples=num_examples)
@computations.federated_computation(
computation_types.at_server(model_weights_type),
computation_types.at_clients(SequenceType(batch_type)))
def server_eval(server_model_weights, federated_dataset):
if broadcast_process is not None:
# TODO(b/179091838): Zip the measurements from the broadcast_process with
# the result of `model.federated_output_computation` below to avoid
# dropping these metrics.
broadcast_output = broadcast_process.next(broadcast_process.initialize(),
server_model_weights)
client_outputs = intrinsics.federated_map(
client_eval, (broadcast_output.result, federated_dataset))
else:
client_outputs = intrinsics.federated_map(client_eval, [
intrinsics.federated_broadcast(server_model_weights),
federated_dataset
])
model_metrics = model.federated_output_computation(
client_outputs.local_outputs)
statistics = collections.OrderedDict(
num_examples=intrinsics.federated_sum(client_outputs.num_examples))
return intrinsics.federated_zip(
collections.OrderedDict(eval=model_metrics, stat=statistics))
return server_eval
| [((3079, 3133), 'tensorflow_federated.python.learning.framework.optimizer_utils.is_stateful_process', 'optimizer_utils.is_stateful_process', (['broadcast_process'], {}), '(broadcast_process)\n', (3114, 3133), False, 'from tensorflow_federated.python.learning.framework import optimizer_utils\n'), ((3679, 3721), 'tensorflow_federated.python.learning.model_utils.weights_type_from_model', 'model_utils.weights_type_from_model', (['model'], {}), '(model)\n', (3714, 3721), False, 'from tensorflow_federated.python.learning import model_utils\n'), ((3739, 3782), 'tensorflow_federated.python.core.impl.types.computation_types.to_type', 'computation_types.to_type', (['model.input_spec'], {}), '(model.input_spec)\n', (3764, 3782), False, 'from tensorflow_federated.python.core.impl.types import computation_types\n'), ((4077, 4119), 'tensorflow_federated.python.learning.model_utils.ModelWeights.from_model', 'model_utils.ModelWeights.from_model', (['model'], {}), '(model)\n', (4112, 4119), False, 'from tensorflow_federated.python.learning import model_utils\n'), ((4700, 4772), 'tensorflow_federated.python.learning.framework.dataset_reduce.build_dataset_reduce_fn', 'dataset_reduce.build_dataset_reduce_fn', (['use_experimental_simulation_loop'], {}), '(use_experimental_simulation_loop)\n', (4738, 4772), False, 'from tensorflow_federated.python.learning.framework import dataset_reduce\n'), ((5098, 5145), 'tensorflow_federated.python.core.impl.types.computation_types.at_server', 'computation_types.at_server', (['model_weights_type'], {}), '(model_weights_type)\n', (5125, 5145), False, 'from tensorflow_federated.python.core.impl.types import computation_types\n'), ((4015, 4030), 'tensorflow.init_scope', 'tf.init_scope', ([], {}), '()\n', (4028, 4030), True, 'import tensorflow as tf\n'), ((5666, 5753), 'tensorflow_federated.python.core.impl.federated_context.intrinsics.federated_map', 'intrinsics.federated_map', (['client_eval', '(broadcast_output.result, federated_dataset)'], {}), '(client_eval, (broadcast_output.result,\n federated_dataset))\n', (5690, 5753), False, 'from tensorflow_federated.python.core.impl.federated_context import intrinsics\n'), ((6192, 6252), 'collections.OrderedDict', 'collections.OrderedDict', ([], {'eval': 'model_metrics', 'stat': 'statistics'}), '(eval=model_metrics, stat=statistics)\n', (6215, 6252), False, 'import collections\n'), ((3606, 3616), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (3614, 3616), True, 'import tensorflow as tf\n'), ((6092, 6145), 'tensorflow_federated.python.core.impl.federated_context.intrinsics.federated_sum', 'intrinsics.federated_sum', (['client_outputs.num_examples'], {}), '(client_outputs.num_examples)\n', (6116, 6145), False, 'from tensorflow_federated.python.core.impl.federated_context import intrinsics\n'), ((4630, 4674), 'tensorflow.cast', 'tf.cast', (['model_output.num_examples', 'tf.int64'], {}), '(model_output.num_examples, tf.int64)\n', (4637, 4674), True, 'import tensorflow as tf\n'), ((4907, 4935), 'tensorflow.zeros', 'tf.zeros', (['[]'], {'dtype': 'tf.int64'}), '([], dtype=tf.int64)\n', (4915, 4935), True, 'import tensorflow as tf\n'), ((5844, 5896), 'tensorflow_federated.python.core.impl.federated_context.intrinsics.federated_broadcast', 'intrinsics.federated_broadcast', (['server_model_weights'], {}), '(server_model_weights)\n', (5874, 5896), False, 'from tensorflow_federated.python.core.impl.federated_context import intrinsics\n'), ((4518, 4571), 'tensorflow.shape', 'tf.shape', (['model_output.predictions'], {'out_type': 'tf.int64'}), '(model_output.predictions, out_type=tf.int64)\n', (4526, 4571), True, 'import tensorflow as tf\n')] |
joaompinto/pylibcontainer | pylibcontainer/image.py | 794f12e7511dc2452521bad040a7873eff40f50b | from __future__ import print_function
import os
import shutil
import hashlib
import requests
import click
from tempfile import NamedTemporaryFile
from hashlib import sha256
from os.path import expanduser, join, exists, basename
from .utils import HumanSize
from .tar import extract_layer
from . import trust
from . import container
from .colorhelper import print_info, print_error, print_warn, print_success
from .colorhelper import success
from .image_index import get_url
from clint.textui import progress
from dateutil.parser import parse as parsedate
from datetime import datetime
CACHE_PATH = join(expanduser("~"), ".pylibcontainer", "images_cache")
class Cache(object):
cache_dir = CACHE_PATH
""" Provides an image caching mechanism on disk """
def __init__(self):
if not exists(CACHE_PATH):
os.makedirs(CACHE_PATH, 0o700)
def get(self, cache_key, default=None):
""" return info for cached file """
cache_hash = sha256(cache_key.encode()).hexdigest()
cache_fn = join(CACHE_PATH, "url_" + cache_hash)
if exists(cache_fn):
file_stat = os.stat(cache_fn)
last_modified = datetime.fromtimestamp(file_stat.st_mtime)
file_size = file_stat.st_size
return cache_fn, cache_hash, last_modified, file_size
return default
def put(self, filename, cache_key):
""" put a file into cache """
cache_hash = sha256(cache_key.encode()).hexdigest()
cache_fn = join(CACHE_PATH, "url_" + cache_hash)
shutil.move(filename, cache_fn)
return cache_hash, cache_fn
def download(image_url):
""" Download image (if not found in cache) and return it's filename """
response = requests.head(image_url)
file_size = remote_file_size = int(response.headers.get("Content-Length"))
remote_last_modified = parsedate(response.headers.get("Last-Modified")).replace(
tzinfo=None
)
remote_is_valid = response.status_code == 200 and file_size and remote_last_modified
# Check if image is on cache
cache = Cache()
cached_image = cache.get(image_url)
if cached_image:
if remote_is_valid:
cache_fn, cache_hash, last_modified, file_size = cached_image
if remote_file_size == file_size and remote_last_modified < last_modified:
print_info("Using file from cache", CACHE_PATH)
return cache_hash, cache_fn
print_info("Downloading new remote file because an update was found")
else:
print_warn("Unable to check the status for " + image_url)
print_warn("Assuming local cache is valid")
# Not cached, and no valid remote information was found
if not remote_is_valid:
print_error(
"Unable to get file, http_code=%s, size=%s, last_modified=%s"
% (response.status_code, remote_file_size, remote_last_modified)
)
exit(2)
# Dowload image
print_info(
"Downloading image... ",
"{0} [{1:.2S}]".format(basename(image_url), HumanSize(file_size)),
)
remote_sha256 = hashlib.sha256()
response = requests.get(image_url, stream=True)
with NamedTemporaryFile(delete=False) as tmp_file:
for chunk in progress.bar(
response.iter_content(chunk_size=1024), expected_size=(file_size / 1024) + 1
):
if chunk:
remote_sha256.update(chunk)
tmp_file.write(chunk)
tmp_file.flush()
# Verify image integrity
trust_verify = trust.verify(image_url, tmp_file.name, remote_sha256.hexdigest())
if not trust_verify or not trust_verify.valid or not trust_verify.username:
print_error("Integrity/authenticity error - GPG signature mismatch!")
exit(3)
print("{0:>10}: {1}".format("GPG Signer", success(trust_verify.username)))
print("{0:>10}: {1}".format("GPG ID", success(trust_verify.pubkey_fingerprint)))
print("{0:>10}: {1}".format("Creation", success(trust_verify.creation_date)))
return cache.put(tmp_file.name, image_url)
@click.command()
@click.argument("image_url")
@click.option("--as_root", is_flag=True)
@click.option("--overlay", "-o", multiple=True)
@click.argument("command", nargs=-1)
def run(image_url, command, as_root, overlay):
url = get_url(image_url)
image_url = url or image_url
if not image_url:
print_info("No index was found for image", image_url)
exit(5)
is_validate_only = False
if not command:
command = ["/bin/sh"]
image_protocol = image_url.split(":")[0].lower()
if image_protocol in ["http", "https"]:
_, image_fn = download(image_url)
else:
_, image_fn = sha256(image_url).hexdigest(), image_url
rootfs = extract_layer(image_fn)
if len(command) == 1 and command[0] == "-":
is_validate_only = True
print("Validating container setup with the rootfs")
else:
print_info("Executing", " ".join(command))
_, exit_code = container.runc(rootfs, command, as_root, overlay)
if exit_code != 0:
print_error("Last command returned an error")
elif is_validate_only:
print_success("OK")
| [((4116, 4131), 'click.command', 'click.command', ([], {}), '()\n', (4129, 4131), False, 'import click\n'), ((4133, 4160), 'click.argument', 'click.argument', (['"""image_url"""'], {}), "('image_url')\n", (4147, 4160), False, 'import click\n'), ((4162, 4201), 'click.option', 'click.option', (['"""--as_root"""'], {'is_flag': '(True)'}), "('--as_root', is_flag=True)\n", (4174, 4201), False, 'import click\n'), ((4203, 4249), 'click.option', 'click.option', (['"""--overlay"""', '"""-o"""'], {'multiple': '(True)'}), "('--overlay', '-o', multiple=True)\n", (4215, 4249), False, 'import click\n'), ((4251, 4286), 'click.argument', 'click.argument', (['"""command"""'], {'nargs': '(-1)'}), "('command', nargs=-1)\n", (4265, 4286), False, 'import click\n'), ((604, 619), 'os.path.expanduser', 'expanduser', (['"""~"""'], {}), "('~')\n", (614, 619), False, 'from os.path import expanduser, join, exists, basename\n'), ((1738, 1762), 'requests.head', 'requests.head', (['image_url'], {}), '(image_url)\n', (1751, 1762), False, 'import requests\n'), ((3134, 3150), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (3148, 3150), False, 'import hashlib\n'), ((3166, 3202), 'requests.get', 'requests.get', (['image_url'], {'stream': '(True)'}), '(image_url, stream=True)\n', (3178, 3202), False, 'import requests\n'), ((1034, 1071), 'os.path.join', 'join', (['CACHE_PATH', "('url_' + cache_hash)"], {}), "(CACHE_PATH, 'url_' + cache_hash)\n", (1038, 1071), False, 'from os.path import expanduser, join, exists, basename\n'), ((1084, 1100), 'os.path.exists', 'exists', (['cache_fn'], {}), '(cache_fn)\n', (1090, 1100), False, 'from os.path import expanduser, join, exists, basename\n'), ((1505, 1542), 'os.path.join', 'join', (['CACHE_PATH', "('url_' + cache_hash)"], {}), "(CACHE_PATH, 'url_' + cache_hash)\n", (1509, 1542), False, 'from os.path import expanduser, join, exists, basename\n'), ((1551, 1582), 'shutil.move', 'shutil.move', (['filename', 'cache_fn'], {}), '(filename, cache_fn)\n', (1562, 1582), False, 'import shutil\n'), ((3212, 3244), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (3230, 3244), False, 'from tempfile import NamedTemporaryFile\n'), ((803, 821), 'os.path.exists', 'exists', (['CACHE_PATH'], {}), '(CACHE_PATH)\n', (809, 821), False, 'from os.path import expanduser, join, exists, basename\n'), ((835, 863), 'os.makedirs', 'os.makedirs', (['CACHE_PATH', '(448)'], {}), '(CACHE_PATH, 448)\n', (846, 863), False, 'import os\n'), ((1126, 1143), 'os.stat', 'os.stat', (['cache_fn'], {}), '(cache_fn)\n', (1133, 1143), False, 'import os\n'), ((1172, 1214), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['file_stat.st_mtime'], {}), '(file_stat.st_mtime)\n', (1194, 1214), False, 'from datetime import datetime\n'), ((3064, 3083), 'os.path.basename', 'basename', (['image_url'], {}), '(image_url)\n', (3072, 3083), False, 'from os.path import expanduser, join, exists, basename\n'), ((4746, 4763), 'hashlib.sha256', 'sha256', (['image_url'], {}), '(image_url)\n', (4752, 4763), False, 'from hashlib import sha256\n')] |
michel117/robotframework-doctestlibrary | utest/test_compareimage.py | 305b220b73846bd389c47d74c2e0431c7bfaff94 | from DocTest.CompareImage import CompareImage
import pytest
from pathlib import Path
import numpy
def test_single_png(testdata_dir):
img = CompareImage(testdata_dir / 'text_big.png')
assert len(img.opencv_images)==1
assert type(img.opencv_images)==list
type(img.opencv_images[0])==numpy.ndarray
def test_single_pdf(testdata_dir):
pass
def test_multipage_pdf(testdata_dir):
pass
def test_huge_pdf(testdata_dir):
pass
def test_image_text_content(testdata_dir):
pass
def test_pdf_text_content(testdata_dir):
pass
def test_non_existing_file(testdata_dir):
with pytest.raises(AssertionError):
img = CompareImage(testdata_dir / 'does_not_exist.png')
def test_corrupt_image(testdata_dir):
with pytest.raises(AssertionError):
img = CompareImage(testdata_dir / 'corrupt_image.png')
def test_corrupt_pdf(testdata_dir):
with pytest.raises(AssertionError):
img = CompareImage(testdata_dir / 'corrupt_pdf.pdf')
| [((144, 187), 'DocTest.CompareImage.CompareImage', 'CompareImage', (["(testdata_dir / 'text_big.png')"], {}), "(testdata_dir / 'text_big.png')\n", (156, 187), False, 'from DocTest.CompareImage import CompareImage\n'), ((604, 633), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (617, 633), False, 'import pytest\n'), ((649, 698), 'DocTest.CompareImage.CompareImage', 'CompareImage', (["(testdata_dir / 'does_not_exist.png')"], {}), "(testdata_dir / 'does_not_exist.png')\n", (661, 698), False, 'from DocTest.CompareImage import CompareImage\n'), ((755, 784), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (768, 784), False, 'import pytest\n'), ((800, 848), 'DocTest.CompareImage.CompareImage', 'CompareImage', (["(testdata_dir / 'corrupt_image.png')"], {}), "(testdata_dir / 'corrupt_image.png')\n", (812, 848), False, 'from DocTest.CompareImage import CompareImage\n'), ((895, 924), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (908, 924), False, 'import pytest\n'), ((940, 986), 'DocTest.CompareImage.CompareImage', 'CompareImage', (["(testdata_dir / 'corrupt_pdf.pdf')"], {}), "(testdata_dir / 'corrupt_pdf.pdf')\n", (952, 986), False, 'from DocTest.CompareImage import CompareImage\n')] |
haruiz/PytorchCvStudio | cvstudio/view/widgets/labels_tableview/__init__.py | ccf79dd0cc0d61f3fd01b1b5d96f7cda7b681eef | from .labels_tableview import LabelsTableView
| [] |
vishalbelsare/ags_nlp_solver | experiments/solve_different_methods.py | 3558e8aae5507285d0c5e74f163c01d09a9cb805 | import functools
import numpy as np
import math
import argparse
import ags_solver
import go_problems
import nlopt
import sys
from Simple import SimpleTuner
import itertools
from scipy.spatial import Delaunay
from scipy.optimize import differential_evolution
from scipy.optimize import basinhopping
from sdaopt import sda
from stochopy import Evolutionary
from pyOpt import Optimization
from pyOpt import MIDACO
import pyOpt
from shgo import shgo
from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass
from benchmark_tools.plot import plot_cmcs
from benchmark_tools.stats import save_stats, compute_stats
class AGSWrapper(Solver):
def __init__(self, dist_stop, max_iters, class_name, eps=0.01, mixedFast=False):
params = self.class_name2params(class_name)
params.mixedFastMode = mixedFast
if dist_stop:
params.eps = 0
params.itersLimit = max_iters
self.solver = ags_solver.Solver()
self.solver.SetParameters(params)
self.dist_stop = dist_stop
self.eps = eps
def class_name2params(self, name):
params = ags_solver.Parameters()
if 'grish' in name:
params.r = 3
elif 'gklss2' in name:
params.r = 4.6
elif 'gklsh2' in name:
params.r = 6.5
elif 'gklss3' in name:
params.r = 3.7
elif 'gklsh3' in name:
params.r = 4.4
elif 'gklss4' in name:
params.r = 4.7
elif 'gklsh4' in name:
params.r = 4.9
elif 'gklss5' in name:
params.r = 4
params.evolventDensity = 10
elif 'gklsh5' in name:
params.r = 4
params.evolventDensity = 10
return params
def Solve(self, problem):
self.solver.SetProblem([lambda x: problem.Calculate(x)], *problem.GetBounds())
#self.solver.SetProblem(problem)
if not self.dist_stop:
point, val, idx = self.solver.Solve()
else:
opt_pt = np.array(problem.GetOptimumPoint())
point, val, idx = self.solver.Solve(lambda x: np.linalg.norm(np.array(x)-opt_pt, np.inf) < self.eps)
#calcCounters = self.solver.GetCalculationsStatistics()
calcCounters = problem.GetCalculationsStatistics()
return point, val, calcCounters
class SDAWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.class_name = class_name
def Solve(self, problem):
lb, ub = problem.GetBounds()
ret = sda(lambda x: problem.Calculate(x), None, bounds=list(zip(lb, ub)), \
seed=100, maxfun=self.max_iters, visit=2.72, maxiter=self.max_iters)
n_evals = problem.GetCalculationsStatistics()
return ret.x, ret.fun, n_evals
class SCBasinhoppingWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.class_name = class_name
def Solve(self, problem):
lb, ub = problem.GetBounds()
#pop_size = self.class_name2params(self.class_name)
class MyBounds(object):
def __init__(self, xmax=[1.1,1.1], xmin=[-1.1,-1.1] ):
self.xmax = np.array(xmax)
self.xmin = np.array(xmin)
def __call__(self, **kwargs):
x = kwargs["x_new"]
tmax = bool(np.all(x <= self.xmax))
tmin = bool(np.all(x >= self.xmin))
return tmax and tmin
x0 = [.5]*problem.GetDimension()
result = \
basinhopping(lambda x: problem.Calculate(x), x0, accept_test=MyBounds(ub, lb), seed=100, T=10, stepsize=0.3)
n_evals = problem.GetCalculationsStatistics()
return result.x, result.fun, n_evals
class SCDEWrapper(Solver):
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.class_name = class_name
def class_name2params(self, name):
if 'grish' in name:
popsize = 60
elif 'gklss2' in name:
popsize = 60
elif 'gklsh2' in name:
popsize = 60
elif 'gklss3' in name:
popsize = 70
elif 'gklsh3' in name:
popsize = 80
elif 'gklss4' in name:
popsize = 90
elif 'gklsh4' in name:
popsize = 100
elif 'gklss5' in name:
popsize = 120
elif 'gklsh5' in name:
popsize = 140
return popsize
def Solve(self, problem):
lb, ub = problem.GetBounds()
bounds = [(l, u) for l, u in zip(lb, ub)]
pop_size = self.class_name2params(self.class_name)
result = \
differential_evolution(
lambda x: problem.Calculate(x), bounds, mutation=(1.1,1.9),
tol=1e-12, maxiter=int(float(self.max_iters) / (pop_size*problem.GetDimension())), popsize=pop_size, disp=False, seed=100)
n_evals = problem.GetCalculationsStatistics()
return result.x, result.fun, n_evals
class PyEvolveWrapper(Solver):
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
def Solve(self, problem):
lb, ub = problem.GetBounds()
# Genome instance
genome = G1DList.G1DList(2)
genome.setParams(rangemin=lb[0], rangemax=ub[0], bestRawScore=-100, roundDecimal=2)
genome.initializator.set(Initializators.G1DListInitializatorReal)
genome.mutator.set(Mutators.G1DListMutatorRealGaussian)
# The evaluator function (objective function)
genome.evaluator.set(lambda x: problem.Calculate(x) + 100)
# Genetic Algorithm Instance
ga = GSimpleGA.GSimpleGA(genome)
ga.selector.set(Selectors.GRouletteWheel)
ga.minimax = Consts.minimaxType["minimize"]
ga.setGenerations(5000)
ga.setMutationRate(0.05)
ga.terminationCriteria.set(GSimpleGA.ConvergenceCriteria)
# Do the evolution, with stats dump
# frequency of 10 generations
ga.evolve(freq_stats=100)
# Best individual
best = ga.bestIndividual()
print ("\nBest individual score: %.2f" % (best.score - 100,))
print (best)
from bayes_opt import BayesianOptimization
class BOptWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
def Solve(self, problem):
lb, ub = problem.GetBounds()
bo = BayesianOptimization(lambda x, y: -problem.Calculate([x, y]),
{'x': (lb[0], ub[0]), 'y': (lb[1], ub[1])})
bo.maximize(init_points=5, n_iter=20, kappa=1.5)
n_evals = problem.GetCalculationsStatistics()
opt_val = -bo.res['max']['max_val']
opt_point = [bo.res['max']['max_params']['x'], bo.res['max']['max_params']['y']]
return opt_point, opt_val, n_evals
class SimpleWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.exploration = self.class_name2params(class_name)
def class_name2params(self, name):
if 'grish' in name:
return 0.1
elif 'gklss2' in name:
return 0.15
elif 'gklsh2' in name:
return 0.15
elif 'gklss3' in name:
return 0.15
elif 'gklsh3' in name:
return 0.25
elif 'gklss4' in name:
return 0.2
elif 'gklsh4' in name:
return 0.25
def Solve(self, problem):
objective_function = lambda x: -problem.Calculate(x)
lb, ub = problem.GetBounds()
opt_pt = problem.GetOptimumPoint()
bounds = [[l, u] for l, u in zip(lb, ub)]
points = np.array([point for point in itertools.product(*bounds)])
tri = Delaunay(points)
optimization_domain_vertices = points[tri.simplices]
exploration = self.exploration # optional, default 0.15
tuner = SimpleTuner(optimization_domain_vertices, objective_function, \
exploration_preference=exploration,
stop_criterion=lambda x:np.linalg.norm(np.array(x)-opt_pt, np.inf) < self.eps)
tuner.optimize(self.max_iters)
opt_val, opt_point = tuner.get_best()
#tuner.plot() # only works in 2D
n_evals = problem.GetCalculationsStatistics()
return opt_point, -opt_val, n_evals
class NLOptWrapper:
def __init__(self, dist_stop, max_iters, class_name, method=nlopt.GD_STOGO, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.method = method
self.max_iters = max_iters
self.pop_size = self.class_name2params(class_name)
def class_name2params(self, name):
if 'grish' in name:
popsize = 150
elif 'gklss2' in name:
popsize = 200
elif 'gklsh2' in name:
popsize = 400
elif 'gklss3' in name:
popsize = 1000
elif 'gklsh3' in name:
popsize = 2000
elif 'gklss4' in name:
popsize = 8000
elif 'gklsh4' in name:
popsize = 16000
elif 'gklss5' in name:
popsize = 25000
elif 'gklsh5' in name:
popsize = 30000
return popsize
def Solve(self, problem):
lb, ub = problem.GetBounds()
self.opt = nlopt.opt(self.method, problem.GetDimension())
self.opt.set_local_optimizer(nlopt.opt(nlopt.LN_SBPLX, problem.GetDimension()))
self.opt.set_lower_bounds(lb)
self.opt.set_upper_bounds(ub)
self.opt.set_min_objective(lambda x, grad: problem.Calculate(x))
self.opt.set_maxeval(self.max_iters)
self.opt.set_xtol_rel(1e-13)
if self.method == nlopt.GN_CRS2_LM:
self.opt.set_population(self.pop_size)
x = self.opt.optimize([.5]*problem.GetDimension())
minf = self.opt.last_optimum_value()
n_evals = problem.GetCalculationsStatistics()
return x, minf, n_evals
class StochOpyWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
self.popsize = self.class_name2params(class_name)
def class_name2params(self, name):
if 'grish' in name:
popsize = 60
elif 'gklss2' in name:
popsize = 60
elif 'gklsh2' in name:
popsize = 60
elif 'gklss3' in name:
popsize = 70
elif 'gklsh3' in name:
popsize = 80
elif 'gklss4' in name:
popsize = 90
elif 'gklsh4' in name:
popsize = 100
elif 'gklss5' in name:
popsize = 120
elif 'gklsh5' in name:
popsize = 140
return popsize
def Solve(self, problem):
objective_function = lambda x: 50 + problem.Calculate(x)
lb, ub = problem.GetBounds()
ea = Evolutionary(objective_function, lower=lb, upper=ub, popsize=self.popsize, \
max_iter=int(self.max_iters/self.popsize), eps1=1e-16, eps2=1e-16)
xopt, gfit = ea.optimize(solver='cpso', sync=False, CR=0.4, F=0.5)
n_evals = problem.GetCalculationsStatistics()
return xopt, gfit, n_evals
class PyOptWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
def Solve(self, problem):
objective_function = lambda x: [problem.Calculate(x), 0, 0]
lb, ub = problem.GetBounds()
opt_prob = pyOpt.Optimization('Problem', objective_function)
opt_prob.addObj('f')
for i in range(problem.GetDimension()):
opt_prob.addVar('x'+str(i),'c',lower=lb[i],upper=ub[i],value=(lb[i] + ub[i])/2.)
midaco_none = MIDACO(pll_type=None)
midaco_none.setOption('IPRINT',-1)
midaco_none.setOption('ISEED', 100)
midaco_none.setOption('MAXEVAL',self.max_iters)
midaco_none.setOption('FOCUS', -4)
fstr, xstr, inform = midaco_none(opt_prob)
n_evals = problem.GetCalculationsStatistics()
return xstr, fstr[0], n_evals
class SHGOWrapper:
def __init__(self, dist_stop, max_iters, class_name, eps=0.01):
self.dist_stop = dist_stop
self.eps = eps
self.max_iters = max_iters
def Solve(self, problem):
objective_function = lambda x: problem.Calculate(x)
bounds = zip(*problem.GetBounds())
opts = {'maxfev': self.max_iters}
result = shgo(objective_function, bounds, options=opts)
n_evals = problem.GetCalculationsStatistics()
return result.x, result.fun, n_evals
algos = {'scd': SCDEWrapper, 'ags': AGSWrapper,
'agsd': functools.partial(AGSWrapper, mixedFast=True),
'direct': functools.partial(NLOptWrapper, method=nlopt.GN_ORIG_DIRECT),
'directl': functools.partial(NLOptWrapper, method=nlopt.GN_ORIG_DIRECT_L),
'stogo': functools.partial(NLOptWrapper, method=nlopt.GD_STOGO),
'mlsl': functools.partial(NLOptWrapper, method=nlopt.G_MLSL_LDS),
'crs': functools.partial(NLOptWrapper, method=nlopt.GN_CRS2_LM),
'simple': SimpleWrapper, 'scb': SCBasinhoppingWrapper,
'sda': SDAWrapper, 'stochopy': StochOpyWrapper, 'shgo': SHGOWrapper,
'pyopt': PyOptWrapper}
algo2cature = {'scd': 'Scipy DE', 'ags': 'AGS', 'direct': 'DIRECT', 'agsd': 'AGSd',
'directl': 'DIRECTl', 'simple': 'Simple',
'stogo': 'StoGO', 'mlsl': 'MLSL', 'crs':'CRS', 'scb': 'Scipy B-H',
'sda': 'SDA', 'stochopy': 'Stochopy', 'pysot': 'PySOT', 'pyopt': 'PyOpt', 'shgo': 'SHGO'}
serg_eps = {2: 0.01, 3: 0.01, 4: math.pow(1e-6, 1./4), 5: math.pow(1e-7, 1./5)}
def main(args):
wrapper_class = algos[args.algo]
if args.problems_class == 'grish':
problems = GrishClass()
else:
assert args.problems_dim > 1 and args.problems_dim < 6
if args.problems_class == 'gklss':
problems = GKLSClass(args.problems_dim, go_problems.GKLSClass.Simple)
else:
problems = GKLSClass(args.problems_dim, go_problems.GKLSClass.Hard)
eps = 0.01
if args.serg_eps:
eps = serg_eps[args.problems_dim]
wrapper = wrapper_class(args.dist_stop, args.max_iters, args.problems_class+str(args.problems_dim), eps=0.01)
calc_stats, solved_status = solve_class(problems, wrapper, verbose=args.verbose, eps_check=eps)
stats = compute_stats(calc_stats, solved_status)
print('Problems solved: {}'.format(stats['num_solved']))
for i, avg in enumerate(stats['avg_calcs'][:-1]):
print('Average number of calculations of constraint #{}: {}'.format(i, avg))
print('Average number of calculations of objective: {}'.format(stats['avg_calcs'][-1]))
#plot_cmcs([stats['cmc']], captures=[algo2cature(args.algo)], show=True, filename='')
save_stats(stats, args.stats_fname, capture=algo2cature[args.algo])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Sample for AGS solver')
parser.add_argument('--max_iters', type=int, default=10000, help='limit of iterations for the method')
parser.add_argument('--problems_class', type=str, choices=['grish','gklss','gklsh'], default='grish')
parser.add_argument('--algo', type=str, choices=algos.keys(), default='scd')
parser.add_argument('--problems_dim', type=int, default=2)
parser.add_argument('--verbose', action='store_true', help='Print additional info to console')
parser.add_argument('--dist_stop', action='store_true', help='Stop algorithm then the next point is close enough to the optimum')
parser.add_argument('--serg_eps', action='store_true')
parser.add_argument('--stats_fname', type=str, default='')
main(parser.parse_args())
| [((13158, 13203), 'functools.partial', 'functools.partial', (['AGSWrapper'], {'mixedFast': '(True)'}), '(AGSWrapper, mixedFast=True)\n', (13175, 13203), False, 'import functools\n'), ((13224, 13284), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.GN_ORIG_DIRECT'}), '(NLOptWrapper, method=nlopt.GN_ORIG_DIRECT)\n', (13241, 13284), False, 'import functools\n'), ((13306, 13368), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.GN_ORIG_DIRECT_L'}), '(NLOptWrapper, method=nlopt.GN_ORIG_DIRECT_L)\n', (13323, 13368), False, 'import functools\n'), ((13388, 13442), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.GD_STOGO'}), '(NLOptWrapper, method=nlopt.GD_STOGO)\n', (13405, 13442), False, 'import functools\n'), ((13461, 13517), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.G_MLSL_LDS'}), '(NLOptWrapper, method=nlopt.G_MLSL_LDS)\n', (13478, 13517), False, 'import functools\n'), ((13535, 13591), 'functools.partial', 'functools.partial', (['NLOptWrapper'], {'method': 'nlopt.GN_CRS2_LM'}), '(NLOptWrapper, method=nlopt.GN_CRS2_LM)\n', (13552, 13591), False, 'import functools\n'), ((14130, 14154), 'math.pow', 'math.pow', (['(1e-06)', '(1.0 / 4)'], {}), '(1e-06, 1.0 / 4)\n', (14138, 14154), False, 'import math\n'), ((14155, 14179), 'math.pow', 'math.pow', (['(1e-07)', '(1.0 / 5)'], {}), '(1e-07, 1.0 / 5)\n', (14163, 14179), False, 'import math\n'), ((14821, 14888), 'benchmark_tools.core.solve_class', 'solve_class', (['problems', 'wrapper'], {'verbose': 'args.verbose', 'eps_check': 'eps'}), '(problems, wrapper, verbose=args.verbose, eps_check=eps)\n', (14832, 14888), False, 'from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass\n'), ((14901, 14941), 'benchmark_tools.stats.compute_stats', 'compute_stats', (['calc_stats', 'solved_status'], {}), '(calc_stats, solved_status)\n', (14914, 14941), False, 'from benchmark_tools.stats import save_stats, compute_stats\n'), ((15330, 15397), 'benchmark_tools.stats.save_stats', 'save_stats', (['stats', 'args.stats_fname'], {'capture': 'algo2cature[args.algo]'}), '(stats, args.stats_fname, capture=algo2cature[args.algo])\n', (15340, 15397), False, 'from benchmark_tools.stats import save_stats, compute_stats\n'), ((15439, 15499), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Sample for AGS solver"""'}), "(description='Sample for AGS solver')\n", (15462, 15499), False, 'import argparse\n'), ((944, 963), 'ags_solver.Solver', 'ags_solver.Solver', ([], {}), '()\n', (961, 963), False, 'import ags_solver\n'), ((1121, 1144), 'ags_solver.Parameters', 'ags_solver.Parameters', ([], {}), '()\n', (1142, 1144), False, 'import ags_solver\n'), ((8158, 8174), 'scipy.spatial.Delaunay', 'Delaunay', (['points'], {}), '(points)\n', (8166, 8174), False, 'from scipy.spatial import Delaunay\n'), ((11978, 12027), 'pyOpt.Optimization', 'pyOpt.Optimization', (['"""Problem"""', 'objective_function'], {}), "('Problem', objective_function)\n", (11996, 12027), False, 'import pyOpt\n'), ((12220, 12241), 'pyOpt.MIDACO', 'MIDACO', ([], {'pll_type': 'None'}), '(pll_type=None)\n', (12226, 12241), False, 'from pyOpt import MIDACO\n'), ((12946, 12992), 'shgo.shgo', 'shgo', (['objective_function', 'bounds'], {'options': 'opts'}), '(objective_function, bounds, options=opts)\n', (12950, 12992), False, 'from shgo import shgo\n'), ((14290, 14302), 'benchmark_tools.core.GrishClass', 'GrishClass', ([], {}), '()\n', (14300, 14302), False, 'from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass\n'), ((14442, 14500), 'benchmark_tools.core.GKLSClass', 'GKLSClass', (['args.problems_dim', 'go_problems.GKLSClass.Simple'], {}), '(args.problems_dim, go_problems.GKLSClass.Simple)\n', (14451, 14500), False, 'from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass\n'), ((14538, 14594), 'benchmark_tools.core.GKLSClass', 'GKLSClass', (['args.problems_dim', 'go_problems.GKLSClass.Hard'], {}), '(args.problems_dim, go_problems.GKLSClass.Hard)\n', (14547, 14594), False, 'from benchmark_tools.core import Solver, solve_class, GrishClass, GKLSClass\n'), ((3378, 3392), 'numpy.array', 'np.array', (['xmax'], {}), '(xmax)\n', (3386, 3392), True, 'import numpy as np\n'), ((3421, 3435), 'numpy.array', 'np.array', (['xmin'], {}), '(xmin)\n', (3429, 3435), True, 'import numpy as np\n'), ((3542, 3564), 'numpy.all', 'np.all', (['(x <= self.xmax)'], {}), '(x <= self.xmax)\n', (3548, 3564), True, 'import numpy as np\n'), ((3594, 3616), 'numpy.all', 'np.all', (['(x >= self.xmin)'], {}), '(x >= self.xmin)\n', (3600, 3616), True, 'import numpy as np\n'), ((8115, 8141), 'itertools.product', 'itertools.product', (['*bounds'], {}), '(*bounds)\n', (8132, 8141), False, 'import itertools\n'), ((2144, 2155), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (2152, 2155), True, 'import numpy as np\n'), ((8487, 8498), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (8495, 8498), True, 'import numpy as np\n')] |
mattyschell/geodatabase-toiler | src/py/fc.py | c8231999c3156bf41f9b80f151085afa97ba8586 | import arcpy
import logging
import pathlib
import subprocess
import gdb
import cx_sde
class Fc(object):
def __init__(self
,gdb
,name):
# gdb object
self.gdb = gdb
# ex BUILDING
self.name = name.upper()
# esri tools usually expect this C:/sdefiles/bldg.sde/BUILDING
# also acceptable: C:/sdefiles/bldg.sde/BLDG.BUILDING
self.featureclass = self.gdb.sdeconn + "/" + self.name
def getfields(self):
desc = arcpy.Describe(self.featureclass)
fields = desc.fields
fieldsameslist = []
for field in fields:
fieldsameslist.append(field.name)
return fieldsameslist
def exists(self):
return arcpy.Exists(self.featureclass)
def delete(self):
logging.info('deleting {0}'.format(self.name))
desc = arcpy.Describe(self.featureclass)
if desc.IsArchived == True:
# disable archving and axe the _H table
arcpy.DisableArchiving_management(self.featureclass,
'DELETE')
arcpy.Delete_management(self.featureclass)
def locksexist(self):
if arcpy.TestSchemaLock(self.featureclass):
# "True A schema lock can be applied to the dataset"
return False
else:
return True
def interpret(self
,resobject):
# could also work with resobject.status
output = 0
if 'succeeded' not in resobject.getMessages().lower():
output = 1
logging.warn('response code is {0}'.format(resobject.status))
logging.warn('response messages are {0}'.format(resobject.getMessages()))
return output
def version(self):
# https://pro.arcgis.com/en/pro-app/tool-reference/data-management/register-as-versioned.htm
logging.info('versioning {0}'.format(self.name))
arcpy.RegisterAsVersioned_management(self.featureclass
,"NO_EDITS_TO_BASE")
# https://support.esri.com/en/technical-article/000023226
# When an ArcGIS 10.8 / ArcGIS Pro 2.5 (or newer) client connects to a
# 10.7.1, or earlier, release of an Enterprise geodatabase in Oracle,
# and registers the data as versioned, the versioned view is not created
# for the associated table or feature class.
# I cant get this shell out to python27 to work
# so like I dummy I'm gonna print it to the screen for now
# the test will fail until I (or esri) get it right, thats honest at least
py2versionedviews = pathlib.Path(__file__).parent.parent \
.joinpath('py27') \
.joinpath('create_versionedviews.py')
# see gdb class for this path, perhaps 'C:\Python27\ArcGIS10.6'
callcmd = r'{0} {1} {2}'.format(self.gdb.arcpy2path, py2versionedviews, self.name)
logging.info('YOU MUST CREATE versioned views from py27 using {0}'.format(callcmd))
logging.info('YOU YES YOU MUST call this: {0}'.format(callcmd))
# From a script run a postprocess something like:
# C:\Python27\ArcGIS10.6\python.exe C:\matt_projects\geodatabase-toiler\src\py27\create_versionedviews.py TOILERTESTFC
# exit_code = subprocess.call(callcmd,shell=True)
# exit_code = subprocess.run([self.gdb.arcpy2path, 'C:\matt_projects\geodatabase-toiler\src\py27\create_versionedviews.py'])
# subprocess.Popen(["virtualenv1/bin/python", "my_script.py"])
# attempts above yield
# File "C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\Lib\site.py", line 177
#file=sys.stderr)
# ^
# SyntaxError: invalid syntax
def trackedits(self):
# https://pro.arcgis.com/en/pro-app/tool-reference/data-management/enable-editor-tracking.htm
# this will create fields only if they dont exist
# I am gonna fix the field names here. Reminder that our goal is to
# be opinionated and consistent across anything we manage
logging.info('enabling editor tracking on {0}'.format(self.name))
return self.interpret(arcpy.EnableEditorTracking_management(self.featureclass
,'CREATED_USER'
,'CREATED_DATE'
,'LAST_EDITED_USER'
,'LAST_EDITED_DATE'
,'NO_ADD_FIELDS'
,'UTC'))
def grantprivileges(self
,user
,edits='GRANT'): # or AS_IS
# https://pro.arcgis.com/en/pro-app/tool-reference/data-management/change-privileges.htm
# caller should know who editors are we dont concern ourselves here
# always grant select, edits are GRANT or AS_IS for grant select only
# The nobs and dials on this tool are confounding
logging.info('granting privileges on {0} to {1}'.format(self.name
,user))
return self.interpret(arcpy.ChangePrivileges_management(self.featureclass
,user
,'GRANT'
,edits))
def index(self
,column):
# https://pro.arcgis.com/en/pro-app/tool-reference/data-management/add-attribute-index.htm
# unique indexes cant be specified for multiversioned tables
logging.info('indexing column {0} on {1}'.format(column
,self.name))
# BUILDINGBINIX
# BUILDING_HISTORICDOITT_IDIX = 27 careful friend
return self.interpret(arcpy.AddIndex_management(self.featureclass
,column
,'{0}{1}{2}'.format(self.name
,column
,'IX')))
def analyze(self
,components=['BUSINESS','ADDS','DELETES']):
return self.interpret(arcpy.Analyze_management(self.featureclass
,components))
def rebuildindexes(self):
# https://pro.arcgis.com/en/pro-app/latest/tool-reference/data-management/rebuild-indexes.htm
return self.interpret(arcpy.RebuildIndexes_management(self.gdb.sdeconn
,'NO_SYSTEM'
,self.name
,'ALL'))
def enablearchiving(self):
desc = arcpy.Describe(self.featureclass)
if desc.IsArchived == False:
return self.interpret(arcpy.EnableArchiving_management(self.featureclass))
else:
return 0
def exporttoshp(self
,outputdir
,outputname):
# print('fc2fc {0} {1} {2}'.format(self.featureclass, outputdir, outputname))
arcpy.FeatureClassToFeatureClass_conversion(self.featureclass
,outputdir
,outputname)
# TODO exportogeopackage if ESRI ever fills in some functionality in
# https://pro.arcgis.com/en/pro-app/latest/tool-reference/conversion/an-overview-of-the-to-geopackage-toolset.htm
# TODO exportogeojson if ESRI tool does something other than error 99999 (guess: sdo_geometry not supported)
# For now export to shp, then ogr2ogr to other formats. Classic
| [((513, 546), 'arcpy.Describe', 'arcpy.Describe', (['self.featureclass'], {}), '(self.featureclass)\n', (527, 546), False, 'import arcpy\n'), ((749, 780), 'arcpy.Exists', 'arcpy.Exists', (['self.featureclass'], {}), '(self.featureclass)\n', (761, 780), False, 'import arcpy\n'), ((877, 910), 'arcpy.Describe', 'arcpy.Describe', (['self.featureclass'], {}), '(self.featureclass)\n', (891, 910), False, 'import arcpy\n'), ((1140, 1182), 'arcpy.Delete_management', 'arcpy.Delete_management', (['self.featureclass'], {}), '(self.featureclass)\n', (1163, 1182), False, 'import arcpy\n'), ((1222, 1261), 'arcpy.TestSchemaLock', 'arcpy.TestSchemaLock', (['self.featureclass'], {}), '(self.featureclass)\n', (1242, 1261), False, 'import arcpy\n'), ((1982, 2057), 'arcpy.RegisterAsVersioned_management', 'arcpy.RegisterAsVersioned_management', (['self.featureclass', '"""NO_EDITS_TO_BASE"""'], {}), "(self.featureclass, 'NO_EDITS_TO_BASE')\n", (2018, 2057), False, 'import arcpy\n'), ((7326, 7359), 'arcpy.Describe', 'arcpy.Describe', (['self.featureclass'], {}), '(self.featureclass)\n', (7340, 7359), False, 'import arcpy\n'), ((7722, 7811), 'arcpy.FeatureClassToFeatureClass_conversion', 'arcpy.FeatureClassToFeatureClass_conversion', (['self.featureclass', 'outputdir', 'outputname'], {}), '(self.featureclass, outputdir,\n outputname)\n', (7765, 7811), False, 'import arcpy\n'), ((1021, 1083), 'arcpy.DisableArchiving_management', 'arcpy.DisableArchiving_management', (['self.featureclass', '"""DELETE"""'], {}), "(self.featureclass, 'DELETE')\n", (1054, 1083), False, 'import arcpy\n'), ((4345, 4505), 'arcpy.EnableEditorTracking_management', 'arcpy.EnableEditorTracking_management', (['self.featureclass', '"""CREATED_USER"""', '"""CREATED_DATE"""', '"""LAST_EDITED_USER"""', '"""LAST_EDITED_DATE"""', '"""NO_ADD_FIELDS"""', '"""UTC"""'], {}), "(self.featureclass, 'CREATED_USER',\n 'CREATED_DATE', 'LAST_EDITED_USER', 'LAST_EDITED_DATE', 'NO_ADD_FIELDS',\n 'UTC')\n", (4382, 4505), False, 'import arcpy\n'), ((5512, 5586), 'arcpy.ChangePrivileges_management', 'arcpy.ChangePrivileges_management', (['self.featureclass', 'user', '"""GRANT"""', 'edits'], {}), "(self.featureclass, user, 'GRANT', edits)\n", (5545, 5586), False, 'import arcpy\n'), ((6737, 6792), 'arcpy.Analyze_management', 'arcpy.Analyze_management', (['self.featureclass', 'components'], {}), '(self.featureclass, components)\n', (6761, 6792), False, 'import arcpy\n'), ((7013, 7098), 'arcpy.RebuildIndexes_management', 'arcpy.RebuildIndexes_management', (['self.gdb.sdeconn', '"""NO_SYSTEM"""', 'self.name', '"""ALL"""'], {}), "(self.gdb.sdeconn, 'NO_SYSTEM', self.name, 'ALL'\n )\n", (7044, 7098), False, 'import arcpy\n'), ((7441, 7492), 'arcpy.EnableArchiving_management', 'arcpy.EnableArchiving_management', (['self.featureclass'], {}), '(self.featureclass)\n', (7473, 7492), False, 'import arcpy\n'), ((2698, 2720), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (2710, 2720), False, 'import pathlib\n')] |
gomesGabriel/Pythonicos | desafiosCursoEmVideo/ex004.py | b491cefbb0479dd83fee267304d0fa30b99786a5 | n = input('Digite algo: ')
print('O tipo primitivo da variável é: ', type(n))
print('O que foi digitado é alfa numérico? ', n.isalnum())
print('O que foi digitado é alfabético? ', n.isalpha())
print('O que foi digitado é um decimal? ', n.isdecimal())
print('O que foi digitado é minúsculo? ', n.islower())
print('O que foi digitado é numérico? ', n.isnumeric())
print('O que foi digitado pode ser impresso? ', n.isprintable())
print('O que foi digitado é apenas espaço? ', n.isspace())
print('O que foi digitado está capitalizada? ', n.istitle())
print('O que foi digitado é maiúsculo? ', n.isupper())
| [] |
dalmia/Lisa-Lab-Tutorials | Machine learning book/3 - MultiLayer Perceptron/test_regression.py | ee1b0b4fcb82914085420bb289ebda09f248c8d1 | from numpy import *
import numpy as np
import matplotlib.pyplot as plt
from mlp import mlp
x = ones((1, 40)) * linspace(0, 1, 40)
t = sin(2 * pi * x) + cos(2 * pi * x) + np.random.randn(40) * 0.2
x = transpose(x)
t = transpose(t)
n_hidden = 3
eta = 0.25
n_iterations = 101
plt.plot(x, t, '.')
plt.show()
train = x[0::2, :]
test = x[1::4, :]
valid = x[3::4, :]
train_targets = t[0::2, :]
test_targets = t[1::4, :]
valid_targets = t[3::4, :]
net = mlp(train, train_targets, n_hidden, out_type='linear')
net.mlptrain(train, train_targets, eta, n_iterations)
best_err = net.earlystopping(train, train_targets, valid, valid_targets, eta)
| [((276, 295), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 't', '"""."""'], {}), "(x, t, '.')\n", (284, 295), True, 'import matplotlib.pyplot as plt\n'), ((296, 306), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (304, 306), True, 'import matplotlib.pyplot as plt\n'), ((452, 506), 'mlp.mlp', 'mlp', (['train', 'train_targets', 'n_hidden'], {'out_type': '"""linear"""'}), "(train, train_targets, n_hidden, out_type='linear')\n", (455, 506), False, 'from mlp import mlp\n'), ((171, 190), 'numpy.random.randn', 'np.random.randn', (['(40)'], {}), '(40)\n', (186, 190), True, 'import numpy as np\n')] |
IllIIIllll/reinforcement-learning-omok | gomoku/networks/__init__.py | 1c76ba76c203a3b7c99095fde0626aff45b1b94b | # © 2020 지성. all rights reserved.
# <[email protected]>
# Apache License 2.0
from .small import *
from .medium import *
from .large import * | [] |
snowxmas/alipay-sdk-python-all | alipay/aop/api/domain/AlipayEbppInvoiceAuthSignModel.py | 96870ced60facd96c5bce18d19371720cbda3317 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayEbppInvoiceAuthSignModel(object):
def __init__(self):
self._authorization_type = None
self._m_short_name = None
self._user_id = None
@property
def authorization_type(self):
return self._authorization_type
@authorization_type.setter
def authorization_type(self, value):
self._authorization_type = value
@property
def m_short_name(self):
return self._m_short_name
@m_short_name.setter
def m_short_name(self, value):
self._m_short_name = value
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
def to_alipay_dict(self):
params = dict()
if self.authorization_type:
if hasattr(self.authorization_type, 'to_alipay_dict'):
params['authorization_type'] = self.authorization_type.to_alipay_dict()
else:
params['authorization_type'] = self.authorization_type
if self.m_short_name:
if hasattr(self.m_short_name, 'to_alipay_dict'):
params['m_short_name'] = self.m_short_name.to_alipay_dict()
else:
params['m_short_name'] = self.m_short_name
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayEbppInvoiceAuthSignModel()
if 'authorization_type' in d:
o.authorization_type = d['authorization_type']
if 'm_short_name' in d:
o.m_short_name = d['m_short_name']
if 'user_id' in d:
o.user_id = d['user_id']
return o
| [] |
jmcshane/experimental | sdk/python/tekton_pipeline/models/v1beta1_embedded_task.py | 3c47c7e87bcdadc6172941169f3f24fc3f159ae0 | # Copyright 2020 The Tekton Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Tekton
Tekton Pipeline # noqa: E501
The version of the OpenAPI document: v0.17.2
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from tekton_pipeline.configuration import Configuration
class V1beta1EmbeddedTask(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'description': 'str',
'metadata': 'V1beta1PipelineTaskMetadata',
'params': 'list[V1beta1ParamSpec]',
'resources': 'V1beta1TaskResources',
'results': 'list[V1beta1TaskResult]',
'sidecars': 'list[V1beta1Sidecar]',
'step_template': 'V1Container',
'steps': 'list[V1beta1Step]',
'volumes': 'list[V1Volume]',
'workspaces': 'list[V1beta1WorkspaceDeclaration]'
}
attribute_map = {
'description': 'description',
'metadata': 'metadata',
'params': 'params',
'resources': 'resources',
'results': 'results',
'sidecars': 'sidecars',
'step_template': 'stepTemplate',
'steps': 'steps',
'volumes': 'volumes',
'workspaces': 'workspaces'
}
def __init__(self, description=None, metadata=None, params=None, resources=None, results=None, sidecars=None, step_template=None, steps=None, volumes=None, workspaces=None, local_vars_configuration=None): # noqa: E501
"""V1beta1EmbeddedTask - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._description = None
self._metadata = None
self._params = None
self._resources = None
self._results = None
self._sidecars = None
self._step_template = None
self._steps = None
self._volumes = None
self._workspaces = None
self.discriminator = None
if description is not None:
self.description = description
if metadata is not None:
self.metadata = metadata
if params is not None:
self.params = params
if resources is not None:
self.resources = resources
if results is not None:
self.results = results
if sidecars is not None:
self.sidecars = sidecars
if step_template is not None:
self.step_template = step_template
if steps is not None:
self.steps = steps
if volumes is not None:
self.volumes = volumes
if workspaces is not None:
self.workspaces = workspaces
@property
def description(self):
"""Gets the description of this V1beta1EmbeddedTask. # noqa: E501
Description is a user-facing description of the task that may be used to populate a UI. # noqa: E501
:return: The description of this V1beta1EmbeddedTask. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this V1beta1EmbeddedTask.
Description is a user-facing description of the task that may be used to populate a UI. # noqa: E501
:param description: The description of this V1beta1EmbeddedTask. # noqa: E501
:type: str
"""
self._description = description
@property
def metadata(self):
"""Gets the metadata of this V1beta1EmbeddedTask. # noqa: E501
:return: The metadata of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1beta1PipelineTaskMetadata
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1beta1EmbeddedTask.
:param metadata: The metadata of this V1beta1EmbeddedTask. # noqa: E501
:type: V1beta1PipelineTaskMetadata
"""
self._metadata = metadata
@property
def params(self):
"""Gets the params of this V1beta1EmbeddedTask. # noqa: E501
Params is a list of input parameters required to run the task. Params must be supplied as inputs in TaskRuns unless they declare a default value. # noqa: E501
:return: The params of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1ParamSpec]
"""
return self._params
@params.setter
def params(self, params):
"""Sets the params of this V1beta1EmbeddedTask.
Params is a list of input parameters required to run the task. Params must be supplied as inputs in TaskRuns unless they declare a default value. # noqa: E501
:param params: The params of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1ParamSpec]
"""
self._params = params
@property
def resources(self):
"""Gets the resources of this V1beta1EmbeddedTask. # noqa: E501
:return: The resources of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1beta1TaskResources
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this V1beta1EmbeddedTask.
:param resources: The resources of this V1beta1EmbeddedTask. # noqa: E501
:type: V1beta1TaskResources
"""
self._resources = resources
@property
def results(self):
"""Gets the results of this V1beta1EmbeddedTask. # noqa: E501
Results are values that this Task can output # noqa: E501
:return: The results of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1TaskResult]
"""
return self._results
@results.setter
def results(self, results):
"""Sets the results of this V1beta1EmbeddedTask.
Results are values that this Task can output # noqa: E501
:param results: The results of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1TaskResult]
"""
self._results = results
@property
def sidecars(self):
"""Gets the sidecars of this V1beta1EmbeddedTask. # noqa: E501
Sidecars are run alongside the Task's step containers. They begin before the steps start and end after the steps complete. # noqa: E501
:return: The sidecars of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1Sidecar]
"""
return self._sidecars
@sidecars.setter
def sidecars(self, sidecars):
"""Sets the sidecars of this V1beta1EmbeddedTask.
Sidecars are run alongside the Task's step containers. They begin before the steps start and end after the steps complete. # noqa: E501
:param sidecars: The sidecars of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1Sidecar]
"""
self._sidecars = sidecars
@property
def step_template(self):
"""Gets the step_template of this V1beta1EmbeddedTask. # noqa: E501
:return: The step_template of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1Container
"""
return self._step_template
@step_template.setter
def step_template(self, step_template):
"""Sets the step_template of this V1beta1EmbeddedTask.
:param step_template: The step_template of this V1beta1EmbeddedTask. # noqa: E501
:type: V1Container
"""
self._step_template = step_template
@property
def steps(self):
"""Gets the steps of this V1beta1EmbeddedTask. # noqa: E501
Steps are the steps of the build; each step is run sequentially with the source mounted into /workspace. # noqa: E501
:return: The steps of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1Step]
"""
return self._steps
@steps.setter
def steps(self, steps):
"""Sets the steps of this V1beta1EmbeddedTask.
Steps are the steps of the build; each step is run sequentially with the source mounted into /workspace. # noqa: E501
:param steps: The steps of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1Step]
"""
self._steps = steps
@property
def volumes(self):
"""Gets the volumes of this V1beta1EmbeddedTask. # noqa: E501
Volumes is a collection of volumes that are available to mount into the steps of the build. # noqa: E501
:return: The volumes of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1Volume]
"""
return self._volumes
@volumes.setter
def volumes(self, volumes):
"""Sets the volumes of this V1beta1EmbeddedTask.
Volumes is a collection of volumes that are available to mount into the steps of the build. # noqa: E501
:param volumes: The volumes of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1Volume]
"""
self._volumes = volumes
@property
def workspaces(self):
"""Gets the workspaces of this V1beta1EmbeddedTask. # noqa: E501
Workspaces are the volumes that this Task requires. # noqa: E501
:return: The workspaces of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1WorkspaceDeclaration]
"""
return self._workspaces
@workspaces.setter
def workspaces(self, workspaces):
"""Sets the workspaces of this V1beta1EmbeddedTask.
Workspaces are the volumes that this Task requires. # noqa: E501
:param workspaces: The workspaces of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1WorkspaceDeclaration]
"""
self._workspaces = workspaces
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1EmbeddedTask):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1EmbeddedTask):
return True
return self.to_dict() != other.to_dict()
| [((10862, 10895), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (10875, 10895), False, 'import six\n'), ((2520, 2535), 'tekton_pipeline.configuration.Configuration', 'Configuration', ([], {}), '()\n', (2533, 2535), False, 'from tekton_pipeline.configuration import Configuration\n')] |
gmlunesa/zhat | tzp.py | 3bf62625d102bd40274fcd39c91f21c169e334a8 | import zmq
import curses
import argparse
import configparser
import threading
import time
from curses import wrapper
from client import Client
from ui import UI
def parse_args():
parser = argparse.ArgumentParser(description='Client for teezeepee')
# Please specify your username
parser.add_argument('username',
type=str,
help='Specified username')
parser.add_argument('--config-file',
type=str,
help='Default path for configuration file.')
return parser.parse_args()
def display_section(window, display):
window_lines, window_cols = window.getmaxyx()
bottom_line = window_lines - 1
window.bkgd(curses.A_NORMAL)
window.scrollok(1)
while True:
window.addstr(bottom_line, 1, display.recv_string())
window.move(bottom_line, 1)
window.scroll(1)
window.refresh()
def input_section(window, chat_sender):
window.bkgd(curses.A_NORMAL)
window.clear()
window.box()
window.refresh()
while True:
window.clear()
window.box()
window.refresh()
s = window.getstr(1, 1).decode('utf-8')
if s is not None and s != "":
chat_sender.send_string(s)
# Short pause
time.sleep(0.01)
def main(stdscr):
config_file = args.config_file if args.config_file is not None else 'tzp.cfg'
config = configparser.ConfigParser()
config.read(config_file)
config = config['default']
receiver = zmq.Context().instance().socket(zmq.PAIR)
receiver.bind("inproc://clientchat")
sender = zmq.Context().instance().socket(zmq.PAIR)
sender.connect("inproc://clientchat")
client = Client(args.username, config['server_host'],
config['chat_port'], receiver)
client.run()
show_receiver = zmq.Context().instance().socket(zmq.PAIR)
show_receiver.bind("inproc://clientdisplay")
show_sender = zmq.Context().instance().socket(zmq.PAIR)
show_sender.connect("inproc://clientdisplay")
ui = UI(config['server_host'], config['display_port'], show_sender)
ui.run()
curses.init_pair(1, curses.COLOR_BLACK, curses.COLOR_WHITE)
curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_WHITE)
curses.echo()
curses.curs_set(0)
window_height = curses.LINES
window_width = curses.COLS
divider = int(window_height * 0.5)
history_screen = stdscr.subpad(divider, window_width, 0, 0)
input_screen = stdscr.subpad(window_height - divider, window_width, divider, 0)
history_thread = threading.Thread(target=display_section, args=(history_screen, show_receiver))
history_thread.daemon = True
history_thread.start()
input_thread = threading.Thread(target=input_section, args=(input_screen, sender))
input_thread.daemon = True
input_thread.start()
history_thread.join()
input_thread.join()
if '__main__' == __name__:
try:
args = parse_args()
wrapper(main)
except KeyboardInterrupt as e:
pass
except:
raise
| [((196, 255), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Client for teezeepee"""'}), "(description='Client for teezeepee')\n", (219, 255), False, 'import argparse\n'), ((1448, 1475), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1473, 1475), False, 'import configparser\n'), ((1747, 1822), 'client.Client', 'Client', (['args.username', "config['server_host']", "config['chat_port']", 'receiver'], {}), "(args.username, config['server_host'], config['chat_port'], receiver)\n", (1753, 1822), False, 'from client import Client\n'), ((2097, 2159), 'ui.UI', 'UI', (["config['server_host']", "config['display_port']", 'show_sender'], {}), "(config['server_host'], config['display_port'], show_sender)\n", (2099, 2159), False, 'from ui import UI\n'), ((2178, 2237), 'curses.init_pair', 'curses.init_pair', (['(1)', 'curses.COLOR_BLACK', 'curses.COLOR_WHITE'], {}), '(1, curses.COLOR_BLACK, curses.COLOR_WHITE)\n', (2194, 2237), False, 'import curses\n'), ((2242, 2301), 'curses.init_pair', 'curses.init_pair', (['(2)', 'curses.COLOR_BLACK', 'curses.COLOR_WHITE'], {}), '(2, curses.COLOR_BLACK, curses.COLOR_WHITE)\n', (2258, 2301), False, 'import curses\n'), ((2307, 2320), 'curses.echo', 'curses.echo', ([], {}), '()\n', (2318, 2320), False, 'import curses\n'), ((2325, 2343), 'curses.curs_set', 'curses.curs_set', (['(0)'], {}), '(0)\n', (2340, 2343), False, 'import curses\n'), ((2624, 2702), 'threading.Thread', 'threading.Thread', ([], {'target': 'display_section', 'args': '(history_screen, show_receiver)'}), '(target=display_section, args=(history_screen, show_receiver))\n', (2640, 2702), False, 'import threading\n'), ((2783, 2850), 'threading.Thread', 'threading.Thread', ([], {'target': 'input_section', 'args': '(input_screen, sender)'}), '(target=input_section, args=(input_screen, sender))\n', (2799, 2850), False, 'import threading\n'), ((1316, 1332), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (1326, 1332), False, 'import time\n'), ((3032, 3045), 'curses.wrapper', 'wrapper', (['main'], {}), '(main)\n', (3039, 3045), False, 'from curses import wrapper\n'), ((1552, 1565), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (1563, 1565), False, 'import zmq\n'), ((1649, 1662), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (1660, 1662), False, 'import zmq\n'), ((1885, 1898), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (1896, 1898), False, 'import zmq\n'), ((1995, 2008), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (2006, 2008), False, 'import zmq\n')] |
Sam-Gresh/linkage-agent-tools | anonlink-entity-service/backend/entityservice/tasks/solver.py | f405c7efe3fa82d99bc047f130c0fac6f3f5bf82 | import anonlink
from anonlink.candidate_generation import _merge_similarities
from entityservice.object_store import connect_to_object_store
from entityservice.async_worker import celery, logger
from entityservice.settings import Config as config
from entityservice.tasks.base_task import TracedTask
from entityservice.tasks.permutation import save_and_permute
@celery.task(base=TracedTask, ignore_result=True, args_as_tags=('project_id', 'run_id'))
def solver_task(similarity_scores_filename, project_id, run_id, dataset_sizes, parent_span):
log = logger.bind(pid=project_id, run_id=run_id)
mc = connect_to_object_store()
solver_task.span.log_kv({'datasetSizes': dataset_sizes,
'filename': similarity_scores_filename})
score_file = mc.get_object(config.MINIO_BUCKET, similarity_scores_filename)
log.debug("Creating python sparse matrix from bytes data")
candidate_pairs_with_duplicates = anonlink.serialization.load_candidate_pairs(score_file)
similarity_scores, (dset_is0, dset_is1), (rec_is0, rec_is1) = candidate_pairs_with_duplicates
log.info(f"Number of candidate pairs before deduplication: {len(candidate_pairs_with_duplicates[0])}")
if len(candidate_pairs_with_duplicates[0]) > 0:
# TODO use public interface when available
# https://github.com/data61/anonlink/issues/271
candidate_pairs = _merge_similarities([zip(similarity_scores, dset_is0, dset_is1, rec_is0, rec_is1)], k=None)
log.info(f"Number of candidate pairs after deduplication: {len(candidate_pairs[0])}")
log.info("Calculating the optimal mapping from similarity matrix")
groups = anonlink.solving.greedy_solve(candidate_pairs)
else:
groups = []
log.info("Entity groups have been computed")
res = {
"groups": groups,
"datasetSizes": dataset_sizes
}
save_and_permute.delay(res, project_id, run_id, solver_task.get_serialized_span())
| [((365, 456), 'entityservice.async_worker.celery.task', 'celery.task', ([], {'base': 'TracedTask', 'ignore_result': '(True)', 'args_as_tags': "('project_id', 'run_id')"}), "(base=TracedTask, ignore_result=True, args_as_tags=('project_id',\n 'run_id'))\n", (376, 456), False, 'from entityservice.async_worker import celery, logger\n'), ((556, 598), 'entityservice.async_worker.logger.bind', 'logger.bind', ([], {'pid': 'project_id', 'run_id': 'run_id'}), '(pid=project_id, run_id=run_id)\n', (567, 598), False, 'from entityservice.async_worker import celery, logger\n'), ((608, 633), 'entityservice.object_store.connect_to_object_store', 'connect_to_object_store', ([], {}), '()\n', (631, 633), False, 'from entityservice.object_store import connect_to_object_store\n'), ((945, 1000), 'anonlink.serialization.load_candidate_pairs', 'anonlink.serialization.load_candidate_pairs', (['score_file'], {}), '(score_file)\n', (988, 1000), False, 'import anonlink\n'), ((1671, 1717), 'anonlink.solving.greedy_solve', 'anonlink.solving.greedy_solve', (['candidate_pairs'], {}), '(candidate_pairs)\n', (1700, 1717), False, 'import anonlink\n')] |
nickmvincent/ugc-val-est | portal/migrations/0007_auto_20170824_1341.py | b5cceda14ef5830f1befaddfccfd90a694c9677a | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-24 13:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('portal', '0006_auto_20170824_0950'),
]
operations = [
migrations.AddField(
model_name='sampledstackoverflowpost',
name='num_question_comments',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='question_score',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title',
field=models.CharField(default='', max_length=1182),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_coleman_liau_index',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_length',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_lexicon_count',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_percent_punctuation',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_percent_spaces',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_percent_uppercase',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_sentence_count',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='sampledstackoverflowpost',
name='title_starts_capitalized',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='sampledredditthread',
name='title',
field=models.CharField(default='', max_length=1182),
),
migrations.AlterField(
model_name='stackoverflowanswer',
name='owner_user_id',
field=models.IntegerField(blank=True, db_index=True, null=True),
),
migrations.AlterField(
model_name='stackoverflowanswer',
name='parent_id',
field=models.IntegerField(db_index=True),
),
migrations.AlterField(
model_name='stackoverflowquestion',
name='accepted_answer_id',
field=models.IntegerField(blank=True, db_index=True, null=True),
),
migrations.AlterField(
model_name='stackoverflowquestion',
name='owner_user_id',
field=models.IntegerField(db_index=True),
),
]
| [((431, 461), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (450, 461), False, 'from django.db import migrations, models\n'), ((607, 637), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (626, 637), False, 'from django.db import migrations, models\n'), ((774, 819), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(1182)'}), "(default='', max_length=1182)\n", (790, 819), False, 'from django.db import migrations, models\n'), ((975, 1005), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (994, 1005), False, 'from django.db import migrations, models\n'), ((1149, 1179), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1168, 1179), False, 'from django.db import migrations, models\n'), ((1330, 1360), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1349, 1360), False, 'from django.db import migrations, models\n'), ((1517, 1547), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1536, 1547), False, 'from django.db import migrations, models\n'), ((1699, 1729), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1718, 1729), False, 'from django.db import migrations, models\n'), ((1884, 1914), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1903, 1914), False, 'from django.db import migrations, models\n'), ((2066, 2096), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2085, 2096), False, 'from django.db import migrations, models\n'), ((2252, 2286), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2271, 2286), False, 'from django.db import migrations, models\n'), ((2420, 2465), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(1182)'}), "(default='', max_length=1182)\n", (2436, 2465), False, 'from django.db import migrations, models\n'), ((2607, 2664), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'db_index': '(True)', 'null': '(True)'}), '(blank=True, db_index=True, null=True)\n', (2626, 2664), False, 'from django.db import migrations, models\n'), ((2802, 2836), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (2821, 2836), False, 'from django.db import migrations, models\n'), ((2985, 3042), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'db_index': '(True)', 'null': '(True)'}), '(blank=True, db_index=True, null=True)\n', (3004, 3042), False, 'from django.db import migrations, models\n'), ((3186, 3220), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (3205, 3220), False, 'from django.db import migrations, models\n')] |
AleRiccardi/technical-neural-network-course | exercise-09/programming_assignment/hopfield.py | bfcca623a9dc3f7f4c20e1efe39abe986cd8869e | import numpy as np
import random
letter_C = np.array([
[1, 1, 1, 1, 1],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0],
[1, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
])
noisy_C = np.array([
[1, 1, 1, 1, 1],
[0, 1, 0, 0, 1],
[1, 0, 0, 0, 0],
[1, 0, 0, 1, 0],
[1, 0, 1, 1, 1],
])
letter_I = np.array([
[0, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[1, 1, 1, 1, 1],
])
noisy_I = np.array([
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 1, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 1, 0, 1, 1],
])
letter_T = np.array([
[1, 1, 1, 1, 1],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 1, 0, 0],
])
noisy_T = np.array([
[1, 1, 0, 1, 0],
[0, 0, 1, 0, 0],
[0, 1, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
])
class HopfieldNet:
def __init__(self, num_neurons, threshold=None):
assert num_neurons <= 1000
self.weights = np.zeros((num_neurons, num_neurons)).astype(np.int)
self.state = np.array((1, num_neurons))
if threshold:
self.thresholds = np.array([threshold for _ in num_neurons])
else:
self.thresholds = np.zeros((num_neurons,))
def fit(self, X):
num_p = X.shape[0]
num_k = X.shape[1]
# check right number of pattern
assert num_p < num_k * 0.138
num_k = X.shape[1]
for p in range(X.shape[0]):
X_p = X[p, :].reshape((1, num_k))
matrix_lr = np.dot(X_p.T, X_p).astype(np.int)
np.fill_diagonal(matrix_lr, 0)
self.weights += matrix_lr
def predict(self, X, show_energy=False, show_char=False):
num_k = X.shape[1]
X_pred = X.copy()
# loop per every pattern
for p in range(X_pred.shape[0]):
differ = True
time_s = 0
# loop until the state
# stay the same
while differ:
X_prev = X_pred[p].copy()
# print energy
if show_energy:
self.print_energy(X_pred[p], p, time_s)
# print char
if show_char and num_k <= 100:
self.print_char(X_pred[p], p, time_s)
# loop per every neuron
for k in range(num_k):
val = np.dot(X_pred[p], self.weights[:, k])
val_thres = 1 if val > self.thresholds[k] else -1
X_pred[p, k] = val_thres
# check if the new state differs from the previous one
differ = False if np.array_equal(X_pred[p], X_prev) else True
time_s += 1
return X_pred
def print_energy(self, state, num_p, time_s):
first_term = 0
second_term = 0
for i in range(state.shape[0]):
for j in range(state.shape[0]):
first_term += self.weights[i, j] * state[i] * state[j]
for k in range(state.shape[0]):
second_term += self.thresholds[k] * state[k]
energy = -0.5 * first_term + second_term
print('Pattern: {}\t||\tTime stamp: {}\t||\tEnergy: {:7.0f}'.format(num_p, time_s, energy))
return energy
def print_char(self, sequence, num_p, time_s):
sqrtK = np.sqrt(sequence.shape[0])
# check if correct sequence
assert sqrtK % 1 == 0
print('Pattern: {}\t||\tTime stamp: {}'.format(num_p, time_s))
for y in range(int(sqrtK)):
for x in range(int(sqrtK)):
idx = int(y * sqrtK + x)
val = '*' if sequence[idx] > 0 else ' '
print(val, end=' ')
print('', sep='', end='\n')
print('', sep='', end='\n')
def test_w_less_101():
print('\n================')
print('K < 101')
print('================\n')
X = np.array([
letter_C.flatten(),
letter_I.flatten(),
letter_T.flatten(),
])
X = np.where(X > 0, 1, -1)
net = HopfieldNet(X.shape[1])
net.fit(X)
X_test = np.array([
noisy_C.flatten(),
noisy_I.flatten(),
noisy_T.flatten(),
])
X_test = np.where(X_test > 0, 1, -1)
_ = net.predict(X_test, show_char=True)
def test_w_more_100():
print('\n================')
print('K > 100')
print('================\n')
num_k = random.randint(101, 1000)
binary = 2
X = np.array([
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
])
X = np.where(X > 0, 1, -1)
net = HopfieldNet(X.shape[1])
net.fit(X)
X_test = np.array([
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
np.random.randint(binary, size=num_k),
])
X_test = np.where(X_test > 0, 1, -1)
_ = net.predict(X_test, show_energy=True)
if __name__ == '__main__':
test_w_less_101()
test_w_more_100()
| [((45, 145), 'numpy.array', 'np.array', (['[[1, 1, 1, 1, 1], [1, 0, 0, 0, 0], [1, 0, 0, 0, 0], [1, 0, 0, 0, 0], [1, 1,\n 1, 1, 1]]'], {}), '([[1, 1, 1, 1, 1], [1, 0, 0, 0, 0], [1, 0, 0, 0, 0], [1, 0, 0, 0, 0\n ], [1, 1, 1, 1, 1]])\n', (53, 145), True, 'import numpy as np\n'), ((174, 274), 'numpy.array', 'np.array', (['[[1, 1, 1, 1, 1], [0, 1, 0, 0, 1], [1, 0, 0, 0, 0], [1, 0, 0, 1, 0], [1, 0,\n 1, 1, 1]]'], {}), '([[1, 1, 1, 1, 1], [0, 1, 0, 0, 1], [1, 0, 0, 0, 0], [1, 0, 0, 1, 0\n ], [1, 0, 1, 1, 1]])\n', (182, 274), True, 'import numpy as np\n'), ((305, 405), 'numpy.array', 'np.array', (['[[0, 1, 1, 1, 1], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [1, 1,\n 1, 1, 1]]'], {}), '([[0, 1, 1, 1, 1], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0\n ], [1, 1, 1, 1, 1]])\n', (313, 405), True, 'import numpy as np\n'), ((434, 534), 'numpy.array', 'np.array', (['[[1, 1, 1, 1, 1], [0, 0, 1, 0, 0], [0, 1, 1, 0, 0], [0, 0, 0, 0, 0], [0, 1,\n 0, 1, 1]]'], {}), '([[1, 1, 1, 1, 1], [0, 0, 1, 0, 0], [0, 1, 1, 0, 0], [0, 0, 0, 0, 0\n ], [0, 1, 0, 1, 1]])\n', (442, 534), True, 'import numpy as np\n'), ((565, 665), 'numpy.array', 'np.array', (['[[1, 1, 1, 1, 1], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0,\n 1, 0, 0]]'], {}), '([[1, 1, 1, 1, 1], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [0, 0, 1, 0, 0\n ], [0, 0, 1, 0, 0]])\n', (573, 665), True, 'import numpy as np\n'), ((694, 794), 'numpy.array', 'np.array', (['[[1, 1, 0, 1, 0], [0, 0, 1, 0, 0], [0, 1, 1, 0, 0], [0, 0, 0, 0, 0], [0, 0,\n 1, 0, 0]]'], {}), '([[1, 1, 0, 1, 0], [0, 0, 1, 0, 0], [0, 1, 1, 0, 0], [0, 0, 0, 0, 0\n ], [0, 0, 1, 0, 0]])\n', (702, 794), True, 'import numpy as np\n'), ((3973, 3995), 'numpy.where', 'np.where', (['(X > 0)', '(1)', '(-1)'], {}), '(X > 0, 1, -1)\n', (3981, 3995), True, 'import numpy as np\n'), ((4171, 4198), 'numpy.where', 'np.where', (['(X_test > 0)', '(1)', '(-1)'], {}), '(X_test > 0, 1, -1)\n', (4179, 4198), True, 'import numpy as np\n'), ((4366, 4391), 'random.randint', 'random.randint', (['(101)', '(1000)'], {}), '(101, 1000)\n', (4380, 4391), False, 'import random\n'), ((4864, 4886), 'numpy.where', 'np.where', (['(X > 0)', '(1)', '(-1)'], {}), '(X > 0, 1, -1)\n', (4872, 4886), True, 'import numpy as np\n'), ((5122, 5149), 'numpy.where', 'np.where', (['(X_test > 0)', '(1)', '(-1)'], {}), '(X_test > 0, 1, -1)\n', (5130, 5149), True, 'import numpy as np\n'), ((1018, 1044), 'numpy.array', 'np.array', (['(1, num_neurons)'], {}), '((1, num_neurons))\n', (1026, 1044), True, 'import numpy as np\n'), ((3294, 3320), 'numpy.sqrt', 'np.sqrt', (['sequence.shape[0]'], {}), '(sequence.shape[0])\n', (3301, 3320), True, 'import numpy as np\n'), ((1097, 1139), 'numpy.array', 'np.array', (['[threshold for _ in num_neurons]'], {}), '([threshold for _ in num_neurons])\n', (1105, 1139), True, 'import numpy as np\n'), ((1184, 1208), 'numpy.zeros', 'np.zeros', (['(num_neurons,)'], {}), '((num_neurons,))\n', (1192, 1208), True, 'import numpy as np\n'), ((1543, 1573), 'numpy.fill_diagonal', 'np.fill_diagonal', (['matrix_lr', '(0)'], {}), '(matrix_lr, 0)\n', (1559, 1573), True, 'import numpy as np\n'), ((4434, 4471), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4451, 4471), True, 'import numpy as np\n'), ((4481, 4518), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4498, 4518), True, 'import numpy as np\n'), ((4528, 4565), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4545, 4565), True, 'import numpy as np\n'), ((4575, 4612), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4592, 4612), True, 'import numpy as np\n'), ((4622, 4659), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4639, 4659), True, 'import numpy as np\n'), ((4669, 4706), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4686, 4706), True, 'import numpy as np\n'), ((4716, 4753), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4733, 4753), True, 'import numpy as np\n'), ((4763, 4800), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4780, 4800), True, 'import numpy as np\n'), ((4810, 4847), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4827, 4847), True, 'import numpy as np\n'), ((4969, 5006), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (4986, 5006), True, 'import numpy as np\n'), ((5016, 5053), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (5033, 5053), True, 'import numpy as np\n'), ((5063, 5100), 'numpy.random.randint', 'np.random.randint', (['binary'], {'size': 'num_k'}), '(binary, size=num_k)\n', (5080, 5100), True, 'import numpy as np\n'), ((945, 981), 'numpy.zeros', 'np.zeros', (['(num_neurons, num_neurons)'], {}), '((num_neurons, num_neurons))\n', (953, 981), True, 'import numpy as np\n'), ((1497, 1515), 'numpy.dot', 'np.dot', (['X_p.T', 'X_p'], {}), '(X_p.T, X_p)\n', (1503, 1515), True, 'import numpy as np\n'), ((2347, 2386), 'numpy.dot', 'np.dot', (['X_pred[p]', 'self.weights[:, (k)]'], {}), '(X_pred[p], self.weights[:, (k)])\n', (2353, 2386), True, 'import numpy as np\n'), ((2606, 2639), 'numpy.array_equal', 'np.array_equal', (['X_pred[p]', 'X_prev'], {}), '(X_pred[p], X_prev)\n', (2620, 2639), True, 'import numpy as np\n')] |
cdla/murfi2 | util/infoclient/test_infoclient.py | 45dba5eb90e7f573f01706a50e584265f0f8ffa7 |
from infoclientLib import InfoClient
ic = InfoClient('localhost', 15002, 'localhost', 15003)
ic.add('roi-weightedave', 'active')
ic.start()
| [((43, 93), 'infoclientLib.InfoClient', 'InfoClient', (['"""localhost"""', '(15002)', '"""localhost"""', '(15003)'], {}), "('localhost', 15002, 'localhost', 15003)\n", (53, 93), False, 'from infoclientLib import InfoClient\n')] |
MovestaDev/low-resource-text-classification-framework | lrtc_lib/experiment_runners/experiment_runner.py | 4380755a65b35265e84ecbf4b87e872d79e8f079 | # (c) Copyright IBM Corporation 2020.
# LICENSE: Apache License 2.0 (Apache-2.0)
# http://www.apache.org/licenses/LICENSE-2.0
import abc
import logging
import time
from collections import defaultdict
from typing import List
import numpy as np
from dataclasses import dataclass
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s')
import lrtc_lib.data_access.data_access_factory as data_access_factory
import lrtc_lib.experiment_runners.experiments_results_handler as res_handler
from lrtc_lib.oracle_data_access import oracle_data_access_api
from lrtc_lib.active_learning.diversity_calculator import DiversityCalculator
from lrtc_lib.active_learning.knn_outlier_calculator import KnnOutlierCalculator
from lrtc_lib.active_learning.strategies import ActiveLearningStrategies
from lrtc_lib.data_access.core.data_structs import TextElement
from lrtc_lib.data_access.data_access_api import DataAccessApi
from lrtc_lib.data_access.data_access_factory import get_data_access
from lrtc_lib.orchestrator import orchestrator_api
from lrtc_lib.orchestrator.orchestrator_api import DeleteModels
from lrtc_lib.train_and_infer_service.model_type import ModelType
from lrtc_lib.training_set_selector.train_and_dev_set_selector_api import TrainingSetSelectionStrategy
@dataclass
class ExperimentParams:
experiment_name: str
train_dataset_name: str
dev_dataset_name: str
test_dataset_name: str
category_name: str
workspace_id: str
model: ModelType
active_learning_strategies: list
repeat_id: int
train_params: dict
def compute_batch_scores(config, elements):
data_access = get_data_access()
unlabeled = data_access.sample_unlabeled_text_elements(config.workspace_id, config.train_dataset_name,
config.category_name, 10 ** 6)["results"]
unlabeled_emb = np.array(orchestrator_api.infer(config.workspace_id, config.category_name, unlabeled)["embeddings"])
batch_emb = np.array(orchestrator_api.infer(config.workspace_id, config.category_name, elements)["embeddings"])
outlier_calculator = KnnOutlierCalculator(unlabeled_emb)
outlier_value = outlier_calculator.compute_batch_score(batch_emb)
representativeness_value = 1 / outlier_value
diversity_calculator = DiversityCalculator(unlabeled_emb)
diversity_value = diversity_calculator.compute_batch_score(batch_emb)
return diversity_value, representativeness_value
class ExperimentRunner(object, metaclass=abc.ABCMeta):
NO_AL = 'no_active_learning'
def __init__(self, first_model_positives_num: int, first_model_negatives_num: int,
active_learning_suggestions_num: int):
"""
Init the ExperimentsRunner
:param first_model_positives_num: the number of positives instances to provide for the first model.
:param first_model_negatives_num: the number of negative instances to provide for the first model.
:param active_learning_suggestions_num: the number of instances to be suggested by the active learning strategy
for the training of the second model.
"""
self.first_model_positives_num = first_model_positives_num
self.first_model_negatives_num = first_model_negatives_num
self.active_learning_suggestions_num = active_learning_suggestions_num
self.data_access: DataAccessApi = data_access_factory.get_data_access()
self.cached_first_model_scores = False
orchestrator_api.set_training_set_selection_strategy(TrainingSetSelectionStrategy.ALL_LABELED)
def run(self, config: ExperimentParams, active_learning_iterations_num: int, results_file_path: str,
delete_workspaces: bool = True):
# key: active learning name, value: list of results oevr iterations (first model has no iterations)
results_per_active_learning = defaultdict(dict)
# train first model
iteration = 0
res_dict = self.train_first_model(config=config)
res_handler.save_results(results_file_path, [res_dict])
results_per_active_learning[self.NO_AL][iteration] = res_dict
original_workspace_id = config.workspace_id
for al in config.active_learning_strategies:
orchestrator_api.set_active_learning_strategy(al)
if not orchestrator_api.is_model_compatible_with_active_learning(al, config.model):
logging.info(f'skipping active learning strategy {al.name} for model {config.model.name} '
f'since the strategy does not support this model.')
continue
al_workspace_id = original_workspace_id + "-" + al.name
if orchestrator_api.workspace_exists(al_workspace_id):
orchestrator_api.delete_workspace(al_workspace_id)
orchestrator_api.copy_workspace(original_workspace_id, al_workspace_id)
config.workspace_id = al_workspace_id
for iteration in range(1, active_learning_iterations_num + 1):
logging.info(f'Run AL strategy: {al.name}, iteration num: {iteration}, repeat num: {config.repeat_id}\t'
f'workspace: {config.workspace_id}')
res_dict, train_id = self.run_active_learning_iteration(config, al, iteration)
res_handler.save_results(results_file_path, [res_dict])
results_per_active_learning[al.name][iteration] = res_dict
if delete_workspaces:
orchestrator_api.delete_workspace(config.workspace_id, DeleteModels.ALL_BUT_FIRST_MODEL)
if delete_workspaces:
orchestrator_api.delete_workspace(original_workspace_id)
return results_per_active_learning
def train_first_model(self, config: ExperimentParams):
if orchestrator_api.workspace_exists(config.workspace_id):
orchestrator_api.delete_workspace(config.workspace_id)
orchestrator_api.create_workspace(config.workspace_id, config.train_dataset_name,
dev_dataset_name=config.dev_dataset_name)
orchestrator_api.create_new_category(config.workspace_id, config.category_name, "No description for you")
dev_text_elements_uris = orchestrator_api.get_all_text_elements_uris(config.dev_dataset_name)
dev_text_elements_and_labels = oracle_data_access_api.get_gold_labels(config.dev_dataset_name,
dev_text_elements_uris)
if dev_text_elements_and_labels is not None:
orchestrator_api.set_labels(config.workspace_id, dev_text_elements_and_labels)
random_seed = sum([ord(c) for c in config.workspace_id])
logging.info(str(config))
logging.info(f'random seed: {random_seed}')
self.set_first_model_positives(config, random_seed)
self.set_first_model_negatives(config, random_seed)
# train first model
logging.info(f'Starting first model training (model: {config.model.name})\tworkspace: {config.workspace_id}')
new_model_id = orchestrator_api.train(config.workspace_id, config.category_name, config.model, train_params=config.train_params)
if new_model_id is None:
raise Exception(f'a new model was not trained\tworkspace: {config.workspace_id}')
eval_dataset = config.test_dataset_name
res_dict = self.evaluate(config, al=self.NO_AL, iteration=0, eval_dataset=eval_dataset)
res_dict.update(self.generate_al_batch_dict(config)) # ensures AL-related keys are in the results dictionary
logging.info(f'Evaluation on dataset: {eval_dataset}, iteration: 0, first model (id: {new_model_id}) '
f'repeat: {config.repeat_id}, is: {res_dict}\t'
f'workspace: {config.workspace_id}')
return res_dict
def run_active_learning_iteration(self, config: ExperimentParams, al, iteration):
# get suggested elements for labeling (and their gold labels)
suggested_text_elements, suggested_uris_and_gold_labels = \
self.get_suggested_elements_and_gold_labels(config, al)
# calculate metrics for the batch suggested by the active learning strategy
al_batch_dict = self.generate_al_batch_dict(config, suggested_text_elements)
# set gold labels as the user-provided labels of the elements suggested by the active learning strategy
orchestrator_api.set_labels(config.workspace_id, suggested_uris_and_gold_labels)
# train a new model with the additional elements suggested by the active learning strategy
new_model_id = orchestrator_api.train(config.workspace_id, config.category_name, config.model, train_params=config.train_params)
if new_model_id is None:
raise Exception('New model was not trained')
# evaluate the new model
eval_dataset = config.test_dataset_name
res_dict = self.evaluate(config, al.name, iteration, eval_dataset, suggested_text_elements)
res_dict.update(al_batch_dict)
logging.info(f'Evaluation on dataset: {eval_dataset}, with AL: {al.name}, iteration: {iteration}, '
f'repeat: {config.repeat_id}, model (id: {new_model_id}) is: {res_dict}\t'
f'workspace: {config.workspace_id}')
return res_dict, new_model_id
def get_suggested_elements_and_gold_labels(self, config, al):
start = time.time()
suggested_text_elements_for_labeling = \
orchestrator_api.get_elements_to_label(config.workspace_id, config.category_name,
self.active_learning_suggestions_num)
end = time.time()
logging.info(f'{len(suggested_text_elements_for_labeling)} instances '
f'suggested by active learning strategy: {al.name} '
f'for dataset: {config.train_dataset_name} and category: {config.category_name}.\t'
f'runtime: {end - start}\tworkspace: {config.workspace_id}')
uris_for_labeling = [elem.uri for elem in suggested_text_elements_for_labeling]
uris_and_gold_labels = oracle_data_access_api.get_gold_labels(config.train_dataset_name, uris_for_labeling,
config.category_name)
return suggested_text_elements_for_labeling, uris_and_gold_labels
def evaluate(self, config: ExperimentParams, al, iteration, eval_dataset,
suggested_text_elements_for_labeling=None):
metadata_dict = res_handler.generate_metadata_dict(config, eval_dataset, al, iteration)
labels_counts_dict = res_handler.generate_train_labels_counts_dict(config)
performance_dict = res_handler.generate_performance_metrics_dict(config, eval_dataset)
experiment_specific_metrics_dict = \
self.generate_additional_metrics_dict(config, suggested_text_elements_for_labeling)
res_dict = {**metadata_dict, **labels_counts_dict, **performance_dict, **experiment_specific_metrics_dict}
return res_dict
@abc.abstractmethod
def set_first_model_positives(self, config, random_seed) -> List[TextElement]:
"""
Set the positive instances for the training of the first model.
:param config: experiment config for this run
:param random_seed: a seed for the Random being used for sampling
:return: a list of TextElements and a log message
"""
func_name = self.set_first_model_positives.__name__
raise NotImplementedError('users must define ' + func_name + ' to use this base class')
@abc.abstractmethod
def set_first_model_negatives(self, config, random_seed) -> List[TextElement]:
"""
Set the negative instances for the training of the first model.
:param config: experiment config for this run
:param random_seed: a seed for the Random being used for sampling
:return: a list of TextElements and a log message
"""
func_name = self.set_first_model_negatives.__name__
raise NotImplementedError('users must define ' + func_name + ' to use this base class')
@staticmethod
def generate_al_batch_dict(config, batch_elements=None):
batch_dict = {}
model_supports_embeddings = \
orchestrator_api.is_model_compatible_with_active_learning(ActiveLearningStrategies.DAL, config.model)
if batch_elements is not None and model_supports_embeddings:
diversity_value, representativeness_value = compute_batch_scores(config, batch_elements)
batch_dict["diversity"] = diversity_value
batch_dict["representativeness"] = representativeness_value
else:
batch_dict["diversity"] = "NA"
batch_dict["representativeness"] = "NA"
return batch_dict
def generate_additional_metrics_dict(self, config, suggested_text_elements_for_labeling):
return {}
| [((281, 401), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s')\n", (300, 401), False, 'import logging\n'), ((1673, 1690), 'lrtc_lib.data_access.data_access_factory.get_data_access', 'get_data_access', ([], {}), '()\n', (1688, 1690), False, 'from lrtc_lib.data_access.data_access_factory import get_data_access\n'), ((2162, 2197), 'lrtc_lib.active_learning.knn_outlier_calculator.KnnOutlierCalculator', 'KnnOutlierCalculator', (['unlabeled_emb'], {}), '(unlabeled_emb)\n', (2182, 2197), False, 'from lrtc_lib.active_learning.knn_outlier_calculator import KnnOutlierCalculator\n'), ((2344, 2378), 'lrtc_lib.active_learning.diversity_calculator.DiversityCalculator', 'DiversityCalculator', (['unlabeled_emb'], {}), '(unlabeled_emb)\n', (2363, 2378), False, 'from lrtc_lib.active_learning.diversity_calculator import DiversityCalculator\n'), ((3436, 3473), 'lrtc_lib.data_access.data_access_factory.get_data_access', 'data_access_factory.get_data_access', ([], {}), '()\n', (3471, 3473), True, 'import lrtc_lib.data_access.data_access_factory as data_access_factory\n'), ((3529, 3628), 'lrtc_lib.orchestrator.orchestrator_api.set_training_set_selection_strategy', 'orchestrator_api.set_training_set_selection_strategy', (['TrainingSetSelectionStrategy.ALL_LABELED'], {}), '(\n TrainingSetSelectionStrategy.ALL_LABELED)\n', (3581, 3628), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((3922, 3939), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (3933, 3939), False, 'from collections import defaultdict\n'), ((4055, 4110), 'lrtc_lib.experiment_runners.experiments_results_handler.save_results', 'res_handler.save_results', (['results_file_path', '[res_dict]'], {}), '(results_file_path, [res_dict])\n', (4079, 4110), True, 'import lrtc_lib.experiment_runners.experiments_results_handler as res_handler\n'), ((5855, 5909), 'lrtc_lib.orchestrator.orchestrator_api.workspace_exists', 'orchestrator_api.workspace_exists', (['config.workspace_id'], {}), '(config.workspace_id)\n', (5888, 5909), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((5987, 6115), 'lrtc_lib.orchestrator.orchestrator_api.create_workspace', 'orchestrator_api.create_workspace', (['config.workspace_id', 'config.train_dataset_name'], {'dev_dataset_name': 'config.dev_dataset_name'}), '(config.workspace_id, config.\n train_dataset_name, dev_dataset_name=config.dev_dataset_name)\n', (6020, 6115), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((6161, 6271), 'lrtc_lib.orchestrator.orchestrator_api.create_new_category', 'orchestrator_api.create_new_category', (['config.workspace_id', 'config.category_name', '"""No description for you"""'], {}), "(config.workspace_id, config.\n category_name, 'No description for you')\n", (6197, 6271), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((6301, 6369), 'lrtc_lib.orchestrator.orchestrator_api.get_all_text_elements_uris', 'orchestrator_api.get_all_text_elements_uris', (['config.dev_dataset_name'], {}), '(config.dev_dataset_name)\n', (6344, 6369), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((6409, 6500), 'lrtc_lib.oracle_data_access.oracle_data_access_api.get_gold_labels', 'oracle_data_access_api.get_gold_labels', (['config.dev_dataset_name', 'dev_text_elements_uris'], {}), '(config.dev_dataset_name,\n dev_text_elements_uris)\n', (6447, 6500), False, 'from lrtc_lib.oracle_data_access import oracle_data_access_api\n'), ((6827, 6870), 'logging.info', 'logging.info', (['f"""random seed: {random_seed}"""'], {}), "(f'random seed: {random_seed}')\n", (6839, 6870), False, 'import logging\n'), ((7029, 7148), 'logging.info', 'logging.info', (['f"""Starting first model training (model: {config.model.name})\tworkspace: {config.workspace_id}"""'], {}), "(\n f'Starting first model training (model: {config.model.name})\\tworkspace: {config.workspace_id}'\n )\n", (7041, 7148), False, 'import logging\n'), ((7162, 7280), 'lrtc_lib.orchestrator.orchestrator_api.train', 'orchestrator_api.train', (['config.workspace_id', 'config.category_name', 'config.model'], {'train_params': 'config.train_params'}), '(config.workspace_id, config.category_name, config.\n model, train_params=config.train_params)\n', (7184, 7280), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((7675, 7864), 'logging.info', 'logging.info', (['f"""Evaluation on dataset: {eval_dataset}, iteration: 0, first model (id: {new_model_id}) repeat: {config.repeat_id}, is: {res_dict}\tworkspace: {config.workspace_id}"""'], {}), "(\n f'Evaluation on dataset: {eval_dataset}, iteration: 0, first model (id: {new_model_id}) repeat: {config.repeat_id}, is: {res_dict}\\tworkspace: {config.workspace_id}'\n )\n", (7687, 7864), False, 'import logging\n'), ((8514, 8599), 'lrtc_lib.orchestrator.orchestrator_api.set_labels', 'orchestrator_api.set_labels', (['config.workspace_id', 'suggested_uris_and_gold_labels'], {}), '(config.workspace_id, suggested_uris_and_gold_labels\n )\n', (8541, 8599), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((8718, 8836), 'lrtc_lib.orchestrator.orchestrator_api.train', 'orchestrator_api.train', (['config.workspace_id', 'config.category_name', 'config.model'], {'train_params': 'config.train_params'}), '(config.workspace_id, config.category_name, config.\n model, train_params=config.train_params)\n', (8740, 8836), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((9152, 9365), 'logging.info', 'logging.info', (['f"""Evaluation on dataset: {eval_dataset}, with AL: {al.name}, iteration: {iteration}, repeat: {config.repeat_id}, model (id: {new_model_id}) is: {res_dict}\tworkspace: {config.workspace_id}"""'], {}), "(\n f'Evaluation on dataset: {eval_dataset}, with AL: {al.name}, iteration: {iteration}, repeat: {config.repeat_id}, model (id: {new_model_id}) is: {res_dict}\\tworkspace: {config.workspace_id}'\n )\n", (9164, 9365), False, 'import logging\n'), ((9527, 9538), 'time.time', 'time.time', ([], {}), '()\n', (9536, 9538), False, 'import time\n'), ((9600, 9724), 'lrtc_lib.orchestrator.orchestrator_api.get_elements_to_label', 'orchestrator_api.get_elements_to_label', (['config.workspace_id', 'config.category_name', 'self.active_learning_suggestions_num'], {}), '(config.workspace_id, config.\n category_name, self.active_learning_suggestions_num)\n', (9638, 9724), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((9785, 9796), 'time.time', 'time.time', ([], {}), '()\n', (9794, 9796), False, 'import time\n'), ((10256, 10366), 'lrtc_lib.oracle_data_access.oracle_data_access_api.get_gold_labels', 'oracle_data_access_api.get_gold_labels', (['config.train_dataset_name', 'uris_for_labeling', 'config.category_name'], {}), '(config.train_dataset_name,\n uris_for_labeling, config.category_name)\n', (10294, 10366), False, 'from lrtc_lib.oracle_data_access import oracle_data_access_api\n'), ((10671, 10742), 'lrtc_lib.experiment_runners.experiments_results_handler.generate_metadata_dict', 'res_handler.generate_metadata_dict', (['config', 'eval_dataset', 'al', 'iteration'], {}), '(config, eval_dataset, al, iteration)\n', (10705, 10742), True, 'import lrtc_lib.experiment_runners.experiments_results_handler as res_handler\n'), ((10772, 10825), 'lrtc_lib.experiment_runners.experiments_results_handler.generate_train_labels_counts_dict', 'res_handler.generate_train_labels_counts_dict', (['config'], {}), '(config)\n', (10817, 10825), True, 'import lrtc_lib.experiment_runners.experiments_results_handler as res_handler\n'), ((10853, 10920), 'lrtc_lib.experiment_runners.experiments_results_handler.generate_performance_metrics_dict', 'res_handler.generate_performance_metrics_dict', (['config', 'eval_dataset'], {}), '(config, eval_dataset)\n', (10898, 10920), True, 'import lrtc_lib.experiment_runners.experiments_results_handler as res_handler\n'), ((12449, 12555), 'lrtc_lib.orchestrator.orchestrator_api.is_model_compatible_with_active_learning', 'orchestrator_api.is_model_compatible_with_active_learning', (['ActiveLearningStrategies.DAL', 'config.model'], {}), '(\n ActiveLearningStrategies.DAL, config.model)\n', (12506, 12555), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((1928, 2004), 'lrtc_lib.orchestrator.orchestrator_api.infer', 'orchestrator_api.infer', (['config.workspace_id', 'config.category_name', 'unlabeled'], {}), '(config.workspace_id, config.category_name, unlabeled)\n', (1950, 2004), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((2045, 2120), 'lrtc_lib.orchestrator.orchestrator_api.infer', 'orchestrator_api.infer', (['config.workspace_id', 'config.category_name', 'elements'], {}), '(config.workspace_id, config.category_name, elements)\n', (2067, 2120), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((4300, 4349), 'lrtc_lib.orchestrator.orchestrator_api.set_active_learning_strategy', 'orchestrator_api.set_active_learning_strategy', (['al'], {}), '(al)\n', (4345, 4349), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((4743, 4793), 'lrtc_lib.orchestrator.orchestrator_api.workspace_exists', 'orchestrator_api.workspace_exists', (['al_workspace_id'], {}), '(al_workspace_id)\n', (4776, 4793), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((4874, 4945), 'lrtc_lib.orchestrator.orchestrator_api.copy_workspace', 'orchestrator_api.copy_workspace', (['original_workspace_id', 'al_workspace_id'], {}), '(original_workspace_id, al_workspace_id)\n', (4905, 4945), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((5684, 5740), 'lrtc_lib.orchestrator.orchestrator_api.delete_workspace', 'orchestrator_api.delete_workspace', (['original_workspace_id'], {}), '(original_workspace_id)\n', (5717, 5740), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((5923, 5977), 'lrtc_lib.orchestrator.orchestrator_api.delete_workspace', 'orchestrator_api.delete_workspace', (['config.workspace_id'], {}), '(config.workspace_id)\n', (5956, 5977), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((6640, 6718), 'lrtc_lib.orchestrator.orchestrator_api.set_labels', 'orchestrator_api.set_labels', (['config.workspace_id', 'dev_text_elements_and_labels'], {}), '(config.workspace_id, dev_text_elements_and_labels)\n', (6667, 6718), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((4369, 4444), 'lrtc_lib.orchestrator.orchestrator_api.is_model_compatible_with_active_learning', 'orchestrator_api.is_model_compatible_with_active_learning', (['al', 'config.model'], {}), '(al, config.model)\n', (4426, 4444), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((4462, 4610), 'logging.info', 'logging.info', (['f"""skipping active learning strategy {al.name} for model {config.model.name} since the strategy does not support this model."""'], {}), "(\n f'skipping active learning strategy {al.name} for model {config.model.name} since the strategy does not support this model.'\n )\n", (4474, 4610), False, 'import logging\n'), ((4811, 4861), 'lrtc_lib.orchestrator.orchestrator_api.delete_workspace', 'orchestrator_api.delete_workspace', (['al_workspace_id'], {}), '(al_workspace_id)\n', (4844, 4861), False, 'from lrtc_lib.orchestrator import orchestrator_api\n'), ((5088, 5235), 'logging.info', 'logging.info', (['f"""Run AL strategy: {al.name}, iteration num: {iteration}, repeat num: {config.repeat_id}\tworkspace: {config.workspace_id}"""'], {}), "(\n f'Run AL strategy: {al.name}, iteration num: {iteration}, repeat num: {config.repeat_id}\\tworkspace: {config.workspace_id}'\n )\n", (5100, 5235), False, 'import logging\n'), ((5371, 5426), 'lrtc_lib.experiment_runners.experiments_results_handler.save_results', 'res_handler.save_results', (['results_file_path', '[res_dict]'], {}), '(results_file_path, [res_dict])\n', (5395, 5426), True, 'import lrtc_lib.experiment_runners.experiments_results_handler as res_handler\n'), ((5553, 5646), 'lrtc_lib.orchestrator.orchestrator_api.delete_workspace', 'orchestrator_api.delete_workspace', (['config.workspace_id', 'DeleteModels.ALL_BUT_FIRST_MODEL'], {}), '(config.workspace_id, DeleteModels.\n ALL_BUT_FIRST_MODEL)\n', (5586, 5646), False, 'from lrtc_lib.orchestrator import orchestrator_api\n')] |
alexbjorling/acquisition-framework | contrast/environment/data.py | 4090381344aabca05155612845ba4e4a47455dc3 | try:
from tango import DeviceProxy, DevError
except ModuleNotFoundError:
pass
class PathFixer(object):
"""
Basic pathfixer which takes a path manually.
"""
def __init__(self):
self.directory = None
class SdmPathFixer(object):
"""
MAX IV pathfixer which takes a path from a Tango device.
"""
def __init__(self, sdm_device):
self.device = DeviceProxy(sdm_device)
self.TRIALS = 10
self.cache = None
@property
def directory(self):
for trial in range(self.TRIALS):
try:
val = self.device.SamplePath
self.cache = val
return val
except DevError:
print('Failed in getting SDM path from Tango. Trying again...')
print('Failed %u times, using cached value: %s'
% (self.TRIALS, self.cache))
return self.cache
| [((397, 420), 'tango.DeviceProxy', 'DeviceProxy', (['sdm_device'], {}), '(sdm_device)\n', (408, 420), False, 'from tango import DeviceProxy, DevError\n')] |
fung04/csrw_game | game_2048/views.py | 9673fdd311583057d5bf756dec7b99959d961d0c | import json
from django.contrib.auth.models import User
from django.http import JsonResponse
from django.shortcuts import redirect, render
from .models import Game2048
# Create your views here.
# test_user
# 8!S#5RP!WVMACg
def game(request):
return render(request, 'game_2048/index.html')
def set_result(request):
user = request.user if str(
request.user) != "AnonymousUser" else User.objects.get(username='test_user')
if request.method == 'POST':
# Get the game state from the POST request
game_state = request.body
obj = Game2048.objects.get(user=user)
# Check if the game state idendical to the server game state
if game_state != obj.game_state:
# let string to JSON object
json_game_state = json.loads(game_state)
# extract value of best from JSON objest
obj.best_score = json_game_state['best']
obj.game_state = json_game_state # save JSON object to game_state
obj.save()
else:
return redirect('game_2048:game')
return JsonResponse("", safe=False)
def get_result(request):
# Check if user is logged in if not set user to test_user
user = request.user if str(
request.user) != "AnonymousUser" else User.objects.get(username='test_user')
if request.method == 'GET':
obj, created = Game2048.objects.get_or_create(user=user)
game_state = obj.game_state
return JsonResponse(game_state, safe=False)
| [((260, 299), 'django.shortcuts.render', 'render', (['request', '"""game_2048/index.html"""'], {}), "(request, 'game_2048/index.html')\n", (266, 299), False, 'from django.shortcuts import redirect, render\n'), ((1086, 1114), 'django.http.JsonResponse', 'JsonResponse', (['""""""'], {'safe': '(False)'}), "('', safe=False)\n", (1098, 1114), False, 'from django.http import JsonResponse\n'), ((1467, 1503), 'django.http.JsonResponse', 'JsonResponse', (['game_state'], {'safe': '(False)'}), '(game_state, safe=False)\n', (1479, 1503), False, 'from django.http import JsonResponse\n'), ((405, 443), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""test_user"""'}), "(username='test_user')\n", (421, 443), False, 'from django.contrib.auth.models import User\n'), ((1047, 1073), 'django.shortcuts.redirect', 'redirect', (['"""game_2048:game"""'], {}), "('game_2048:game')\n", (1055, 1073), False, 'from django.shortcuts import redirect, render\n'), ((1282, 1320), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""test_user"""'}), "(username='test_user')\n", (1298, 1320), False, 'from django.contrib.auth.models import User\n'), ((791, 813), 'json.loads', 'json.loads', (['game_state'], {}), '(game_state)\n', (801, 813), False, 'import json\n')] |
Silvicek/distributional-dqn | distdeepq/__init__.py | 41a9095393dd25b7375119b4af7d2c35ee3ec6cc | from distdeepq import models # noqa
from distdeepq.build_graph import build_act, build_train # noqa
from distdeepq.simple import learn, load, make_session # noqa
from distdeepq.replay_buffer import ReplayBuffer, PrioritizedReplayBuffer # noqa
from distdeepq.static import *
from distdeepq.plots import PlotMachine
| [] |
17nikhil/codecademy | python/10.Authentication-&-API-Keys.py | 58fbd652691c9df8139544965ebb0e9748142538 | # Authentication & API Keys
# Many APIs require an API key. Just as a real-world key allows you to access something, an API key grants you access to a particular API. Moreover, an API key identifies you to the API, which helps the API provider keep track of how their service is used and prevent unauthorized or malicious activity.
#
# Some APIs require authentication using a protocol called OAuth. We won't get into the details, but if you've ever been redirected to a page asking for permission to link an application with your account, you've probably used OAuth.
#
# API keys are often long alphanumeric strings. We've made one up in the editor to the right! (It won't actually work on anything, but when you receive your own API keys in future projects, they'll look a lot like this.)
api_key = "string"
| [] |
takkaria/json-plucker | plucker/__init__.py | 6407dcc9a21d99d8f138128e9ee80c901a08c2e1 | from .plucker import pluck, Path
from .exceptions import PluckError
__all__ = ["pluck", "Path", "PluckError"]
| [] |
gimbo/arviz | arviz/plots/pairplot.py | c1df1847aa5170ad2810ae3d705d576d2643e3ec | """Plot a scatter or hexbin of sampled parameters."""
import warnings
import numpy as np
from ..data import convert_to_dataset, convert_to_inference_data
from .plot_utils import xarray_to_ndarray, get_coords, get_plotting_function
from ..utils import _var_names
def plot_pair(
data,
group="posterior",
var_names=None,
coords=None,
figsize=None,
textsize=None,
kind="scatter",
gridsize="auto",
contour=True,
fill_last=True,
divergences=False,
colorbar=False,
ax=None,
divergences_kwargs=None,
plot_kwargs=None,
backend=None,
backend_kwargs=None,
show=None,
):
"""
Plot a scatter or hexbin matrix of the sampled parameters.
Parameters
----------
data : obj
Any object that can be converted to an az.InferenceData object
Refer to documentation of az.convert_to_dataset for details
group : str, optional
Specifies which InferenceData group should be plotted. Defaults to 'posterior'.
var_names : list of variable names
Variables to be plotted, if None all variable are plotted
coords : mapping, optional
Coordinates of var_names to be plotted. Passed to `Dataset.sel`
figsize : figure size tuple
If None, size is (8 + numvars, 8 + numvars)
textsize: int
Text size for labels. If None it will be autoscaled based on figsize.
kind : str
Type of plot to display (scatter, kde or hexbin)
gridsize : int or (int, int), optional
Only works for kind=hexbin.
The number of hexagons in the x-direction. The corresponding number of hexagons in the
y-direction is chosen such that the hexagons are approximately regular.
Alternatively, gridsize can be a tuple with two elements specifying the number of hexagons
in the x-direction and the y-direction.
contour : bool
If True plot the 2D KDE using contours, otherwise plot a smooth 2D KDE. Defaults to True.
fill_last : bool
If True fill the last contour of the 2D KDE plot. Defaults to True.
divergences : Boolean
If True divergences will be plotted in a different color, only if group is either 'prior'
or 'posterior'.
colorbar : bool
If True a colorbar will be included as part of the plot (Defaults to False).
Only works when kind=hexbin
ax: axes, optional
Matplotlib axes or bokeh figures.
divergences_kwargs : dicts, optional
Additional keywords passed to ax.scatter for divergences
plot_kwargs : dicts, optional
Additional keywords passed to ax.plot, az.plot_kde or ax.hexbin
backend: str, optional
Select plotting backend {"matplotlib","bokeh"}. Default "matplotlib".
backend_kwargs: bool, optional
These are kwargs specific to the backend being used. For additional documentation
check the plotting method of the backend.
show : bool, optional
Call backend show function.
Returns
-------
axes : matplotlib axes or bokeh figures
Examples
--------
KDE Pair Plot
.. plot::
:context: close-figs
>>> import arviz as az
>>> centered = az.load_arviz_data('centered_eight')
>>> coords = {'school': ['Choate', 'Deerfield']}
>>> az.plot_pair(centered,
>>> var_names=['theta', 'mu', 'tau'],
>>> kind='kde',
>>> coords=coords,
>>> divergences=True,
>>> textsize=18)
Hexbin pair plot
.. plot::
:context: close-figs
>>> az.plot_pair(centered,
>>> var_names=['theta', 'mu'],
>>> coords=coords,
>>> textsize=18,
>>> kind='hexbin')
Pair plot showing divergences
.. plot::
:context: close-figs
>>> az.plot_pair(centered,
... var_names=['theta', 'mu', 'tau'],
... coords=coords,
... divergences=True,
... textsize=18)
"""
valid_kinds = ["scatter", "kde", "hexbin"]
if kind not in valid_kinds:
raise ValueError(
("Plot type {} not recognized." "Plot type must be in {}").format(kind, valid_kinds)
)
if coords is None:
coords = {}
if plot_kwargs is None:
plot_kwargs = {}
if kind == "scatter":
plot_kwargs.setdefault("marker", ".")
plot_kwargs.setdefault("lw", 0)
if divergences_kwargs is None:
divergences_kwargs = {}
divergences_kwargs.setdefault("marker", "o")
divergences_kwargs.setdefault("markeredgecolor", "k")
divergences_kwargs.setdefault("color", "C1")
divergences_kwargs.setdefault("lw", 0)
# Get posterior draws and combine chains
data = convert_to_inference_data(data)
grouped_data = convert_to_dataset(data, group=group)
var_names = _var_names(var_names, grouped_data)
flat_var_names, infdata_group = xarray_to_ndarray(
get_coords(grouped_data, coords), var_names=var_names, combined=True
)
divergent_data = None
diverging_mask = None
# Assigning divergence group based on group param
if group == "posterior":
divergent_group = "sample_stats"
elif group == "prior":
divergent_group = "sample_stats_prior"
else:
divergences = False
# Get diverging draws and combine chains
if divergences:
if hasattr(data, divergent_group) and hasattr(getattr(data, divergent_group), "diverging"):
divergent_data = convert_to_dataset(data, group=divergent_group)
_, diverging_mask = xarray_to_ndarray(
divergent_data, var_names=("diverging",), combined=True
)
diverging_mask = np.squeeze(diverging_mask)
else:
divergences = False
warnings.warn(
"Divergences data not found, plotting without divergences. "
"Make sure the sample method provides divergences data and "
"that it is present in the `diverging` field of `sample_stats` "
"or `sample_stats_prior` or set divergences=False",
SyntaxWarning,
)
if gridsize == "auto":
gridsize = int(len(infdata_group[0]) ** 0.35)
numvars = len(flat_var_names)
if numvars < 2:
raise Exception("Number of variables to be plotted must be 2 or greater.")
pairplot_kwargs = dict(
ax=ax,
infdata_group=infdata_group,
numvars=numvars,
figsize=figsize,
textsize=textsize,
kind=kind,
plot_kwargs=plot_kwargs,
contour=contour,
fill_last=fill_last,
gridsize=gridsize,
colorbar=colorbar,
divergences=divergences,
diverging_mask=diverging_mask,
divergences_kwargs=divergences_kwargs,
flat_var_names=flat_var_names,
backend_kwargs=backend_kwargs,
show=show,
)
if backend == "bokeh":
pairplot_kwargs.pop("gridsize", None)
pairplot_kwargs.pop("colorbar", None)
pairplot_kwargs.pop("divergences_kwargs", None)
pairplot_kwargs.pop("hexbin_values", None)
# TODO: Add backend kwargs
plot = get_plotting_function("plot_pair", "pairplot", backend)
ax = plot(**pairplot_kwargs)
return ax
| [((5835, 5861), 'numpy.squeeze', 'np.squeeze', (['diverging_mask'], {}), '(diverging_mask)\n', (5845, 5861), True, 'import numpy as np\n'), ((5920, 6188), 'warnings.warn', 'warnings.warn', (['"""Divergences data not found, plotting without divergences. Make sure the sample method provides divergences data and that it is present in the `diverging` field of `sample_stats` or `sample_stats_prior` or set divergences=False"""', 'SyntaxWarning'], {}), "(\n 'Divergences data not found, plotting without divergences. Make sure the sample method provides divergences data and that it is present in the `diverging` field of `sample_stats` or `sample_stats_prior` or set divergences=False'\n , SyntaxWarning)\n", (5933, 6188), False, 'import warnings\n')] |
LisandroCanteros/Grupo2_COM06_Info2021 | cuestionario/formularios.py | 86ad9e08db4e8935bf397b6e4db0b3d9d72cb320 | from django.forms import ModelForm
from .models import Cuestionario, Categoria
from preguntas.models import Pregunta, Respuesta
class CuestionarioForm(ModelForm):
class Meta:
model = Cuestionario
fields = '__all__'
class PreguntaForm(ModelForm):
class Meta:
model = Pregunta
fields = '__all__'
class RespuestaForm(ModelForm):
class Meta:
model = Respuesta
fields = '__all__'
class CategoriaForm(ModelForm):
class Meta:
model = Categoria
fields = '__all__'
| [] |
biocross/VITCloud | vitcloud/views.py | 9656bd489c6d05717bf529d0661e07da0cd2551a | from django.views.generic import View
from django.http import HttpResponse
import os, json, datetime
from django.shortcuts import redirect
from django.shortcuts import render_to_response
from vitcloud.models import File
from django.views.decorators.csrf import csrf_exempt
from listingapikeys import findResult
import sys # sys.setdefaultencoding is cancelled by site.py
reload(sys) # to re-enable sys.setdefaultencoding()
sys.setdefaultencoding('utf-8')
#Custom Functions:
def repeated(fname, fsize, fblock, froom):
if(File.objects.filter(name=fname, size=fsize, block=fblock, room=froom)):
return True
else:
return False
#**Not for Production** Views
def clean(request):
q = File.objects.filter(block__iexact="L")
q.delete()
#Views:
def home(request):
no_of_files = len(File.objects.values_list('name').distinct())
no_of_blocks = len(File.objects.values_list('block').distinct())
no_of_rooms = len(File.objects.values_list('room').distinct())
file_sizes = File.objects.all()
total_file_size = 0
for x in file_sizes:
total_file_size = total_file_size + int(x.size)
total_file_size = (total_file_size/1024)
return render_to_response('home/home.htm' , {'x' : no_of_files, 'y' : no_of_blocks, 'z' : no_of_rooms, 'w' : total_file_size })
def pageSearch(request):
return render_to_response('home/search.htm')
def pageHowitworks(request):
return render_to_response('home/howitworks.htm')
def pageTopsharers(request):
return render_to_response('home/topsharers.htm')
def pageGettheapp(request):
return render_to_response('home/gettheapp.htm')
def search(request):
if request.method == "GET":
if (request.GET['thesearchbox'] == ""):
if ('latest' in request.GET):
results = File.objects.all().order_by("-id")
no = len(results)
paragraph = True
return render_to_response('home/results.htm', {'results' : results, 'paragraph' : paragraph, 'no' : no })
else:
return redirect('/blockwise')
else:
filename = str(request.GET['thesearchbox'])
paragraph = False
results = File.objects.filter(name__icontains=filename).order_by("-id")
no = len(results)
return render_to_response('home/results.htm', {'results' : results, 'paragraph': paragraph, 'no' : no })
def blockwise(request):
blockNames = File.objects.values_list('block').distinct()
for x in blockNames:
print str(x[0])
return render_to_response('home/blockwise.htm', {'blocks' : blockNames})
def blockwiseFeeder(request):
if request.method == "GET":
block = request.GET['block']
blockFiles = File.objects.filter(block__iexact=block).order_by("-id")
return render_to_response('home/blockwiseFeeder.htm', {'block': block, 'results': blockFiles})
def suggestions(request):
if request.method == "GET":
filename = str(request.GET['q'])
results = File.objects.filter(name__icontains=filename)
length = len(results)
suggestions = [filename, []]
for x in range(0, length, 1):
suggestions[1].append(results[x].name)
return HttpResponse(json.dumps(suggestions))
class Block:
name = ""
total = ""
def statistics(request):
finalArray = []
blockNames = []
blockSizes = []
blocks = File.objects.values_list('block').distinct()
for x in blocks:
blockName = str(str(x[0]).upper())
blockName = blockName + " Block"
blockNames.append(str(blockName).encode('utf-8'))
blockFiles = File.objects.filter(block__iexact=x[0])
totalSize = 0
for y in blockFiles:
totalSize = totalSize + int(y.size)
blockSizes.append(totalSize/1024)
return render_to_response('home/stats.htm', { 'blockNames' : blockNames, 'blockSizes' : blockSizes })
def apiFeed(request):
if request.method == "GET":
if("q" in request.GET):
filename = str(request.GET['q'])
result = findResult(filename)
return HttpResponse(json.dumps(result))
else:
return HttpResponse("Need The Required Parameters to work!")
def fileDetails(request):
if request.method == "GET":
filename = str(request.GET['q'])
results = File.objects.filter(name__icontains=filename)
filen = "NOTFOUND.404"
for x in results:
filen = x.name
return render_to_response('home/file.htm', {'results' : results, 'filen': filen })
def submitOne(request):
error = False
if 'filename' in request.GET:
filename = request.GET['filename']
filesize = 100000
fileblock = "MN"
fileroom = "447"
if not filename:
error = True
else:
now = datetime.datetime.now()
p1 = File.objects.create(name=filename, size = filesize, block = fileblock, room = fileroom, date = now)
results = File.objects.all()
return render_to_response('home/success.htm', { 'results': results })
return render_to_response('home/submitone.htm', { 'error': error })
@csrf_exempt
def interface(request):
if request.method == "POST":
data = json.loads(request.body)
currentBlock = str(data['Block'])
currentRoom = str(data['Room'])
currentHostelType = str(data['Hostel'])
no = len(data['Files'])
inserted = 0
data=data['Files']
for x in range(0, no, 2):
data[x+1] = int(data[x+1])
data[x+1] = (data[x+1]/1048576)
if not repeated(fname = data[x], fsize = str(data[x+1]), fblock=currentBlock, froom = currentRoom):
now = datetime.datetime.now()
temp = File.objects.create(name=data[x], size=str(data[x+1]), block = currentBlock, room = currentRoom, date = now)
inserted = (inserted + 1)
files_inserted = inserted
result = "inserted files: \n\n" + str(files_inserted)
return HttpResponse(result)
else:
return HttpResponse("<h2>VITCloud</h2> <h4>Desktop App Interface</h4><br/><br/><strong>Current Status:</strong> Listening at /interface...<br/><br/>Copyright 2012-2013<br/>Siddharth Gupta<br/>Saurabh Joshi")
| [] |
jeremytiki/blurple.py | blurple/ui/base.py | c8f65955539cc27be588a06592b1c81c03f59c37 | from abc import ABC
import discord
class Base(discord.Embed, ABC):
async def send(self, client: discord.abc.Messageable):
""" Send the component as a message in discord.
:param client: The client used, usually a :class:`discord.abc.Messageable`. Must have implemented :func:`.send`
:returns: :class:`discord.Message`
"""
return await client.send(embed=self)
| [] |
jonzxz/project-piscator | migrations/versions/e86dd3bc539c_change_admin_to_boolean.py | 588c8b1ac9355f9a82ac449fdbeaa1ef7eb441ef | """change admin to boolean
Revision ID: e86dd3bc539c
Revises: 6f63ef516cdc
Create Date: 2020-11-11 22:32:00.707936
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e86dd3bc539c'
down_revision = '6f63ef516cdc'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('email_address', sa.Column('active', sa.Boolean(), nullable=False))
op.add_column('email_address', sa.Column('email_password', sa.String(length=255), nullable=False))
op.add_column('email_address', sa.Column('last_mailbox_size', sa.Integer(), nullable=True))
op.add_column('email_address', sa.Column('last_updated', sa.DateTime(), nullable=True))
op.add_column('email_address', sa.Column('phishing_mail_detected', sa.Integer(), nullable=True))
op.add_column('user', sa.Column('is_active', sa.Boolean(), nullable=False))
op.add_column('user', sa.Column('is_admin', sa.Boolean(), nullable=True))
op.add_column('user', sa.Column('last_logged_in', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_logged_in')
op.drop_column('user', 'is_admin')
op.drop_column('user', 'is_active')
op.drop_column('email_address', 'phishing_mail_detected')
op.drop_column('email_address', 'last_updated')
op.drop_column('email_address', 'last_mailbox_size')
op.drop_column('email_address', 'email_password')
op.drop_column('email_address', 'active')
# ### end Alembic commands ###
| [((1235, 1275), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""last_logged_in"""'], {}), "('user', 'last_logged_in')\n", (1249, 1275), False, 'from alembic import op\n'), ((1280, 1314), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""is_admin"""'], {}), "('user', 'is_admin')\n", (1294, 1314), False, 'from alembic import op\n'), ((1319, 1354), 'alembic.op.drop_column', 'op.drop_column', (['"""user"""', '"""is_active"""'], {}), "('user', 'is_active')\n", (1333, 1354), False, 'from alembic import op\n'), ((1359, 1416), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""phishing_mail_detected"""'], {}), "('email_address', 'phishing_mail_detected')\n", (1373, 1416), False, 'from alembic import op\n'), ((1421, 1468), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""last_updated"""'], {}), "('email_address', 'last_updated')\n", (1435, 1468), False, 'from alembic import op\n'), ((1473, 1525), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""last_mailbox_size"""'], {}), "('email_address', 'last_mailbox_size')\n", (1487, 1525), False, 'from alembic import op\n'), ((1530, 1579), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""email_password"""'], {}), "('email_address', 'email_password')\n", (1544, 1579), False, 'from alembic import op\n'), ((1584, 1625), 'alembic.op.drop_column', 'op.drop_column', (['"""email_address"""', '"""active"""'], {}), "('email_address', 'active')\n", (1598, 1625), False, 'from alembic import op\n'), ((445, 457), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (455, 457), True, 'import sqlalchemy as sa\n'), ((539, 560), 'sqlalchemy.String', 'sa.String', ([], {'length': '(255)'}), '(length=255)\n', (548, 560), True, 'import sqlalchemy as sa\n'), ((645, 657), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (655, 657), True, 'import sqlalchemy as sa\n'), ((736, 749), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (747, 749), True, 'import sqlalchemy as sa\n'), ((838, 850), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (848, 850), True, 'import sqlalchemy as sa\n'), ((917, 929), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (927, 929), True, 'import sqlalchemy as sa\n'), ((996, 1008), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1006, 1008), True, 'import sqlalchemy as sa\n'), ((1080, 1093), 'sqlalchemy.DateTime', 'sa.DateTime', ([], {}), '()\n', (1091, 1093), True, 'import sqlalchemy as sa\n')] |
adrianomqsmts/django-escola | school/migrations/0010_alter_sala_unique_together.py | a69541bceb3f30bdd2e9f0f41aa9c2da6081a1d1 | # Generated by Django 4.0.3 on 2022-03-16 03:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('school', '0009_rename_periodo_semestre_alter_semestre_options_and_more'),
]
operations = [
migrations.AlterUniqueTogether(
name='sala',
unique_together={('porta', 'predio')},
),
]
| [((263, 349), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether', ([], {'name': '"""sala"""', 'unique_together': "{('porta', 'predio')}"}), "(name='sala', unique_together={('porta',\n 'predio')})\n", (293, 349), False, 'from django.db import migrations\n')] |
chris4540/DD2430-ds-proj | code_trunk/trainer/abc.py | b876efabe949392b27a7ebd4afb2be623174e287 | """
Abstract training class
"""
from abc import ABC as AbstractBaseClass
from abc import abstractmethod
class AdstractTrainer(AbstractBaseClass):
@abstractmethod
def run(self):
pass
@abstractmethod
def prepare_data_loaders(self):
"""
For preparing data loaders and save them as instance attributes
"""
pass
@abstractmethod
def prepare_exp_settings(self):
"""
Define stuff which are before the actual run. For example:
- Optimizer
- Model
"""
pass
@abstractmethod
def prepare_logging(self):
pass
| [] |
melvyniandrag/quadpy | quadpy/triangle/cools_haegemans.py | ae28fc17351be8e76909033f03d71776c7ef8280 | # -*- coding: utf-8 -*-
#
from mpmath import mp
from .helpers import untangle2
class CoolsHaegemans(object):
"""
R. Cools, A. Haegemans,
Construction of minimal cubature formulae for the square and the triangle
using invariant theory,
Department of Computer Science, K.U.Leuven,
TW Reports vol:TW96, Sept. 1987,
<https://lirias.kuleuven.be/handle/123456789/131869>.
"""
def __init__(self, index, mpmath=False):
self.name = "CoolsHaegemans({})".format(index)
assert index == 1
self.degree = 8
flt = mp.mpf if mpmath else float
mp.dps = 20
data = {
"rot": [
[
flt("0.16058343856681218798E-09"),
flt("0.34579201116826902882E+00"),
flt("0.36231682215692616667E+01"),
],
[
flt("0.26530624434780379347E-01"),
flt("0.65101993458939166328E-01"),
flt("0.87016510156356306078E+00"),
],
[
flt("0.29285717640155892159E-01"),
flt("0.65177530364879570754E+00"),
flt("0.31347788752373300717E+00"),
],
[
flt("0.43909556791220782402E-01"),
flt("0.31325121067172530696E+00"),
flt("0.63062143431895614010E+00"),
],
[
flt("0.66940767639916174192E-01"),
flt("0.51334692063945414949E+00"),
flt("0.28104124731511039057E+00"),
],
]
}
# elif index == 2:
# self.degree = 10
# data = [
# (0.15319130036758557631E-06_r3(+0.58469201683584513031E-01, -0.54887778772527519316E+00)),
# (0.13260526227928785221E-01_r3(0.50849285064031410705E-01, 0.90799059794957813439E+00)),
# (0.15646439344539042136E-01_r3(0.51586732419949574487E+00, 0.46312452842927062902E+00)),
# (0.21704258224807323311E-01_r3(0.24311033191739048230E+00, 0.72180595182371959467E-00)),
# (0.21797613600129922367E-01_r3(0.75397765920922660134E-00, 0.20647569839132397633E+00)),
# (0.38587913508193459468E-01_r3(0.42209207910846960294E-00, 0.12689533413411127327E+00)),
# (0.39699584282594413022E-01_r3(0.19823878346663354068E+00, 0.62124412566393319745E+00)),
# (0.47910534861520060665E-01numpy.array([[1.0/3.0, 1.0/3.0, 1.0/3.0]])
# ]
self.bary, self.weights = untangle2(data)
self.points = self.bary[:, 1:]
self.weights *= 2
return
| [] |
RichardLeeH/invoce_sys | account/admin.py | 42a6f5750f45b25e0d7282114ccb7f9f72ee1761 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from account.models import Profile
admin.site.site_header = 'invoce'
class TokenAdmin(admin.ModelAdmin):
list_display = ('key', 'uid', 'user', 'created')
fields = ('user',)
ordering = ('-created',)
def uid(self, obj):
return obj.user.id
uid.short_description = u'用户ID'
admin.site.unregister(Token)
admin.site.register(Token, TokenAdmin)
class ProfileInline(admin.StackedInline):
model = Profile
class UserCustomAdmin(UserAdmin):
list_display = ('id', 'username', 'email', 'is_active', 'is_staff')
inlines = (ProfileInline, )
ordering = ('-id', )
admin.site.unregister(User)
admin.site.register(User, UserCustomAdmin)
| [((543, 571), 'django.contrib.admin.site.unregister', 'admin.site.unregister', (['Token'], {}), '(Token)\n', (564, 571), False, 'from django.contrib import admin\n'), ((572, 610), 'django.contrib.admin.site.register', 'admin.site.register', (['Token', 'TokenAdmin'], {}), '(Token, TokenAdmin)\n', (591, 610), False, 'from django.contrib import admin\n'), ((841, 868), 'django.contrib.admin.site.unregister', 'admin.site.unregister', (['User'], {}), '(User)\n', (862, 868), False, 'from django.contrib import admin\n'), ((869, 911), 'django.contrib.admin.site.register', 'admin.site.register', (['User', 'UserCustomAdmin'], {}), '(User, UserCustomAdmin)\n', (888, 911), False, 'from django.contrib import admin\n')] |
krishankansal/PythonPrograms | oops/#016exceptions.py | 6d4d989068195b8c8dd9d71cf4f920fef1177cf2 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 18 08:40:11 2020
@author: krishan
"""
def funny_division2(anumber):
try:
if anumber == 13:
raise ValueError("13 is an unlucky number")
return 100 / anumber
except (ZeroDivisionError, TypeError):
return "Enter a number other than zero"
def funny_division3(anumber):
try:
if anumber == 13:
raise ValueError("13 is an unlucky number")
return 100 / anumber
except ZeroDivisionError:
return "Enter a number other than zero"
except TypeError:
return "Enter a numerical value"
except ValueError as e:
print("The exception arguments were",e.args)
#raise
for val in (0, "hello", 50.0, 13):
print(f"Testing {val}:", funny_division3(val))
| [] |
denn-s/SimCLR | config/simclr_config.py | e2239ac52464b1271c3b8ad1ec4eb26f3b73c7d4 | import os
from datetime import datetime
import torch
from dataclasses import dataclass
class SimCLRConfig:
@dataclass()
class Base:
output_dir_path: str
log_dir_path: str
log_file_path: str
device: object
num_gpu: int
logger_name: str
@dataclass()
class Train:
# batch_size as usual. examples: 16,32,..
batch_size: int
# number of workers to be used for data loading. examples: 2,4,...
num_workers: int
# start training with this epoch. most likely: 0
start_epoch: int
# in case of restart this is where the saved model is expected to be located
restart_log_dir_path: str
# end training with this epoch. examples: 10, 100,...
epochs: int
# directory where the datasets are located. example: "/home/USER_NAME/Data"
data_dir_path: str
# dataset name. options: ["CIFAR10", "STL10", "iNaturalist2019", "ImageNet"]
dataset: str
# save trained model every n epochs. examples: 1,5,10,...
save_num_epochs: int
# image size obtained from last data preparation step
img_size: int
# name of the optimizer. options: ["Adam", "LARS"]
# TODO: implement LARS ptimizer
optimizer: str
weight_decay: float
temperature: float
global_step: int
current_epoch: int
@dataclass()
class Model:
# model architecture. options: ["resnet18", "resnet50"]
resnet: str
normalize: bool
projection_dim: int
@dataclass()
class SimCLR:
train: object
model: object
@dataclass()
class LogisticRegression:
epochs: int
batch_size: int
learning_rate: float
momentum: float
img_size: int
model_path: str
epoch_num: int
@dataclass()
class FineTuning:
epochs: int
batch_size: int
learning_rate: float
momentum: float
img_size: int
save_num_epochs: int
# decay "learning_rate" by a factor of "gamma" every "step_size" epochs
gamma: float
step_size: int
model_path: str
epoch_num: int
@dataclass()
class ONNX:
batch_size: int
img_size: int
model_path: str
epoch_num: int
def __init__(self, config):
global_step = 0
current_epoch = 0
simclr_train = SimCLRConfig.Train(**config['simclr']['train'], global_step=global_step,
current_epoch=current_epoch)
simclr_model = SimCLRConfig.Model(**config['simclr']['model'])
self.simclr = SimCLRConfig.SimCLR(simclr_train, simclr_model)
model_path = None
epoch_num = None
self.logistic_regression = SimCLRConfig.LogisticRegression(**config['logistic_regression'],
model_path=model_path, epoch_num=epoch_num)
model_path = None
epoch_num = None
self.fine_tuning = SimCLRConfig.FineTuning(**config['fine_tuning'], model_path=model_path,
epoch_num=epoch_num)
model_path = None
epoch_num = None
self.onnx = SimCLRConfig.ONNX(**config['onnx'], model_path=model_path, epoch_num=epoch_num)
logger_name = config['logger_name']
output_dir_path = 'output'
now = datetime.now()
dt_string: str = now.strftime("%Y_%m_%d_%H_%M_%S")
log_dir_name = dt_string + '_' + logger_name + '_' + self.simclr.train.dataset.lower()
log_dir_path = os.path.join(output_dir_path, log_dir_name)
log_file_path = os.path.join(log_dir_path, 'log.txt')
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
num_gpu = torch.cuda.device_count()
self.base = SimCLRConfig.Base(output_dir_path, log_dir_path, log_file_path, device, num_gpu, logger_name)
def __str__(self):
return str(self.__class__) + ": " + str(self.__dict__)
| [((115, 126), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (124, 126), False, 'from dataclasses import dataclass\n'), ((300, 311), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (309, 311), False, 'from dataclasses import dataclass\n'), ((1430, 1441), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (1439, 1441), False, 'from dataclasses import dataclass\n'), ((1601, 1612), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (1610, 1612), False, 'from dataclasses import dataclass\n'), ((1681, 1692), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (1690, 1692), False, 'from dataclasses import dataclass\n'), ((1896, 1907), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (1905, 1907), False, 'from dataclasses import dataclass\n'), ((2257, 2268), 'dataclasses.dataclass', 'dataclass', ([], {}), '()\n', (2266, 2268), False, 'from dataclasses import dataclass\n'), ((3502, 3516), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3514, 3516), False, 'from datetime import datetime\n'), ((3695, 3738), 'os.path.join', 'os.path.join', (['output_dir_path', 'log_dir_name'], {}), '(output_dir_path, log_dir_name)\n', (3707, 3738), False, 'import os\n'), ((3763, 3800), 'os.path.join', 'os.path.join', (['log_dir_path', '"""log.txt"""'], {}), "(log_dir_path, 'log.txt')\n", (3775, 3800), False, 'import os\n'), ((3901, 3926), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (3924, 3926), False, 'import torch\n'), ((3844, 3869), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3867, 3869), False, 'import torch\n')] |
Priyanka-Askani/swift | test/unit/common/middleware/s3api/test_obj.py | 1ab691f63778008015b34ce004992844acee9968 | # Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from datetime import datetime
import hashlib
import os
from os.path import join
import time
from mock import patch
from swift.common import swob
from swift.common.swob import Request
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, \
Owner, Grant
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.utils import mktime, S3Timestamp
from test.unit.common.middleware.s3api.helpers import FakeSwift
def _wrap_fake_auth_middleware(org_func):
def fake_fake_auth_middleware(self, env):
org_func(env)
if 'swift.authorize_override' in env:
return
if 'HTTP_AUTHORIZATION' not in env:
return
_, authorization = env['HTTP_AUTHORIZATION'].split(' ')
tenant_user, sign = authorization.rsplit(':', 1)
tenant, user = tenant_user.rsplit(':', 1)
env['HTTP_X_TENANT_NAME'] = tenant
env['HTTP_X_USER_NAME'] = user
return fake_fake_auth_middleware
class TestS3ApiObj(S3ApiTestCase):
def setUp(self):
super(TestS3ApiObj, self).setUp()
self.object_body = 'hello'
self.etag = hashlib.md5(self.object_body).hexdigest()
self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT'
self.response_headers = {'Content-Type': 'text/html',
'Content-Length': len(self.object_body),
'Content-Disposition': 'inline',
'Content-Language': 'en',
'x-object-meta-test': 'swift',
'etag': self.etag,
'last-modified': self.last_modified,
'expires': 'Mon, 21 Sep 2015 12:00:00 GMT',
'x-robots-tag': 'nofollow',
'cache-control': 'private'}
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers,
self.object_body)
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPCreated,
{'etag': self.etag,
'last-modified': self.last_modified,
'x-object-meta-something': 'oh hai'},
None)
def _test_object_GETorHEAD(self, method):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
unexpected_headers = []
for key, val in self.response_headers.iteritems():
if key in ('Content-Length', 'Content-Type', 'content-encoding',
'last-modified', 'cache-control', 'Content-Disposition',
'Content-Language', 'expires', 'x-robots-tag'):
self.assertIn(key, headers)
self.assertEqual(headers[key], str(val))
elif key == 'etag':
self.assertEqual(headers[key], '"%s"' % val)
elif key.startswith('x-object-meta-'):
self.assertIn('x-amz-meta-' + key[14:], headers)
self.assertEqual(headers['x-amz-meta-' + key[14:]], val)
else:
unexpected_headers.append((key, val))
if unexpected_headers:
self.fail('unexpected headers: %r' % unexpected_headers)
self.assertEqual(headers['etag'],
'"%s"' % self.response_headers['etag'])
if method == 'GET':
self.assertEqual(body, self.object_body)
@s3acl
def test_object_HEAD_error(self):
# HEAD does not return the body even an error response in the
# specifications of the REST API.
# So, check the response code for error test of HEAD.
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPUnauthorized, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPForbidden, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPNotFound, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPPreconditionFailed, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '412')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPServerError, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual(body, '') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPServiceUnavailable, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual(body, '') # sanity
def test_object_HEAD(self):
self._test_object_GETorHEAD('HEAD')
def _test_object_HEAD_Range(self, range_value):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Range': range_value,
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl
def test_object_HEAD_Range_with_invalid_value(self):
range_value = ''
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'hoge'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes='
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=1'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=5-1'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=5-10'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '416')
@s3acl
def test_object_HEAD_Range(self):
# update response headers
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers,
self.object_body)
range_value = 'bytes=0-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '4')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 0-3'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=3-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '1')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 3-3'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=1-'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '4')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 1-4'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '3')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 2-4'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
@s3acl
def test_object_GET_error(self):
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchKey')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPPreconditionFailed)
self.assertEqual(code, 'PreconditionFailed')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError')
@s3acl
def test_object_GET(self):
self._test_object_GETorHEAD('GET')
@s3acl(s3acl_only=True)
def test_object_GET_with_s3acl_and_keystone(self):
# for passing keystone authentication root
fake_auth = self.swift._fake_auth_middleware
with patch.object(FakeSwift, '_fake_auth_middleware',
_wrap_fake_auth_middleware(fake_auth)):
self._test_object_GETorHEAD('GET')
_, _, headers = self.swift.calls_with_headers[-1]
self.assertNotIn('Authorization', headers)
_, _, headers = self.swift.calls_with_headers[0]
self.assertNotIn('Authorization', headers)
@s3acl
def test_object_GET_Range(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Range': 'bytes=0-3',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 0-3'))
@s3acl
def test_object_GET_Range_error(self):
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPRequestedRangeNotSatisfiable)
self.assertEqual(code, 'InvalidRange')
@s3acl
def test_object_GET_Response(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'GET',
'QUERY_STRING':
'response-content-type=%s&'
'response-content-language=%s&'
'response-expires=%s&'
'response-cache-control=%s&'
'response-content-disposition=%s&'
'response-content-encoding=%s&'
% ('text/plain', 'en',
'Fri, 01 Apr 2014 12:00:00 GMT',
'no-cache',
'attachment',
'gzip')},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'text/plain')
self.assertTrue('content-language' in headers)
self.assertEqual(headers['content-language'], 'en')
self.assertTrue('expires' in headers)
self.assertEqual(headers['expires'], 'Fri, 01 Apr 2014 12:00:00 GMT')
self.assertTrue('cache-control' in headers)
self.assertEqual(headers['cache-control'], 'no-cache')
self.assertTrue('content-disposition' in headers)
self.assertEqual(headers['content-disposition'],
'attachment')
self.assertTrue('content-encoding' in headers)
self.assertEqual(headers['content-encoding'], 'gzip')
@s3acl
def test_object_PUT_error(self):
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPRequestEntityTooLarge)
self.assertEqual(code, 'EntityTooLarge')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPUnprocessableEntity)
self.assertEqual(code, 'BadDigest')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPLengthRequired)
self.assertEqual(code, 'MissingContentLength')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPPreconditionFailed)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': ''})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?foo=bar'})
self.assertEqual(code, 'InvalidArgument')
# adding other query paramerters will cause an error
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo&bar=baz'})
self.assertEqual(code, 'InvalidArgument')
# ...even versionId appears in the last
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?bar=baz&versionId=foo'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo'})
self.assertEqual(code, 'NotImplemented')
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/src_bucket/src_object',
'X-Amz-Copy-Source-Range': 'bytes=0-0'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPRequestTimeout)
self.assertEqual(code, 'RequestTimeout')
@s3acl
def test_object_PUT(self):
etag = self.response_headers['etag']
content_md5 = etag.decode('hex').encode('base64').strip()
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'x-amz-storage-class': 'STANDARD',
'Content-MD5': content_md5,
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
# Check that s3api returns an etag header.
self.assertEqual(headers['etag'], '"%s"' % etag)
_, _, headers = self.swift.calls_with_headers[-1]
# Check that s3api converts a Content-MD5 header into an etag.
self.assertEqual(headers['etag'], etag)
def test_object_PUT_headers(self):
content_md5 = self.etag.decode('hex').encode('base64').strip()
self.swift.register('HEAD', '/v1/AUTH_test/some/source',
swob.HTTPOk, {'last-modified': self.last_modified},
None)
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'X-Amz-Storage-Class': 'STANDARD',
'X-Amz-Meta-Something': 'oh hai',
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
'X-Amz-Copy-Source': '/some/source',
'Content-MD5': content_md5,
'Date': self.get_date_header()})
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
# Check that s3api does not return an etag header,
# specified copy source.
self.assertTrue(headers.get('etag') is None)
# Check that s3api does not return custom metadata in response
self.assertTrue(headers.get('x-amz-meta-something') is None)
_, _, headers = self.swift.calls_with_headers[-1]
# Check that s3api converts a Content-MD5 header into an etag.
self.assertEqual(headers['ETag'], self.etag)
self.assertEqual(headers['X-Object-Meta-Something'], 'oh hai')
self.assertEqual(headers['X-Object-Meta-Unreadable-Prefix'],
'=?UTF-8?Q?=04w?=')
self.assertEqual(headers['X-Object-Meta-Unreadable-Suffix'],
'=?UTF-8?Q?h=04?=')
self.assertEqual(headers['X-Object-Meta-Lots-Of-Unprintable'],
'=?UTF-8?B?BAQEBAQ=?=')
self.assertEqual(headers['X-Copy-From'], '/some/source')
self.assertEqual(headers['Content-Length'], '0')
def _test_object_PUT_copy(self, head_resp, put_header=None,
src_path='/some/source', timestamp=None):
account = 'test:tester'
grants = [Grant(User(account), 'FULL_CONTROL')]
head_headers = \
encode_acl('object',
ACL(Owner(account, account), grants))
head_headers.update({'last-modified': self.last_modified})
self.swift.register('HEAD', '/v1/AUTH_test/some/source',
head_resp, head_headers, None)
put_header = put_header or {}
return self._call_object_copy(src_path, put_header, timestamp)
def _test_object_PUT_copy_self(self, head_resp,
put_header=None, timestamp=None):
account = 'test:tester'
grants = [Grant(User(account), 'FULL_CONTROL')]
head_headers = \
encode_acl('object',
ACL(Owner(account, account), grants))
head_headers.update({'last-modified': self.last_modified})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
head_resp, head_headers, None)
put_header = put_header or {}
return self._call_object_copy('/bucket/object', put_header, timestamp)
def _call_object_copy(self, src_path, put_header, timestamp=None):
put_headers = {'Authorization': 'AWS test:tester:hmac',
'X-Amz-Copy-Source': src_path,
'Date': self.get_date_header()}
put_headers.update(put_header)
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers=put_headers)
req.date = datetime.now()
req.content_type = 'text/plain'
timestamp = timestamp or time.time()
with patch('swift.common.middleware.s3api.utils.time.time',
return_value=timestamp):
return self.call_s3api(req)
@s3acl
def test_object_PUT_copy(self):
def do_test(src_path=None):
date_header = self.get_date_header()
timestamp = mktime(date_header)
last_modified = S3Timestamp(timestamp).s3xmlformat
status, headers, body = self._test_object_PUT_copy(
swob.HTTPOk, put_header={'Date': date_header},
timestamp=timestamp, src_path=src_path)
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertTrue(headers.get('etag') is None)
self.assertTrue(headers.get('x-amz-meta-something') is None)
elem = fromstring(body, 'CopyObjectResult')
self.assertEqual(elem.find('LastModified').text, last_modified)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/some/source')
self.assertEqual(headers['Content-Length'], '0')
do_test('/some/source')
do_test('/some/source?')
do_test('/some/source?versionId=null')
# Some clients (like Boto) don't include the leading slash;
# AWS seems to tolerate this so we should, too
do_test('some/source')
@s3acl
def test_object_PUT_copy_self(self):
status, headers, body = \
self._test_object_PUT_copy_self(swob.HTTPOk)
self.assertEqual(status.split()[0], '400')
elem = fromstring(body, 'Error')
err_msg = ("This copy request is illegal because it is trying to copy "
"an object to itself without changing the object's "
"metadata, storage class, website redirect location or "
"encryption attributes.")
self.assertEqual(elem.find('Code').text, 'InvalidRequest')
self.assertEqual(elem.find('Message').text, err_msg)
@s3acl
def test_object_PUT_copy_self_metadata_copy(self):
header = {'x-amz-metadata-directive': 'COPY'}
status, headers, body = \
self._test_object_PUT_copy_self(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '400')
elem = fromstring(body, 'Error')
err_msg = ("This copy request is illegal because it is trying to copy "
"an object to itself without changing the object's "
"metadata, storage class, website redirect location or "
"encryption attributes.")
self.assertEqual(elem.find('Code').text, 'InvalidRequest')
self.assertEqual(elem.find('Message').text, err_msg)
@s3acl
def test_object_PUT_copy_self_metadata_replace(self):
date_header = self.get_date_header()
timestamp = mktime(date_header)
last_modified = S3Timestamp(timestamp).s3xmlformat
header = {'x-amz-metadata-directive': 'REPLACE',
'Date': date_header}
status, headers, body = self._test_object_PUT_copy_self(
swob.HTTPOk, header, timestamp=timestamp)
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertTrue(headers.get('etag') is None)
elem = fromstring(body, 'CopyObjectResult')
self.assertEqual(elem.find('LastModified').text, last_modified)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/bucket/object')
self.assertEqual(headers['Content-Length'], '0')
@s3acl
def test_object_PUT_copy_headers_error(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPPreconditionFailed,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = {'X-Amz-Copy-Source-If-None-Match': etag}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPNotModified,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = {'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPNotModified,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = \
{'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPPreconditionFailed,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
def test_object_PUT_copy_headers_with_match(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'X-Amz-Copy-Source-If-Modified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 2)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-Match'], etag)
self.assertEqual(headers['If-Modified-Since'], last_modified_since)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_headers_with_match_and_s3acl(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'X-Amz-Copy-Source-If-Modified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 3)
# After the check of the copy source in the case of s3acl is valid,
# s3api check the bucket write permissions of the destination.
_, _, headers = self.swift.calls_with_headers[-2]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-Match'], etag)
self.assertEqual(headers['If-Modified-Since'], last_modified_since)
def test_object_PUT_copy_headers_with_not_match(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-None-Match': etag,
'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 2)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_headers_with_not_match_and_s3acl(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-None-Match': etag,
'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
# After the check of the copy source in the case of s3acl is valid,
# s3api check the bucket write permissions of the destination.
self.assertEqual(len(self.swift.calls_with_headers), 3)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
@s3acl
def test_object_POST_error(self):
code = self._test_method_error('POST', '/bucket/object', None)
self.assertEqual(code, 'NotImplemented')
@s3acl
def test_object_DELETE_error(self):
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError')
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
return_value={'status': 204}):
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchKey')
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
return_value={'status': 404}):
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket')
@s3acl
def test_object_DELETE_no_multipart(self):
self.s3api.conf.allow_multipart_uploads = False
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertNotIn(('HEAD', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
_, path = self.swift.calls[-1]
self.assertEqual(path.count('?'), 0)
@s3acl
def test_object_DELETE_multipart(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
_, path = self.swift.calls[-1]
self.assertEqual(path.count('?'), 0)
@s3acl
def test_slo_object_DELETE(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk,
{'x-static-large-object': 'True'},
None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, {}, '<SLO delete results>')
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-Type': 'foo/bar'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual(body, '')
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'
'?multipart-manifest=delete'),
self.swift.calls)
_, path, headers = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['multipart-manifest'], 'delete')
self.assertNotIn('Content-Type', headers)
def _test_object_for_s3acl(self, method, account):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()})
return self.call_s3api(req)
def _test_set_container_permission(self, account, permission):
grants = [Grant(User(account), permission)]
headers = \
encode_acl('container',
ACL(Owner('test:tester', 'test:tester'), grants))
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, headers, None)
@s3acl(s3acl_only=True)
def test_object_GET_without_permission(self):
status, headers, body = self._test_object_for_s3acl('GET',
'test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_GET_with_read_permission(self):
status, headers, body = self._test_object_for_s3acl('GET',
'test:read')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_GET_with_fullcontrol_permission(self):
status, headers, body = \
self._test_object_for_s3acl('GET', 'test:full_control')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_without_permission(self):
status, headers, body = self._test_object_for_s3acl('PUT',
'test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_PUT_with_owner_permission(self):
status, headers, body = self._test_object_for_s3acl('PUT',
'test:tester')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_with_write_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'WRITE')
status, headers, body = self._test_object_for_s3acl('PUT', account)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_with_fullcontrol_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'FULL_CONTROL')
status, headers, body = \
self._test_object_for_s3acl('PUT', account)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_DELETE_without_permission(self):
account = 'test:other'
status, headers, body = self._test_object_for_s3acl('DELETE',
account)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_owner_permission(self):
status, headers, body = self._test_object_for_s3acl('DELETE',
'test:tester')
self.assertEqual(status.split()[0], '204')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_write_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'WRITE')
status, headers, body = self._test_object_for_s3acl('DELETE',
account)
self.assertEqual(status.split()[0], '204')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_fullcontrol_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'FULL_CONTROL')
status, headers, body = self._test_object_for_s3acl('DELETE', account)
self.assertEqual(status.split()[0], '204')
def _test_object_copy_for_s3acl(self, account, src_permission=None,
src_path='/src_bucket/src_obj'):
owner = 'test:tester'
grants = [Grant(User(account), src_permission)] \
if src_permission else [Grant(User(owner), 'FULL_CONTROL')]
src_o_headers = \
encode_acl('object', ACL(Owner(owner, owner), grants))
src_o_headers.update({'last-modified': self.last_modified})
self.swift.register(
'HEAD', join('/v1/AUTH_test', src_path.lstrip('/')),
swob.HTTPOk, src_o_headers, None)
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS %s:hmac' % account,
'X-Amz-Copy-Source': src_path,
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_owner_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:tester')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_fullcontrol_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:full_control',
'FULL_CONTROL')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_grantee_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:write', 'READ')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_without_src_obj_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:write')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_without_dst_container_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:other', 'READ')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_empty_src_path(self):
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPPreconditionFailed, {}, None)
status, headers, body = self._test_object_copy_for_s3acl(
'test:write', 'READ', src_path='')
self.assertEqual(status.split()[0], '400')
class TestS3ApiObjNonUTC(TestS3ApiObj):
def setUp(self):
self.orig_tz = os.environ.get('TZ', '')
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
super(TestS3ApiObjNonUTC, self).setUp()
def tearDown(self):
super(TestS3ApiObjNonUTC, self).tearDown()
os.environ['TZ'] = self.orig_tz
time.tzset()
if __name__ == '__main__':
unittest.main()
| [((13899, 13921), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (13904, 13921), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((32337, 32359), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (32342, 32359), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((34580, 34602), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (34585, 34602), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((41001, 41023), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (41006, 41023), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((41290, 41312), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (41295, 41312), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((41562, 41584), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (41567, 41584), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((41803, 41825), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (41808, 41825), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((42092, 42114), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (42097, 42114), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((42367, 42389), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (42372, 42389), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((42669, 42691), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (42674, 42691), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((42998, 43020), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (43003, 43020), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((43319, 43341), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (43324, 43341), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((43600, 43622), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (43605, 43622), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((43968, 43990), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (43973, 43990), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((45205, 45227), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (45210, 45227), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((45437, 45459), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (45442, 45459), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((45742, 45764), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (45747, 45764), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((45983, 46005), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (45988, 46005), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((46219, 46241), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (46224, 46241), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((46469, 46491), 'test.unit.common.middleware.s3api.test_s3_acl.s3acl', 's3acl', ([], {'s3acl_only': '(True)'}), '(s3acl_only=True)\n', (46474, 46491), False, 'from test.unit.common.middleware.s3api.test_s3_acl import s3acl\n'), ((47243, 47258), 'unittest.main', 'unittest.main', ([], {}), '()\n', (47256, 47258), False, 'import unittest\n'), ((21839, 21853), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (21851, 21853), False, 'from datetime import datetime\n'), ((23225, 23239), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (23237, 23239), False, 'from datetime import datetime\n'), ((25924, 26016), 'swift.common.swob.Request.blank', 'Request.blank', (['"""/bucket/object"""'], {'environ': "{'REQUEST_METHOD': 'PUT'}", 'headers': 'put_headers'}), "('/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, headers=\n put_headers)\n", (25937, 26016), False, 'from swift.common.swob import Request\n'), ((26088, 26102), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (26100, 26102), False, 'from datetime import datetime\n'), ((27897, 27922), 'swift.common.middleware.s3api.etree.fromstring', 'fromstring', (['body', '"""Error"""'], {}), "(body, 'Error')\n", (27907, 27922), False, 'from swift.common.middleware.s3api.etree import fromstring\n'), ((28610, 28635), 'swift.common.middleware.s3api.etree.fromstring', 'fromstring', (['body', '"""Error"""'], {}), "(body, 'Error')\n", (28620, 28635), False, 'from swift.common.middleware.s3api.etree import fromstring\n'), ((29172, 29191), 'swift.common.middleware.s3api.utils.mktime', 'mktime', (['date_header'], {}), '(date_header)\n', (29178, 29191), False, 'from swift.common.middleware.s3api.utils import mktime, S3Timestamp\n'), ((29654, 29690), 'swift.common.middleware.s3api.etree.fromstring', 'fromstring', (['body', '"""CopyObjectResult"""'], {}), "(body, 'CopyObjectResult')\n", (29664, 29690), False, 'from swift.common.middleware.s3api.etree import fromstring\n'), ((46926, 46950), 'os.environ.get', 'os.environ.get', (['"""TZ"""', '""""""'], {}), "('TZ', '')\n", (46940, 46950), False, 'import os\n'), ((47013, 47025), 'time.tzset', 'time.tzset', ([], {}), '()\n', (47023, 47025), False, 'import time\n'), ((47198, 47210), 'time.tzset', 'time.tzset', ([], {}), '()\n', (47208, 47210), False, 'import time\n'), ((26176, 26187), 'time.time', 'time.time', ([], {}), '()\n', (26185, 26187), False, 'import time\n'), ((26201, 26279), 'mock.patch', 'patch', (['"""swift.common.middleware.s3api.utils.time.time"""'], {'return_value': 'timestamp'}), "('swift.common.middleware.s3api.utils.time.time', return_value=timestamp)\n", (26206, 26279), False, 'from mock import patch\n'), ((26497, 26516), 'swift.common.middleware.s3api.utils.mktime', 'mktime', (['date_header'], {}), '(date_header)\n', (26503, 26516), False, 'from swift.common.middleware.s3api.utils import mktime, S3Timestamp\n'), ((27041, 27077), 'swift.common.middleware.s3api.etree.fromstring', 'fromstring', (['body', '"""CopyObjectResult"""'], {}), "(body, 'CopyObjectResult')\n", (27051, 27077), False, 'from swift.common.middleware.s3api.etree import fromstring\n'), ((29216, 29238), 'swift.common.middleware.s3api.utils.S3Timestamp', 'S3Timestamp', (['timestamp'], {}), '(timestamp)\n', (29227, 29238), False, 'from swift.common.middleware.s3api.utils import mktime, S3Timestamp\n'), ((36662, 36763), 'mock.patch', 'patch', (['"""swift.common.middleware.s3api.s3request.get_container_info"""'], {'return_value': "{'status': 204}"}), "('swift.common.middleware.s3api.s3request.get_container_info',\n return_value={'status': 204})\n", (36667, 36763), False, 'from mock import patch\n'), ((36989, 37090), 'mock.patch', 'patch', (['"""swift.common.middleware.s3api.s3request.get_container_info"""'], {'return_value': "{'status': 404}"}), "('swift.common.middleware.s3api.s3request.get_container_info',\n return_value={'status': 404})\n", (36994, 37090), False, 'from mock import patch\n'), ((1894, 1923), 'hashlib.md5', 'hashlib.md5', (['self.object_body'], {}), '(self.object_body)\n', (1905, 1923), False, 'import hashlib\n'), ((24535, 24548), 'swift.common.middleware.s3api.subresource.User', 'User', (['account'], {}), '(account)\n', (24539, 24548), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((24652, 24675), 'swift.common.middleware.s3api.subresource.Owner', 'Owner', (['account', 'account'], {}), '(account, account)\n', (24657, 24675), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((25164, 25177), 'swift.common.middleware.s3api.subresource.User', 'User', (['account'], {}), '(account)\n', (25168, 25177), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((25281, 25304), 'swift.common.middleware.s3api.subresource.Owner', 'Owner', (['account', 'account'], {}), '(account, account)\n', (25286, 25304), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((26545, 26567), 'swift.common.middleware.s3api.utils.S3Timestamp', 'S3Timestamp', (['timestamp'], {}), '(timestamp)\n', (26556, 26567), False, 'from swift.common.middleware.s3api.utils import mktime, S3Timestamp\n'), ((40715, 40728), 'swift.common.middleware.s3api.subresource.User', 'User', (['account'], {}), '(account)\n', (40719, 40728), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((40826, 40861), 'swift.common.middleware.s3api.subresource.Owner', 'Owner', (['"""test:tester"""', '"""test:tester"""'], {}), "('test:tester', 'test:tester')\n", (40831, 40861), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((44648, 44667), 'swift.common.middleware.s3api.subresource.Owner', 'Owner', (['owner', 'owner'], {}), '(owner, owner)\n', (44653, 44667), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((44479, 44492), 'swift.common.middleware.s3api.subresource.User', 'User', (['account'], {}), '(account)\n', (44483, 44492), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n'), ((44555, 44566), 'swift.common.middleware.s3api.subresource.User', 'User', (['owner'], {}), '(owner)\n', (44559, 44566), False, 'from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, Owner, Grant\n')] |
EggPool/pynyzo | pynyzo/pynyzo/keyutil.py | 7f3b86f15caa51a975e6a428f4dff578a1f24bcb | """
Eddsa Ed25519 key handling
From
https://github.com/n-y-z-o/nyzoVerifier/blob/b73bc25ba3094abe3470ec070ce306885ad9a18f/src/main/java/co/nyzo/verifier/KeyUtil.java
plus
https://github.com/n-y-z-o/nyzoVerifier/blob/17509f03a7f530c0431ce85377db9b35688c078e/src/main/java/co/nyzo/verifier/util/SignatureUtil.java
"""
# Uses https://github.com/warner/python-ed25519 , c binding, fast
import ed25519
import hashlib
from pynyzo.byteutil import ByteUtil
class KeyUtil:
@staticmethod
def main():
"""Temp test, not to be used"""
signing_key, verifying_key = ed25519.create_keypair()
print("Original private key", ByteUtil.bytes_as_string_with_dashes(signing_key.to_bytes()[:32]))
# signing key has signing + verifying, we keep the first 32 to only get the private part.
print("Original public key", ByteUtil.bytes_as_string_with_dashes(verifying_key.to_bytes()))
@staticmethod
def generateSeed(hashable_keyword: str='') -> bytes:
"""Generate a private key, with optional keyword to get reproducible tests results or later HD Wallet."""
if len(hashable_keyword):
seed = hashlib.sha256(hashable_keyword).digest()
signing_key = ed25519.SigningKey(seed)
else:
signing_key, _ = ed25519.create_keypair()
return signing_key.to_bytes()[:32]
@staticmethod
def private_to_public(private: str) -> str:
"""Temp Test"""
keydata = bytes.fromhex(private)
signing_key = ed25519.SigningKey(keydata)
verifying_key = signing_key.get_verifying_key()
vkey_hex = verifying_key.to_ascii(encoding="hex")
return vkey_hex.decode('utf-8')
@staticmethod
def get_from_private_seed_file(filename: str):
"""returns priv and pub key - as object - from the stored nyzo text id format"""
with open(filename) as f:
nyzo = f.read(80).replace('-', '').encode('utf-8').strip()
signing_key = ed25519.SigningKey(nyzo, encoding="hex")
verifying_key = signing_key.get_verifying_key()
return signing_key, verifying_key
@staticmethod
def get_from_private_seed(seed: str):
"""returns priv and pub key - as object - from an hex seed"""
seed = seed.replace('-', '').encode('utf-8').strip()
signing_key = ed25519.SigningKey(seed, encoding="hex")
verifying_key = signing_key.get_verifying_key()
return signing_key, verifying_key
@staticmethod
def save_to_private_seed_file(filename: str, key: bytes) -> None:
"""Saves the privkey to the nyzo formatted file"""
nyzo_format = ByteUtil.bytes_as_string_with_dashes(key)
with open(filename, 'w') as f:
f.write(nyzo_format)
@staticmethod
def sign_bytes(bytes_to_sign: bytes, private_key: ed25519.SigningKey) -> bytes:
sig = private_key.sign(bytes_to_sign)
return sig
@staticmethod
def signature_is_valid(signature: bytes, signed_bytes: bytes, public_id: bytes) -> bool:
verifying_key = ed25519.VerifyingKey(public_id)
# todo: cache key from id, see https://github.com/n-y-z-o/nyzoVerifier/blob/17509f03a7f530c0431ce85377db9b35688c078e/src/main/java/co/nyzo/verifier/util/SignatureUtil.java
try:
verifying_key.verify(signature, signed_bytes)
# print("signature is good")
return True
except ed25519.BadSignatureError:
# print("signature is bad!")
return False
if __name__ == "__main__":
KeyUtil.main()
# KeyUtil.private_to_public('nyzo-formatted-private-key'.replace('-', ''))
| [((582, 606), 'ed25519.create_keypair', 'ed25519.create_keypair', ([], {}), '()\n', (604, 606), False, 'import ed25519\n'), ((1512, 1539), 'ed25519.SigningKey', 'ed25519.SigningKey', (['keydata'], {}), '(keydata)\n', (1530, 1539), False, 'import ed25519\n'), ((2341, 2381), 'ed25519.SigningKey', 'ed25519.SigningKey', (['seed'], {'encoding': '"""hex"""'}), "(seed, encoding='hex')\n", (2359, 2381), False, 'import ed25519\n'), ((2651, 2692), 'pynyzo.byteutil.ByteUtil.bytes_as_string_with_dashes', 'ByteUtil.bytes_as_string_with_dashes', (['key'], {}), '(key)\n', (2687, 2692), False, 'from pynyzo.byteutil import ByteUtil\n'), ((3069, 3100), 'ed25519.VerifyingKey', 'ed25519.VerifyingKey', (['public_id'], {}), '(public_id)\n', (3089, 3100), False, 'import ed25519\n'), ((1222, 1246), 'ed25519.SigningKey', 'ed25519.SigningKey', (['seed'], {}), '(seed)\n', (1240, 1246), False, 'import ed25519\n'), ((1290, 1314), 'ed25519.create_keypair', 'ed25519.create_keypair', ([], {}), '()\n', (1312, 1314), False, 'import ed25519\n'), ((1984, 2024), 'ed25519.SigningKey', 'ed25519.SigningKey', (['nyzo'], {'encoding': '"""hex"""'}), "(nyzo, encoding='hex')\n", (2002, 2024), False, 'import ed25519\n'), ((1154, 1186), 'hashlib.sha256', 'hashlib.sha256', (['hashable_keyword'], {}), '(hashable_keyword)\n', (1168, 1186), False, 'import hashlib\n')] |
mir-group/CiderPress | mldftdat/scripts/train_gp.py | bf2b3536e6bd7432645c18dce5a745d63bc9df59 | from argparse import ArgumentParser
import os
import numpy as np
from joblib import dump
from mldftdat.workflow_utils import SAVE_ROOT
from mldftdat.models.gp import *
from mldftdat.data import load_descriptors, filter_descriptors
import yaml
def parse_settings(args):
fname = args.datasets_list[0]
if args.suffix is not None:
fname = fname + '_' + args.suffix
fname = os.path.join(SAVE_ROOT, 'DATASETS', args.functional,
args.basis, args.version, fname)
print(fname)
with open(os.path.join(fname, 'settings.yaml'), 'r') as f:
d = yaml.load(f, Loader=yaml.Loader)
args.gg_a0 = d.get('a0')
args.gg_amin = d.get('amin')
args.gg_facmul = d.get('fac_mul')
def parse_dataset(args, i, val=False):
if val:
fname = args.validation_set[2*i]
n = int(args.validation_set[2*i+1])
else:
fname = args.datasets_list[2*i]
n = int(args.datasets_list[2*i+1])
if args.suffix is not None:
fname = fname + '_' + args.suffix
fname = os.path.join(SAVE_ROOT, 'DATASETS', args.functional,
args.basis, args.version, fname)
print(fname)
X, y, rho_data = load_descriptors(fname)
if val:
# offset in case repeat datasets are used
X, y, rho_data = X[n//2+1:,:], y[n//2+1:], rho_data[:,n//2+1:]
X, y, rho, rho_data = filter_descriptors(X, y, rho_data,
tol=args.density_cutoff)
print(X.shape, n)
if args.randomize:
inds = np.arange(X.shape[0])
np.random.shuffle(inds)
X = X[inds,:]
y = y[inds]
rho = rho[inds]
rho_data = rho_data[:,inds]
return X[::n,:], y[::n], rho[::n], rho_data[:,::n]
def parse_list(lststr, T=int):
return [T(substr) for substr in lststr.split(',')]
def main():
parser = ArgumentParser(description='Trains a GP exchange model')
parser.add_argument('save_file', type=str)
parser.add_argument('feature_file', type=str,
help='serialized FeatureList object in yaml format')
parser.add_argument('datasets_list', nargs='+',
help='pairs of dataset names and inverse sampling densities')
parser.add_argument('basis', metavar='basis', type=str,
help='basis set code')
parser.add_argument('--functional', metavar='functional', type=str, default=None,
help='exchange-correlation functional, HF for Hartree-Fock')
parser.add_argument('-r', '--randomize', action='store_true')
parser.add_argument('-c', '--density-cutoff', type=float, default=1e-4)
#parser.add_argument('-m', '--model-class', type=str, default=None)
#parser.add_argument('-k', '--kernel', help='kernel initialization strategy', type=str, default=None)
parser.add_argument('-s', '--seed', help='random seed', default=0, type=int)
parser.add_argument('-vs', '--validation-set', nargs='+')
parser.add_argument('-d', '--delete-k', action='store_true',
help='Delete L (LL^T=K the kernel matrix) to save disk space. Need to refit when reloading to calculate covariance.')
parser.add_argument('--heg', action='store_true', help='HEG exact constraint')
parser.add_argument('--tail', action='store_true', help='atomic tail exact constraint')
parser.add_argument('-o', '--desc-order', default=None,
help='comma-separated list of descriptor order with no spaces. must start with 0,1.')
parser.add_argument('-l', '--length-scale', default=None,
help='comma-separated list initial length-scale guesses')
parser.add_argument('--length-scale-mul', type=float, default=1.0,
help='Used for automatic length-scale initial guess')
parser.add_argument('-a', '--agpr', action='store_true',
help='Whether to use Additive RBF. If False, use RBF')
parser.add_argument('-as', '--agpr-scale', default=None)
parser.add_argument('-ao', '--agpr-order', default=2, type=int)
parser.add_argument('-an', '--agpr-nsingle', default=1, type=int)
parser.add_argument('-x', '--xed-y-code', default='CHACHIYO', type=str)
parser.add_argument('-on', '--optimize-noise', action='store_true',
help='Whether to optimzie exponent of density noise.')
parser.add_argument('-v', '--version', default='c', type=str,
help='version of descriptor set. Default c')
parser.add_argument('--suffix', default=None, type=str,
help='customize data directories with this suffix')
args = parser.parse_args()
parse_settings(args)
np.random.seed(args.seed)
feature_list = FeatureList.load(args.feature_file)
if args.length_scale is not None:
args.length_scale = parse_list(args.length_scale, T=float)
if args.agpr_scale is not None:
args.agpr_scale = parse_list(args.agpr_scale, T=float)
if args.desc_order is not None:
args.desc_order = parse_list(args.desc_order)
assert len(args.datasets_list) % 2 == 0, 'Need pairs of entries for datasets list.'
assert len(args.datasets_list) != 0, 'Need training data'
nd = len(args.datasets_list) // 2
if args.validation_set is None:
nv = 0
else:
assert len(args.validation_set) % 2 == 0, 'Need pairs of entries for datasets list.'
nv = len(args.validation_set) // 2
X, y, rho, rho_data = parse_dataset(args, 0)
for i in range(1, nd):
Xn, yn, rhon, rho_datan, = parse_dataset(args, i)
X = np.append(X, Xn, axis=0)
y = np.append(y, yn, axis=0)
rho = np.append(rho, rhon, axis=0)
rho_data = np.append(rho_data, rho_datan, axis=1)
if nv != 0:
Xv, yv, rhov, rho_datav = parse_dataset(args, 0, val=True)
for i in range(1, nv):
Xn, yn, rhon, rho_datan, = parse_dataset(args, i, val=True)
Xv = np.append(Xv, Xn, axis=0)
yv = np.append(yv, yn, axis=0)
rhov = np.append(rhov, rhon, axis=0)
rho_datav = np.append(rho_datav, rho_datan, axis=1)
gpcls = DFTGPR
gpr = gpcls.from_settings(X, feature_list, args)
gpr.fit(X, y, add_heg=args.heg, add_tail=args.tail)
#if args.heg:
# gpr.add_heg_limit()
print('FINAL KERNEL', gpr.gp.kernel_)
if nv != 0:
pred = gpr.xed_to_y(gpr.predict(Xv), Xv)
abserr = np.abs(pred - gpr.xed_to_y(yv, Xv))
print('MAE VAL SET', np.mean(abserr))
# Always attach the arguments to the object to keep track of settings.
gpr.args = args
if args.delete_k:
gpr.L_ = None
dump(gpr, args.save_file)
if __name__ == '__main__':
main()
| [((390, 480), 'os.path.join', 'os.path.join', (['SAVE_ROOT', '"""DATASETS"""', 'args.functional', 'args.basis', 'args.version', 'fname'], {}), "(SAVE_ROOT, 'DATASETS', args.functional, args.basis, args.\n version, fname)\n", (402, 480), False, 'import os\n'), ((1042, 1132), 'os.path.join', 'os.path.join', (['SAVE_ROOT', '"""DATASETS"""', 'args.functional', 'args.basis', 'args.version', 'fname'], {}), "(SAVE_ROOT, 'DATASETS', args.functional, args.basis, args.\n version, fname)\n", (1054, 1132), False, 'import os\n'), ((1191, 1214), 'mldftdat.data.load_descriptors', 'load_descriptors', (['fname'], {}), '(fname)\n', (1207, 1214), False, 'from mldftdat.data import load_descriptors, filter_descriptors\n'), ((1374, 1433), 'mldftdat.data.filter_descriptors', 'filter_descriptors', (['X', 'y', 'rho_data'], {'tol': 'args.density_cutoff'}), '(X, y, rho_data, tol=args.density_cutoff)\n', (1392, 1433), False, 'from mldftdat.data import load_descriptors, filter_descriptors\n'), ((1863, 1919), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Trains a GP exchange model"""'}), "(description='Trains a GP exchange model')\n", (1877, 1919), False, 'from argparse import ArgumentParser\n'), ((4702, 4727), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (4716, 4727), True, 'import numpy as np\n'), ((6664, 6689), 'joblib.dump', 'dump', (['gpr', 'args.save_file'], {}), '(gpr, args.save_file)\n', (6668, 6689), False, 'from joblib import dump\n'), ((593, 625), 'yaml.load', 'yaml.load', (['f'], {'Loader': 'yaml.Loader'}), '(f, Loader=yaml.Loader)\n', (602, 625), False, 'import yaml\n'), ((1539, 1560), 'numpy.arange', 'np.arange', (['X.shape[0]'], {}), '(X.shape[0])\n', (1548, 1560), True, 'import numpy as np\n'), ((1569, 1592), 'numpy.random.shuffle', 'np.random.shuffle', (['inds'], {}), '(inds)\n', (1586, 1592), True, 'import numpy as np\n'), ((5613, 5637), 'numpy.append', 'np.append', (['X', 'Xn'], {'axis': '(0)'}), '(X, Xn, axis=0)\n', (5622, 5637), True, 'import numpy as np\n'), ((5650, 5674), 'numpy.append', 'np.append', (['y', 'yn'], {'axis': '(0)'}), '(y, yn, axis=0)\n', (5659, 5674), True, 'import numpy as np\n'), ((5689, 5717), 'numpy.append', 'np.append', (['rho', 'rhon'], {'axis': '(0)'}), '(rho, rhon, axis=0)\n', (5698, 5717), True, 'import numpy as np\n'), ((5737, 5775), 'numpy.append', 'np.append', (['rho_data', 'rho_datan'], {'axis': '(1)'}), '(rho_data, rho_datan, axis=1)\n', (5746, 5775), True, 'import numpy as np\n'), ((5967, 5992), 'numpy.append', 'np.append', (['Xv', 'Xn'], {'axis': '(0)'}), '(Xv, Xn, axis=0)\n', (5976, 5992), True, 'import numpy as np\n'), ((6006, 6031), 'numpy.append', 'np.append', (['yv', 'yn'], {'axis': '(0)'}), '(yv, yn, axis=0)\n', (6015, 6031), True, 'import numpy as np\n'), ((6047, 6076), 'numpy.append', 'np.append', (['rhov', 'rhon'], {'axis': '(0)'}), '(rhov, rhon, axis=0)\n', (6056, 6076), True, 'import numpy as np\n'), ((6097, 6136), 'numpy.append', 'np.append', (['rho_datav', 'rho_datan'], {'axis': '(1)'}), '(rho_datav, rho_datan, axis=1)\n', (6106, 6136), True, 'import numpy as np\n'), ((532, 568), 'os.path.join', 'os.path.join', (['fname', '"""settings.yaml"""'], {}), "(fname, 'settings.yaml')\n", (544, 568), False, 'import os\n'), ((6503, 6518), 'numpy.mean', 'np.mean', (['abserr'], {}), '(abserr)\n', (6510, 6518), True, 'import numpy as np\n')] |
zaratec/picoCTF | picoCTF-web/api/routes/admin.py | b0a63f03625bb4657a8116f43bea26346ca6f010 | import api
import bson
from api.annotations import (
api_wrapper,
log_action,
require_admin,
require_login,
require_teacher
)
from api.common import WebError, WebSuccess
from flask import (
Blueprint,
Flask,
render_template,
request,
send_from_directory,
session
)
blueprint = Blueprint("admin_api", __name__)
@blueprint.route('/problems', methods=['GET'])
@api_wrapper
@require_admin
def get_problem_data_hook():
has_instances = lambda p : len(p["instances"]) > 0
problems = list(filter(has_instances, api.problem.get_all_problems(show_disabled=True)))
for problem in problems:
problem["reviews"] = api.problem_feedback.get_problem_feedback(pid=problem["pid"])
data = {
"problems": problems,
"bundles": api.problem.get_all_bundles()
}
return WebSuccess(data=data)
@blueprint.route('/users', methods=['GET'])
@api_wrapper
@require_admin
def get_all_users_hook():
users = api.user.get_all_users()
if users is None:
return WebError("There was an error query users from the database.")
return WebSuccess(data=users)
@blueprint.route('/exceptions', methods=['GET'])
@api_wrapper
@require_admin
def get_exceptions_hook():
try:
limit = abs(int(request.args.get("limit")))
exceptions = api.admin.get_api_exceptions(result_limit=limit)
return WebSuccess(data=exceptions)
except (ValueError, TypeError):
return WebError("limit is not a valid integer.")
@blueprint.route('/exceptions/dismiss', methods=['POST'])
@api_wrapper
@require_admin
def dismiss_exceptions_hook():
trace = request.form.get("trace", None)
if trace:
api.admin.dismiss_api_exceptions(trace)
return WebSuccess(data="Successfuly changed exception visibility.")
else:
return WebError(message="You must supply a trace to hide.")
@blueprint.route("/problems/submissions", methods=["GET"])
@api_wrapper
@require_admin
def get_problem():
submission_data = {p["name"]:api.stats.get_problem_submission_stats(pid=p["pid"]) \
for p in api.problem.get_all_problems(show_disabled=True)}
return WebSuccess(data=submission_data)
@blueprint.route("/problems/availability", methods=["POST"])
@api_wrapper
@require_admin
def change_problem_availability_hook():
pid = request.form.get("pid", None)
desired_state = request.form.get("state", None)
if desired_state == None:
return WebError("Problems are either enabled or disabled.")
else:
state = bson.json_util.loads(desired_state)
api.admin.set_problem_availability(pid, state)
return WebSuccess(data="Problem state changed successfully.")
@blueprint.route("/shell_servers", methods=["GET"])
@api_wrapper
@require_admin
def get_shell_servers():
return WebSuccess(data=api.shell_servers.get_servers())
@blueprint.route("/shell_servers/add", methods=["POST"])
@api_wrapper
@require_admin
def add_shell_server():
params = api.common.flat_multi(request.form)
api.shell_servers.add_server(params)
return WebSuccess("Shell server added.")
@blueprint.route("/shell_servers/update", methods=["POST"])
@api_wrapper
@require_admin
def update_shell_server():
params = api.common.flat_multi(request.form)
sid = params.get("sid", None)
if sid is None:
return WebError("Must specify sid to be updated")
api.shell_servers.update_server(sid, params)
return WebSuccess("Shell server updated.")
@blueprint.route("/shell_servers/remove", methods=["POST"])
@api_wrapper
@require_admin
def remove_shell_server():
sid = request.form.get("sid", None)
if sid is None:
return WebError("Must specify sid to be removed")
api.shell_servers.remove_server(sid)
return WebSuccess("Shell server removed.")
@blueprint.route("/shell_servers/load_problems", methods=["POST"])
@api_wrapper
@require_admin
def load_problems_from_shell_server():
sid = request.form.get("sid", None)
if sid is None:
return WebError("Must provide sid to load from.")
number = api.shell_servers.load_problems_from_server(sid)
return WebSuccess("Loaded {} problems from the server".format(number))
@blueprint.route("/shell_servers/check_status", methods=["GET"])
@api_wrapper
@require_admin
def check_status_of_shell_server():
sid = request.args.get("sid", None)
if sid is None:
return WebError("Must provide sid to load from.")
all_online, data = api.shell_servers.get_problem_status_from_server(sid)
if all_online:
return WebSuccess("All problems are online", data=data)
else:
return WebError("One or more problems are offline. Please connect and fix the errors.", data=data)
@blueprint.route("/bundle/dependencies_active", methods=["POST"])
@api_wrapper
@require_admin
def bundle_dependencies():
bid = request.form.get("bid", None)
state = request.form.get("state", None)
if bid is None:
return WebError("Must provide bid to load from.")
if state is None:
return WebError("Must provide a state to set.")
state = bson.json_util.loads(state)
api.problem.set_bundle_dependencies_enabled(bid, state)
return WebSuccess("Dependencies are now {}.".format("enabled" if state else "disabled"))
@blueprint.route("/settings", methods=["GET"])
@api_wrapper
@require_admin
def get_settings():
return WebSuccess(data=api.config.get_settings())
@blueprint.route("/settings/change", methods=["POST"])
@api_wrapper
@require_admin
def change_settings():
data = bson.json_util.loads(request.form["json"])
api.config.change_settings(data)
return WebSuccess("Settings updated")
| [((322, 354), 'flask.Blueprint', 'Blueprint', (['"""admin_api"""', '__name__'], {}), "('admin_api', __name__)\n", (331, 354), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((840, 861), 'api.common.WebSuccess', 'WebSuccess', ([], {'data': 'data'}), '(data=data)\n', (850, 861), False, 'from api.common import WebError, WebSuccess\n'), ((973, 997), 'api.user.get_all_users', 'api.user.get_all_users', ([], {}), '()\n', (995, 997), False, 'import api\n'), ((1108, 1130), 'api.common.WebSuccess', 'WebSuccess', ([], {'data': 'users'}), '(data=users)\n', (1118, 1130), False, 'from api.common import WebError, WebSuccess\n'), ((1634, 1665), 'flask.request.form.get', 'request.form.get', (['"""trace"""', 'None'], {}), "('trace', None)\n", (1650, 1665), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((2170, 2202), 'api.common.WebSuccess', 'WebSuccess', ([], {'data': 'submission_data'}), '(data=submission_data)\n', (2180, 2202), False, 'from api.common import WebError, WebSuccess\n'), ((2343, 2372), 'flask.request.form.get', 'request.form.get', (['"""pid"""', 'None'], {}), "('pid', None)\n", (2359, 2372), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((2393, 2424), 'flask.request.form.get', 'request.form.get', (['"""state"""', 'None'], {}), "('state', None)\n", (2409, 2424), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((2591, 2637), 'api.admin.set_problem_availability', 'api.admin.set_problem_availability', (['pid', 'state'], {}), '(pid, state)\n', (2625, 2637), False, 'import api\n'), ((2649, 2703), 'api.common.WebSuccess', 'WebSuccess', ([], {'data': '"""Problem state changed successfully."""'}), "(data='Problem state changed successfully.')\n", (2659, 2703), False, 'from api.common import WebError, WebSuccess\n'), ((2994, 3029), 'api.common.flat_multi', 'api.common.flat_multi', (['request.form'], {}), '(request.form)\n', (3015, 3029), False, 'import api\n'), ((3034, 3070), 'api.shell_servers.add_server', 'api.shell_servers.add_server', (['params'], {}), '(params)\n', (3062, 3070), False, 'import api\n'), ((3082, 3115), 'api.common.WebSuccess', 'WebSuccess', (['"""Shell server added."""'], {}), "('Shell server added.')\n", (3092, 3115), False, 'from api.common import WebError, WebSuccess\n'), ((3245, 3280), 'api.common.flat_multi', 'api.common.flat_multi', (['request.form'], {}), '(request.form)\n', (3266, 3280), False, 'import api\n'), ((3399, 3443), 'api.shell_servers.update_server', 'api.shell_servers.update_server', (['sid', 'params'], {}), '(sid, params)\n', (3430, 3443), False, 'import api\n'), ((3455, 3490), 'api.common.WebSuccess', 'WebSuccess', (['"""Shell server updated."""'], {}), "('Shell server updated.')\n", (3465, 3490), False, 'from api.common import WebError, WebSuccess\n'), ((3617, 3646), 'flask.request.form.get', 'request.form.get', (['"""sid"""', 'None'], {}), "('sid', None)\n", (3633, 3646), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((3730, 3766), 'api.shell_servers.remove_server', 'api.shell_servers.remove_server', (['sid'], {}), '(sid)\n', (3761, 3766), False, 'import api\n'), ((3778, 3813), 'api.common.WebSuccess', 'WebSuccess', (['"""Shell server removed."""'], {}), "('Shell server removed.')\n", (3788, 3813), False, 'from api.common import WebError, WebSuccess\n'), ((3959, 3988), 'flask.request.form.get', 'request.form.get', (['"""sid"""', 'None'], {}), "('sid', None)\n", (3975, 3988), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((4082, 4130), 'api.shell_servers.load_problems_from_server', 'api.shell_servers.load_problems_from_server', (['sid'], {}), '(sid)\n', (4125, 4130), False, 'import api\n'), ((4346, 4375), 'flask.request.args.get', 'request.args.get', (['"""sid"""', 'None'], {}), "('sid', None)\n", (4362, 4375), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((4479, 4532), 'api.shell_servers.get_problem_status_from_server', 'api.shell_servers.get_problem_status_from_server', (['sid'], {}), '(sid)\n', (4527, 4532), False, 'import api\n'), ((4866, 4895), 'flask.request.form.get', 'request.form.get', (['"""bid"""', 'None'], {}), "('bid', None)\n", (4882, 4895), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((4908, 4939), 'flask.request.form.get', 'request.form.get', (['"""state"""', 'None'], {}), "('state', None)\n", (4924, 4939), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n'), ((5111, 5138), 'bson.json_util.loads', 'bson.json_util.loads', (['state'], {}), '(state)\n', (5131, 5138), False, 'import bson\n'), ((5144, 5199), 'api.problem.set_bundle_dependencies_enabled', 'api.problem.set_bundle_dependencies_enabled', (['bid', 'state'], {}), '(bid, state)\n', (5187, 5199), False, 'import api\n'), ((5562, 5604), 'bson.json_util.loads', 'bson.json_util.loads', (["request.form['json']"], {}), "(request.form['json'])\n", (5582, 5604), False, 'import bson\n'), ((5609, 5641), 'api.config.change_settings', 'api.config.change_settings', (['data'], {}), '(data)\n', (5635, 5641), False, 'import api\n'), ((5653, 5683), 'api.common.WebSuccess', 'WebSuccess', (['"""Settings updated"""'], {}), "('Settings updated')\n", (5663, 5683), False, 'from api.common import WebError, WebSuccess\n'), ((667, 728), 'api.problem_feedback.get_problem_feedback', 'api.problem_feedback.get_problem_feedback', ([], {'pid': "problem['pid']"}), "(pid=problem['pid'])\n", (708, 728), False, 'import api\n'), ((792, 821), 'api.problem.get_all_bundles', 'api.problem.get_all_bundles', ([], {}), '()\n', (819, 821), False, 'import api\n'), ((1035, 1096), 'api.common.WebError', 'WebError', (['"""There was an error query users from the database."""'], {}), "('There was an error query users from the database.')\n", (1043, 1096), False, 'from api.common import WebError, WebSuccess\n'), ((1318, 1366), 'api.admin.get_api_exceptions', 'api.admin.get_api_exceptions', ([], {'result_limit': 'limit'}), '(result_limit=limit)\n', (1346, 1366), False, 'import api\n'), ((1382, 1409), 'api.common.WebSuccess', 'WebSuccess', ([], {'data': 'exceptions'}), '(data=exceptions)\n', (1392, 1409), False, 'from api.common import WebError, WebSuccess\n'), ((1688, 1727), 'api.admin.dismiss_api_exceptions', 'api.admin.dismiss_api_exceptions', (['trace'], {}), '(trace)\n', (1720, 1727), False, 'import api\n'), ((1743, 1803), 'api.common.WebSuccess', 'WebSuccess', ([], {'data': '"""Successfuly changed exception visibility."""'}), "(data='Successfuly changed exception visibility.')\n", (1753, 1803), False, 'from api.common import WebError, WebSuccess\n'), ((1829, 1881), 'api.common.WebError', 'WebError', ([], {'message': '"""You must supply a trace to hide."""'}), "(message='You must supply a trace to hide.')\n", (1837, 1881), False, 'from api.common import WebError, WebSuccess\n'), ((2022, 2074), 'api.stats.get_problem_submission_stats', 'api.stats.get_problem_submission_stats', ([], {'pid': "p['pid']"}), "(pid=p['pid'])\n", (2060, 2074), False, 'import api\n'), ((2471, 2523), 'api.common.WebError', 'WebError', (['"""Problems are either enabled or disabled."""'], {}), "('Problems are either enabled or disabled.')\n", (2479, 2523), False, 'from api.common import WebError, WebSuccess\n'), ((2550, 2585), 'bson.json_util.loads', 'bson.json_util.loads', (['desired_state'], {}), '(desired_state)\n', (2570, 2585), False, 'import bson\n'), ((3351, 3393), 'api.common.WebError', 'WebError', (['"""Must specify sid to be updated"""'], {}), "('Must specify sid to be updated')\n", (3359, 3393), False, 'from api.common import WebError, WebSuccess\n'), ((3682, 3724), 'api.common.WebError', 'WebError', (['"""Must specify sid to be removed"""'], {}), "('Must specify sid to be removed')\n", (3690, 3724), False, 'from api.common import WebError, WebSuccess\n'), ((4025, 4067), 'api.common.WebError', 'WebError', (['"""Must provide sid to load from."""'], {}), "('Must provide sid to load from.')\n", (4033, 4067), False, 'from api.common import WebError, WebSuccess\n'), ((4412, 4454), 'api.common.WebError', 'WebError', (['"""Must provide sid to load from."""'], {}), "('Must provide sid to load from.')\n", (4420, 4454), False, 'from api.common import WebError, WebSuccess\n'), ((4568, 4616), 'api.common.WebSuccess', 'WebSuccess', (['"""All problems are online"""'], {'data': 'data'}), "('All problems are online', data=data)\n", (4578, 4616), False, 'from api.common import WebError, WebSuccess\n'), ((4642, 4738), 'api.common.WebError', 'WebError', (['"""One or more problems are offline. Please connect and fix the errors."""'], {'data': 'data'}), "('One or more problems are offline. Please connect and fix the errors.'\n , data=data)\n", (4650, 4738), False, 'from api.common import WebError, WebSuccess\n'), ((4976, 5018), 'api.common.WebError', 'WebError', (['"""Must provide bid to load from."""'], {}), "('Must provide bid to load from.')\n", (4984, 5018), False, 'from api.common import WebError, WebSuccess\n'), ((5057, 5097), 'api.common.WebError', 'WebError', (['"""Must provide a state to set."""'], {}), "('Must provide a state to set.')\n", (5065, 5097), False, 'from api.common import WebError, WebSuccess\n'), ((557, 605), 'api.problem.get_all_problems', 'api.problem.get_all_problems', ([], {'show_disabled': '(True)'}), '(show_disabled=True)\n', (585, 605), False, 'import api\n'), ((1462, 1503), 'api.common.WebError', 'WebError', (['"""limit is not a valid integer."""'], {}), "('limit is not a valid integer.')\n", (1470, 1503), False, 'from api.common import WebError, WebSuccess\n'), ((2109, 2157), 'api.problem.get_all_problems', 'api.problem.get_all_problems', ([], {'show_disabled': '(True)'}), '(show_disabled=True)\n', (2137, 2157), False, 'import api\n'), ((2838, 2869), 'api.shell_servers.get_servers', 'api.shell_servers.get_servers', ([], {}), '()\n', (2867, 2869), False, 'import api\n'), ((5417, 5442), 'api.config.get_settings', 'api.config.get_settings', ([], {}), '()\n', (5440, 5442), False, 'import api\n'), ((1269, 1294), 'flask.request.args.get', 'request.args.get', (['"""limit"""'], {}), "('limit')\n", (1285, 1294), False, 'from flask import Blueprint, Flask, render_template, request, send_from_directory, session\n')] |
thongnbui/MIDS_251_project | python code/influxdb_worker.py | 8eee0f4569268e11c2d1d356024dbdc10f180b10 | #!/usr/bin/python
import json
import argparse
from influxdb import InfluxDBClient
parser = argparse.ArgumentParser(description = 'pull data for softlayer queue' )
parser.add_argument( 'measurement' , help = 'measurement001' )
args = parser.parse_args()
client_influxdb = InfluxDBClient('50.23.117.76', '8086', 'cricket', 'cricket', 'cricket_data')
query = 'SELECT "data_center", "device", "value" FROM "cricket_data"."cricket_retention".'+args.measurement+' WHERE time > now() - 10m order by time'
result = client_influxdb.query(query)
for r in result:
i = 0
for data_center, device, value, time in r:
print args.measurement,'\t',r[i][data_center],'\t',r[i][device],'\t',r[i][time],'\t',r[i][value]
i += 1
| [] |
xyzst/pants | src/python/pants/backend/docker/lint/hadolint/subsystem.py | d6a357fe67ee7e8e1aefeae625e107f5609f1717 | # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from typing import cast
from pants.core.util_rules.config_files import ConfigFilesRequest
from pants.core.util_rules.external_tool import TemplatedExternalTool
from pants.option.custom_types import file_option, shell_str
class Hadolint(TemplatedExternalTool):
options_scope = "hadolint"
name = "hadolint"
help = "A linter for Dockerfiles."
default_version = "v2.8.0"
# TODO: https://github.com/hadolint/hadolint/issues/411 tracks building and releasing
# hadolint for Linux ARM64.
default_known_versions = [
"v2.8.0|macos_x86_64|27985f257a216ecab06a16e643e8cb0123e7145b5d526cfcb4ce7a31fe99f357|2428944",
"v2.8.0|macos_arm64 |27985f257a216ecab06a16e643e8cb0123e7145b5d526cfcb4ce7a31fe99f357|2428944", # same as mac x86
"v2.8.0|linux_x86_64|9dfc155139a1e1e9b3b28f3de9907736b9dfe7cead1c3a0ae7ff0158f3191674|5895708",
]
default_url_template = (
"https://github.com/hadolint/hadolint/releases/download/{version}/hadolint-{platform}"
)
default_url_platform_mapping = {
"macos_arm64": "Darwin-x86_64",
"macos_x86_64": "Darwin-x86_64",
"linux_x86_64": "Linux-x86_64",
}
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--skip",
type=bool,
default=False,
help="Don't use Hadolint when running `./pants lint`.",
)
register(
"--args",
type=list,
member_type=shell_str,
help=(
"Arguments to pass directly to Hadolint, e.g. `--hadolint-args='--format json'`.'"
),
)
register(
"--config",
type=file_option,
default=None,
advanced=True,
help=(
"Path to an YAML config file understood by Hadolint "
"(https://github.com/hadolint/hadolint#configure).\n\n"
f"Setting this option will disable `[{cls.options_scope}].config_discovery`. Use "
"this option if the config is located in a non-standard location."
),
)
register(
"--config-discovery",
type=bool,
default=True,
advanced=True,
help=(
"If true, Pants will include all relevant config files during runs "
"(`.hadolint.yaml` and `.hadolint.yml`).\n\n"
f"Use `[{cls.options_scope}].config` instead if your config is in a "
"non-standard location."
),
)
@property
def skip(self) -> bool:
return cast(bool, self.options.skip)
@property
def args(self) -> tuple[str, ...]:
return tuple(self.options.args)
@property
def config(self) -> str | None:
return cast("str | None", self.options.config)
def config_request(self) -> ConfigFilesRequest:
# Refer to https://github.com/hadolint/hadolint#configure for how config files are
# discovered.
return ConfigFilesRequest(
specified=self.config,
specified_option_name=f"[{self.options_scope}].config",
discovery=cast(bool, self.options.config_discovery),
check_existence=[".hadolint.yaml", ".hadolint.yml"],
)
| [((2848, 2877), 'typing.cast', 'cast', (['bool', 'self.options.skip'], {}), '(bool, self.options.skip)\n', (2852, 2877), False, 'from typing import cast\n'), ((3038, 3077), 'typing.cast', 'cast', (['"""str | None"""', 'self.options.config'], {}), "('str | None', self.options.config)\n", (3042, 3077), False, 'from typing import cast\n'), ((3404, 3445), 'typing.cast', 'cast', (['bool', 'self.options.config_discovery'], {}), '(bool, self.options.config_discovery)\n', (3408, 3445), False, 'from typing import cast\n')] |
LucaCilibrasi/docker_viruclust | venv/lib/python3.9/site-packages/biorun/fetch.py | 88149c17fd4b94a54397d0cb4a9daece00122c49 | """
Handles functionality related to data storege.
"""
import sys, os, glob, re, gzip, json
from biorun import const, utils, objects, ncbi
from biorun.models import jsonrec
import biorun.libs.placlib as plac
# Module level logger.
logger = utils.logger
# A nicer error message on incorrect installation.
try:
from Bio import SeqIO
except ImportError as exc:
print(f"*** Error: {exc}", file=sys.stderr)
print(f"*** This program requires biopython", file=sys.stderr)
print(f"*** Install: conda install -y biopython>=1.78", file=sys.stderr)
sys.exit(-1)
def resolve_fname(name, format='json'):
"""
Resolve a file name given an accession number.
"""
ext = format.lower()
fname = f"{name}.{ext}.gz"
fname = os.path.join(utils.DATADIR, fname)
return fname
def delete_data(text):
"""
Deletes data under a filename.
"""
for name in text.split(","):
fname = resolve_fname(name)
if os.path.isfile(fname):
os.remove(fname)
logger.info(f"removed: {fname}")
else:
logger.info(f"file does not exist: {fname}")
def read_json_file(fname):
"""
Returns the content of a JSON file.
"""
fp = utils.gz_read(fname)
data = json.load(fp)
fp.close()
return data
def save_json_file(fname, data):
"""
Returns the content of a JSON file.
"""
fp = utils.gz_write(fname)
json.dump(data, fp)
fp.close()
logger.info(f"saved {fname}")
return data
def change_seqid(json_name, seqid):
"""
Changes the sequence id stored in a json file.
"""
if os.path.isfile(json_name):
data = read_json_file(json_name)
for item in data:
item[const.SEQID] = seqid
fp = utils.gz_write(json_name)
json.dump(data, fp)
fp.close()
def fetch_data(data, param):
"""
Obtains data from NCBI. Fills each parameter with a json field.
"""
db = "protein" if param.protein else "nuccore"
# Ensure json DB is built
ncbi.build_db()
genbank, taxon_acc, refseq = ncbi.get_data()
for name in data:
# Pretend no data if it is an update.
json = None if param.update else get_json(name)
# The data exists, nothing needs to be done.
if json:
continue
# The JSON representation of the data.
json_name = resolve_fname(name=name, format="json")
# GenBank representation of the data.
gbk_name = resolve_fname(name=name, format="gb")
# Genome assembly data.
if name.startswith("GCA") or name.startswith("GCF"):
ncbi.genome(name=name, fname=gbk_name, update=param.update, genbank=genbank,
refseq=refseq)
else:
# Genbank data.
ncbi.genbank_save(name, db=db, fname=gbk_name)
# Convert Genbank to JSON.
data = jsonrec.parse_file(fname=gbk_name, seqid=param.seqid)
# Save JSON file.
save_json_file(fname=json_name, data=data)
def genbank_view(params):
for param in params:
altname = resolve_fname(param.acc, format="gb")
if os.path.isfile(param.acc):
stream = utils.gz_read(param.acc)
elif os.path.isfile(altname):
stream = utils.gz_read(altname)
else:
stream = []
utils.error(f"data not found: {param.acc}")
for line in stream:
print(line, end='')
def get_json(name, seqid=None, inter=False, strict=False):
"""
Attempts to return a JSON formatted data based on a name.
"""
# Data is an existing path to a JSON file.
if os.path.isfile(name):
try:
data = jsonrec.parse_file(name, seqid=seqid)
except Exception as exc:
logger.error(f"JSON parsing error for file {name}: {exc}")
sys.exit(-1)
return data
# The JSON representation of the data.
json_name = resolve_fname(name=name, format="json")
# GenBank representation of the data.
gbk_name = resolve_fname(name=name, format="gb")
# Found the JSON representation of the file.
if os.path.isfile(json_name):
logger.info(f"found {json_name}")
data = read_json_file(json_name)
return data
# There is no JSON file but there is a GenBank file.
if os.path.isfile(gbk_name):
logger.info(f"found {gbk_name}")
data = jsonrec.parse_file(fname=gbk_name, seqid=seqid)
data = save_json_file(fname=json_name, data=data)
return data
# Interactive input, make JSON from name
if inter:
data = jsonrec.make_jsonrec(name, seqid=seqid)
return data
# Raise error if in strict mode
if strict:
utils.error(f"data not found: {name}")
return None
def rename_data(data, param, newname=None):
"""
Rename data.
"""
# Will only rename a single data
newnames = newname.split(",")
for name1, name2 in zip(data, newnames):
src_json = resolve_fname(name=name1, format="json")
dest_json = resolve_fname(name=name2, format="json")
src_gb = resolve_fname(name=name1, format="gb")
dest_gb = resolve_fname(name=name2, format="gb")
if os.path.isfile(src_json):
logger.info(f"renamed {name1} as {name2}")
os.rename(src_json, dest_json)
if param.seqid:
change_seqid(dest_json, seqid=param.seqid)
else:
logger.info(f"file not found: {src_json}")
if os.path.isfile(src_gb):
if not os.path.isfile(dest_gb):
os.symlink(src_gb, dest_gb)
else:
logger.info(f"file not found: {src_gb}")
def print_data_list():
"""
Returns a list of the files in the data directory
"""
pattern = os.path.join(os.path.join(utils.DATADIR, '*.json.gz'))
matched = glob.glob(pattern)
# Extract the definition from the JSON without parsing it.
patt = re.compile(r'(definition\":\s*)(?P<value>\".+?\")')
collect = []
for path in matched:
fsize = utils.human_size(os.path.getsize(path))
base, fname = os.path.split(path)
fname = fname.rsplit(".", maxsplit=2)[0]
# Parse the first N lines
stream = gzip.open(path, 'rt') if path.endswith('gz') else open(path, 'rt')
text = stream.read(1000)
match = patt.search(text)
title = match.group("value") if match else ''
title = title.strip('", ')
# Trim the title
stitle = title[:100]
stitle = stitle + "..." if len(title) != len(stitle) else stitle
collect.append((str(fsize), f"{fname:10s}", stitle))
collect = sorted(collect, key=lambda x: x[2])
for row in collect:
line = "\t".join(row)
print(line)
@plac.pos("data", "data names")
@plac.flg('fetch', "download data as accessions")
@plac.flg('update', "updates data in storage")
@plac.opt('rename', "rename the data")
@plac.opt('seqid', "set the sequence id of the data")
@plac.flg('protein', "use the protein database")
@plac.flg('build', "build the database")
@plac.flg('verbose', "verbose mode")
def run(update=False, rename='', seqid='', protein=False, verbose=False, *data):
"""
Fetches and manages data in storage.
"""
# Set the verbosity
utils.set_verbosity(logger, level=int(verbose))
# Reset counter (needed for consistency during testing).
jsonrec.reset_counter()
# A simple wrapper class to represent input parameters.
param = objects.Param(seqid=seqid, rename=rename, start=1, protein=protein, update=update)
# Fetch the data.
fetch_data(data, param=param)
# Renaming after fetching.
if rename:
rename_data(data, param=param, newname=rename)
@plac.opt('delete', "deletes foo from storage", metavar='foo')
@plac.flg('verbose', "verbose mode")
def data(delete, verbose=False):
"""
Shows the data in the storage.
Usage:
bio data : lists the data
bio data --delete foo : deletes data called foo
bio data --delete foo,bar : deletes multiple datasets
"""
# Set the verbosity
utils.set_verbosity(logger, level=int(verbose))
# Reset counter (needed for consistency during testing).
jsonrec.reset_counter()
# Delete should be the first to execute.
if delete:
delete_data(delete)
else:
# Prints the data listing.
print_data_list() | [((6836, 6866), 'biorun.libs.placlib.pos', 'plac.pos', (['"""data"""', '"""data names"""'], {}), "('data', 'data names')\n", (6844, 6866), True, 'import biorun.libs.placlib as plac\n'), ((6868, 6916), 'biorun.libs.placlib.flg', 'plac.flg', (['"""fetch"""', '"""download data as accessions"""'], {}), "('fetch', 'download data as accessions')\n", (6876, 6916), True, 'import biorun.libs.placlib as plac\n'), ((6918, 6963), 'biorun.libs.placlib.flg', 'plac.flg', (['"""update"""', '"""updates data in storage"""'], {}), "('update', 'updates data in storage')\n", (6926, 6963), True, 'import biorun.libs.placlib as plac\n'), ((6965, 7002), 'biorun.libs.placlib.opt', 'plac.opt', (['"""rename"""', '"""rename the data"""'], {}), "('rename', 'rename the data')\n", (6973, 7002), True, 'import biorun.libs.placlib as plac\n'), ((7004, 7056), 'biorun.libs.placlib.opt', 'plac.opt', (['"""seqid"""', '"""set the sequence id of the data"""'], {}), "('seqid', 'set the sequence id of the data')\n", (7012, 7056), True, 'import biorun.libs.placlib as plac\n'), ((7058, 7105), 'biorun.libs.placlib.flg', 'plac.flg', (['"""protein"""', '"""use the protein database"""'], {}), "('protein', 'use the protein database')\n", (7066, 7105), True, 'import biorun.libs.placlib as plac\n'), ((7107, 7146), 'biorun.libs.placlib.flg', 'plac.flg', (['"""build"""', '"""build the database"""'], {}), "('build', 'build the database')\n", (7115, 7146), True, 'import biorun.libs.placlib as plac\n'), ((7148, 7183), 'biorun.libs.placlib.flg', 'plac.flg', (['"""verbose"""', '"""verbose mode"""'], {}), "('verbose', 'verbose mode')\n", (7156, 7183), True, 'import biorun.libs.placlib as plac\n'), ((7807, 7868), 'biorun.libs.placlib.opt', 'plac.opt', (['"""delete"""', '"""deletes foo from storage"""'], {'metavar': '"""foo"""'}), "('delete', 'deletes foo from storage', metavar='foo')\n", (7815, 7868), True, 'import biorun.libs.placlib as plac\n'), ((7870, 7905), 'biorun.libs.placlib.flg', 'plac.flg', (['"""verbose"""', '"""verbose mode"""'], {}), "('verbose', 'verbose mode')\n", (7878, 7905), True, 'import biorun.libs.placlib as plac\n'), ((749, 783), 'os.path.join', 'os.path.join', (['utils.DATADIR', 'fname'], {}), '(utils.DATADIR, fname)\n', (761, 783), False, 'import sys, os, glob, re, gzip, json\n'), ((1219, 1239), 'biorun.utils.gz_read', 'utils.gz_read', (['fname'], {}), '(fname)\n', (1232, 1239), False, 'from biorun import const, utils, objects, ncbi\n'), ((1251, 1264), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (1260, 1264), False, 'import sys, os, glob, re, gzip, json\n'), ((1396, 1417), 'biorun.utils.gz_write', 'utils.gz_write', (['fname'], {}), '(fname)\n', (1410, 1417), False, 'from biorun import const, utils, objects, ncbi\n'), ((1422, 1441), 'json.dump', 'json.dump', (['data', 'fp'], {}), '(data, fp)\n', (1431, 1441), False, 'import sys, os, glob, re, gzip, json\n'), ((1619, 1644), 'os.path.isfile', 'os.path.isfile', (['json_name'], {}), '(json_name)\n', (1633, 1644), False, 'import sys, os, glob, re, gzip, json\n'), ((2039, 2054), 'biorun.ncbi.build_db', 'ncbi.build_db', ([], {}), '()\n', (2052, 2054), False, 'from biorun import const, utils, objects, ncbi\n'), ((2089, 2104), 'biorun.ncbi.get_data', 'ncbi.get_data', ([], {}), '()\n', (2102, 2104), False, 'from biorun import const, utils, objects, ncbi\n'), ((3667, 3687), 'os.path.isfile', 'os.path.isfile', (['name'], {}), '(name)\n', (3681, 3687), False, 'import sys, os, glob, re, gzip, json\n'), ((4161, 4186), 'os.path.isfile', 'os.path.isfile', (['json_name'], {}), '(json_name)\n', (4175, 4186), False, 'import sys, os, glob, re, gzip, json\n'), ((4356, 4380), 'os.path.isfile', 'os.path.isfile', (['gbk_name'], {}), '(gbk_name)\n', (4370, 4380), False, 'import sys, os, glob, re, gzip, json\n'), ((5906, 5924), 'glob.glob', 'glob.glob', (['pattern'], {}), '(pattern)\n', (5915, 5924), False, 'import sys, os, glob, re, gzip, json\n'), ((6000, 6054), 're.compile', 're.compile', (['"""(definition\\\\":\\\\s*)(?P<value>\\\\".+?\\\\")"""'], {}), '(\'(definition\\\\":\\\\s*)(?P<value>\\\\".+?\\\\")\')\n', (6010, 6054), False, 'import sys, os, glob, re, gzip, json\n'), ((7465, 7488), 'biorun.models.jsonrec.reset_counter', 'jsonrec.reset_counter', ([], {}), '()\n', (7486, 7488), False, 'from biorun.models import jsonrec\n'), ((7562, 7649), 'biorun.objects.Param', 'objects.Param', ([], {'seqid': 'seqid', 'rename': 'rename', 'start': '(1)', 'protein': 'protein', 'update': 'update'}), '(seqid=seqid, rename=rename, start=1, protein=protein, update=\n update)\n', (7575, 7649), False, 'from biorun import const, utils, objects, ncbi\n'), ((8321, 8344), 'biorun.models.jsonrec.reset_counter', 'jsonrec.reset_counter', ([], {}), '()\n', (8342, 8344), False, 'from biorun.models import jsonrec\n'), ((560, 572), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (568, 572), False, 'import sys, os, glob, re, gzip, json\n'), ((957, 978), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (971, 978), False, 'import sys, os, glob, re, gzip, json\n'), ((1764, 1789), 'biorun.utils.gz_write', 'utils.gz_write', (['json_name'], {}), '(json_name)\n', (1778, 1789), False, 'from biorun import const, utils, objects, ncbi\n'), ((1798, 1817), 'json.dump', 'json.dump', (['data', 'fp'], {}), '(data, fp)\n', (1807, 1817), False, 'import sys, os, glob, re, gzip, json\n'), ((2910, 2963), 'biorun.models.jsonrec.parse_file', 'jsonrec.parse_file', ([], {'fname': 'gbk_name', 'seqid': 'param.seqid'}), '(fname=gbk_name, seqid=param.seqid)\n', (2928, 2963), False, 'from biorun.models import jsonrec\n'), ((3163, 3188), 'os.path.isfile', 'os.path.isfile', (['param.acc'], {}), '(param.acc)\n', (3177, 3188), False, 'import sys, os, glob, re, gzip, json\n'), ((4438, 4485), 'biorun.models.jsonrec.parse_file', 'jsonrec.parse_file', ([], {'fname': 'gbk_name', 'seqid': 'seqid'}), '(fname=gbk_name, seqid=seqid)\n', (4456, 4485), False, 'from biorun.models import jsonrec\n'), ((4639, 4678), 'biorun.models.jsonrec.make_jsonrec', 'jsonrec.make_jsonrec', (['name'], {'seqid': 'seqid'}), '(name, seqid=seqid)\n', (4659, 4678), False, 'from biorun.models import jsonrec\n'), ((4759, 4797), 'biorun.utils.error', 'utils.error', (['f"""data not found: {name}"""'], {}), "(f'data not found: {name}')\n", (4770, 4797), False, 'from biorun import const, utils, objects, ncbi\n'), ((5257, 5281), 'os.path.isfile', 'os.path.isfile', (['src_json'], {}), '(src_json)\n', (5271, 5281), False, 'import sys, os, glob, re, gzip, json\n'), ((5549, 5571), 'os.path.isfile', 'os.path.isfile', (['src_gb'], {}), '(src_gb)\n', (5563, 5571), False, 'import sys, os, glob, re, gzip, json\n'), ((5850, 5890), 'os.path.join', 'os.path.join', (['utils.DATADIR', '"""*.json.gz"""'], {}), "(utils.DATADIR, '*.json.gz')\n", (5862, 5890), False, 'import sys, os, glob, re, gzip, json\n'), ((6173, 6192), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (6186, 6192), False, 'import sys, os, glob, re, gzip, json\n'), ((992, 1008), 'os.remove', 'os.remove', (['fname'], {}), '(fname)\n', (1001, 1008), False, 'import sys, os, glob, re, gzip, json\n'), ((2642, 2737), 'biorun.ncbi.genome', 'ncbi.genome', ([], {'name': 'name', 'fname': 'gbk_name', 'update': 'param.update', 'genbank': 'genbank', 'refseq': 'refseq'}), '(name=name, fname=gbk_name, update=param.update, genbank=genbank,\n refseq=refseq)\n', (2653, 2737), False, 'from biorun import const, utils, objects, ncbi\n'), ((2812, 2858), 'biorun.ncbi.genbank_save', 'ncbi.genbank_save', (['name'], {'db': 'db', 'fname': 'gbk_name'}), '(name, db=db, fname=gbk_name)\n', (2829, 2858), False, 'from biorun import const, utils, objects, ncbi\n'), ((3211, 3235), 'biorun.utils.gz_read', 'utils.gz_read', (['param.acc'], {}), '(param.acc)\n', (3224, 3235), False, 'from biorun import const, utils, objects, ncbi\n'), ((3249, 3272), 'os.path.isfile', 'os.path.isfile', (['altname'], {}), '(altname)\n', (3263, 3272), False, 'import sys, os, glob, re, gzip, json\n'), ((3721, 3758), 'biorun.models.jsonrec.parse_file', 'jsonrec.parse_file', (['name'], {'seqid': 'seqid'}), '(name, seqid=seqid)\n', (3739, 3758), False, 'from biorun.models import jsonrec\n'), ((5350, 5380), 'os.rename', 'os.rename', (['src_json', 'dest_json'], {}), '(src_json, dest_json)\n', (5359, 5380), False, 'import sys, os, glob, re, gzip, json\n'), ((6128, 6149), 'os.path.getsize', 'os.path.getsize', (['path'], {}), '(path)\n', (6143, 6149), False, 'import sys, os, glob, re, gzip, json\n'), ((6294, 6315), 'gzip.open', 'gzip.open', (['path', '"""rt"""'], {}), "(path, 'rt')\n", (6303, 6315), False, 'import sys, os, glob, re, gzip, json\n'), ((3295, 3317), 'biorun.utils.gz_read', 'utils.gz_read', (['altname'], {}), '(altname)\n', (3308, 3317), False, 'from biorun import const, utils, objects, ncbi\n'), ((3368, 3411), 'biorun.utils.error', 'utils.error', (['f"""data not found: {param.acc}"""'], {}), "(f'data not found: {param.acc}')\n", (3379, 3411), False, 'from biorun import const, utils, objects, ncbi\n'), ((3875, 3887), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (3883, 3887), False, 'import sys, os, glob, re, gzip, json\n'), ((5592, 5615), 'os.path.isfile', 'os.path.isfile', (['dest_gb'], {}), '(dest_gb)\n', (5606, 5615), False, 'import sys, os, glob, re, gzip, json\n'), ((5633, 5660), 'os.symlink', 'os.symlink', (['src_gb', 'dest_gb'], {}), '(src_gb, dest_gb)\n', (5643, 5660), False, 'import sys, os, glob, re, gzip, json\n')] |
LaverdeS/Genetic_Algorithm_EGame | game/items/game_item.py | 89ff8c7870fa90768f4616cab6803227c8613396 | import numpy as np
from random import randint
from PyQt5.QtGui import QImage
from PyQt5.QtCore import QPointF
class GameItem():
def __init__(self, parent, boundary, position=None):
self.parent = parent
self.config = parent.config
self.items_config = self.config.items
if position is None:
_left_border = boundary
_right_border = int(self.parent.frame_dimension[0]) - boundary
_top_border = boundary
_bottom_border = int(self.parent.frame_dimension[1]) - boundary
_x = float(randint(_left_border, _right_border))
_y = float(randint(_top_border, _bottom_border))
self._position = np.array([_x, _y])
else:
self._position = position
def draw_image(self, painter):
item_image = QImage(self.image)
painter.drawImage(QPointF(self._position[0]-(item_image.height()/2),
self._position[1]-(item_image.width()/2)),
item_image) | [((829, 847), 'PyQt5.QtGui.QImage', 'QImage', (['self.image'], {}), '(self.image)\n', (835, 847), False, 'from PyQt5.QtGui import QImage\n'), ((701, 719), 'numpy.array', 'np.array', (['[_x, _y]'], {}), '([_x, _y])\n', (709, 719), True, 'import numpy as np\n'), ((573, 609), 'random.randint', 'randint', (['_left_border', '_right_border'], {}), '(_left_border, _right_border)\n', (580, 609), False, 'from random import randint\n'), ((634, 670), 'random.randint', 'randint', (['_top_border', '_bottom_border'], {}), '(_top_border, _bottom_border)\n', (641, 670), False, 'from random import randint\n')] |
felipecosta09/cloudone-workload-controltower-lifecycle | source/deepsecurity/models/application_type_rights.py | 7927c84d164058b034fc872701b5ee117641f4d1 | # coding: utf-8
"""
Trend Micro Deep Security API
Copyright 2018 - 2020 Trend Micro Incorporated.<br/>Get protected, stay secured, and keep informed with Trend Micro Deep Security's new RESTful API. Access system data and manage security configurations to automate your security workflows and integrate Deep Security into your CI/CD pipeline. # noqa: E501
OpenAPI spec version: 12.5.841
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ApplicationTypeRights(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'can_create_new_application_types': 'bool',
'can_delete_application_types': 'bool',
'can_edit_application_type_properties': 'bool'
}
attribute_map = {
'can_create_new_application_types': 'canCreateNewApplicationTypes',
'can_delete_application_types': 'canDeleteApplicationTypes',
'can_edit_application_type_properties': 'canEditApplicationTypeProperties'
}
def __init__(self, can_create_new_application_types=None, can_delete_application_types=None, can_edit_application_type_properties=None): # noqa: E501
"""ApplicationTypeRights - a model defined in Swagger""" # noqa: E501
self._can_create_new_application_types = None
self._can_delete_application_types = None
self._can_edit_application_type_properties = None
self.discriminator = None
if can_create_new_application_types is not None:
self.can_create_new_application_types = can_create_new_application_types
if can_delete_application_types is not None:
self.can_delete_application_types = can_delete_application_types
if can_edit_application_type_properties is not None:
self.can_edit_application_type_properties = can_edit_application_type_properties
@property
def can_create_new_application_types(self):
"""Gets the can_create_new_application_types of this ApplicationTypeRights. # noqa: E501
Right to create new application types. # noqa: E501
:return: The can_create_new_application_types of this ApplicationTypeRights. # noqa: E501
:rtype: bool
"""
return self._can_create_new_application_types
@can_create_new_application_types.setter
def can_create_new_application_types(self, can_create_new_application_types):
"""Sets the can_create_new_application_types of this ApplicationTypeRights.
Right to create new application types. # noqa: E501
:param can_create_new_application_types: The can_create_new_application_types of this ApplicationTypeRights. # noqa: E501
:type: bool
"""
self._can_create_new_application_types = can_create_new_application_types
@property
def can_delete_application_types(self):
"""Gets the can_delete_application_types of this ApplicationTypeRights. # noqa: E501
Right to delete application types. # noqa: E501
:return: The can_delete_application_types of this ApplicationTypeRights. # noqa: E501
:rtype: bool
"""
return self._can_delete_application_types
@can_delete_application_types.setter
def can_delete_application_types(self, can_delete_application_types):
"""Sets the can_delete_application_types of this ApplicationTypeRights.
Right to delete application types. # noqa: E501
:param can_delete_application_types: The can_delete_application_types of this ApplicationTypeRights. # noqa: E501
:type: bool
"""
self._can_delete_application_types = can_delete_application_types
@property
def can_edit_application_type_properties(self):
"""Gets the can_edit_application_type_properties of this ApplicationTypeRights. # noqa: E501
Right to edit application type properties. # noqa: E501
:return: The can_edit_application_type_properties of this ApplicationTypeRights. # noqa: E501
:rtype: bool
"""
return self._can_edit_application_type_properties
@can_edit_application_type_properties.setter
def can_edit_application_type_properties(self, can_edit_application_type_properties):
"""Sets the can_edit_application_type_properties of this ApplicationTypeRights.
Right to edit application type properties. # noqa: E501
:param can_edit_application_type_properties: The can_edit_application_type_properties of this ApplicationTypeRights. # noqa: E501
:type: bool
"""
self._can_edit_application_type_properties = can_edit_application_type_properties
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ApplicationTypeRights, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApplicationTypeRights):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [((5314, 5347), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (5327, 5347), False, 'import six\n')] |
hardikvasa/database-journal | code-samples/aws_neptune.py | 7932b5a7fe909f8adb3a909183532b43d450da7b | from __future__ import print_function # Python 2/3 compatibility
from gremlin_python import statics
from gremlin_python.structure.graph import Graph
from gremlin_python.process.graph_traversal import __
from gremlin_python.process.strategies import *
from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection
#initializing the graph object
graph = Graph()
#creating connection with the remote
remoteConn = DriverRemoteConnection('wss://<endpoint>:8182/gremlin','g')
g = graph.traversal().withRemote(DriverRemoteConnection('wss://<endpoint>:8182/gremlin','g'))
print('Connection created.')
#clearing out all the vertices to start fresh
g.V().drop().iterate()
print('Deleting everything and starting clean.')
#Adding some vertices (nodes)
gerald = g.addV('person').property('age','81').property('first_name','Gerald').property('stays_in','Portland').next()
edith = g.addV('person').property('age','78').property('first_name','Edith').property('stays_in','Portland').next()
peter = g.addV('person').property('age','52').property('first_name','Shane').property('stays_in','Seattle').next()
mary = g.addV('person').property('age','50').property('first_name','Mary').property('stays_in','Seattle').next()
betty = g.addV('person').property('age','19').property('first_name','Betty').property('stays_in','Chicago').next()
print('Added some vertices (nodes).')
#Adding relationships (edges)
edge = g.V().has('first_name', 'Gerald').addE('husband_of').to(g.V().has('first_name', 'Edith')).property('married_since','1947').next()
edge = g.V().has('first_name', 'Edith').addE('wife_of').to(g.V().has('first_name', 'Gerald')).property('married_since','1947').next()
edge = g.V().has('first_name', 'Shane').addE('son_of').to(g.V().has('first_name', 'Gerald')).property('known_since','1964').next()
edge = g.V().has('first_name', 'Gerald').addE('father_of').to(g.V().has('first_name', 'Shane')).property('known_since','1964').next()
edge = g.V().has('first_name', 'Shane').addE('son_of').to(g.V().has('first_name', 'Edith')).property('known_since','1964').next()
edge = g.V().has('first_name', 'Edith').addE('mother_of').to(g.V().has('first_name', 'Shane')).property('known_since','1964').next()
edge = g.V().has('first_name', 'Shane').addE('husband_of').to(g.V().has('first_name', 'Mary')).property('known_since','1989').next()
edge = g.V().has('first_name', 'Mary').addE('wife_of').to(g.V().has('first_name', 'Shane')).property('known_since','1989').next()
edge = g.V().has('first_name', 'Shane').addE('father_of').to(g.V().has('first_name', 'Betty')).property('known_since','1991').next()
edge = g.V().has('first_name', 'Betty').addE('daughter_of').to(g.V().has('first_name', 'Shane')).property('known_since','1991').next()
edge = g.V().has('first_name', 'Mary').addE('mother_of').to(g.V().has('first_name', 'Betty')).property('known_since','1991').next()
edge = g.V().has('first_name', 'Betty').addE('daughter_of').to(g.V().has('first_name', 'Mary')).property('known_since','1991').next()
#print out all the node's first names
print('\n Printing first name from all nodes:')
print(g.V().first_name.toList())
#print out all the properties of person whose's first name is Shane
print('\n Printing all properties of person whose first name is Shane:')
print(g.V().has('person','first_name','Shane').valueMap().next())
#traversing the graph starting with Betty to then Shane to then Edith
print('\n Finding Betty and then looking up her parents:')
print(g.V().has('first_name', 'Betty').out('daughter_of').out('son_of').valueMap().toList())
#Print out all the nodes
print('\n Printing out all the nodes:')
people = g.V().valueMap().toList()
print(people)
#Print out all the connections (edges)
print('\n Print out all the connections (edges):')
connections = g.E().valueMap().toList()
print(connections)
#Closing the connection
remoteConn.close()
print('Connection closed!') | [((376, 383), 'gremlin_python.structure.graph.Graph', 'Graph', ([], {}), '()\n', (381, 383), False, 'from gremlin_python.structure.graph import Graph\n'), ((435, 495), 'gremlin_python.driver.driver_remote_connection.DriverRemoteConnection', 'DriverRemoteConnection', (['"""wss://<endpoint>:8182/gremlin"""', '"""g"""'], {}), "('wss://<endpoint>:8182/gremlin', 'g')\n", (457, 495), False, 'from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection\n'), ((528, 588), 'gremlin_python.driver.driver_remote_connection.DriverRemoteConnection', 'DriverRemoteConnection', (['"""wss://<endpoint>:8182/gremlin"""', '"""g"""'], {}), "('wss://<endpoint>:8182/gremlin', 'g')\n", (550, 588), False, 'from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection\n')] |
Ramsha04/kits19-2d-reproduce | kits19cnn/io/preprocess_train.py | 66678f1eda3688d6dc64389e9a80ae0b754a3052 | import os
from os.path import join, isdir
from pathlib import Path
from collections import defaultdict
from tqdm import tqdm
import nibabel as nib
import numpy as np
import json
from .resample import resample_patient
from .custom_augmentations import resize_data_and_seg, crop_to_bbox
class Preprocessor(object):
"""
Preprocesses the original dataset (interpolated).
Procedures:
* Resampled all volumes to have a thickness of 3mm.
* Clipped to [-30, 300] HU
* z-score standardization (zero mean and unit variance)
* Standardization per 3D image instead of ACROSS THE WHOLE
TRAINING SET
* save as .npy array
* imaging.npy
* segmentation.npy (if with_masks)
"""
def __init__(self, in_dir, out_dir, cases=None, kits_json_path=None,
bbox_json_path=None, clip_values=[-30, 300], with_mask=True,
fg_classes=[0, 1, 2], resize_xy_shape=(256, 256)):
"""
Attributes:
in_dir (str): directory with the input data. Should be the
kits19/data directory.
out_dir (str): output directory where you want to save each case
cases: list of case folders to preprocess
kits_json_path (str): path to the kits.json file in the kits19/data
directory. This only should be specfied if you're resampling.
Defaults to None.
bbox_json_path (str): path to the bbox_stage1.json file made from
stage1 post-processing. Triggers cropping to the bboxes.
Defaults to None.
target_spacing (list/tuple): spacing to resample to
clip_values (list, tuple): values you want to clip CT scans to.
Defaults to None for no clipping.
with_mask (bool): whether or not to preprocess with masks or no
masks. Applicable to preprocessing test set (no labels
available).
fg_classes (list): of foreground class indices
if None, doesn't gather fg class stats.
"""
self.in_dir = in_dir
self.out_dir = out_dir
self._load_kits_json(kits_json_path)
self._load_bbox_json(bbox_json_path)
self.clip_values = clip_values
self.with_mask = with_mask
self.fg_classes = fg_classes
if not self.with_mask:
assert self.fg_classes is None, \
"When with_mask is False, fg_classes must be None."
self.cases = cases
# automatically collecting all of the case folder names
if self.cases is None:
self.cases = [os.path.join(self.in_dir, case) \
for case in os.listdir(self.in_dir) \
if case.startswith("case")]
self.cases = sorted(self.cases)
assert len(self.cases) > 0, \
"Please make sure that in_dir refers to the proper directory."
# making directory if out_dir doesn't exist
if not isdir(out_dir):
os.mkdir(out_dir)
print("Created directory: {0}".format(out_dir))
self.resize_xy_shape = tuple(resize_xy_shape)
def gen_data(self, save_fnames=["imaging", "segmentation"]):
"""
Generates and saves preprocessed data as numpy arrays (n, x, y).
Args:
task_path: file path to the task directory
(must have the corresponding "dataset.json" in it)
save_fnames (List[str]): save names for [image, seg] respectively.
DOESN'T INCLUDE THE .npy
Returns:
None
"""
# Generating data and saving them recursively
for case in tqdm(self.cases):
x_path, y_path = join(case, "imaging.nii.gz"), join(case, "segmentation.nii.gz")
image = nib.load(x_path).get_fdata()[None]
label = nib.load(y_path).get_fdata()[None] if self.with_mask \
else None
preprocessed_img, preprocessed_label = self.preprocess(image,
label,
case)
if self.bbox_dict is not None:
preprocessed_img, preprocessed_label = self.crop_case_to_bbox(preprocessed_img,
preprocessed_label,
case)
self.save_imgs(preprocessed_img, preprocessed_label, case,
save_fnames=save_fnames)
def preprocess(self, image, mask, case=None):
"""
Clipping, cropping, and resampling.
Args:
image: numpy array
shape (c, n, x, y)
mask: numpy array or None
shape (c, n, x, y)
case (str): path to a case folder
Returns:
tuple of:
- preprocessed image
shape: (n, x, y)
- preprocessed mask or None
shape: (n, x, y)
"""
raw_case = Path(case).name # raw case name, i.e. case_00000
# resampling
if self.kits_json is not None:
for info_dict in self.kits_json:
# guaranteeing that the info is corresponding to the right
# case
if info_dict["case_id"] == raw_case:
case_info_dict = info_dict
break
# resampling the slices axis to 3mm
orig_spacing = (case_info_dict["captured_slice_thickness"],
case_info_dict["captured_pixel_width"],
case_info_dict["captured_pixel_width"])
target_spacing = (3,) + orig_spacing[1:]
image, mask = resample_patient(image, mask, np.array(orig_spacing),
target_spacing=np.array(target_spacing))
if self.clip_values is not None:
image = np.clip(image, self.clip_values[0], self.clip_values[1])
if self.resize_xy_shape is not None:
# image coming in : shape (c, n, h, w); mask is same shape
zdim_size = image.shape[1]
resize_xy_shape = (zdim_size,) + self.resize_xy_shape
image, mask = resize_data_and_seg(image, size=resize_xy_shape,
seg=mask)
image = standardize_per_image(image)
mask = mask.squeeze() if mask is not None else mask
return (image.squeeze(), mask)
def save_imgs(self, image, mask, case,
save_fnames=["imaging", "segmentation"]):
"""
Saves an image and mask pair as .npy arrays in the KiTS19 file structure
Args:
image: numpy array
mask: numpy array
case: path to a case folder (each element of self.cases)
save_fnames (List[str]): save names for [image, seg] respectively.
DOESN'T INCLUDE THE .npy
"""
for fname in save_fnames:
assert not ".npy" in fname, \
"Filenames in save_fnames should not include .npy in the name."
# saving the generated dataset
# output dir in KiTS19 format
# extracting the raw case folder name
case_raw = Path(case).name # extracting the raw case folder name
out_case_dir = join(self.out_dir, case_raw)
# checking to make sure that the output directories exist
if not isdir(out_case_dir):
os.mkdir(out_case_dir)
np.save(os.path.join(out_case_dir, f"{save_fnames[0]}.npy"), image)
if mask is not None:
np.save(os.path.join(out_case_dir, f"{save_fnames[1]}.npy"), mask)
def save_dir_as_2d(self, base_fnames=["imaging", "segmentation"],
delete3dcase=False):
"""
Takes preprocessed 3D numpy arrays and saves them as slices
in the same directory.
Arrays must have shape (n, h, w).
Args:
base_fnames (List[str]): names to read for [image, seg] respectively.
DOESN'T INCLUDE THE .npy
delete3dcase (bool): whether or not to delete the 3D volume after
saving the 2D sliced versions
"""
for fname in base_fnames:
assert not ".npy" in fname, \
"Filenames in base_fnames should not include .npy in the name."
self.pos_per_class_dict = {} # saves slices per class
self.pos_per_slice_dict = defaultdict(list) # saves classes per slice
# Generating data and saving them recursively
for case in tqdm(self.cases):
# output dir in KiTS19 format
case_raw = Path(case).name # extracting the raw case folder name
out_case_dir = join(self.out_dir, case_raw)
# checking to make sure that the output directories exist
if not isdir(out_case_dir):
os.mkdir(out_case_dir)
# assumes the .npy files have shape: (d, h, w)
paths = [join(out_case_dir, f"{base_fnames[0]}.npy"),
join(out_case_dir, f"{base_fnames[1]}.npy")]
image, label = np.load(paths[0]), np.load(paths[1])
self.save_3d_as_2d(image, label, case_raw, out_case_dir)
# to deal with colaboratory storage limitations
if delete3dcase:
os.remove(paths[0]), os.remove(paths[1])
if self.fg_classes is not None:
self._save_pos_slice_dict()
def save_3d_as_2d(self, image, mask, case_raw, out_case_dir):
"""
Saves a 3D volume as separate 2D arrays for each slice across the
axial axis. The naming convention is as follows:
imaging_{parsed_slice_idx}.npy
segmentation_{parsed_slice_idx}.npy
where parsed_slice_idx is just the slice index but filled with
zeros until it hits 5 digits (so sorting is easier.)
Args:
image: numpy array
mask: numpy array
case: raw case folder name
"""
# saving the generated dataset
# iterates through all slices and saves them individually as 2D arrays
assert len(image.shape) == 3, \
"Image shape should be (n, h, w)"
slice_idx_per_class = defaultdict(list)
for slice_idx in range(image.shape[0]):
# naming
slice_idx_str = parse_slice_idx_to_str(slice_idx)
case_str = f"{case_raw}_{slice_idx_str}"
if mask is not None:
label_slice = mask[slice_idx]
# appending fg slice indices
if self.fg_classes is not None:
for label_idx in self.fg_classes:
if label_idx != 0 and (label_slice == label_idx).any():
slice_idx_per_class[label_idx].append(slice_idx)
self.pos_per_slice_dict[case_str].append(label_idx)
elif label_idx == 0 and np.sum(label_slice) == 0:
# for completely blank labels
slice_idx_per_class[label_idx].append(slice_idx)
self.pos_per_slice_dict[case_str].append(label_idx)
self._save_slices(image, mask, out_case_dir=out_case_dir,
slice_idx=slice_idx, slice_idx_str=slice_idx_str)
if self.fg_classes is not None:
self.pos_per_class_dict[case_raw] = slice_idx_per_class
def _save_pos_slice_dict(self):
"""
Saves the foreground (positive) class dictionaries:
- slice_indices.json
saves the slice indices per class
{
case: {fg_class1: [slice indices...],
fg_class2: [slice indices...],
...}
}
- classes_per_slice.json
the keys are not cases, but the actual filenames that are
being read.
{
case_slice_idx_str: [classes_in_slice],
case_slice_idx_str2: [classes_in_slice],
}
"""
save_path_per_slice = join(self.out_dir, "classes_per_slice.json")
# saving the dictionaries
print(f"Logged the classes in {self.fg_classes} for each slice at",
f"{save_path_per_slice}.")
with open(save_path_per_slice, "w") as fp:
json.dump(self.pos_per_slice_dict, fp)
save_path = join(self.out_dir, "slice_indices.json")
# saving the dictionaries
print(f"Logged the slice indices for each class in {self.fg_classes} at",
f"{save_path}.")
with open(save_path, "w") as fp:
json.dump(self.pos_per_class_dict, fp)
def _save_slices(self, image, mask, out_case_dir, slice_idx,
slice_idx_str):
"""
For saving the slices in self.save_3d_as_2d()
"""
np.save(join(out_case_dir, f"imaging_{slice_idx_str}.npy"),
image[slice_idx])
if mask is not None:
label_slice = mask[slice_idx]
np.save(join(out_case_dir, f"segmentation_{slice_idx_str}.npy"),
label_slice)
def _load_kits_json(self, json_path):
"""
Loads the kits.json file into `self.kits_json`
"""
if json_path is None:
self.kits_json = None
print("`kits_json_path is empty, so not resampling.`")
elif json_path is not None:
with open(json_path, "r") as fp:
self.kits_json = json.load(fp)
def _load_bbox_json(self, json_path):
"""
Loads the kits.json file into `self.kits_json`
"""
if json_path is None:
self.bbox_dict = None
print("bbox_json_path, so not cropping volumes to their bbox.")
else:
with open(json_path, "r") as fp:
self.bbox_dict = json.load(fp)
def crop_case_to_bbox(self, image, label, case):
"""
Crops a 3D image and 3D label to the corresponding bounding box.
"""
bbox_coord = self.bbox_dict[case]
return (crop_to_bbox(image, bbox), crop_to_bbox(label, case))
def standardize_per_image(image):
"""
Z-score standardization per image.
"""
mean, stddev = image.mean(), image.std()
return (image - mean) / stddev
def parse_slice_idx_to_str(slice_idx):
"""
Parse the slice index to a three digit string for saving and reading the
2D .npy files generated by io.preprocess.Preprocessor.
Naming convention: {type of slice}_{case}_{slice_idx}
* adding 0s to slice_idx until it reaches 3 digits,
* so sorting files is easier when stacking
"""
return f"{slice_idx:03}"
| [((3745, 3761), 'tqdm.tqdm', 'tqdm', (['self.cases'], {}), '(self.cases)\n', (3749, 3761), False, 'from tqdm import tqdm\n'), ((7526, 7554), 'os.path.join', 'join', (['self.out_dir', 'case_raw'], {}), '(self.out_dir, case_raw)\n', (7530, 7554), False, 'from os.path import join, isdir\n'), ((8671, 8688), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (8682, 8688), False, 'from collections import defaultdict\n'), ((8789, 8805), 'tqdm.tqdm', 'tqdm', (['self.cases'], {}), '(self.cases)\n', (8793, 8805), False, 'from tqdm import tqdm\n'), ((10485, 10502), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (10496, 10502), False, 'from collections import defaultdict\n'), ((12410, 12454), 'os.path.join', 'join', (['self.out_dir', '"""classes_per_slice.json"""'], {}), "(self.out_dir, 'classes_per_slice.json')\n", (12414, 12454), False, 'from os.path import join, isdir\n'), ((12729, 12769), 'os.path.join', 'join', (['self.out_dir', '"""slice_indices.json"""'], {}), "(self.out_dir, 'slice_indices.json')\n", (12733, 12769), False, 'from os.path import join, isdir\n'), ((3058, 3072), 'os.path.isdir', 'isdir', (['out_dir'], {}), '(out_dir)\n', (3063, 3072), False, 'from os.path import join, isdir\n'), ((3086, 3103), 'os.mkdir', 'os.mkdir', (['out_dir'], {}), '(out_dir)\n', (3094, 3103), False, 'import os\n'), ((5212, 5222), 'pathlib.Path', 'Path', (['case'], {}), '(case)\n', (5216, 5222), False, 'from pathlib import Path\n'), ((6124, 6180), 'numpy.clip', 'np.clip', (['image', 'self.clip_values[0]', 'self.clip_values[1]'], {}), '(image, self.clip_values[0], self.clip_values[1])\n', (6131, 6180), True, 'import numpy as np\n'), ((7449, 7459), 'pathlib.Path', 'Path', (['case'], {}), '(case)\n', (7453, 7459), False, 'from pathlib import Path\n'), ((7636, 7655), 'os.path.isdir', 'isdir', (['out_case_dir'], {}), '(out_case_dir)\n', (7641, 7655), False, 'from os.path import join, isdir\n'), ((7669, 7691), 'os.mkdir', 'os.mkdir', (['out_case_dir'], {}), '(out_case_dir)\n', (7677, 7691), False, 'import os\n'), ((7709, 7760), 'os.path.join', 'os.path.join', (['out_case_dir', 'f"""{save_fnames[0]}.npy"""'], {}), "(out_case_dir, f'{save_fnames[0]}.npy')\n", (7721, 7760), False, 'import os\n'), ((8953, 8981), 'os.path.join', 'join', (['self.out_dir', 'case_raw'], {}), '(self.out_dir, case_raw)\n', (8957, 8981), False, 'from os.path import join, isdir\n'), ((12669, 12707), 'json.dump', 'json.dump', (['self.pos_per_slice_dict', 'fp'], {}), '(self.pos_per_slice_dict, fp)\n', (12678, 12707), False, 'import json\n'), ((12970, 13008), 'json.dump', 'json.dump', (['self.pos_per_class_dict', 'fp'], {}), '(self.pos_per_class_dict, fp)\n', (12979, 13008), False, 'import json\n'), ((13206, 13256), 'os.path.join', 'join', (['out_case_dir', 'f"""imaging_{slice_idx_str}.npy"""'], {}), "(out_case_dir, f'imaging_{slice_idx_str}.npy')\n", (13210, 13256), False, 'from os.path import join, isdir\n'), ((2674, 2705), 'os.path.join', 'os.path.join', (['self.in_dir', 'case'], {}), '(self.in_dir, case)\n', (2686, 2705), False, 'import os\n'), ((3792, 3820), 'os.path.join', 'join', (['case', '"""imaging.nii.gz"""'], {}), "(case, 'imaging.nii.gz')\n", (3796, 3820), False, 'from os.path import join, isdir\n'), ((3822, 3855), 'os.path.join', 'join', (['case', '"""segmentation.nii.gz"""'], {}), "(case, 'segmentation.nii.gz')\n", (3826, 3855), False, 'from os.path import join, isdir\n'), ((5955, 5977), 'numpy.array', 'np.array', (['orig_spacing'], {}), '(orig_spacing)\n', (5963, 5977), True, 'import numpy as np\n'), ((7818, 7869), 'os.path.join', 'os.path.join', (['out_case_dir', 'f"""{save_fnames[1]}.npy"""'], {}), "(out_case_dir, f'{save_fnames[1]}.npy')\n", (7830, 7869), False, 'import os\n'), ((8872, 8882), 'pathlib.Path', 'Path', (['case'], {}), '(case)\n', (8876, 8882), False, 'from pathlib import Path\n'), ((9071, 9090), 'os.path.isdir', 'isdir', (['out_case_dir'], {}), '(out_case_dir)\n', (9076, 9090), False, 'from os.path import join, isdir\n'), ((9108, 9130), 'os.mkdir', 'os.mkdir', (['out_case_dir'], {}), '(out_case_dir)\n', (9116, 9130), False, 'import os\n'), ((9211, 9254), 'os.path.join', 'join', (['out_case_dir', 'f"""{base_fnames[0]}.npy"""'], {}), "(out_case_dir, f'{base_fnames[0]}.npy')\n", (9215, 9254), False, 'from os.path import join, isdir\n'), ((9277, 9320), 'os.path.join', 'join', (['out_case_dir', 'f"""{base_fnames[1]}.npy"""'], {}), "(out_case_dir, f'{base_fnames[1]}.npy')\n", (9281, 9320), False, 'from os.path import join, isdir\n'), ((9349, 9366), 'numpy.load', 'np.load', (['paths[0]'], {}), '(paths[0])\n', (9356, 9366), True, 'import numpy as np\n'), ((9368, 9385), 'numpy.load', 'np.load', (['paths[1]'], {}), '(paths[1])\n', (9375, 9385), True, 'import numpy as np\n'), ((13383, 13438), 'os.path.join', 'join', (['out_case_dir', 'f"""segmentation_{slice_idx_str}.npy"""'], {}), "(out_case_dir, f'segmentation_{slice_idx_str}.npy')\n", (13387, 13438), False, 'from os.path import join, isdir\n'), ((14208, 14221), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (14217, 14221), False, 'import json\n'), ((2746, 2769), 'os.listdir', 'os.listdir', (['self.in_dir'], {}), '(self.in_dir)\n', (2756, 2769), False, 'import os\n'), ((6037, 6061), 'numpy.array', 'np.array', (['target_spacing'], {}), '(target_spacing)\n', (6045, 6061), True, 'import numpy as np\n'), ((9561, 9580), 'os.remove', 'os.remove', (['paths[0]'], {}), '(paths[0])\n', (9570, 9580), False, 'import os\n'), ((9582, 9601), 'os.remove', 'os.remove', (['paths[1]'], {}), '(paths[1])\n', (9591, 9601), False, 'import os\n'), ((13840, 13853), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (13849, 13853), False, 'import json\n'), ((3876, 3892), 'nibabel.load', 'nib.load', (['x_path'], {}), '(x_path)\n', (3884, 3892), True, 'import nibabel as nib\n'), ((3931, 3947), 'nibabel.load', 'nib.load', (['y_path'], {}), '(y_path)\n', (3939, 3947), True, 'import nibabel as nib\n'), ((11171, 11190), 'numpy.sum', 'np.sum', (['label_slice'], {}), '(label_slice)\n', (11177, 11190), True, 'import numpy as np\n')] |
opywan/calm-dsl | setup.py | 1d89436d039a39265a0ae806022be5b52e757ac0 | import sys
import setuptools
from setuptools.command.test import test as TestCommand
def read_file(filename):
with open(filename, "r", encoding='utf8') as f:
return f.read()
class PyTest(TestCommand):
"""PyTest"""
def finalize_options(self):
"""finalize_options"""
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
"""run_tests"""
import pytest
errcode = pytest.main(self.test_args)
sys.exit(errcode)
setuptools.setup(
name="calm.dsl",
version="0.9.0-alpha",
author="Nutanix",
author_email="[email protected]",
description="Calm DSL for blueprints",
long_description=read_file("README.md"),
long_description_content_type="text/markdown",
url="https://github.com/nutanix/calm-dsl",
packages=setuptools.find_namespace_packages(include=["calm.*"]),
namespace_packages=["calm"],
install_requires=read_file("requirements.txt"),
tests_require=read_file("dev-requirements.txt"),
cmdclass={"test": PyTest},
zip_safe=False,
include_package_data=True,
entry_points={"console_scripts": ["calm=calm.dsl.cli:main"]},
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.7",
],
)
| [((306, 340), 'setuptools.command.test.test.finalize_options', 'TestCommand.finalize_options', (['self'], {}), '(self)\n', (334, 340), True, 'from setuptools.command.test import test as TestCommand\n'), ((491, 518), 'pytest.main', 'pytest.main', (['self.test_args'], {}), '(self.test_args)\n', (502, 518), False, 'import pytest\n'), ((527, 544), 'sys.exit', 'sys.exit', (['errcode'], {}), '(errcode)\n', (535, 544), False, 'import sys\n'), ((873, 927), 'setuptools.find_namespace_packages', 'setuptools.find_namespace_packages', ([], {'include': "['calm.*']"}), "(include=['calm.*'])\n", (907, 927), False, 'import setuptools\n')] |
chen88358323/HanLP | hanlp/pretrained/tok.py | ee9066c3b7aad405dfe0ccffb7f66c59017169ae | # -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2019-12-28 21:12
from hanlp_common.constant import HANLP_URL
SIGHAN2005_PKU_CONVSEG = HANLP_URL + 'tok/sighan2005-pku-convseg_20200110_153722.zip'
'Conv model (:cite:`wang-xu-2017-convolutional`) trained on sighan2005 pku dataset.'
SIGHAN2005_MSR_CONVSEG = HANLP_URL + 'tok/convseg-msr-nocrf-noembed_20200110_153524.zip'
'Conv model (:cite:`wang-xu-2017-convolutional`) trained on sighan2005 msr dataset.'
CTB6_CONVSEG = HANLP_URL + 'tok/ctb6_convseg_nowe_nocrf_20200110_004046.zip'
'Conv model (:cite:`wang-xu-2017-convolutional`) trained on CTB6 dataset.'
PKU_NAME_MERGED_SIX_MONTHS_CONVSEG = HANLP_URL + 'tok/pku98_6m_conv_ngram_20200110_134736.zip'
'Conv model (:cite:`wang-xu-2017-convolutional`) trained on pku98 six months dataset with familiy name and given name merged into one unit.'
LARGE_ALBERT_BASE = HANLP_URL + 'tok/large_corpus_cws_albert_base_20211228_160926.zip'
'ALBERT model (:cite:`Lan2020ALBERT:`) trained on the largest CWS dataset in the world.'
SIGHAN2005_PKU_BERT_BASE_ZH = HANLP_URL + 'tok/sighan2005_pku_bert_base_zh_20201231_141130.zip'
'BERT model (:cite:`devlin-etal-2019-bert`) trained on sighan2005 pku dataset.'
COARSE_ELECTRA_SMALL_ZH = HANLP_URL + 'tok/coarse_electra_small_20220220_013548.zip'
'Electra (:cite:`clark2020electra`) small model trained on coarse-grained CWS corpora. Its performance is P=96.97% R=96.87% F1=96.92% which is ' \
'much higher than that of MTL model '
FINE_ELECTRA_SMALL_ZH = HANLP_URL + 'tok/fine_electra_small_20220217_190117.zip'
'Electra (:cite:`clark2020electra`) small model trained on fine-grained CWS corpora. Its performance is P=97.44% R=97.40% F1=97.42% which is ' \
'much higher than that of MTL model '
CTB9_TOK_ELECTRA_SMALL = HANLP_URL + 'tok/ctb9_electra_small_20220215_205427.zip'
'Electra (:cite:`clark2020electra`) small model trained on CTB9. Its performance is P=97.15% R=97.36% F1=97.26% which is ' \
'much higher than that of MTL model '
# Will be filled up during runtime
ALL = {}
| [] |
bopopescu/webrtc-streaming-node | third_party/webrtc/src/chromium/src/tools/swarming_client/tests/logging_utils_test.py | 727a441204344ff596401b0253caac372b714d91 | #!/usr/bin/env python
# Copyright 2015 The Swarming Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0 that
# can be found in the LICENSE file.
import logging
import os
import subprocess
import sys
import tempfile
import shutil
import unittest
import re
THIS_FILE = os.path.abspath(__file__)
sys.path.insert(0, os.path.dirname(os.path.dirname(THIS_FILE)))
from utils import logging_utils
# PID YYYY-MM-DD HH:MM:SS.MMM
_LOG_HEADER = r'^%d \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d' % os.getpid()
_LOG_HEADER_PID = r'^\d+ \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d\.\d\d\d'
_PHASE = 'LOGGING_UTILS_TESTS_PHASE'
def call(phase, cwd):
"""Calls itself back."""
env = os.environ.copy()
env[_PHASE] = phase
return subprocess.call([sys.executable, '-u', THIS_FILE], env=env, cwd=cwd)
class Test(unittest.TestCase):
def setUp(self):
super(Test, self).setUp()
self.tmp = tempfile.mkdtemp(prefix='logging_utils')
def tearDown(self):
try:
shutil.rmtree(self.tmp)
finally:
super(Test, self).tearDown()
def test_capture(self):
root = logging.RootLogger(logging.DEBUG)
with logging_utils.CaptureLogs('foo', root) as log:
root.debug('foo')
result = log.read()
expected = _LOG_HEADER + ': DEBUG foo\n$'
if sys.platform == 'win32':
expected = expected.replace('\n', '\r\n')
self.assertTrue(re.match(expected, result), (expected, result))
def test_prepare_logging(self):
root = logging.RootLogger(logging.DEBUG)
filepath = os.path.join(self.tmp, 'test.log')
logging_utils.prepare_logging(filepath, root)
root.debug('foo')
with open(filepath, 'rb') as f:
result = f.read()
# It'd be nice to figure out a way to ensure it's properly in UTC but it's
# tricky to do reliably.
expected = _LOG_HEADER + ' D: foo\n$'
self.assertTrue(re.match(expected, result), (expected, result))
def test_rotating(self):
# Create a rotating log. Create a subprocess then delete the file. Make sure
# nothing blows up.
# Everything is done in a child process because the called functions mutate
# the global state.
self.assertEqual(0, call('test_rotating_phase_1', cwd=self.tmp))
self.assertEqual({'shared.1.log'}, set(os.listdir(self.tmp)))
with open(os.path.join(self.tmp, 'shared.1.log'), 'rb') as f:
lines = f.read().splitlines()
expected = [
r' I: Parent1',
r' I: Child1',
r' I: Child2',
r' I: Parent2',
]
for e, l in zip(expected, lines):
ex = _LOG_HEADER_PID + e + '$'
self.assertTrue(re.match(ex, l), (ex, l))
self.assertEqual(len(expected), len(lines))
def test_rotating_phase_1():
logging_utils.prepare_logging('shared.log')
logging.info('Parent1')
r = call('test_rotating_phase_2', None)
logging.info('Parent2')
return r
def test_rotating_phase_2():
# Simulate rotating the log.
logging_utils.prepare_logging('shared.log')
logging.info('Child1')
os.rename('shared.log', 'shared.1.log')
logging.info('Child2')
return 0
def main():
phase = os.environ.get(_PHASE)
if phase:
return getattr(sys.modules[__name__], phase)()
verbose = '-v' in sys.argv
logging.basicConfig(level=logging.DEBUG if verbose else logging.ERROR)
unittest.main()
if __name__ == '__main__':
sys.exit(main())
| [((323, 348), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (338, 348), False, 'import os\n'), ((543, 554), 'os.getpid', 'os.getpid', ([], {}), '()\n', (552, 554), False, 'import os\n'), ((721, 738), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (736, 738), False, 'import os\n'), ((770, 838), 'subprocess.call', 'subprocess.call', (["[sys.executable, '-u', THIS_FILE]"], {'env': 'env', 'cwd': 'cwd'}), "([sys.executable, '-u', THIS_FILE], env=env, cwd=cwd)\n", (785, 838), False, 'import subprocess\n'), ((2726, 2769), 'utils.logging_utils.prepare_logging', 'logging_utils.prepare_logging', (['"""shared.log"""'], {}), "('shared.log')\n", (2755, 2769), False, 'from utils import logging_utils\n'), ((2772, 2795), 'logging.info', 'logging.info', (['"""Parent1"""'], {}), "('Parent1')\n", (2784, 2795), False, 'import logging\n'), ((2840, 2863), 'logging.info', 'logging.info', (['"""Parent2"""'], {}), "('Parent2')\n", (2852, 2863), False, 'import logging\n'), ((2939, 2982), 'utils.logging_utils.prepare_logging', 'logging_utils.prepare_logging', (['"""shared.log"""'], {}), "('shared.log')\n", (2968, 2982), False, 'from utils import logging_utils\n'), ((2985, 3007), 'logging.info', 'logging.info', (['"""Child1"""'], {}), "('Child1')\n", (2997, 3007), False, 'import logging\n'), ((3010, 3049), 'os.rename', 'os.rename', (['"""shared.log"""', '"""shared.1.log"""'], {}), "('shared.log', 'shared.1.log')\n", (3019, 3049), False, 'import os\n'), ((3052, 3074), 'logging.info', 'logging.info', (['"""Child2"""'], {}), "('Child2')\n", (3064, 3074), False, 'import logging\n'), ((3110, 3132), 'os.environ.get', 'os.environ.get', (['_PHASE'], {}), '(_PHASE)\n', (3124, 3132), False, 'import os\n'), ((3227, 3297), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '(logging.DEBUG if verbose else logging.ERROR)'}), '(level=logging.DEBUG if verbose else logging.ERROR)\n', (3246, 3297), False, 'import logging\n'), ((3300, 3315), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3313, 3315), False, 'import unittest\n'), ((384, 410), 'os.path.dirname', 'os.path.dirname', (['THIS_FILE'], {}), '(THIS_FILE)\n', (399, 410), False, 'import os\n'), ((936, 976), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""logging_utils"""'}), "(prefix='logging_utils')\n", (952, 976), False, 'import tempfile\n'), ((1125, 1158), 'logging.RootLogger', 'logging.RootLogger', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (1143, 1158), False, 'import logging\n'), ((1505, 1538), 'logging.RootLogger', 'logging.RootLogger', (['logging.DEBUG'], {}), '(logging.DEBUG)\n', (1523, 1538), False, 'import logging\n'), ((1554, 1588), 'os.path.join', 'os.path.join', (['self.tmp', '"""test.log"""'], {}), "(self.tmp, 'test.log')\n", (1566, 1588), False, 'import os\n'), ((1593, 1638), 'utils.logging_utils.prepare_logging', 'logging_utils.prepare_logging', (['filepath', 'root'], {}), '(filepath, root)\n', (1622, 1638), False, 'from utils import logging_utils\n'), ((1015, 1038), 'shutil.rmtree', 'shutil.rmtree', (['self.tmp'], {}), '(self.tmp)\n', (1028, 1038), False, 'import shutil\n'), ((1168, 1206), 'utils.logging_utils.CaptureLogs', 'logging_utils.CaptureLogs', (['"""foo"""', 'root'], {}), "('foo', root)\n", (1193, 1206), False, 'from utils import logging_utils\n'), ((1411, 1437), 're.match', 're.match', (['expected', 'result'], {}), '(expected, result)\n', (1419, 1437), False, 'import re\n'), ((1891, 1917), 're.match', 're.match', (['expected', 'result'], {}), '(expected, result)\n', (1899, 1917), False, 'import re\n'), ((2288, 2308), 'os.listdir', 'os.listdir', (['self.tmp'], {}), '(self.tmp)\n', (2298, 2308), False, 'import os\n'), ((2325, 2363), 'os.path.join', 'os.path.join', (['self.tmp', '"""shared.1.log"""'], {}), "(self.tmp, 'shared.1.log')\n", (2337, 2363), False, 'import os\n'), ((2619, 2634), 're.match', 're.match', (['ex', 'l'], {}), '(ex, l)\n', (2627, 2634), False, 'import re\n')] |
HrishikV/ineuron_inceome_prediction_internship | model_selection.py | 4a97a7f29d80198f394fcfd880cc5250fe2a0d1e | from featur_selection import df,race,occupation,workclass,country
import pandas as pd
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import cross_val_score,KFold
from sklearn.linear_model import LogisticRegression
from imblearn.pipeline import Pipeline
from sklearn.compose import ColumnTransformer
from imblearn.combine import SMOTETomek
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier,AdaBoostClassifier
from sklearn.neighbors import KNeighborsClassifier
from catboost import CatBoostClassifier
from xgboost import XGBClassifier
from sklearn.svm import SVC
from matplotlib import pyplot as plt
import seaborn as sns
df1=df.copy()
salary=df1['salary'].reset_index(drop=True)
df1=df1.drop(['salary'],axis=1)
def concat_dataframes(data):
dataframe = pd.concat([data, workclass.iloc[data.index, :], race.iloc[data.index , :], occupation.iloc[data.index, :], country.iloc[data.index, :]], axis = 1)
dataframe = dataframe.dropna()
dataframe = dataframe.reset_index(drop=True)
return dataframe
df1= concat_dataframes(df1)
features=['age_logarthmic','hours_per_week']
scaler = ColumnTransformer(transformers = [('scale_num_features', StandardScaler(), features)], remainder='passthrough')
models = [LogisticRegression(), SVC(), AdaBoostClassifier(), RandomForestClassifier(), XGBClassifier(),DecisionTreeClassifier(), KNeighborsClassifier(), CatBoostClassifier()]
model_labels = ['LogisticReg.','SVC','AdaBoost','RandomForest','Xgboost','DecisionTree','KNN', 'CatBoost']
mean_validation_f1_scores = []
for model in models:
data_pipeline = Pipeline(steps = [
('scaler', scaler),
('resample', SMOTETomek()),
('model', model)
])
mean_validation_f1 = float(cross_val_score(data_pipeline, df1, salary, cv=KFold(n_splits=10), scoring='f1',n_jobs=-1).mean())
mean_validation_f1_scores.append(mean_validation_f1)
print(mean_validation_f1_scores)
fig, axes = plt.subplots(nrows = 2, ncols = 1, figsize = (15,8))
sns.set_style('dark')
sns.barplot(y = model_labels ,x = mean_validation_f1_scores, ax=axes[0])
axes[0].grid(True, color='k')
sns.set_style('whitegrid')
sns.lineplot(x = model_labels, y = mean_validation_f1_scores)
axes[1].grid(True, color='k')
fig.show() | [((706, 715), 'featur_selection.df.copy', 'df.copy', ([], {}), '()\n', (713, 715), False, 'from featur_selection import df, race, occupation, workclass, country\n'), ((2143, 2190), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': '(2)', 'ncols': '(1)', 'figsize': '(15, 8)'}), '(nrows=2, ncols=1, figsize=(15, 8))\n', (2155, 2190), True, 'from matplotlib import pyplot as plt\n'), ((2197, 2218), 'seaborn.set_style', 'sns.set_style', (['"""dark"""'], {}), "('dark')\n", (2210, 2218), True, 'import seaborn as sns\n'), ((2219, 2287), 'seaborn.barplot', 'sns.barplot', ([], {'y': 'model_labels', 'x': 'mean_validation_f1_scores', 'ax': 'axes[0]'}), '(y=model_labels, x=mean_validation_f1_scores, ax=axes[0])\n', (2230, 2287), True, 'import seaborn as sns\n'), ((2323, 2349), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (2336, 2349), True, 'import seaborn as sns\n'), ((2350, 2407), 'seaborn.lineplot', 'sns.lineplot', ([], {'x': 'model_labels', 'y': 'mean_validation_f1_scores'}), '(x=model_labels, y=mean_validation_f1_scores)\n', (2362, 2407), True, 'import seaborn as sns\n'), ((837, 998), 'pandas.concat', 'pd.concat', (['[data, workclass.iloc[(data.index), :], race.iloc[(data.index), :],\n occupation.iloc[(data.index), :], country.iloc[(data.index), :]]'], {'axis': '(1)'}), '([data, workclass.iloc[(data.index), :], race.iloc[(data.index), :\n ], occupation.iloc[(data.index), :], country.iloc[(data.index), :]], axis=1\n )\n', (846, 998), True, 'import pandas as pd\n'), ((1374, 1394), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (1392, 1394), False, 'from sklearn.linear_model import LogisticRegression\n'), ((1396, 1401), 'sklearn.svm.SVC', 'SVC', ([], {}), '()\n', (1399, 1401), False, 'from sklearn.svm import SVC\n'), ((1403, 1423), 'sklearn.ensemble.AdaBoostClassifier', 'AdaBoostClassifier', ([], {}), '()\n', (1421, 1423), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n'), ((1425, 1449), 'sklearn.ensemble.RandomForestClassifier', 'RandomForestClassifier', ([], {}), '()\n', (1447, 1449), False, 'from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n'), ((1451, 1466), 'xgboost.XGBClassifier', 'XGBClassifier', ([], {}), '()\n', (1464, 1466), False, 'from xgboost import XGBClassifier\n'), ((1467, 1491), 'sklearn.tree.DecisionTreeClassifier', 'DecisionTreeClassifier', ([], {}), '()\n', (1489, 1491), False, 'from sklearn.tree import DecisionTreeClassifier\n'), ((1493, 1515), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {}), '()\n', (1513, 1515), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((1517, 1537), 'catboost.CatBoostClassifier', 'CatBoostClassifier', ([], {}), '()\n', (1535, 1537), False, 'from catboost import CatBoostClassifier\n'), ((1308, 1324), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1322, 1324), False, 'from sklearn.preprocessing import StandardScaler\n'), ((1842, 1854), 'imblearn.combine.SMOTETomek', 'SMOTETomek', ([], {}), '()\n', (1852, 1854), False, 'from imblearn.combine import SMOTETomek\n'), ((1991, 2009), 'sklearn.model_selection.KFold', 'KFold', ([], {'n_splits': '(10)'}), '(n_splits=10)\n', (1996, 2009), False, 'from sklearn.model_selection import cross_val_score, KFold\n')] |
blazelibs/blazeweb | tests/apps/newlayout/tasks/init_data.py | b120a6a2e38c8b53da2b73443ff242e2d1438053 | from __future__ import print_function
def action_010():
print('doit')
| [] |
apie/advent-of-code | 2021/d8b_bits.py | c49abec01b044166a688ade40ebb1e642f0e5ce0 | #!/usr/bin/env python3
import pytest
import fileinput
from os.path import splitext, abspath
F_NAME = 'd8'
#implement day8 using bits
def find_ones(d):
'''count number of ones in binary number'''
ones = 0
while d > 0:
ones += d & 1
d >>= 1
return ones
# Assign each segment a 'wire'.
lut = {
'a':0b0000001,
'b':0b0000010,
'c':0b0000100,
'd':0b0001000,
'e':0b0010000,
'f':0b0100000,
'g':0b1000000,
}
def solve_line(line):
def solve_output_val(output_values):
'''Look up each output val in binary repr in the mapping and add them together shifting each digit to the left.'''
output = 0
for o in output_values:
b_val = sum(lut[c] for c in o)
for k,v in mapping.items():
if v == b_val:
output = output*10 + k
break
else:
raise Exception(b_val, 'not found')
return output
def found(digit, bit_pattern):
mapping[digit] = bit_pattern
bpatterns.remove(bit_pattern)
signal_pattern, output_value = line.split(' | ')
# Convert letter string to binary pattern
bpatterns = {
sum(lut[c] for c in p)
for p in signal_pattern.split()
}
## Search for each digit and if found, remove it from bpatterns and add the digit to the mapping.
######################################
mapping = {}
# 1,4,7,8 all have a unique count of segments. Find them.
for bp in list(bpatterns):
if find_ones(bp) == 2:
found(1, bp)
elif find_ones(bp) == 4:
found(4, bp)
elif find_ones(bp) == 3:
found(7, bp)
elif find_ones(bp) == 7:
found(8, bp)
# Find 0, 6, 9. All have 6 segments
for bp in list(bpatterns):
if find_ones(bp) != 6:
continue
#is 4 contained within p, then it is 9
if mapping[4] & bp >= mapping[4]:
found(9, bp)
#is 1 contained within p, then it is 0
elif mapping[1] & bp >= mapping[1]:
found(0, bp)
else: # 6 is left
found(6, bp)
#is p contained within 6, then it is 5
for bp in bpatterns:
if mapping[6] & bp >= bp:
found(5, bp)
break
#is p contained within 9, and it is not 8 or 5, then it is 3
for bp in bpatterns:
if mapping[9] & bp >= bp:
found(3, bp)
break
assert len(bpatterns) == 1, bpatterns
#what is left is 2
for bp in bpatterns:
found(2, bp)
break
assert len(bpatterns) == 0, bpatterns
return solve_output_val(output_value.split())
def answer(lines):
return sum(solve_line(line) for line in map(str.strip, lines))
@pytest.fixture
def example_input1():
return fileinput.input(F_NAME + '.test.1')
def test_answer1(example_input1):
assert answer(example_input1) == 5353
@pytest.fixture
def example_input():
return fileinput.input(F_NAME + '.test')
def test_answer(example_input):
assert answer(example_input) == 61229
if __name__ == '__main__':
import timeit
start = timeit.default_timer()
filename = fileinput.input(F_NAME + '.input')
ans = answer(filename)
print('Answer:', ans)
duration = timeit.default_timer()-start
print(f'Execution time: {duration:.3f} s')
| [((2843, 2878), 'fileinput.input', 'fileinput.input', (["(F_NAME + '.test.1')"], {}), "(F_NAME + '.test.1')\n", (2858, 2878), False, 'import fileinput\n'), ((3006, 3039), 'fileinput.input', 'fileinput.input', (["(F_NAME + '.test')"], {}), "(F_NAME + '.test')\n", (3021, 3039), False, 'import fileinput\n'), ((3174, 3196), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3194, 3196), False, 'import timeit\n'), ((3212, 3246), 'fileinput.input', 'fileinput.input', (["(F_NAME + '.input')"], {}), "(F_NAME + '.input')\n", (3227, 3246), False, 'import fileinput\n'), ((3315, 3337), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (3335, 3337), False, 'import timeit\n')] |
rizkiailham/two-stream-action-recognition-1 | frame_dataloader/spatial_dataloader.py | 01221f668e62eb26e3593f4ecd3f257b6b6979ab | """
********************************
* Created by mohammed-alaa *
********************************
Spatial Dataloader implementing sequence api from keras (defines how to load a single item)
this loads batches of images for each iteration it returns [batch_size, height, width ,3] ndarrays
"""
import copy
import random
import cv2
import numpy as np
import tensorflow.keras as keras
from .UCF_splitting_kernel import *
from .helpers import get_training_augmenter, get_validation_augmenter
class SpatialSequence(keras.utils.Sequence):
def __init__(self, data_to_load, data_root_path, batch_size, is_training, augmenter):
"""get data structure to load data"""
# list of (video names,frame/max_frame,label)
self.data_to_load = copy.deepcopy(data_to_load)
self.batch_size = batch_size
self.is_training = is_training
self.augmenter = copy.deepcopy(augmenter)
self.data_root_path = data_root_path
self.video_names, self.frames, self.labels = [list(one_of_three_tuples) for one_of_three_tuples in zip(*self.data_to_load)] # three lists
def __len__(self):
"""Denotes the number of batches per epoch"""
return (len(self.video_names) + self.batch_size - 1) // self.batch_size # ceiling div
def get_actual_length(self):
"""Denotes the total number of samples"""
return len(self.video_names)
def __getitem__(self, batch_start):
"""Gets one batch"""
batch_video_names = self.video_names[batch_start * self.batch_size:(batch_start + 1) * self.batch_size]
batch_frames = self.frames[batch_start * self.batch_size:(batch_start + 1) * self.batch_size]
batch_y = np.array(self.labels[batch_start * self.batch_size:(batch_start + 1) * self.batch_size])
batch_x = [] # could be less or equal batch size
#
for vid_id, _ in enumerate(batch_y):
if self.is_training: # max frame is given
frame_id = random.randint(1, batch_frames[vid_id]) # random frame (one based)
else:
frame_id = batch_frames[vid_id] # just as selected
batch_x.append(
cv2.cvtColor(cv2.imread(os.path.join(self.data_root_path, "v_" + batch_video_names[vid_id], 'frame{}'.format(str(frame_id).zfill(6)) + '.jpg')), cv2.COLOR_BGR2RGB)
)
if self.is_training:
return np.array(self.augmenter.augment_images(batch_x), dtype=np.float32) / 255.0, batch_y
else:
# no label needed since (test_video_to_label mapping) (dictionary of name to label) is returned
return batch_video_names, np.array(self.augmenter.augment_images(batch_x), dtype=np.float32) / 255.0
def shuffle_and_reset(self):
"""
new data for the next epoch
"""
random.shuffle(self.data_to_load)
self.video_names, self.frames, self.labels = [list(one_of_three_tuples) for one_of_three_tuples in zip(*self.data_to_load)] # shuffle all
class SpatialDataLoader:
def __init__(self, batch_size, testing_samples_per_video, width, height, log_stream=open("/tmp/null.log", "w"), augmenter_level=1, data_root_path='./jpegs_256/', ucf_list_path='./UCF_list/', ucf_split='01'):
"""
get the mapping and initialize the augmenter
"""
self.batch_size = batch_size
self.width, self.height = width, height
self.data_root_path = data_root_path
self.testing_samples_per_video = testing_samples_per_video
self.log_stream = log_stream
# split the training and testing videos
data_util_ = DataUtil(path=ucf_list_path, split=ucf_split)
self.train_video_to_label, self.test_video_to_label = data_util_.get_train_test_video_to_label_mapping() # name without v_ or .avi and small s .. name to numeric label starts at 0
# get video frames
self.video_frame_count = data_util_.get_video_frame_count() # name without v_ or .avi and small s
self.augmenter_level = augmenter_level
def run(self):
"""
get the data structure for training and validation
"""
train_loader = self.get_training_loader()
val_loader = self.get_testing_loader()
return train_loader, val_loader, self.test_video_to_label
def get_training_data_structure(self):
"""
get the data structure for training
"""
training_data_structure = [] # list of (video names,frame/max_frame,label)
for video_name in self.train_video_to_label: # sample from the whole video frames
training_data_structure.append((video_name, self.video_frame_count[video_name], self.train_video_to_label[video_name]))
return training_data_structure
def get_testing_data_structure(self):
"""
get the data structure for validation
"""
test_data_structure = [] # list of (video names,frame/max_frame,label)
for video_name in self.test_video_to_label:
nb_frame = self.video_frame_count[video_name]
interval = nb_frame // self.testing_samples_per_video
if interval == 0: # for videos shorter than self.testing_samples_per_video
interval = 1
# range is exclusive add one to be inclusive
# 1 > self.testing_samples_per_video * interval
for frame_idx in range(1, min(self.testing_samples_per_video * interval, nb_frame) + 1, interval):
test_data_structure.append((video_name, frame_idx, self.test_video_to_label[video_name]))
return test_data_structure
def get_training_loader(self):
"""
an instance of sequence loader for spatial model for parallel dataloading using keras sequence
"""
loader = SpatialSequence(data_to_load=self.get_training_data_structure(),
data_root_path=self.data_root_path,
batch_size=self.batch_size,
is_training=True,
augmenter=get_training_augmenter(height=self.height, width=self.width, augmenter_level=self.augmenter_level),
)
print('==> Training data :', len(loader.data_to_load), 'videos', file=self.log_stream)
print('==> Training data :', len(loader.data_to_load), 'videos')
return loader
def get_testing_loader(self):
"""
an instance of sequence loader for spatial model for parallel dataloading using keras sequence
"""
loader = SpatialSequence(data_to_load=self.get_testing_data_structure(),
data_root_path=self.data_root_path,
batch_size=self.batch_size,
is_training=False,
augmenter=get_validation_augmenter(height=self.height, width=self.width),
)
print('==> Validation data :', len(loader.data_to_load), 'frames', file=self.log_stream)
print('==> Validation data :', len(loader.data_to_load), 'frames')
return loader
if __name__ == '__main__':
data_loader = SpatialDataLoader(batch_size=64, use_multiprocessing=True, # data_root_path="data",
ucf_split='01',
testing_samples_per_video=19, width=224, height=224, num_workers=2)
train_loader, test_loader, test_video_level_label = data_loader.run()
print(len(train_loader))
print(len(test_loader))
print(train_loader.get_actual_length())
print(test_loader.get_actual_length())
print(train_loader.sequence[0][0].shape, train_loader.sequence[0][1].shape)
print(train_loader[0][0].shape, train_loader[0][1].shape)
# import tqdm
# progress = tqdm.tqdm(train_loader.get_epoch_generator(), total=len(train_loader))
# for (sampled_frame, label) in progress:
# pass
import matplotlib.pyplot as plt
# preview raw data
def preview(data, labels):
# 3 channels
fig, axeslist = plt.subplots(ncols=8, nrows=8, figsize=(10, 10))
for i, sample in enumerate(data):
axeslist.ravel()[i].imshow(data[i])
axeslist.ravel()[i].set_title(labels[i])
axeslist.ravel()[i].set_axis_off()
plt.subplots_adjust(wspace=.4, hspace=.4)
print("train sample")
for batch in train_loader.get_epoch_generator():
print(batch[0].shape, batch[1].shape)
print(batch[1])
preview(batch[0], batch[1])
break
print("test sample") # same name will be displayed testing_samples_per_video with no shuffling
for batch in test_loader.get_epoch_generator():
print(batch[1].shape, batch[2].shape)
print(batch[0], batch[2])
preview(batch[1], batch[2])
break
| [((760, 787), 'copy.deepcopy', 'copy.deepcopy', (['data_to_load'], {}), '(data_to_load)\n', (773, 787), False, 'import copy\n'), ((890, 914), 'copy.deepcopy', 'copy.deepcopy', (['augmenter'], {}), '(augmenter)\n', (903, 914), False, 'import copy\n'), ((1705, 1798), 'numpy.array', 'np.array', (['self.labels[batch_start * self.batch_size:(batch_start + 1) * self.batch_size]'], {}), '(self.labels[batch_start * self.batch_size:(batch_start + 1) * self\n .batch_size])\n', (1713, 1798), True, 'import numpy as np\n'), ((2837, 2870), 'random.shuffle', 'random.shuffle', (['self.data_to_load'], {}), '(self.data_to_load)\n', (2851, 2870), False, 'import random\n'), ((8155, 8203), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'ncols': '(8)', 'nrows': '(8)', 'figsize': '(10, 10)'}), '(ncols=8, nrows=8, figsize=(10, 10))\n', (8167, 8203), True, 'import matplotlib.pyplot as plt\n'), ((8404, 8447), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'wspace': '(0.4)', 'hspace': '(0.4)'}), '(wspace=0.4, hspace=0.4)\n', (8423, 8447), True, 'import matplotlib.pyplot as plt\n'), ((1990, 2029), 'random.randint', 'random.randint', (['(1)', 'batch_frames[vid_id]'], {}), '(1, batch_frames[vid_id])\n', (2004, 2029), False, 'import random\n')] |
Svolcano/python_exercise | dianhua/worker/crawler/china_mobile/hunan/base_request_param.py | a50e05891cc7f1fbb40ebcae324b09b6a14473d2 | # -*- coding:utf-8 -*-
"""
@version: v1.0
@author: xuelong.liu
@license: Apache Licence
@contact: [email protected]
@software: PyCharm
@file: base_request_param.py
@time: 12/21/16 6:48 PM
"""
class RequestParam(object):
"""
请求相关
"""
# URL
START_URL = "https://www.hn.10086.cn/service/static/componant/login.html"
# GET_CAPTCHA_URL = "http://www.hn.10086.cn/service/ics/servlet/ImageServlet"
GET_CAPTCHA_URL = "https://www.hn.10086.cn/service/ics/login/sendSms"
# GET_CAPTCHA_URL = "http://www.hn.10086.cn/newservice/ics/servlet/ImageServlet?random=0.14531555527237483"
LOGIN_URL = "https://www.hn.10086.cn/service/ics/login/SSOLogin"
# GET_SMS_URL = "http://www.hn.10086.cn/newservice/ics/componant/initSendHattedCode?requestTel=%s&ajaxSubmitType=post&ajax_randomcode=0.5158618472543544"
GET_SMS_URL_READY = "https://www.hn.10086.cn/service/ics/componant/initTelQCellCore?tel=%s&ajaxSubmitType=post&ajax_randomcode=0.9461358208494027"
GET_SMS_URL = "https://www.hn.10086.cn/service/ics/componant/initSendHattedCode?requestTel=%s&ajaxSubmitType=post&ajax_randomcode=0.9461358208494027"
# SMS_URL = "http://www.hn.10086.cn/newservice/ics/componant/initSmsCodeAndServicePwd"
SMS_URL = "https://www.hn.10086.cn/service/ics/componant/initSmsCodeAndServicePwd?smsCode=%s&servicePwd=NaN&requestTel=%s&ajaxSubmitType=post&ajax_randomcode=0.012645535304207867"
GET_CAL_LOG = "https://www.hn.10086.cn/service/ics/detailBillQuery/queryDetailBill"
GET_USER_INFO = "https://www.hn.10086.cn/service/ics/basicInfo/queryUserBasicInfo"
| [] |
jack-beach/AdventOfCode2019 | day02/puzzle2.py | a8ac53eaf03cd7595deb2a9aa798a2d17c21c513 | # stdlib imports
import copy
# vendor imports
import click
@click.command()
@click.argument("input_file", type=click.File("r"))
def main(input_file):
"""Put your puzzle execution code here"""
# Convert the comma-delimited string of numbers into a list of ints
masterRegister = list(
map(lambda op: int(op), input_file.read().strip().split(","))
)
def execute(noun, verb):
# Create a local copy of the register for this execution
register = copy.deepcopy(masterRegister)
# Inject the noun and verb
register[1] = noun
register[2] = verb
# We will start reading the opcodes at position 0
pointer = 0
# Loop infinitely until we reach the termination instruction
while True:
# Get the code at the current read position
code = register[pointer]
# Code 99 means immediate termination
if code == 99:
break
# Code 1 is addition
elif code == 1:
# Get register addresses
addendAPointer = register[pointer + 1]
addendBPointer = register[pointer + 2]
sumPointer = register[pointer + 3]
# Perform the addition
register[sumPointer] = (
register[addendAPointer] + register[addendBPointer]
)
# Advance the code position by 4
pointer += 4
# Code 2 is multiplication
elif code == 2:
# Get register addresses
factorAPointer = register[pointer + 1]
factorBPointer = register[pointer + 2]
productPointer = register[pointer + 3]
# Perform the addition
register[productPointer] = (
register[factorAPointer] * register[factorBPointer]
)
# Advance the code position by 4
pointer += 4
# Unknown opcode means there was an error
else:
raise RuntimeError(
f"Unknown opcode {code} at position {pointer}"
)
# Return the result
return register[0]
# Iterate through all the possible combinations until the target is found
target = 19690720
found = None
for noun in range(100):
for verb in range(100):
result = execute(noun, verb)
if result == target:
found = (noun, verb)
break
if found:
break
# Calculate the final result
print("RESULT:", 100 * found[0] + found[1])
# Execute cli function on main
if __name__ == "__main__":
main()
| [((63, 78), 'click.command', 'click.command', ([], {}), '()\n', (76, 78), False, 'import click\n'), ((488, 517), 'copy.deepcopy', 'copy.deepcopy', (['masterRegister'], {}), '(masterRegister)\n', (501, 517), False, 'import copy\n'), ((114, 129), 'click.File', 'click.File', (['"""r"""'], {}), "('r')\n", (124, 129), False, 'import click\n')] |
ionos-cloud/ionos-enterprise-sdk-python | ionosenterprise/items/backupunit.py | 6b601990098ab36289a251406fb093489b647f1d | class BackupUnit(object):
def __init__(self, name, password=None, email=None):
"""
BackupUnit class initializer.
:param name: A name of that resource (only alphanumeric characters are acceptable)"
:type name: ``str``
:param password: The password associated to that resource.
:type password: ``str``
:param email: The email associated with the backup unit.
Bear in mind that this email does not be the same email as of the user.
:type email: ``str``
"""
self.name = name
self.password = password
self.email = email
def __repr__(self):
return ('<BackupUnit: name=%s, password=%s, email=%s>'
% (self.name, str(self.password), self.email))
| [] |
X-lab-3D/PANDORA | install.py | 02912a03022e814ff8e0ae8ec52f5075f0e2e381 | import os
dirs = [
'./PANDORA_files', './PANDORA_files/data', './PANDORA_files/data/csv_pkl_files',
'./PANDORA_files/data/csv_pkl_files/mhcseqs', './PANDORA_files/data/PDBs',
'./PANDORA_files/data/PDBs/pMHCI', './PANDORA_files/data/PDBs/pMHCII',
'./PANDORA_files/data/PDBs/Bad', './PANDORA_files/data/PDBs/Bad/pMHCI',
'./PANDORA_files/data/PDBs/Bad/pMHCII', './PANDORA_files/data/PDBs/IMGT_retrieved',
'./PANDORA_files/data/outputs',
'./test/test_data/PDBs/Bad','./test/test_data/PDBs/Bad/pMHCI',
'./test/test_data/PDBs/Bad/pMHCII', './test/test_data/csv_pkl_files'
]
for D in dirs:
try:
os.mkdir(D)
except OSError:
print('Could not make directory: ' + D)
# Install dependenciess
# os.popen("alias KEY_MODELLER='XXXX'").read()
# os.popen("conda install -y -c salilab modeller").read()
# os.popen("conda install -y -c bioconda muscle").read()
# os.popen("pip install -e ./").read()
| [((675, 686), 'os.mkdir', 'os.mkdir', (['D'], {}), '(D)\n', (683, 686), False, 'import os\n')] |
MainaKamau92/apexselftaught | app/auth/views.py | 9f9a3bd1ba23e57a12e173730917fb9bb7003707 | # app/auth/views.py
import os
from flask import flash, redirect, render_template, url_for, request
from flask_login import login_required, login_user, logout_user, current_user
from . import auth
from .forms import (LoginForm, RegistrationForm,
RequestResetForm, ResetPasswordForm)
from .. import db, mail
from ..models import User
from flask_mail import Message
from werkzeug.security import generate_password_hash
@auth.route('/register/', methods=['GET', 'POST'])
def register():
"""
Handle requests to the /register route
Add an user to the database through the registration form
"""
logout_user()
form = RegistrationForm()
if form.validate_on_submit():
user = User(first_name=form.first_name.data,
last_name=form.last_name.data,
email=form.email.data,
username=form.username.data,
password=form.password.data,
is_freelancer=form.freelancer.data,
is_employer=form.employer.data)
# add user to the database
db.session.add(user)
db.session.commit()
flash(f'You have successfully registered! You may now login', 'success')
# redirect to the login page
return redirect(url_for('auth.login'))
# load registration form
return render_template('auth/register.html', form=form, title='Register')
@auth.route('/login/', methods=['GET', 'POST'])
def login():
"""
Handle requests to the /login route
Log an employee in through the login form
"""
if current_user.is_authenticated:
if current_user.is_freelancer == True and current_user.is_employer == False:
# redirect to the freelancer dashboard page after login
return redirect(url_for('freelancer.dashboard'))
elif current_user.is_employer == True and current_user.is_freelancer == False:
# redirect to the employer dashboard page after login
return redirect(url_for('employer.dashboard'))
elif current_user.is_employer and current_user.is_freelancer:
# redirect to the employer dashboard page after login
return redirect(url_for('employer.dashboard'))
else:
# redirect to the admin dashboard
return redirect(url_for('admin.admin_dashboard'))
form = LoginForm()
if form.validate_on_submit():
# check whether user exists in the database
# the password entered matches the password in the database
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, remember=form.remember.data)
#flash(f'Logged In', 'success')
if user.is_freelancer == True and user.is_employer == False:
# redirect to the freelancer dashboard page after login
return redirect(url_for('freelancer.dashboard'))
elif user.is_employer == True and user.is_freelancer == False:
# redirect to the employer dashboard page after login
return redirect(url_for('employer.dashboard'))
elif user.is_employer and user.is_freelancer:
# redirect to the employer dashboard page after login
return redirect(url_for('employer.dashboard'))
else:
# redirect to the admin dashboard
return redirect(url_for('admin.admin_dashboard'))
flash(f'Invalid Credentials', 'danger')
# load login template
return render_template('auth/login.html', form=form, title='Login')
@auth.route('/logout/', methods=['GET', 'POST'])
@login_required
def logout():
"""
Handle requests to the /logout route
Log an employee out through the logout link
"""
logout_user()
flash(f'You have been logged out', 'success')
# redirect to the login page
return redirect(url_for('auth.login'))
def send_reset_email(user):
try:
token = user.get_reset_token()
msg = Message('Password Reset Request',
sender='[email protected]',
recipients=[user.email])
msg.body = f''' To reset your password visit the following link
{url_for('auth.reset_password', token=token, _external=True)}
If you did not make this request ignore this email
'''
mail.send(msg)
except Exception as e:
print(e)
@auth.route('/reset-password', methods=['GET', 'POST'])
def request_reset():
if current_user.is_authenticated:
next_page = request.args.get('next')
if current_user.is_freelancer == True and current_user.is_employer == False:
# redirect to the freelancer dashboard page after login
return redirect(next_page) if next_page else redirect(url_for('freelancer.dashboard'))
elif current_user.is_employer == True and current_user.is_freelancer == False:
# redirect to the employer dashboard page after login
return redirect(next_page) if next_page else redirect(url_for('employer.dashboard'))
elif current_user.is_employer and current_user.is_freelancer:
# redirect to the employer dashboard page after login
return redirect(next_page) if next_page else redirect(url_for('employer.dashboard'))
else:
# redirect to the admin dashboard
return redirect(next_page) if next_page else redirect(url_for('admin.admin_dashboard'))
form = RequestResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
send_reset_email(user)
flash(f'Email has been sent with password reset instructions', 'info')
return redirect(url_for('auth.login'))
return render_template('auth/reset_request.html', form=form, title='Request Reset Password')
@auth.route('/reset-password/<token>', methods=['GET', 'POST'])
def reset_password(token):
if current_user.is_authenticated:
next_page = request.args.get('next')
if current_user.is_freelancer == True and current_user.is_employer == False:
# redirect to the freelancer dashboard page after login
return redirect(next_page) if next_page else redirect(url_for('freelancer.dashboard'))
elif current_user.is_employer == True and current_user.is_freelancer == False:
# redirect to the employer dashboard page after login
return redirect(next_page) if next_page else redirect(url_for('employer.dashboard'))
elif current_user.is_employer and current_user.is_freelancer:
# redirect to the employer dashboard page after login
return redirect(next_page) if next_page else redirect(url_for('employer.dashboard'))
else:
# redirect to the admin dashboard
return redirect(next_page) if next_page else redirect(url_for('admin.admin_dashboard'))
user = User.verify_reset_token(token)
if user is None:
flash(f'Invalid token or expired token', 'warning')
return redirect(url_for('auth.request_reset'))
form = ResetPasswordForm()
if form.validate_on_submit():
# add user to the database
hashed_password = generate_password_hash(form.password.data)
user.password_hash = hashed_password
db.session.commit()
flash(
f'Your password has been reset successfully! You may now login', 'success')
return redirect(url_for('auth.login'))
return render_template('auth/reset_password.html', form=form, title='Reset Password')
| [((630, 643), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (641, 643), False, 'from flask_login import login_required, login_user, logout_user, current_user\n'), ((1360, 1426), 'flask.render_template', 'render_template', (['"""auth/register.html"""'], {'form': 'form', 'title': '"""Register"""'}), "('auth/register.html', form=form, title='Register')\n", (1375, 1426), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((3623, 3683), 'flask.render_template', 'render_template', (['"""auth/login.html"""'], {'form': 'form', 'title': '"""Login"""'}), "('auth/login.html', form=form, title='Login')\n", (3638, 3683), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((3874, 3887), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (3885, 3887), False, 'from flask_login import login_required, login_user, logout_user, current_user\n'), ((3892, 3937), 'flask.flash', 'flash', (['f"""You have been logged out"""', '"""success"""'], {}), "(f'You have been logged out', 'success')\n", (3897, 3937), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((5892, 5982), 'flask.render_template', 'render_template', (['"""auth/reset_request.html"""'], {'form': 'form', 'title': '"""Request Reset Password"""'}), "('auth/reset_request.html', form=form, title=\n 'Request Reset Password')\n", (5907, 5982), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((7632, 7710), 'flask.render_template', 'render_template', (['"""auth/reset_password.html"""'], {'form': 'form', 'title': '"""Reset Password"""'}), "('auth/reset_password.html', form=form, title='Reset Password')\n", (7647, 7710), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((1161, 1233), 'flask.flash', 'flash', (['f"""You have successfully registered! You may now login"""', '"""success"""'], {}), "(f'You have successfully registered! You may now login', 'success')\n", (1166, 1233), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((3546, 3585), 'flask.flash', 'flash', (['f"""Invalid Credentials"""', '"""danger"""'], {}), "(f'Invalid Credentials', 'danger')\n", (3551, 3585), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((3991, 4012), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (3998, 4012), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((4111, 4205), 'flask_mail.Message', 'Message', (['"""Password Reset Request"""'], {'sender': '"""[email protected]"""', 'recipients': '[user.email]'}), "('Password Reset Request', sender='[email protected]',\n recipients=[user.email])\n", (4118, 4205), False, 'from flask_mail import Message\n'), ((4673, 4697), 'flask.request.args.get', 'request.args.get', (['"""next"""'], {}), "('next')\n", (4689, 4697), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((5763, 5833), 'flask.flash', 'flash', (['f"""Email has been sent with password reset instructions"""', '"""info"""'], {}), "(f'Email has been sent with password reset instructions', 'info')\n", (5768, 5833), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((6129, 6153), 'flask.request.args.get', 'request.args.get', (['"""next"""'], {}), "('next')\n", (6145, 6153), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((7122, 7173), 'flask.flash', 'flash', (['f"""Invalid token or expired token"""', '"""warning"""'], {}), "(f'Invalid token or expired token', 'warning')\n", (7127, 7173), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((7355, 7397), 'werkzeug.security.generate_password_hash', 'generate_password_hash', (['form.password.data'], {}), '(form.password.data)\n', (7377, 7397), False, 'from werkzeug.security import generate_password_hash\n'), ((7479, 7564), 'flask.flash', 'flash', (['f"""Your password has been reset successfully! You may now login"""', '"""success"""'], {}), "(f'Your password has been reset successfully! You may now login',\n 'success')\n", (7484, 7564), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((1297, 1318), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (1304, 1318), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((2705, 2750), 'flask_login.login_user', 'login_user', (['user'], {'remember': 'form.remember.data'}), '(user, remember=form.remember.data)\n', (2715, 2750), False, 'from flask_login import login_required, login_user, logout_user, current_user\n'), ((5858, 5879), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (5865, 5879), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((7198, 7227), 'flask.url_for', 'url_for', (['"""auth.request_reset"""'], {}), "('auth.request_reset')\n", (7205, 7227), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((7598, 7619), 'flask.url_for', 'url_for', (['"""auth.login"""'], {}), "('auth.login')\n", (7605, 7619), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((1811, 1842), 'flask.url_for', 'url_for', (['"""freelancer.dashboard"""'], {}), "('freelancer.dashboard')\n", (1818, 1842), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((4347, 4406), 'flask.url_for', 'url_for', (['"""auth.reset_password"""'], {'token': 'token', '_external': '(True)'}), "('auth.reset_password', token=token, _external=True)\n", (4354, 4406), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((4870, 4889), 'flask.redirect', 'redirect', (['next_page'], {}), '(next_page)\n', (4878, 4889), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((6326, 6345), 'flask.redirect', 'redirect', (['next_page'], {}), '(next_page)\n', (6334, 6345), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((2025, 2054), 'flask.url_for', 'url_for', (['"""employer.dashboard"""'], {}), "('employer.dashboard')\n", (2032, 2054), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((2972, 3003), 'flask.url_for', 'url_for', (['"""freelancer.dashboard"""'], {}), "('freelancer.dashboard')\n", (2979, 3003), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((4917, 4948), 'flask.url_for', 'url_for', (['"""freelancer.dashboard"""'], {}), "('freelancer.dashboard')\n", (4924, 4948), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((5122, 5141), 'flask.redirect', 'redirect', (['next_page'], {}), '(next_page)\n', (5130, 5141), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((6373, 6404), 'flask.url_for', 'url_for', (['"""freelancer.dashboard"""'], {}), "('freelancer.dashboard')\n", (6380, 6404), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((6578, 6597), 'flask.redirect', 'redirect', (['next_page'], {}), '(next_page)\n', (6586, 6597), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((2220, 2249), 'flask.url_for', 'url_for', (['"""employer.dashboard"""'], {}), "('employer.dashboard')\n", (2227, 2249), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((2339, 2371), 'flask.url_for', 'url_for', (['"""admin.admin_dashboard"""'], {}), "('admin.admin_dashboard')\n", (2346, 2371), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((3182, 3211), 'flask.url_for', 'url_for', (['"""employer.dashboard"""'], {}), "('employer.dashboard')\n", (3189, 3211), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((5169, 5198), 'flask.url_for', 'url_for', (['"""employer.dashboard"""'], {}), "('employer.dashboard')\n", (5176, 5198), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((5355, 5374), 'flask.redirect', 'redirect', (['next_page'], {}), '(next_page)\n', (5363, 5374), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((5512, 5531), 'flask.redirect', 'redirect', (['next_page'], {}), '(next_page)\n', (5520, 5531), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((6625, 6654), 'flask.url_for', 'url_for', (['"""employer.dashboard"""'], {}), "('employer.dashboard')\n", (6632, 6654), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((6811, 6830), 'flask.redirect', 'redirect', (['next_page'], {}), '(next_page)\n', (6819, 6830), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((6968, 6987), 'flask.redirect', 'redirect', (['next_page'], {}), '(next_page)\n', (6976, 6987), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((3373, 3402), 'flask.url_for', 'url_for', (['"""employer.dashboard"""'], {}), "('employer.dashboard')\n", (3380, 3402), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((3504, 3536), 'flask.url_for', 'url_for', (['"""admin.admin_dashboard"""'], {}), "('admin.admin_dashboard')\n", (3511, 3536), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((5402, 5431), 'flask.url_for', 'url_for', (['"""employer.dashboard"""'], {}), "('employer.dashboard')\n", (5409, 5431), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((5559, 5591), 'flask.url_for', 'url_for', (['"""admin.admin_dashboard"""'], {}), "('admin.admin_dashboard')\n", (5566, 5591), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((6858, 6887), 'flask.url_for', 'url_for', (['"""employer.dashboard"""'], {}), "('employer.dashboard')\n", (6865, 6887), False, 'from flask import flash, redirect, render_template, url_for, request\n'), ((7015, 7047), 'flask.url_for', 'url_for', (['"""admin.admin_dashboard"""'], {}), "('admin.admin_dashboard')\n", (7022, 7047), False, 'from flask import flash, redirect, render_template, url_for, request\n')] |
devendermishrajio/oslo.messaging | oslo_messaging/_drivers/zmq_driver/client/publishers/zmq_dealer_publisher.py | 9e5fb5697d3f7259f01e3416af0582090d20859a | # Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from oslo_messaging._drivers.zmq_driver.client.publishers\
import zmq_publisher_base
from oslo_messaging._drivers.zmq_driver import zmq_async
from oslo_messaging._drivers.zmq_driver import zmq_names
from oslo_messaging._i18n import _LI, _LW
LOG = logging.getLogger(__name__)
zmq = zmq_async.import_zmq()
class DealerPublisher(zmq_publisher_base.PublisherMultisend):
def __init__(self, conf, matchmaker):
super(DealerPublisher, self).__init__(conf, matchmaker, zmq.DEALER)
def send_request(self, request):
self._check_request_pattern(request)
dealer_socket, hosts = self._check_hosts_connections(request.target)
if not dealer_socket.connections:
# NOTE(ozamiatin): Here we can provide
# a queue for keeping messages to send them later
# when some listener appears. However such approach
# being more reliable will consume additional memory.
LOG.warning(_LW("Request %s was dropped because no connection")
% request.msg_type)
return
if request.msg_type in zmq_names.MULTISEND_TYPES:
for _ in range(dealer_socket.connections_count()):
self._send_request(dealer_socket, request)
else:
self._send_request(dealer_socket, request)
def _check_request_pattern(self, request):
if request.msg_type == zmq_names.CALL_TYPE:
raise zmq_publisher_base.UnsupportedSendPattern(request.msg_type)
def _send_request(self, socket, request):
socket.send(b'', zmq.SNDMORE)
socket.send_pyobj(request)
LOG.info(_LI("Sending message_id %(message)s to a target %(target)s")
% {"message": request.message_id,
"target": request.target})
def cleanup(self):
super(DealerPublisher, self).cleanup()
class DealerPublisherLight(zmq_publisher_base.PublisherBase):
def __init__(self, conf, address):
super(DealerPublisherLight, self).__init__(conf)
self.socket = self.zmq_context.socket(zmq.DEALER)
self.socket.connect(address)
def send_request(self, request):
if request.msg_type == zmq_names.CALL_TYPE:
raise zmq_publisher_base.UnsupportedSendPattern(request.msg_type)
envelope = request.create_envelope()
self.socket.send(b'', zmq.SNDMORE)
self.socket.send_pyobj(envelope, zmq.SNDMORE)
self.socket.send_pyobj(request)
def cleanup(self):
self.socket.setsockopt(zmq.LINGER, 0)
self.socket.close()
class DealerPublisherProxy(DealerPublisher):
def __init__(self, conf, matchmaker, reply_receiver):
super(DealerPublisherProxy, self).__init__(conf, matchmaker)
self.reply_receiver = reply_receiver
def send_request(self, multipart_message):
envelope = multipart_message[zmq_names.MULTIPART_IDX_ENVELOPE]
LOG.info(_LI("Envelope: %s") % envelope)
target = envelope[zmq_names.FIELD_TARGET]
dealer_socket, hosts = self._check_hosts_connections(target)
if not dealer_socket.connections:
# NOTE(ozamiatin): Here we can provide
# a queue for keeping messages to send them later
# when some listener appears. However such approach
# being more reliable will consume additional memory.
LOG.warning(_LW("Request %s was dropped because no connection")
% envelope[zmq_names.FIELD_MSG_TYPE])
return
self.reply_receiver.track_socket(dealer_socket.handle)
LOG.info(_LI("Sending message %(message)s to a target %(target)s")
% {"message": envelope[zmq_names.FIELD_MSG_ID],
"target": envelope[zmq_names.FIELD_TARGET]})
if envelope[zmq_names.FIELD_MSG_TYPE] in zmq_names.MULTISEND_TYPES:
for _ in range(dealer_socket.connections_count()):
self._send_request(dealer_socket, multipart_message)
else:
self._send_request(dealer_socket, multipart_message)
def _send_request(self, socket, multipart_message):
socket.send(b'', zmq.SNDMORE)
socket.send_pyobj(
multipart_message[zmq_names.MULTIPART_IDX_ENVELOPE],
zmq.SNDMORE)
socket.send(multipart_message[zmq_names.MULTIPART_IDX_BODY])
class ReplyReceiver(object):
def __init__(self, poller):
self.poller = poller
LOG.info(_LI("Reply waiter created in broker"))
def _receive_reply(self, socket):
return socket.recv_multipart()
def track_socket(self, socket):
self.poller.register(socket, self._receive_reply)
def cleanup(self):
self.poller.close()
class AcknowledgementReceiver(object):
def __init__(self):
self.poller = zmq_async.get_poller()
self.thread = zmq_async.get_executor(self.poll_for_acknowledgements)
self.thread.execute()
def _receive_acknowledgement(self, socket):
empty = socket.recv()
assert empty == b"", "Empty delimiter expected"
ack_message = socket.recv_pyobj()
return ack_message
def track_socket(self, socket):
self.poller.register(socket, self._receive_acknowledgement)
def poll_for_acknowledgements(self):
ack_message, socket = self.poller.poll()
LOG.info(_LI("Message %s acknowledged")
% ack_message[zmq_names.FIELD_ID])
def cleanup(self):
self.thread.stop()
self.poller.close()
| [((878, 905), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (895, 905), False, 'import logging\n'), ((913, 935), 'oslo_messaging._drivers.zmq_driver.zmq_async.import_zmq', 'zmq_async.import_zmq', ([], {}), '()\n', (933, 935), False, 'from oslo_messaging._drivers.zmq_driver import zmq_async\n'), ((5465, 5487), 'oslo_messaging._drivers.zmq_driver.zmq_async.get_poller', 'zmq_async.get_poller', ([], {}), '()\n', (5485, 5487), False, 'from oslo_messaging._drivers.zmq_driver import zmq_async\n'), ((5510, 5564), 'oslo_messaging._drivers.zmq_driver.zmq_async.get_executor', 'zmq_async.get_executor', (['self.poll_for_acknowledgements'], {}), '(self.poll_for_acknowledgements)\n', (5532, 5564), False, 'from oslo_messaging._drivers.zmq_driver import zmq_async\n'), ((2074, 2133), 'oslo_messaging._drivers.zmq_driver.client.publishers.zmq_publisher_base.UnsupportedSendPattern', 'zmq_publisher_base.UnsupportedSendPattern', (['request.msg_type'], {}), '(request.msg_type)\n', (2115, 2133), False, 'from oslo_messaging._drivers.zmq_driver.client.publishers import zmq_publisher_base\n'), ((2868, 2927), 'oslo_messaging._drivers.zmq_driver.client.publishers.zmq_publisher_base.UnsupportedSendPattern', 'zmq_publisher_base.UnsupportedSendPattern', (['request.msg_type'], {}), '(request.msg_type)\n', (2909, 2927), False, 'from oslo_messaging._drivers.zmq_driver.client.publishers import zmq_publisher_base\n'), ((5113, 5150), 'oslo_messaging._i18n._LI', '_LI', (['"""Reply waiter created in broker"""'], {}), "('Reply waiter created in broker')\n", (5116, 5150), False, 'from oslo_messaging._i18n import _LI, _LW\n'), ((2273, 2333), 'oslo_messaging._i18n._LI', '_LI', (['"""Sending message_id %(message)s to a target %(target)s"""'], {}), "('Sending message_id %(message)s to a target %(target)s')\n", (2276, 2333), False, 'from oslo_messaging._i18n import _LI, _LW\n'), ((3568, 3587), 'oslo_messaging._i18n._LI', '_LI', (['"""Envelope: %s"""'], {}), "('Envelope: %s')\n", (3571, 3587), False, 'from oslo_messaging._i18n import _LI, _LW\n'), ((4245, 4302), 'oslo_messaging._i18n._LI', '_LI', (['"""Sending message %(message)s to a target %(target)s"""'], {}), "('Sending message %(message)s to a target %(target)s')\n", (4248, 4302), False, 'from oslo_messaging._i18n import _LI, _LW\n'), ((6012, 6042), 'oslo_messaging._i18n._LI', '_LI', (['"""Message %s acknowledged"""'], {}), "('Message %s acknowledged')\n", (6015, 6042), False, 'from oslo_messaging._i18n import _LI, _LW\n'), ((1591, 1642), 'oslo_messaging._i18n._LW', '_LW', (['"""Request %s was dropped because no connection"""'], {}), "('Request %s was dropped because no connection')\n", (1594, 1642), False, 'from oslo_messaging._i18n import _LI, _LW\n'), ((4030, 4081), 'oslo_messaging._i18n._LW', '_LW', (['"""Request %s was dropped because no connection"""'], {}), "('Request %s was dropped because no connection')\n", (4033, 4081), False, 'from oslo_messaging._i18n import _LI, _LW\n')] |
mrucker/banditbenchmark | coba/environments/filters.py | 0365291b3a0cf1d862d294e0386d0ccad3f360f1 | import pickle
import warnings
import collections.abc
from math import isnan
from statistics import mean, median, stdev, mode
from abc import abstractmethod, ABC
from numbers import Number
from collections import defaultdict
from itertools import islice, chain
from typing import Hashable, Optional, Sequence, Union, Iterable, Dict, Any, List, Tuple, Callable, Mapping
from coba.backports import Literal
from coba import pipes
from coba.random import CobaRandom
from coba.exceptions import CobaException
from coba.statistics import iqr
from coba.pipes import Flatten
from coba.environments.primitives import Interaction
from coba.environments.logged.primitives import LoggedInteraction
from coba.environments.simulated.primitives import SimulatedInteraction
class EnvironmentFilter(pipes.Filter[Iterable[Interaction],Iterable[Interaction]], ABC):
"""A filter that can be applied to an Environment."""
@abstractmethod
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
"""Apply a filter to an Environment's interactions."""
...
class Identity(pipes.Identity, EnvironmentFilter):
"""Return whatever interactions are given to the filter."""
pass
class Take(pipes.Take, EnvironmentFilter):
"""Take a fixed number of interactions from an Environment."""
pass
class Shuffle(pipes.Shuffle, EnvironmentFilter):
"""Shuffle a sequence of Interactions in an Environment."""
pass
class Reservoir(pipes.Reservoir, EnvironmentFilter):
"""Take a fixed number of random Interactions from an Environment."""
pass
class Scale(EnvironmentFilter):
"""Shift and scale features to precondition them before learning."""
def __init__(self,
shift: Union[Number,Literal["min","mean","med"]] = 0,
scale: Union[Number,Literal["minmax","std","iqr","maxabs"]] = "minmax",
target: Literal["features","rewards"] = "features",
using: Optional[int] = None):
"""Instantiate a Scale filter.
Args:
shift: The statistic to use to shift each context feature.
scale: The statistic to use to scale each context feature.
target: The target data we wish to scale in the environment.
using: The number of interactions to use when calculating the necessary statistics.
"""
assert isinstance(shift,Number) or shift in ["min","mean","med"]
assert isinstance(scale,Number) or scale in ["minmax","std","iqr","maxabs"]
self._shift = shift
self._scale = scale
self._using = using
self._target = target
@property
def params(self) -> Dict[str, Any]:
return {
"scale_shift": self._shift,
"scale_scale": self._scale,
"scale_using": self._using,
"scale_target": self._target
}
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
iter_interactions = iter(interactions)
fitting_interactions = list(islice(iter_interactions,self._using))
shifts : Dict[Hashable,float] = defaultdict(lambda:0)
scales : Dict[Hashable,float] = defaultdict(lambda:1)
unscaled: Dict[Hashable,List[Any]] = defaultdict(list)
if any([isinstance(i.context,dict) for i in fitting_interactions]) and self._shift != 0:
raise CobaException("Shift is required to be 0 for sparse environments. Otherwise the environment will become dense.")
mixed = set()
had_non_numeric = set()
for interaction in fitting_interactions:
if self._target == "features":
for name,value in self._feature_pairs(interaction.context):
if name in mixed: continue
is_numeric = isinstance(value,Number)
is_nan = is_numeric and isnan(value)
if is_nan:
pass
elif (not is_numeric and name in unscaled) or (is_numeric and name in had_non_numeric):
mixed.add(name)
if name in unscaled: del unscaled[name]
if name in had_non_numeric: had_non_numeric.remove(name)
elif not is_numeric:
had_non_numeric.add(name)
elif is_numeric and not is_nan:
unscaled[name].append(value)
if self._target == "rewards":
unscaled["rewards"].extend(interaction.rewards)
if mixed: warnings.warn(f"Some features were not scaled due to having mixed types: {mixed}. ")
has_sparse_zero = set()
for interaction in fitting_interactions:
if isinstance(interaction.context,dict):
has_sparse_zero |= unscaled.keys() - interaction.context.keys() - {"rewards"}
for key in has_sparse_zero:
unscaled[key].append(0)
for name, values in unscaled.items():
if isinstance(self._shift, Number):
shift = self._shift
if self._shift == "min":
shift = min(values)
if self._shift == "mean":
shift = mean(values)
if self._shift == "med":
shift = median(values)
if isinstance(self._scale, Number):
scale_num = self._scale
scale_den = 1
if self._scale == "std":
scale_num = 1
scale_den = stdev(values)
if self._scale == "minmax":
scale_num = 1
scale_den = max(values)-min(values)
if self._scale == "iqr":
scale_num = 1
scale_den = iqr(values)
if self._scale == "maxabs":
scale_num = 1
scale_den = max([abs(v-shift) for v in values])
shifts[name] = shift
scales[name] = scale_num/scale_den if round(scale_den,10) != 0 else 1
for interaction in chain(fitting_interactions, iter_interactions):
scaled_values = {}
final_context = interaction.context
final_rewards = None
final_kwargs = interaction.kwargs.copy()
if self._target == "features":
for name,value in self._feature_pairs(interaction.context):
if isinstance(value,Number):
scaled_values[name] = (value-shifts[name])*scales[name]
else:
scaled_values[name] = value
if interaction.context is None:
final_context = None
elif isinstance(interaction.context,dict):
final_context = scaled_values
elif isinstance(interaction.context,tuple):
final_context = tuple(scaled_values[k] for k,_ in self._feature_pairs(interaction.context))
else:
final_context = scaled_values[1]
if self._target == "rewards":
final_rewards = [ (r-shifts['rewards'])*scales['rewards'] for r in interaction.rewards ]
if isinstance(interaction, SimulatedInteraction):
yield SimulatedInteraction(
final_context,
interaction.actions,
final_rewards or interaction.rewards,
**interaction.kwargs
)
elif isinstance(interaction, LoggedInteraction):
yield LoggedInteraction(
final_context,
interaction.action,
interaction.reward,
interaction.probability,
interaction.actions,
**interaction.kwargs
)
else: #pragma: no cover
raise CobaException("Unknown interactions were given to Scale.")
def _feature_pairs(self,context) -> Sequence[Tuple[Hashable,Any]]:
if isinstance(context,dict ): return context.items()
if isinstance(context,tuple): return enumerate(context)
if context is not None : return [(1,context)]
return []
class Impute(EnvironmentFilter):
"""Impute missing values (nan) in Interaction contexts."""
def __init__(self,
stat : Literal["mean","median","mode"] = "mean",
using: Optional[int] = None):
"""Instantiate an Impute filter.
Args:
stat: The statistic to use for impuatation.
using: The number of interactions to use to calculate the imputation statistics.
"""
assert stat in ["mean","median","mode"]
self._stat = stat
self._using = using
@property
def params(self) -> Dict[str, Any]:
return { "impute_stat": self._stat, "impute_using": self._using }
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
iter_interactions = iter(interactions)
train_interactions = list(islice(iter_interactions,self._using))
test_interactions = chain.from_iterable([train_interactions, iter_interactions])
stats : Dict[Hashable,float] = defaultdict(int)
features: Dict[Hashable,List[Number]] = defaultdict(list)
for interaction in train_interactions:
for name,value in self._context_as_name_values(interaction.context):
if isinstance(value,Number) and not isnan(value):
features[name].append(value)
for feat_name, feat_numeric_values in features.items():
if self._stat == "mean":
stats[feat_name] = mean(feat_numeric_values)
if self._stat == "median":
stats[feat_name] = median(feat_numeric_values)
if self._stat == "mode":
stats[feat_name] = mode(feat_numeric_values)
for interaction in test_interactions:
kv_imputed_context = {}
for name,value in self._context_as_name_values(interaction.context):
kv_imputed_context[name] = stats[name] if isinstance(value,Number) and isnan(value) else value
if interaction.context is None:
final_context = None
elif isinstance(interaction.context,dict):
final_context = kv_imputed_context
elif isinstance(interaction.context,tuple):
final_context = tuple(kv_imputed_context[k] for k,_ in self._context_as_name_values(interaction.context))
else:
final_context = kv_imputed_context[1]
if isinstance(interaction, SimulatedInteraction):
yield SimulatedInteraction(
final_context,
interaction.actions,
interaction.rewards,
**interaction.kwargs
)
elif isinstance(interaction, LoggedInteraction):
yield LoggedInteraction(
final_context,
interaction.action,
interaction.reward,
**interaction.kwargs
)
else: #pragma: no cover
raise CobaException("Unknown interactions were given to Impute.")
def _context_as_name_values(self,context) -> Sequence[Tuple[Hashable,Any]]:
if isinstance(context,dict ): return context.items()
if isinstance(context,tuple): return enumerate(context)
if context is not None : return [(1,context)]
return []
class Sparse(EnvironmentFilter):
"""Sparsify an environment's feature representation.
This has little utility beyond debugging.
"""
def __init__(self, context:bool = True, action:bool = False):
"""Instantiate a Sparse filter.
Args:
context: If True then contexts should be made sparse otherwise leave them alone.
action: If True then actions should be made sparse otherwise leave them alone.
"""
self._context = context
self._action = action
@property
def params(self) -> Dict[str, Any]:
return { "sparse_C": self._context, "sparse_A": self._action }
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
for interaction in interactions:
sparse_context = self._make_sparse(interaction.context) if self._context else interaction.context
if isinstance(interaction, SimulatedInteraction):
sparse_actions = list(map(self._make_sparse,interaction.actions)) if self._action else interaction.actions
yield SimulatedInteraction(
sparse_context,
sparse_actions,
interaction.rewards
)
elif isinstance(interaction, LoggedInteraction):
sparse_action = self._make_sparse(interaction.action) if self._action else interaction.action
yield LoggedInteraction(
sparse_context,
sparse_action,
interaction.reward,
interaction.probability,
interaction.actions,
**interaction.kwargs
)
else: #pragma: no cover
raise CobaException("Unknown interactions were given to Sparse.")
def _make_sparse(self, value) -> Optional[dict]:
if isinstance(value,dict) or value is None:
return value
if isinstance(value,(list,tuple)):
return dict(enumerate(value))
return {0:value}
class Cycle(EnvironmentFilter):
"""Cycle all rewards associated with actions by one place.
This filter is useful for testing an algorithms response to a non-stationary shock.
"""
def __init__(self, after:int = 0):
"""Instantiate a Cycle filter.
Args:
after: How many interactions should be seen before applying the cycle filter.
"""
self._after = after
@property
def params(self) -> Dict[str, Any]:
return { "cycle_after": self._after }
def filter(self, interactions: Iterable[SimulatedInteraction]) -> Iterable[SimulatedInteraction]:
underlying_iterable = iter(interactions)
sans_cycle_interactions = islice(underlying_iterable, self._after)
with_cycle_interactions = underlying_iterable
for interaction in sans_cycle_interactions:
yield interaction
try:
first_interaction = next(with_cycle_interactions)
action_set = set(first_interaction.actions)
n_actions = len(action_set)
featureless_actions = [tuple([0]*n+[1]+[0]*(n_actions-n-1)) for n in range(n_actions)]
with_cycle_interactions = chain([first_interaction], with_cycle_interactions)
if len(set(action_set) & set(featureless_actions)) != len(action_set):
warnings.warn("Cycle only works for environments without action features. It will be ignored in this case.")
for interaction in with_cycle_interactions:
yield interaction
else:
for interaction in with_cycle_interactions:
rewards = interaction.rewards[-1:] + interaction.rewards[:-1]
yield SimulatedInteraction(interaction.context, interaction.actions, rewards, **interaction.kwargs)
except StopIteration:
pass
class Binary(EnvironmentFilter):
"""Binarize all rewards to either 1 (max rewards) or 0 (all others)."""
@property
def params(self) -> Dict[str, Any]:
return { "binary": True }
def filter(self, interactions: Iterable[SimulatedInteraction]) -> Iterable[SimulatedInteraction]:
for interaction in interactions:
max_rwd = max(interaction.rewards)
rewards = [int(r==max_rwd) for r in interaction.rewards]
yield SimulatedInteraction(interaction.context, interaction.actions, rewards, **interaction.kwargs)
class Sort(EnvironmentFilter):
"""Sort a sequence of Interactions in an Environment."""
def __init__(self, *keys: Union[str,int,Sequence[Union[str,int]]]) -> None:
"""Instantiate a Sort filter.
Args:
*keys: The context items that should be sorted on.
"""
self._keys = list(Flatten().filter([list(keys)]))[0]
@property
def params(self) -> Dict[str, Any]:
return { "sort": self._keys or '*' }
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
full_sorter = lambda interaction: tuple(interaction.context )
list_sorter = lambda interaction: tuple(interaction.context[key] for key in self._keys)
dict_sorter = lambda interaction: tuple(interaction.context.get(key,0) for key in self._keys)
interactions = list(interactions)
is_sparse = isinstance(interactions[0].context,dict)
sorter = full_sorter if not self._keys else dict_sorter if is_sparse else list_sorter
return sorted(interactions, key=sorter)
class Where(EnvironmentFilter):
"""Define Environment selection criteria for an Environments pipe."""
def __init__(self, *, n_interactions: Union[int,Tuple[Optional[int],Optional[int]]] = None) -> None:
"""Instantiate a Where filter.
Args:
n_interactions: The minimum, maximum or exact number of interactions Environments must have.
"""
self._n_interactions = n_interactions
@property
def params(self) -> Dict[str, Any]:
params = {}
if self._n_interactions is not None:
params["where_n_interactions"] = self._n_interactions
return params
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
interactions = iter(interactions)
if self._n_interactions is None or self._n_interactions == (None,None):
min_interactions = None
max_interactions = None
take_interactions = 0
elif isinstance(self._n_interactions, int):
min_interactions = self._n_interactions
max_interactions = self._n_interactions
take_interactions = self._n_interactions+1
else:
min_interactions = self._n_interactions[0]
max_interactions = self._n_interactions[1]
take_interactions = max(filter(lambda x: x is not None, list(self._n_interactions)))+1
taken_interactions = list(islice(interactions, take_interactions))
if max_interactions is not None and len(taken_interactions) > max_interactions:
return []
if min_interactions is not None and len(taken_interactions) < min_interactions:
return []
return chain(taken_interactions, interactions)
class Warm(EnvironmentFilter):
"""Turn a SimulatedEnvironment into a WarmStartEnvironment."""
def __init__(self, n_warm:int, seed:int = 1):
"""Instantiate a Warm filter.
Args:
n_warm: The number of interactions that should be turned into LoggedInteractions.
seed: The random number seed that determines the random logging policy for LoggedInteractions.
"""
self._n_warm = n_warm
self._seed = seed
@property
def params(self) -> Dict[str, Any]:
return { "n_warm": self._n_warm }
def filter(self, interactions: Iterable[SimulatedInteraction]) -> Iterable[Interaction]:
self._rng = CobaRandom(self._seed)
underlying_iterable = iter(interactions)
logged_interactions = map(self._to_logged_interaction, islice(underlying_iterable, self._n_warm))
simulated_interactions = underlying_iterable
return chain(logged_interactions, simulated_interactions)
def _to_logged_interaction(self, interaction: SimulatedInteraction) -> LoggedInteraction:
num_actions = len(interaction.actions)
probabilities = [1/num_actions] * num_actions
idx = self._rng.choice(list(range(num_actions)), probabilities)
actions = interaction.actions
action = interaction.actions[idx]
prob = probabilities[idx]
reward = interaction.rewards[idx]
return LoggedInteraction(interaction.context, action, reward, prob, actions)
class Riffle(EnvironmentFilter):
"""Riffle shuffle Interactions by taking actions from the end and evenly distributing into the beginning."""
def __init__(self, spacing: int = 3, seed=1) -> None:
"""Instantiate a Riffle filter.
Args:
spacing: The number of interactions from the beginning between each interaction shuffled in from the end.
seed: The seed used to determine the location of each ending interaction when placed within its beginning space.
"""
self._spacing = spacing
self._seed = seed
@property
def params(self) -> Dict[str, Any]:
return {"riffle_spacing": self._spacing, "riffle_seed": self._seed}
def filter(self, interactions: Iterable[Interaction]) -> Iterable[Interaction]:
rng = CobaRandom(self._seed)
interactions = list(interactions)
for i in range(int(len(interactions)/(self._spacing+1))):
interactions.insert(i*self._spacing+rng.randint(0,self._spacing), interactions.pop())
return interactions
class Noise(EnvironmentFilter):
"""Introduce noise to an environment."""
def __init__(self,
context: Callable[[float,CobaRandom], float] = None,
action : Callable[[float,CobaRandom], float] = None,
reward : Callable[[float,CobaRandom], float] = None,
seed : int = 1) -> None:
"""Instantiate a Noise EnvironmentFilter.
Args:
context: A noise generator for context features.
action : A noise generator for action features.
reward : A noise generator for rewards.
seed : The seed initializing the random state of the noise generators.
"""
self._args = (context,action,reward,seed)
self._no_noise = lambda x, _: x
if context is None and action is None and reward is None:
context = lambda x, rng: x+rng.gauss(0,1)
self._context_noise = context or self._no_noise
self._action_noise = action or self._no_noise
self._reward_noise = reward or self._no_noise
self._seed = seed
def __reduce__(self) -> tuple:
try:
pickle.dumps(self._args)
except Exception:
message = (
"We were unable to pickle the Noise filter. This is likely due to using lambda functions for noise generation. "
"To work around this we recommend you first define your lambda functions as a named function and then pass the "
"named function to Noise."
)
raise CobaException(message)
else:
return (Noise, self._args)
@property
def params(self) -> Dict[str, Any]:
params = {}
if self._context_noise != self._no_noise: params['context_noise'] = True
if self._action_noise != self._no_noise : params['action_noise' ] = True
if self._reward_noise != self._no_noise : params['reward_noise' ] = True
params['noise_seed'] = self._seed
return params
def filter(self, interactions: Iterable[SimulatedInteraction]) -> Iterable[SimulatedInteraction]:
rng = CobaRandom(self._seed)
for interaction in interactions:
if isinstance(interaction, LoggedInteraction):
raise CobaException("We do not currently support adding noise to a LoggedInteraction.")
noisy_context = self._noises(interaction.context, rng, self._context_noise)
noisy_actions = [ self._noises(a, rng, self._action_noise) for a in interaction.actions ]
noisy_rewards = [ self._noises(r, rng, self._reward_noise) for r in interaction.rewards ]
yield SimulatedInteraction(noisy_context, noisy_actions, noisy_rewards, **interaction.kwargs)
def _noises(self, value:Union[None,float,str,Mapping,Sequence], rng: CobaRandom, noiser: Callable[[float,CobaRandom], float]):
if isinstance(value, collections.abc.Mapping):
#we sort so that noise generation is deterministic with respect to seed
return { k:self._noise(v, rng, noiser) for k,v in sorted(value.items()) }
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
return [ self._noise(v, rng, noiser) for v in value ]
return self._noise(value, rng, noiser)
def _noise(self, value:Union[None,float,str], rng: CobaRandom, noiser: Callable[[float,CobaRandom], float]) -> float:
return value if not isinstance(value,(int,float)) else noiser(value, rng)
| [((3136, 3159), 'collections.defaultdict', 'defaultdict', (['(lambda : 0)'], {}), '(lambda : 0)\n', (3147, 3159), False, 'from collections import defaultdict\n'), ((3203, 3226), 'collections.defaultdict', 'defaultdict', (['(lambda : 1)'], {}), '(lambda : 1)\n', (3214, 3226), False, 'from collections import defaultdict\n'), ((3270, 3287), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3281, 3287), False, 'from collections import defaultdict\n'), ((6070, 6116), 'itertools.chain', 'chain', (['fitting_interactions', 'iter_interactions'], {}), '(fitting_interactions, iter_interactions)\n', (6075, 6116), False, 'from itertools import islice, chain\n'), ((9166, 9226), 'itertools.chain.from_iterable', 'chain.from_iterable', (['[train_interactions, iter_interactions]'], {}), '([train_interactions, iter_interactions])\n', (9185, 9226), False, 'from itertools import islice, chain\n'), ((9276, 9292), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (9287, 9292), False, 'from collections import defaultdict\n'), ((9341, 9358), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9352, 9358), False, 'from collections import defaultdict\n'), ((14440, 14480), 'itertools.islice', 'islice', (['underlying_iterable', 'self._after'], {}), '(underlying_iterable, self._after)\n', (14446, 14480), False, 'from itertools import islice, chain\n'), ((19036, 19075), 'itertools.chain', 'chain', (['taken_interactions', 'interactions'], {}), '(taken_interactions, interactions)\n', (19041, 19075), False, 'from itertools import islice, chain\n'), ((19760, 19782), 'coba.random.CobaRandom', 'CobaRandom', (['self._seed'], {}), '(self._seed)\n', (19770, 19782), False, 'from coba.random import CobaRandom\n'), ((20014, 20064), 'itertools.chain', 'chain', (['logged_interactions', 'simulated_interactions'], {}), '(logged_interactions, simulated_interactions)\n', (20019, 20064), False, 'from itertools import islice, chain\n'), ((20517, 20586), 'coba.environments.logged.primitives.LoggedInteraction', 'LoggedInteraction', (['interaction.context', 'action', 'reward', 'prob', 'actions'], {}), '(interaction.context, action, reward, prob, actions)\n', (20534, 20586), False, 'from coba.environments.logged.primitives import LoggedInteraction\n'), ((21401, 21423), 'coba.random.CobaRandom', 'CobaRandom', (['self._seed'], {}), '(self._seed)\n', (21411, 21423), False, 'from coba.random import CobaRandom\n'), ((23780, 23802), 'coba.random.CobaRandom', 'CobaRandom', (['self._seed'], {}), '(self._seed)\n', (23790, 23802), False, 'from coba.random import CobaRandom\n'), ((3051, 3089), 'itertools.islice', 'islice', (['iter_interactions', 'self._using'], {}), '(iter_interactions, self._using)\n', (3057, 3089), False, 'from itertools import islice, chain\n'), ((3404, 3526), 'coba.exceptions.CobaException', 'CobaException', (['"""Shift is required to be 0 for sparse environments. Otherwise the environment will become dense."""'], {}), "(\n 'Shift is required to be 0 for sparse environments. Otherwise the environment will become dense.'\n )\n", (3417, 3526), False, 'from coba.exceptions import CobaException\n'), ((4584, 4673), 'warnings.warn', 'warnings.warn', (['f"""Some features were not scaled due to having mixed types: {mixed}. """'], {}), "(\n f'Some features were not scaled due to having mixed types: {mixed}. ')\n", (4597, 4673), False, 'import warnings\n'), ((9098, 9136), 'itertools.islice', 'islice', (['iter_interactions', 'self._using'], {}), '(iter_interactions, self._using)\n', (9104, 9136), False, 'from itertools import islice, chain\n'), ((14959, 15010), 'itertools.chain', 'chain', (['[first_interaction]', 'with_cycle_interactions'], {}), '([first_interaction], with_cycle_interactions)\n', (14964, 15010), False, 'from itertools import islice, chain\n'), ((18757, 18796), 'itertools.islice', 'islice', (['interactions', 'take_interactions'], {}), '(interactions, take_interactions)\n', (18763, 18796), False, 'from itertools import islice, chain\n'), ((19902, 19943), 'itertools.islice', 'islice', (['underlying_iterable', 'self._n_warm'], {}), '(underlying_iterable, self._n_warm)\n', (19908, 19943), False, 'from itertools import islice, chain\n'), ((22792, 22816), 'pickle.dumps', 'pickle.dumps', (['self._args'], {}), '(self._args)\n', (22804, 22816), False, 'import pickle\n'), ((5241, 5253), 'statistics.mean', 'mean', (['values'], {}), '(values)\n', (5245, 5253), False, 'from statistics import mean, median, stdev, mode\n'), ((5316, 5330), 'statistics.median', 'median', (['values'], {}), '(values)\n', (5322, 5330), False, 'from statistics import mean, median, stdev, mode\n'), ((5546, 5559), 'statistics.stdev', 'stdev', (['values'], {}), '(values)\n', (5551, 5559), False, 'from statistics import mean, median, stdev, mode\n'), ((5779, 5790), 'coba.statistics.iqr', 'iqr', (['values'], {}), '(values)\n', (5782, 5790), False, 'from coba.statistics import iqr\n'), ((9741, 9766), 'statistics.mean', 'mean', (['feat_numeric_values'], {}), '(feat_numeric_values)\n', (9745, 9766), False, 'from statistics import mean, median, stdev, mode\n'), ((9842, 9869), 'statistics.median', 'median', (['feat_numeric_values'], {}), '(feat_numeric_values)\n', (9848, 9869), False, 'from statistics import mean, median, stdev, mode\n'), ((9943, 9968), 'statistics.mode', 'mode', (['feat_numeric_values'], {}), '(feat_numeric_values)\n', (9947, 9968), False, 'from statistics import mean, median, stdev, mode\n'), ((15111, 15229), 'warnings.warn', 'warnings.warn', (['"""Cycle only works for environments without action features. It will be ignored in this case."""'], {}), "(\n 'Cycle only works for environments without action features. It will be ignored in this case.'\n )\n", (15124, 15229), False, 'import warnings\n'), ((16125, 16223), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['interaction.context', 'interaction.actions', 'rewards'], {}), '(interaction.context, interaction.actions, rewards, **\n interaction.kwargs)\n', (16145, 16223), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((23200, 23222), 'coba.exceptions.CobaException', 'CobaException', (['message'], {}), '(message)\n', (23213, 23222), False, 'from coba.exceptions import CobaException\n'), ((23927, 24013), 'coba.exceptions.CobaException', 'CobaException', (['"""We do not currently support adding noise to a LoggedInteraction."""'], {}), "(\n 'We do not currently support adding noise to a LoggedInteraction.')\n", (23940, 24013), False, 'from coba.exceptions import CobaException\n'), ((24321, 24413), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['noisy_context', 'noisy_actions', 'noisy_rewards'], {}), '(noisy_context, noisy_actions, noisy_rewards, **\n interaction.kwargs)\n', (24341, 24413), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((7292, 7412), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['final_context', 'interaction.actions', '(final_rewards or interaction.rewards)'], {}), '(final_context, interaction.actions, final_rewards or\n interaction.rewards, **interaction.kwargs)\n', (7312, 7412), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((7929, 7987), 'coba.exceptions.CobaException', 'CobaException', (['"""Unknown interactions were given to Scale."""'], {}), "('Unknown interactions were given to Scale.')\n", (7942, 7987), False, 'from coba.exceptions import CobaException\n'), ((10769, 10873), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['final_context', 'interaction.actions', 'interaction.rewards'], {}), '(final_context, interaction.actions, interaction.\n rewards, **interaction.kwargs)\n', (10789, 10873), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((11303, 11362), 'coba.exceptions.CobaException', 'CobaException', (['"""Unknown interactions were given to Impute."""'], {}), "('Unknown interactions were given to Impute.')\n", (11316, 11362), False, 'from coba.exceptions import CobaException\n'), ((12749, 12822), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['sparse_context', 'sparse_actions', 'interaction.rewards'], {}), '(sparse_context, sparse_actions, interaction.rewards)\n', (12769, 12822), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((13430, 13489), 'coba.exceptions.CobaException', 'CobaException', (['"""Unknown interactions were given to Sparse."""'], {}), "('Unknown interactions were given to Sparse.')\n", (13443, 13489), False, 'from coba.exceptions import CobaException\n'), ((3896, 3908), 'math.isnan', 'isnan', (['value'], {}), '(value)\n', (3901, 3908), False, 'from math import isnan\n'), ((7591, 7735), 'coba.environments.logged.primitives.LoggedInteraction', 'LoggedInteraction', (['final_context', 'interaction.action', 'interaction.reward', 'interaction.probability', 'interaction.actions'], {}), '(final_context, interaction.action, interaction.reward,\n interaction.probability, interaction.actions, **interaction.kwargs)\n', (7608, 7735), False, 'from coba.environments.logged.primitives import LoggedInteraction\n'), ((9540, 9552), 'math.isnan', 'isnan', (['value'], {}), '(value)\n', (9545, 9552), False, 'from math import isnan\n'), ((10222, 10234), 'math.isnan', 'isnan', (['value'], {}), '(value)\n', (10227, 10234), False, 'from math import isnan\n'), ((11051, 11150), 'coba.environments.logged.primitives.LoggedInteraction', 'LoggedInteraction', (['final_context', 'interaction.action', 'interaction.reward'], {}), '(final_context, interaction.action, interaction.reward, **\n interaction.kwargs)\n', (11068, 11150), False, 'from coba.environments.logged.primitives import LoggedInteraction\n'), ((13096, 13236), 'coba.environments.logged.primitives.LoggedInteraction', 'LoggedInteraction', (['sparse_context', 'sparse_action', 'interaction.reward', 'interaction.probability', 'interaction.actions'], {}), '(sparse_context, sparse_action, interaction.reward,\n interaction.probability, interaction.actions, **interaction.kwargs)\n', (13113, 13236), False, 'from coba.environments.logged.primitives import LoggedInteraction\n'), ((15504, 15602), 'coba.environments.simulated.primitives.SimulatedInteraction', 'SimulatedInteraction', (['interaction.context', 'interaction.actions', 'rewards'], {}), '(interaction.context, interaction.actions, rewards, **\n interaction.kwargs)\n', (15524, 15602), False, 'from coba.environments.simulated.primitives import SimulatedInteraction\n'), ((16548, 16557), 'coba.pipes.Flatten', 'Flatten', ([], {}), '()\n', (16555, 16557), False, 'from coba.pipes import Flatten\n')] |
egoolish/cuml | python/cuml/preprocessing/LabelEncoder.py | 5320eff78890b3e9129e04e13437496c0424820d | #
# Copyright (c) 2019, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import cudf
import nvcategory
from librmm_cffi import librmm
import numpy as np
def _enforce_str(y: cudf.Series) -> cudf.Series:
''' Ensure that nvcategory is being given strings
'''
if y.dtype != "object":
return y.astype("str")
return y
def _enforce_npint32(y: cudf.Series) -> cudf.Series:
if y.dtype != np.int32:
return y.astype(np.int32)
return y
class LabelEncoder(object):
"""
An nvcategory based implementation of ordinal label encoding
Examples
--------
Converting a categorical implementation to a numerical one
.. code-block:: python
from cudf import DataFrame, Series
data = DataFrame({'category': ['a', 'b', 'c', 'd']})
# There are two functionally equivalent ways to do this
le = LabelEncoder()
le.fit(data.category) # le = le.fit(data.category) also works
encoded = le.transform(data.category)
print(encoded)
# This method is preferred
le = LabelEncoder()
encoded = le.fit_transform(data.category)
print(encoded)
# We can assign this to a new column
data = data.assign(encoded=encoded)
print(data.head())
# We can also encode more data
test_data = Series(['c', 'a'])
encoded = le.transform(test_data)
print(encoded)
# After train, ordinal label can be inverse_transform() back to
# string labels
ord_label = cudf.Series([0, 0, 1, 2, 1])
ord_label = dask_cudf.from_cudf(data, npartitions=2)
str_label = le.inverse_transform(ord_label)
print(str_label)
Output:
.. code-block:: python
0 0
1 1
2 2
3 3
dtype: int64
0 0
1 1
2 2
3 3
dtype: int32
category encoded
0 a 0
1 b 1
2 c 2
3 d 3
0 2
1 0
dtype: int64
0 a
1 a
2 b
3 c
4 b
dtype: object
"""
def __init__(self, *args, **kwargs):
self._cats: nvcategory.nvcategory = None
self._dtype = None
self._fitted: bool = False
def _check_is_fitted(self):
if not self._fitted:
raise RuntimeError("Model must first be .fit()")
def fit(self, y: cudf.Series) -> "LabelEncoder":
"""
Fit a LabelEncoder (nvcategory) instance to a set of categories
Parameters
---------
y : cudf.Series
Series containing the categories to be encoded. It's elements
may or may not be unique
Returns
-------
self : LabelEncoder
A fitted instance of itself to allow method chaining
"""
self._dtype = y.dtype
y = _enforce_str(y)
self._cats = nvcategory.from_strings(y.data)
self._fitted = True
return self
def transform(self, y: cudf.Series) -> cudf.Series:
"""
Transform an input into its categorical keys.
This is intended for use with small inputs relative to the size of the
dataset. For fitting and transforming an entire dataset, prefer
`fit_transform`.
Parameters
----------
y : cudf.Series
Input keys to be transformed. Its values should match the
categories given to `fit`
Returns
------
encoded : cudf.Series
The ordinally encoded input series
Raises
------
KeyError
if a category appears that was not seen in `fit`
"""
self._check_is_fitted()
y = _enforce_str(y)
encoded = cudf.Series(
nvcategory.from_strings(y.data)
.set_keys(self._cats.keys())
.values()
)
if -1 in encoded:
raise KeyError("Attempted to encode unseen key")
return encoded
def fit_transform(self, y: cudf.Series) -> cudf.Series:
"""
Simultaneously fit and transform an input
This is functionally equivalent to (but faster than)
`LabelEncoder().fit(y).transform(y)`
"""
self._dtype = y.dtype
# Convert y to nvstrings series, if it isn't one
y = _enforce_str(y)
# Bottleneck is here, despite everything being done on the device
self._cats = nvcategory.from_strings(y.data)
self._fitted = True
arr: librmm.device_array = librmm.device_array(
y.data.size(), dtype=np.int32
)
self._cats.values(devptr=arr.device_ctypes_pointer.value)
return cudf.Series(arr)
def inverse_transform(self, y: cudf.Series) -> cudf.Series:
''' Revert ordinal label to original label
Parameters
----------
y : cudf.Series, dtype=int32
Ordinal labels to be reverted
Returns
-------
reverted : cudf.Series
Reverted labels
'''
# check LabelEncoder is fitted
self._check_is_fitted()
# check input type is cudf.Series
if not isinstance(y, cudf.Series):
raise TypeError(
'Input of type {} is not cudf.Series'.format(type(y)))
# check if y's dtype is np.int32, otherwise convert it
y = _enforce_npint32(y)
# check if ord_label out of bound
ord_label = y.unique()
category_num = len(self._cats.keys())
for ordi in ord_label:
if ordi < 0 or ordi >= category_num:
raise ValueError(
'y contains previously unseen label {}'.format(ordi))
# convert ordinal label to string label
reverted = cudf.Series(self._cats.gather_strings(
y.data.mem.device_ctypes_pointer.value, len(y)))
return reverted
| [((3543, 3574), 'nvcategory.from_strings', 'nvcategory.from_strings', (['y.data'], {}), '(y.data)\n', (3566, 3574), False, 'import nvcategory\n'), ((5096, 5127), 'nvcategory.from_strings', 'nvcategory.from_strings', (['y.data'], {}), '(y.data)\n', (5119, 5127), False, 'import nvcategory\n'), ((5346, 5362), 'cudf.Series', 'cudf.Series', (['arr'], {}), '(arr)\n', (5357, 5362), False, 'import cudf\n'), ((4427, 4458), 'nvcategory.from_strings', 'nvcategory.from_strings', (['y.data'], {}), '(y.data)\n', (4450, 4458), False, 'import nvcategory\n')] |
jhamrick/cogsci-proceedings-analysis | cleaning.py | c3c8b0abd8b9ce639f6de0aea52aec46c2c8abca | import re
import difflib
import pandas as pd
import numpy as np
from nameparser import HumanName
from nameparser.config import CONSTANTS
CONSTANTS.titles.remove("gen")
CONSTANTS.titles.remove("prin")
def parse_paper_type(section_name):
section_name = section_name.strip().lower()
if section_name == '':
paper_type = None
elif re.match('.*workshop.*', section_name):
paper_type = 'workshop'
elif re.match('.*symposi.*', section_name):
paper_type = 'symposium'
elif re.match('.*poster.*', section_name):
paper_type = 'poster'
elif re.match('.*tutorial.*', section_name):
paper_type = 'workshop'
elif re.match('.*abstract.*', section_name):
paper_type = 'poster'
elif re.match('.*addenda.*', section_name):
paper_type = 'other'
else:
paper_type = 'talk'
return paper_type
def clean_authors(authors):
cleaned_authors = []
authors = authors.lower()
# get rid of commas where there are suffixes, like Jr. or III
authors = authors.replace(", jr.", " jr.")
authors = authors.replace(", iii", " iii")
authors = authors.replace(", ph.d", "")
# special cases
authors = authors.replace("organizer:", "")
authors = authors.replace("roel m,", "roel m.")
if authors == 'kozue miyashiro, etsuko harada, t.':
author_list = ['kozue miyashiro', 'etsuko harada, t.']
else:
author_list = authors.split(",")
for author in author_list:
author = HumanName(author.lower())
if author.first == '' or author.last == '':
raise ValueError("invalid author name: {}".format(author))
author.capitalize()
author.string_format = u"{last}, {title} {first} {middle}, {suffix}"
cleaned_authors.append(unicode(author))
return cleaned_authors
def extract_authors(papers):
author_papers = []
for i, paper in papers.iterrows():
authors = clean_authors(paper['authors'])
for author in authors:
entry = paper.copy().drop('authors')
entry['author'] = author
author_papers.append(entry)
author_papers = pd.DataFrame(author_papers)
return author_papers
def fix_author_misspellings(papers, G):
authors = np.sort(papers['author'].unique())
for i in xrange(len(authors)):
window = 20
lower = i + 1
upper = min(i + 1 + window, len(authors) - 1)
for j in xrange(len(authors[lower:upper])):
author1 = authors[i]
author2 = authors[lower + j]
if author1 == author2:
continue
author1_hn = HumanName(author1)
author2_hn = HumanName(author2)
same_first = author1_hn.first == author2_hn.first
same_last = author1_hn.last == author2_hn.last
if same_first and same_last:
replace = True
else:
ratio = difflib.SequenceMatcher(None, author1, author2).ratio()
if ratio > 0.9:
coauthors = set(G[author1].keys()) & set(G[author2].keys())
if len(coauthors) > 0:
replace = True
else:
print u"\nPossible match: '{}' vs '{}' (r={})".format(
author1, author2, ratio)
print sorted(G[author1].keys())
print sorted(G[author2].keys())
accept = ""
while accept not in ("y", "n"):
accept = raw_input("Accept? (y/n) ")
replace = accept == "y"
else:
replace = False
if replace:
num1 = len(papers.groupby('author').get_group(author1))
num2 = len(papers.groupby('author').get_group(author2))
if num1 > num2:
oldname = author2
newname = author1
else:
oldname = author1
newname = author2
print u"Replacing '{}' with '{}'".format(oldname, newname)
papers.loc[papers['author'] == oldname, 'author'] = newname
authors[authors == oldname] = newname
for neighbor in G[oldname]:
if neighbor not in G[newname]:
G.add_edge(newname, neighbor)
G[newname][neighbor]['weight'] = 0
weight = G[oldname][neighbor]['weight']
G[newname][neighbor]['weight'] += weight
G.remove_node(oldname)
return papers, G
if __name__ == "__main__":
import graph
papers = pd.read_csv("cogsci_proceedings_raw.csv")
papers['type'] = papers['section'].apply(parse_paper_type)
papers = extract_authors(papers)
G = graph.make_author_graph(papers)
papers, G = fix_author_misspellings(papers, G)
papers.to_csv("cogsci_proceedings.csv", encoding='utf-8')
| [] |
CamilaBodack/template-projeto-selecao | quem_foi_para_mar_core/migrations/0004_auto_20200811_1945.py | b0a0cf6070bf8abab626a17af5c315c82368b010 | # Generated by Django 3.1 on 2020-08-11 19:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('quem_foi_para_mar_core', '0003_auto_20200811_1944'),
]
operations = [
migrations.RenameField(
model_name='contato',
old_name='pescador_id',
new_name='pescador',
),
]
| [((240, 333), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""contato"""', 'old_name': '"""pescador_id"""', 'new_name': '"""pescador"""'}), "(model_name='contato', old_name='pescador_id',\n new_name='pescador')\n", (262, 333), False, 'from django.db import migrations\n')] |
MJ-SEO/py_fuzz | examples/tinytag/fuzz.py | 789fbfea21bf644ba4d00554fe4141694b0a190a | from pythonfuzz.main import PythonFuzz
from tinytag import TinyTag
import io
@PythonFuzz
def fuzz(buf):
try:
f = open('temp.mp4', "wb")
f.write(buf)
f.seek(0)
tag = TinyTag.get(f.name)
except UnicodeDecodeError:
pass
if __name__ == '__main__':
fuzz()
| [((175, 194), 'tinytag.TinyTag.get', 'TinyTag.get', (['f.name'], {}), '(f.name)\n', (186, 194), False, 'from tinytag import TinyTag\n')] |
GiulianaPola/select_repeats | venv/lib/python3.8/site-packages/requests/compat.py | 17a0d053d4f874e42cf654dd142168c2ec8fbd11 | /home/runner/.cache/pip/pool/d1/fc/c7/6cbbdf9c58b6591d28ed792bbd7944946d3f56042698e822a2869787f6 | [] |
StatMixedML/GPBoost | examples/python-guide/cross_validation_example.py | 786d8be61c5c28da0690e167af636a6d777bf9e1 | # coding: utf-8
# pylint: disable = invalid-name, C0111
import gpboost as gpb
import numpy as np
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
plt.style.use('ggplot')
#--------------------Cross validation for tree-boosting without GP or random effects----------------
print('Simulating data...')
# Simulate and create your dataset
def f1d(x):
"""Non-linear function for simulation"""
return (1.7 * (1 / (1 + np.exp(-(x - 0.5) * 20)) + 0.75 * x))
x = np.linspace(0, 1, 200, endpoint=True)
plt.plot(x, f1d(x), linewidth=2, color="r")
plt.title("Mean function")
plt.show()
def sim_data(n):
"""Function that simulates data. Two covariates of which only one has an effect"""
X = np.random.rand(n, 2)
# mean function plus noise
y = f1d(X[:, 0]) + np.random.normal(scale=0.1, size=n)
return ([X, y])
# Simulate data
n = 1000
data = sim_data(2 * n)
# create dataset for gpb.train
data_train = gpb.Dataset(data[0][0:n, :], data[1][0:n])
# specify your configurations as a dict
params = {
'objective': 'regression_l2',
'metric': {'l2', 'l1'},
'learning_rate': 0.1,
'max_depth': 6,
'min_data_in_leaf': 5,
'verbose': 0
}
print('Starting cross-validation...')
# do cross-validation
cvbst = gpb.cv(params=params, train_set=data_train,
num_boost_round=100, early_stopping_rounds=5,
nfold=2, verbose_eval=True, show_stdv=False, seed=1)
print("Best number of iterations: " + str(np.argmin(cvbst['l2-mean'])))
# --------------------Combine tree-boosting and grouped random effects model----------------
print('Simulating data...')
# Simulate data
def f1d(x):
"""Non-linear function for simulation"""
return (1.7 * (1 / (1 + np.exp(-(x - 0.5) * 20)) + 0.75 * x))
x = np.linspace(0, 1, 200, endpoint=True)
plt.figure("Mean function")
plt.plot(x, f1d(x), linewidth=2, color="r")
plt.title("Mean function")
plt.show()
n = 1000 # number of samples
np.random.seed(1)
X = np.random.rand(n, 2)
F = f1d(X[:, 0])
# Simulate grouped random effects
m = 25 # number of categories / levels for grouping variable
group = np.arange(n) # grouping variable
for i in range(m):
group[int(i * n / m):int((i + 1) * n / m)] = i
# incidence matrix relating grouped random effects to samples
Z1 = np.zeros((n, m))
for i in range(m):
Z1[np.where(group == i), i] = 1
sigma2_1 = 1 ** 2 # random effect variance
sigma2 = 0.1 ** 2 # error variance
b1 = np.sqrt(sigma2_1) * np.random.normal(size=m) # simulate random effects
eps = Z1.dot(b1)
xi = np.sqrt(sigma2) * np.random.normal(size=n) # simulate error term
y = F + eps + xi # observed data
# define GPModel
gp_model = gpb.GPModel(group_data=group)
gp_model.set_optim_params(params={"optimizer_cov": "fisher_scoring"})
# create dataset for gpb.train
data_train = gpb.Dataset(X, y)
# specify your configurations as a dict
params = {
'objective': 'regression_l2',
'learning_rate': 0.05,
'max_depth': 6,
'min_data_in_leaf': 5,
'verbose': 0
}
print('Starting cross-validation...')
# do cross-validation
cvbst = gpb.cv(params=params, train_set=data_train,
gp_model=gp_model, use_gp_model_for_validation=False,
num_boost_round=100, early_stopping_rounds=5,
nfold=2, verbose_eval=True, show_stdv=False, seed=1)
print("Best number of iterations: " + str(np.argmin(cvbst['l2-mean'])))
# Include random effect predictions for validation (observe the lower test error)
gp_model = gpb.GPModel(group_data=group)
print("Running cross validation for GPBoost model and use_gp_model_for_validation = TRUE")
cvbst = gpb.cv(params=params, train_set=data_train,
gp_model=gp_model, use_gp_model_for_validation=True,
num_boost_round=100, early_stopping_rounds=5,
nfold=2, verbose_eval=True, show_stdv=Falsem, seed=1)
print("Best number of iterations: " + str(np.argmin(cvbst['l2-mean'])))
cvbst.best_iteration
| [((176, 199), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (189, 199), True, 'import matplotlib.pyplot as plt\n'), ((492, 529), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(200)'], {'endpoint': '(True)'}), '(0, 1, 200, endpoint=True)\n', (503, 529), True, 'import numpy as np\n'), ((574, 600), 'matplotlib.pyplot.title', 'plt.title', (['"""Mean function"""'], {}), "('Mean function')\n", (583, 600), True, 'import matplotlib.pyplot as plt\n'), ((601, 611), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (609, 611), True, 'import matplotlib.pyplot as plt\n'), ((948, 990), 'gpboost.Dataset', 'gpb.Dataset', (['data[0][0:n, :]', 'data[1][0:n]'], {}), '(data[0][0:n, :], data[1][0:n])\n', (959, 990), True, 'import gpboost as gpb\n'), ((1266, 1416), 'gpboost.cv', 'gpb.cv', ([], {'params': 'params', 'train_set': 'data_train', 'num_boost_round': '(100)', 'early_stopping_rounds': '(5)', 'nfold': '(2)', 'verbose_eval': '(True)', 'show_stdv': '(False)', 'seed': '(1)'}), '(params=params, train_set=data_train, num_boost_round=100,\n early_stopping_rounds=5, nfold=2, verbose_eval=True, show_stdv=False,\n seed=1)\n', (1272, 1416), True, 'import gpboost as gpb\n'), ((1777, 1814), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(200)'], {'endpoint': '(True)'}), '(0, 1, 200, endpoint=True)\n', (1788, 1814), True, 'import numpy as np\n'), ((1815, 1842), 'matplotlib.pyplot.figure', 'plt.figure', (['"""Mean function"""'], {}), "('Mean function')\n", (1825, 1842), True, 'import matplotlib.pyplot as plt\n'), ((1887, 1913), 'matplotlib.pyplot.title', 'plt.title', (['"""Mean function"""'], {}), "('Mean function')\n", (1896, 1913), True, 'import matplotlib.pyplot as plt\n'), ((1914, 1924), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1922, 1924), True, 'import matplotlib.pyplot as plt\n'), ((1955, 1972), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (1969, 1972), True, 'import numpy as np\n'), ((1977, 1997), 'numpy.random.rand', 'np.random.rand', (['n', '(2)'], {}), '(n, 2)\n', (1991, 1997), True, 'import numpy as np\n'), ((2119, 2131), 'numpy.arange', 'np.arange', (['n'], {}), '(n)\n', (2128, 2131), True, 'import numpy as np\n'), ((2290, 2306), 'numpy.zeros', 'np.zeros', (['(n, m)'], {}), '((n, m))\n', (2298, 2306), True, 'import numpy as np\n'), ((2670, 2699), 'gpboost.GPModel', 'gpb.GPModel', ([], {'group_data': 'group'}), '(group_data=group)\n', (2681, 2699), True, 'import gpboost as gpb\n'), ((2814, 2831), 'gpboost.Dataset', 'gpb.Dataset', (['X', 'y'], {}), '(X, y)\n', (2825, 2831), True, 'import gpboost as gpb\n'), ((3079, 3287), 'gpboost.cv', 'gpb.cv', ([], {'params': 'params', 'train_set': 'data_train', 'gp_model': 'gp_model', 'use_gp_model_for_validation': '(False)', 'num_boost_round': '(100)', 'early_stopping_rounds': '(5)', 'nfold': '(2)', 'verbose_eval': '(True)', 'show_stdv': '(False)', 'seed': '(1)'}), '(params=params, train_set=data_train, gp_model=gp_model,\n use_gp_model_for_validation=False, num_boost_round=100,\n early_stopping_rounds=5, nfold=2, verbose_eval=True, show_stdv=False,\n seed=1)\n', (3085, 3287), True, 'import gpboost as gpb\n'), ((3487, 3516), 'gpboost.GPModel', 'gpb.GPModel', ([], {'group_data': 'group'}), '(group_data=group)\n', (3498, 3516), True, 'import gpboost as gpb\n'), ((3616, 3824), 'gpboost.cv', 'gpb.cv', ([], {'params': 'params', 'train_set': 'data_train', 'gp_model': 'gp_model', 'use_gp_model_for_validation': '(True)', 'num_boost_round': '(100)', 'early_stopping_rounds': '(5)', 'nfold': '(2)', 'verbose_eval': '(True)', 'show_stdv': 'Falsem', 'seed': '(1)'}), '(params=params, train_set=data_train, gp_model=gp_model,\n use_gp_model_for_validation=True, num_boost_round=100,\n early_stopping_rounds=5, nfold=2, verbose_eval=True, show_stdv=Falsem,\n seed=1)\n', (3622, 3824), True, 'import gpboost as gpb\n'), ((724, 744), 'numpy.random.rand', 'np.random.rand', (['n', '(2)'], {}), '(n, 2)\n', (738, 744), True, 'import numpy as np\n'), ((2447, 2464), 'numpy.sqrt', 'np.sqrt', (['sigma2_1'], {}), '(sigma2_1)\n', (2454, 2464), True, 'import numpy as np\n'), ((2467, 2491), 'numpy.random.normal', 'np.random.normal', ([], {'size': 'm'}), '(size=m)\n', (2483, 2491), True, 'import numpy as np\n'), ((2541, 2556), 'numpy.sqrt', 'np.sqrt', (['sigma2'], {}), '(sigma2)\n', (2548, 2556), True, 'import numpy as np\n'), ((2559, 2583), 'numpy.random.normal', 'np.random.normal', ([], {'size': 'n'}), '(size=n)\n', (2575, 2583), True, 'import numpy as np\n'), ((799, 834), 'numpy.random.normal', 'np.random.normal', ([], {'scale': '(0.1)', 'size': 'n'}), '(scale=0.1, size=n)\n', (815, 834), True, 'import numpy as np\n'), ((1481, 1508), 'numpy.argmin', 'np.argmin', (["cvbst['l2-mean']"], {}), "(cvbst['l2-mean'])\n", (1490, 1508), True, 'import numpy as np\n'), ((3363, 3390), 'numpy.argmin', 'np.argmin', (["cvbst['l2-mean']"], {}), "(cvbst['l2-mean'])\n", (3372, 3390), True, 'import numpy as np\n'), ((3900, 3927), 'numpy.argmin', 'np.argmin', (["cvbst['l2-mean']"], {}), "(cvbst['l2-mean'])\n", (3909, 3927), True, 'import numpy as np\n'), ((2333, 2353), 'numpy.where', 'np.where', (['(group == i)'], {}), '(group == i)\n', (2341, 2353), True, 'import numpy as np\n'), ((450, 473), 'numpy.exp', 'np.exp', (['(-(x - 0.5) * 20)'], {}), '(-(x - 0.5) * 20)\n', (456, 473), True, 'import numpy as np\n'), ((1735, 1758), 'numpy.exp', 'np.exp', (['(-(x - 0.5) * 20)'], {}), '(-(x - 0.5) * 20)\n', (1741, 1758), True, 'import numpy as np\n')] |
Florian-Sabonchi/synapse | synapse/rest/synapse/client/unsubscribe.py | c95b04bb0e719d3f5de1714b442f95a39c6e3634 | # Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from synapse.api.errors import StoreError
from synapse.http.server import DirectServeHtmlResource, respond_with_html_bytes
from synapse.http.servlet import parse_string
from synapse.http.site import SynapseRequest
if TYPE_CHECKING:
from synapse.server import HomeServer
class UnsubscribeResource(DirectServeHtmlResource):
"""
To allow pusher to be delete by clicking a link (ie. GET request)
"""
SUCCESS_HTML = b"<html><body>You have been unsubscribed</body><html>"
def __init__(self, hs: "HomeServer"):
super().__init__()
self.notifier = hs.get_notifier()
self.auth = hs.get_auth()
self.pusher_pool = hs.get_pusherpool()
self.macaroon_generator = hs.get_macaroon_generator()
async def _async_render_GET(self, request: SynapseRequest) -> None:
token = parse_string(request, "access_token", required=True)
app_id = parse_string(request, "app_id", required=True)
pushkey = parse_string(request, "pushkey", required=True)
user_id = self.macaroon_generator.verify_delete_pusher_token(
token, app_id, pushkey
)
try:
await self.pusher_pool.remove_pusher(
app_id=app_id, pushkey=pushkey, user_id=user_id
)
except StoreError as se:
if se.code != 404:
# This is fine: they're already unsubscribed
raise
self.notifier.on_new_replication_data()
respond_with_html_bytes(
request,
200,
UnsubscribeResource.SUCCESS_HTML,
)
| [((1465, 1517), 'synapse.http.servlet.parse_string', 'parse_string', (['request', '"""access_token"""'], {'required': '(True)'}), "(request, 'access_token', required=True)\n", (1477, 1517), False, 'from synapse.http.servlet import parse_string\n'), ((1535, 1581), 'synapse.http.servlet.parse_string', 'parse_string', (['request', '"""app_id"""'], {'required': '(True)'}), "(request, 'app_id', required=True)\n", (1547, 1581), False, 'from synapse.http.servlet import parse_string\n'), ((1600, 1647), 'synapse.http.servlet.parse_string', 'parse_string', (['request', '"""pushkey"""'], {'required': '(True)'}), "(request, 'pushkey', required=True)\n", (1612, 1647), False, 'from synapse.http.servlet import parse_string\n'), ((2111, 2182), 'synapse.http.server.respond_with_html_bytes', 'respond_with_html_bytes', (['request', '(200)', 'UnsubscribeResource.SUCCESS_HTML'], {}), '(request, 200, UnsubscribeResource.SUCCESS_HTML)\n', (2134, 2182), False, 'from synapse.http.server import DirectServeHtmlResource, respond_with_html_bytes\n')] |
ravihammond/hanabi-convention-adaptation | pyhanabi/act_group.py | 5dafa91742de8e8d5810e8213e0e2771818b2f54 | import set_path
import sys
import torch
set_path.append_sys_path()
import rela
import hanalearn
import utils
assert rela.__file__.endswith(".so")
assert hanalearn.__file__.endswith(".so")
class ActGroup:
def __init__(
self,
devices,
agent,
partner_weight,
seed,
num_thread,
num_game_per_thread,
num_player,
explore_eps,
trinary,
replay_buffer,
max_len,
gamma,
convention,
convention_act_override,
):
self.devices = devices.split(",")
self.seed = seed
self.num_thread = num_thread
self.num_player = num_player
self.num_game_per_thread = num_game_per_thread
self.explore_eps = explore_eps
self.trinary = trinary
self.replay_buffer = replay_buffer
self.max_len = max_len
self.gamma = gamma
self.load_partner_model(partner_weight)
self.model_runners = []
for dev in self.devices:
runner = rela.BatchRunner(agent.clone(dev), dev)
runner.add_method("act", 5000)
runner.add_method("compute_priority", 100)
runner.add_method("compute_target", 5000)
partner_runner = rela.BatchRunner(
self._partner_agent.clone(dev), dev)
partner_runner.add_method("act", 5000)
self.model_runners.append([runner, partner_runner])
self.num_runners = len(self.model_runners)
self.convention = convention
self.convention_act_override = convention_act_override
self.create_r2d2_actors()
def load_partner_model(self, weight_file):
try:
state_dict = torch.load(weight_file)
except:
sys.exit(f"weight_file {weight_file} can't be loaded")
overwrite = {}
overwrite["vdn"] = False
overwrite["device"] = "cuda:0"
overwrite["boltzmann_act"] = False
if "fc_v.weight" in state_dict.keys():
agent, cfg = utils.load_agent(weight_file, overwrite)
self._partner_sad = cfg["sad"] if "sad" in cfg else cfg["greedy_extra"]
self._partner_hide_action = bool(cfg["hide_action"])
else:
agent = utils.load_supervised_agent(weight_file, "cuda:0")
self._partner_sad = False
self._partner_hide_action = False
agent.train(False)
self._partner_agent = agent
def create_r2d2_actors(self):
convention_act_override = [0, 0]
convention_sender = [1, 0]
if self.convention_act_override:
convention_act_override = [0, 1]
convention_sender = [1, 0]
actors = []
for i in range(self.num_thread):
thread_actors = []
for j in range(self.num_game_per_thread):
game_actors = []
actor = hanalearn.R2D2Actor(
self.model_runners[i % self.num_runners][0],
self.seed,
self.num_player,
0,
self.explore_eps,
[0], # boltzmann_act
False,
0, # sad
0, # shuffle_color
0, # hide_action
self.trinary,
self.replay_buffer,
1, # multi-step
self.max_len,
self.gamma,
self.convention,
1,
0,
True, # convention_fict_act_override
True, # use_experience
)
game_actors.append(actor)
self.seed += 1
actor = hanalearn.R2D2Actor(
self.model_runners[i % self.num_runners][1], # runner
self.num_player, # numPlayer
1, # playerIdx
False, # vdn
self._partner_sad, # sad
self._partner_hide_action, # hideAction
self.convention, # convention
0, # conventionSender
1) # conventionOverride
game_actors.append(actor)
for k in range(self.num_player):
partners = game_actors[:]
partners[k] = None
game_actors[k].set_partners(partners)
thread_actors.append(game_actors)
actors.append(thread_actors)
self.actors = actors
print("ActGroup created")
def start(self):
for runners in self.model_runners:
for runner in runners:
runner.start()
def update_model(self, agent):
for runner in self.model_runners:
runner[0].update_model(agent)
| [((41, 67), 'set_path.append_sys_path', 'set_path.append_sys_path', ([], {}), '()\n', (65, 67), False, 'import set_path\n'), ((119, 148), 'rela.__file__.endswith', 'rela.__file__.endswith', (['""".so"""'], {}), "('.so')\n", (141, 148), False, 'import rela\n'), ((156, 190), 'hanalearn.__file__.endswith', 'hanalearn.__file__.endswith', (['""".so"""'], {}), "('.so')\n", (183, 190), False, 'import hanalearn\n'), ((1722, 1745), 'torch.load', 'torch.load', (['weight_file'], {}), '(weight_file)\n', (1732, 1745), False, 'import torch\n'), ((2040, 2080), 'utils.load_agent', 'utils.load_agent', (['weight_file', 'overwrite'], {}), '(weight_file, overwrite)\n', (2056, 2080), False, 'import utils\n'), ((2264, 2314), 'utils.load_supervised_agent', 'utils.load_supervised_agent', (['weight_file', '"""cuda:0"""'], {}), "(weight_file, 'cuda:0')\n", (2291, 2314), False, 'import utils\n'), ((1774, 1828), 'sys.exit', 'sys.exit', (['f"""weight_file {weight_file} can\'t be loaded"""'], {}), '(f"weight_file {weight_file} can\'t be loaded")\n', (1782, 1828), False, 'import sys\n'), ((2903, 3147), 'hanalearn.R2D2Actor', 'hanalearn.R2D2Actor', (['self.model_runners[i % self.num_runners][0]', 'self.seed', 'self.num_player', '(0)', 'self.explore_eps', '[0]', '(False)', '(0)', '(0)', '(0)', 'self.trinary', 'self.replay_buffer', '(1)', 'self.max_len', 'self.gamma', 'self.convention', '(1)', '(0)', '(True)', '(True)'], {}), '(self.model_runners[i % self.num_runners][0], self.seed,\n self.num_player, 0, self.explore_eps, [0], False, 0, 0, 0, self.trinary,\n self.replay_buffer, 1, self.max_len, self.gamma, self.convention, 1, 0,\n True, True)\n', (2922, 3147), False, 'import hanalearn\n'), ((3766, 3935), 'hanalearn.R2D2Actor', 'hanalearn.R2D2Actor', (['self.model_runners[i % self.num_runners][1]', 'self.num_player', '(1)', '(False)', 'self._partner_sad', 'self._partner_hide_action', 'self.convention', '(0)', '(1)'], {}), '(self.model_runners[i % self.num_runners][1], self.\n num_player, 1, False, self._partner_sad, self._partner_hide_action,\n self.convention, 0, 1)\n', (3785, 3935), False, 'import hanalearn\n')] |
vanHoek-dgnm/CARBON-DISC | A_source_code/carbon/code/make_mask.py | 3ecd5f4efba5e032d43679ee977064d6b25154a9 | # ******************************************************
## Copyright 2019, PBL Netherlands Environmental Assessment Agency and Utrecht University.
## Reuse permitted under Gnu Public License, GPL v3.
# ******************************************************
from netCDF4 import Dataset
import numpy as np
import general_path
import accuflux
import ascraster
import get_surrounding_cells
import make_np_grid
def do(mask_asc_fn, mask_id, dum_asc, logical = "EQ", mask_type='np_grid'):
dum_mask = ascraster.create_mask(mask_asc_fn, mask_id, logical = logical, numtype=int)
mask=[]
if mask_type=="rowcol":
for i in dum_mask:
mask.append(dum_asc.get_row_col_from_index(i))
elif mask_type=="index":
for i in dum_mask:
mask.append(i)
elif mask_type=="latlon":
for i in dum_mask:
mask.append(dum_asc.get_coord_from_index(i))
elif mask_type=="np_grid":
mask = np.zeros((dum_asc.nrows, dum_asc.ncols), dtype=bool)
mask[:,:] = True
for i in dum_mask:
row, col = dum_asc.get_row_col_from_index(i)
mask[row,col]=False
return mask
| [((503, 576), 'ascraster.create_mask', 'ascraster.create_mask', (['mask_asc_fn', 'mask_id'], {'logical': 'logical', 'numtype': 'int'}), '(mask_asc_fn, mask_id, logical=logical, numtype=int)\n', (524, 576), False, 'import ascraster\n'), ((930, 982), 'numpy.zeros', 'np.zeros', (['(dum_asc.nrows, dum_asc.ncols)'], {'dtype': 'bool'}), '((dum_asc.nrows, dum_asc.ncols), dtype=bool)\n', (938, 982), True, 'import numpy as np\n')] |
gitFloyd/AAI-Project-2 | Code/Dataset.py | c6bb4d389248c3385e58a0c399343322a6dd887f | from io import TextIOWrapper
import math
from typing import TypeVar
import random
import os
from Settings import Settings
class Dataset:
DataT = TypeVar('DataT')
WIN_NL = "\r\n"
LINUX_NL = "\n"
def __init__(self, path:str, filename:str, newline:str = WIN_NL) -> None:
self.path_ = path
self.filename_ = filename
self.loaded_ = False
self.parsed_ = False
self.data_ = None
self.nl = newline
self.classes_ = set()
self.attributes_ = []
self.types_ = []
self.data_ = []
def Data(self) -> list:
return self.data_
def Attributes(self) -> list:
return self.attributes_
def Types(self) -> list:
return self.types_
def Classes(self) -> list:
return self.classes_
def Load(self, reload:bool = False) -> DataT:
if not self.loaded_ or reload:
self.file_ = open(os.sep.join([self.path_, self.filename_]))
self.loaded_ = True
# If we reload, then we want to reparse as well.
return self.Parse_(reload)
def Parse_(self, reparse:bool = False) -> DataT:
if not self.loaded_:
# Silently return instead of raising an exception because
# this method is not intended to be used outside of the
# class. Although, it can be used that way if needed.
return
if not self.parsed_ or reparse:
self.Parse_Hook_(self.file_.read())
return self.data_
def Parse_Hook_(self, data:str) -> None:
self.data_ = data
def __del__(self):
if self.loaded_:
self.file_.close()
class ArffRow:
ATTR_LABEL = '@ATTRIBUTE ' # need the space at the end here
DATA_LABEL = '@DATA'
ATTR_LEN = len(ATTR_LABEL)
DATA_LEN = len(DATA_LABEL)
Attributes = []
Types = []
Data = []
Classes = set()
IsCollecting_ = False
@classmethod
def Reset(cls):
cls.Attributes = []
cls.Types = []
cls.Data = []
cls.Classes = set()
cls.IsCollecting_ = False
def __init__(self, line:str, nl:str) -> None:
self.line_ = line
self.len_ = len(line)
self.nl_ = nl
def Len(self) -> str:
return self.len_
def HasAttributeLabel(self) -> bool:
return self.len_ >= ArffRow.ATTR_LEN and self.line_[0:ArffRow.ATTR_LEN] == ArffRow.ATTR_LABEL
def HasDataLabel(self) -> bool:
return self.len_ >= ArffRow.DATA_LEN and self.line_[0:ArffRow.DATA_LEN] == ArffRow.DATA_LABEL
def GetAttributeData(self) -> tuple[str, str]:
namePosition = 0
for (i, char) in enumerate(self.line_[ArffRow.ATTR_LEN:]):
if char == '\t':
namePosition = i + ArffRow.ATTR_LEN
break
return (self.line_[ArffRow.ATTR_LEN:namePosition], self.line_[namePosition + 1:])
def Parse(self):
if ArffRow.IsCollecting_ and self.len_ > 1:
ArffRow.Data.append(self.line_.split(','))
ArffRow.Classes.add(ArffRow.Data[-1][-1])
elif self.HasDataLabel():
ArffRow.IsCollecting_ = True
elif self.HasAttributeLabel():
attrData = self.GetAttributeData()
ArffRow.Attributes.append(attrData[0])
ArffRow.Types.append(attrData[1])
class ArffDataset(Dataset):
# ARFF (Attribute-Relation File Format)
#def __init__(self, path:str, filename:str, newline:str = Dataset.WIN_NL) -> None:
# super().__init__(path, filename, newline)
#
# self.parser_ = {
# 'attributesLoaded': False,
# }
def Parse_Hook_(self, data:str) -> None:
ArffRow.Reset()
rows = [ArffRow(line, self.nl) for line in data.split(self.nl)]
for row in rows:
row.Parse()
for attribute in ArffRow.Attributes:
self.attributes_.append(attribute)
for typeName in ArffRow.Types:
self.types_.append(typeName)
for datum in ArffRow.Data:
self.data_.append(datum)
self.classes_ = self.classes_.union(ArffRow.Classes)
classes = list(self.classes_)
attribute_maxes = {}
for row in self.data_:
classIndex = classes.index(row[-1])
row[-1] = [1 if i == classIndex else 0 for (i, value) in enumerate(classes)]
for i in range(len(row)):
if self.types_[i] == 'REAL':
row[i] = float(row[i])
elif self.types_[i] == 'INTEGER':
row[i] = int(row[i])
else:
continue
if i not in attribute_maxes:
attribute_maxes[i] = 0
if abs(row[i]) > attribute_maxes[i]:
attribute_maxes[i] = row[i]
for i in range(len(row)):
if self.types_[i] == 'REAL' or self.types_[i] == 'INTEGER':
row[i] = row[i] / attribute_maxes[i]
self.data_ = self.RowSort(self.data_)
def LexOrder(self, item1, item2):
num_fields = len(item1)
for i in range(num_fields):
if item1[i] != item2[i]:
if item1[i] < item2[i]:
return -1
else:
return 1
return 0
def RowSort(self, rows):
rows_len = len(rows)
if rows_len > 2:
result1 = self.RowSort(rows[0: math.floor(rows_len * 0.5)])
result2 = self.RowSort(rows[math.floor(rows_len * 0.5):])
sorted_rows = []
item1 = None
item2 = None
while len(result1) > 0 or len(result2) > 0:
if len(result1) > 0 and len(result2) > 0 and item1 == None and item2 == None:
item1 = result1.pop(0)
item2 = result2.pop(0)
elif len(result1) > 0 and item1 == None:
item1 = result1.pop(0)
elif len(result2) > 0 and item2 == None:
item2 = result2.pop(0)
order = 0
if item1 == None and item2 != None:
order = 1
elif item1 != None and item2 == None:
order = -1
else:
order = self.LexOrder(item1, item2)
if order == -1:
sorted_rows.append(item1)
item1 = None
elif order == 1:
sorted_rows.append(item2)
item2 = None
else:
sorted_rows.append(item1)
sorted_rows.append(item2)
item1 = None
item2 = None
if item1 != None:
sorted_rows.append(item1)
if item2 != None:
sorted_rows.append(item2)
return sorted_rows
elif rows_len == 1:
return rows
else:
order = self.LexOrder(rows[0], rows[1])
if order == 1:
rows.reverse()
return rows
def Fetch(self, *fields:list[str], limit:int = None, offset:int = 0):
cols = []
data = []
# iterate over the field names and find the column indices
# for names that match the requested field names
for (i, field) in enumerate(fields):
try:
cols.append(self.attributes_.index(field))
except ValueError:
pass
end = limit
if limit != None:
end += offset
for row in self.data_[offset:end]:
data.append([row[i] for i in cols])
return data
def FetchFilter_(self, i, value):
# Not used any more
#if self.types_[i] == 'REAL':
# return float(value)
#elif self.types_[i] == 'INTEGER':
# return int(value)
#else:
# return value
pass
def Size(self):
length = len(self.data_)
if length == 0:
return (len(self.data_), None)
return (len(self.data_), len(self.data_[0]))
def Shuffle(self):
random.shuffle(self.data_)
class Pistachio(ArffDataset):
SettingsKey = 'PistachioDataset'
def __init__(self, newline:str = Dataset.WIN_NL) -> None:
settings = Settings.Data()
super().__init__(
path = settings[Pistachio.SettingsKey]['Path'],
filename = settings[Pistachio.SettingsKey]['FileName'],
newline = newline
)
#pist = Pistachio(Dataset.LINUX_NL)
#
#for row in pist.Load()[0:10]:
# print(row)
| [((157, 173), 'typing.TypeVar', 'TypeVar', (['"""DataT"""'], {}), "('DataT')\n", (164, 173), False, 'from typing import TypeVar\n'), ((6860, 6886), 'random.shuffle', 'random.shuffle', (['self.data_'], {}), '(self.data_)\n', (6874, 6886), False, 'import random\n'), ((7037, 7052), 'Settings.Settings.Data', 'Settings.Data', ([], {}), '()\n', (7050, 7052), False, 'from Settings import Settings\n'), ((841, 882), 'os.sep.join', 'os.sep.join', (['[self.path_, self.filename_]'], {}), '([self.path_, self.filename_])\n', (852, 882), False, 'import os\n'), ((4738, 4764), 'math.floor', 'math.floor', (['(rows_len * 0.5)'], {}), '(rows_len * 0.5)\n', (4748, 4764), False, 'import math\n'), ((4799, 4825), 'math.floor', 'math.floor', (['(rows_len * 0.5)'], {}), '(rows_len * 0.5)\n', (4809, 4825), False, 'import math\n')] |
wahuneke/django-stripe-payments | payments/models.py | 5d4b26b025fc3fa75d3a0aeaafd67fb825325c94 | import datetime
import decimal
import json
import traceback
from django.conf import settings
from django.core.mail import EmailMessage
from django.db import models
from django.utils import timezone
from django.template.loader import render_to_string
from django.contrib.sites.models import Site
import stripe
from jsonfield.fields import JSONField
from .managers import CustomerManager, ChargeManager, TransferManager
from .settings import (
DEFAULT_PLAN,
INVOICE_FROM_EMAIL,
PAYMENTS_PLANS,
plan_from_stripe_id,
SEND_EMAIL_RECEIPTS,
TRIAL_PERIOD_FOR_USER_CALLBACK,
PLAN_QUANTITY_CALLBACK
)
from .signals import (
cancelled,
card_changed,
subscription_made,
webhook_processing_error,
WEBHOOK_SIGNALS,
)
from .utils import convert_tstamp
stripe.api_key = settings.STRIPE_SECRET_KEY
stripe.api_version = getattr(settings, "STRIPE_API_VERSION", "2012-11-07")
class StripeObject(models.Model):
stripe_id = models.CharField(max_length=255, unique=True)
created_at = models.DateTimeField(default=timezone.now)
class Meta: # pylint: disable=E0012,C1001
abstract = True
class EventProcessingException(models.Model):
event = models.ForeignKey("Event", null=True)
data = models.TextField()
message = models.CharField(max_length=500)
traceback = models.TextField()
created_at = models.DateTimeField(default=timezone.now)
@classmethod
def log(cls, data, exception, event):
cls.objects.create(
event=event,
data=data or "",
message=str(exception),
traceback=traceback.format_exc()
)
def __unicode__(self):
return u"<%s, pk=%s, Event=%s>" % (self.message, self.pk, self.event)
class Event(StripeObject):
kind = models.CharField(max_length=250)
livemode = models.BooleanField()
customer = models.ForeignKey("Customer", null=True)
webhook_message = JSONField()
validated_message = JSONField(null=True)
valid = models.NullBooleanField(null=True)
processed = models.BooleanField(default=False)
stripe_connect = models.ForeignKey('ConnectUser', null=True)
@property
def message(self):
return self.validated_message
def __unicode__(self):
return "%s - %s" % (self.kind, self.stripe_id)
def link_customer(self):
cus_id = None
customer_crud_events = [
"customer.created",
"customer.updated",
"customer.deleted"
]
if self.kind in customer_crud_events:
cus_id = self.message["data"]["object"]["id"]
else:
cus_id = self.message["data"]["object"].get("customer", None)
if cus_id is not None:
try:
self.customer = Customer.objects.get(stripe_id=cus_id)
self.save()
except Customer.DoesNotExist:
pass
def link_stripe_connect(self):
connect_id = self.message["data"]["object"].get("user_id", None)
if connect_id is not None:
try:
self.stripe_connect = ConnectUser.objects.get(account_id=connect_id)
self.save()
except ConnectUser.DoesNotExist:
pass
def validate(self):
evt = stripe.Event.retrieve(self.stripe_id)
self.validated_message = json.loads(
json.dumps(
evt.to_dict(),
sort_keys=True,
cls=stripe.StripeObjectEncoder
)
)
if self.webhook_message["data"] == self.validated_message["data"]:
self.valid = True
else:
self.valid = False
self.save()
def process(self):
"""
"account.updated",
"account.application.deauthorized",
"charge.succeeded",
"charge.failed",
"charge.refunded",
"charge.dispute.created",
"charge.dispute.updated",
"chagne.dispute.closed",
"customer.created",
"customer.updated",
"customer.deleted",
"customer.subscription.created",
"customer.subscription.updated",
"customer.subscription.deleted",
"customer.subscription.trial_will_end",
"customer.discount.created",
"customer.discount.updated",
"customer.discount.deleted",
"invoice.created",
"invoice.updated",
"invoice.payment_succeeded",
"invoice.payment_failed",
"invoiceitem.created",
"invoiceitem.updated",
"invoiceitem.deleted",
"plan.created",
"plan.updated",
"plan.deleted",
"coupon.created",
"coupon.updated",
"coupon.deleted",
"transfer.created",
"transfer.updated",
"transfer.failed",
"ping"
"""
if self.valid and not self.processed:
try:
if not self.kind.startswith("plan.") and \
not self.kind.startswith("transfer."):
self.link_customer()
if not self.stripe_connect:
self.link_stripe_connect()
if self.kind.startswith("invoice."):
Invoice.handle_event(self)
elif self.kind.startswith("charge."):
if not self.customer:
self.link_customer()
self.customer.record_charge(
self.message["data"]["object"]["id"]
)
elif self.kind.startswith("transfer."):
Transfer.process_transfer(
self,
self.message["data"]["object"]
)
elif self.kind.startswith("customer.subscription."):
if not self.customer:
self.link_customer()
if self.customer:
self.customer.sync_current_subscription()
elif self.kind == "customer.deleted":
if not self.customer:
self.link_customer()
self.customer.purge()
self.send_signal()
self.processed = True
self.save()
except stripe.StripeError, e:
EventProcessingException.log(
data=e.http_body,
exception=e,
event=self
)
webhook_processing_error.send(
sender=Event,
data=e.http_body,
exception=e
)
def send_signal(self):
signal = WEBHOOK_SIGNALS.get(self.kind)
if signal:
return signal.send(sender=Event, event=self)
class Transfer(StripeObject):
# pylint: disable=C0301
event = models.ForeignKey(Event, related_name="transfers")
amount = models.DecimalField(decimal_places=2, max_digits=9)
status = models.CharField(max_length=25)
date = models.DateTimeField()
description = models.TextField(null=True, blank=True)
adjustment_count = models.IntegerField(null=True)
adjustment_fees = models.DecimalField(decimal_places=2, max_digits=7, null=True)
adjustment_gross = models.DecimalField(decimal_places=2, max_digits=7, null=True)
charge_count = models.IntegerField(null=True)
charge_fees = models.DecimalField(decimal_places=2, max_digits=7, null=True)
charge_gross = models.DecimalField(decimal_places=2, max_digits=9, null=True)
collected_fee_count = models.IntegerField(null=True)
collected_fee_gross = models.DecimalField(decimal_places=2, max_digits=7, null=True)
net = models.DecimalField(decimal_places=2, max_digits=9, null=True)
refund_count = models.IntegerField(null=True)
refund_fees = models.DecimalField(decimal_places=2, max_digits=7, null=True)
refund_gross = models.DecimalField(decimal_places=2, max_digits=7, null=True)
validation_count = models.IntegerField(null=True)
validation_fees = models.DecimalField(decimal_places=2, max_digits=7, null=True)
stripe_connect = models.ForeignKey('ConnectUser', null=True)
objects = TransferManager()
def update_status(self):
self.status = stripe.Transfer.retrieve(self.stripe_id).status
self.save()
@classmethod
def process_transfer(cls, event, transfer):
defaults = {
"amount": transfer["amount"] / decimal.Decimal("100"),
"status": transfer["status"],
"date": convert_tstamp(transfer, "date"),
"description": transfer.get("description", "")
}
summary = transfer.get("summary")
if summary:
defaults.update({
"adjustment_count": summary.get("adjustment_count"),
"adjustment_fees": summary.get("adjustment_fees"),
"adjustment_gross": summary.get("adjustment_gross"),
"charge_count": summary.get("charge_count"),
"charge_fees": summary.get("charge_fees"),
"charge_gross": summary.get("charge_gross"),
"collected_fee_count": summary.get("collected_fee_count"),
"collected_fee_gross": summary.get("collected_fee_gross"),
"refund_count": summary.get("refund_count"),
"refund_fees": summary.get("refund_fees"),
"refund_gross": summary.get("refund_gross"),
"validation_count": summary.get("validation_count"),
"validation_fees": summary.get("validation_fees"),
"net": summary.get("net") / decimal.Decimal("100")
})
for field in defaults:
if field.endswith("fees") or field.endswith("gross"):
defaults[field] = defaults[field] / decimal.Decimal("100")
if event.kind == "transfer.paid":
defaults.update({"event": event})
obj, created = Transfer.objects.get_or_create(
stripe_id=transfer["id"],
defaults=defaults
)
else:
obj, created = Transfer.objects.get_or_create(
stripe_id=transfer["id"],
event=event,
defaults=defaults
)
if event.stripe_connect:
obj.stripe_connect = event.stripe_connect
if created and summary:
for fee in summary.get("charge_fee_details", []):
obj.charge_fee_details.create(
amount=fee["amount"] / decimal.Decimal("100"),
application=fee.get("application", ""),
description=fee.get("description", ""),
kind=fee["type"]
)
else:
obj.status = transfer["status"]
obj.save()
if event.kind == "transfer.updated":
obj.update_status()
class TransferChargeFee(models.Model):
transfer = models.ForeignKey(Transfer, related_name="charge_fee_details")
amount = models.DecimalField(decimal_places=2, max_digits=7)
application = models.TextField(null=True, blank=True)
description = models.TextField(null=True, blank=True)
kind = models.CharField(max_length=150)
created_at = models.DateTimeField(default=timezone.now)
class Customer(StripeObject):
user = models.OneToOneField(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True
)
card_fingerprint = models.CharField(max_length=200, blank=True)
card_last_4 = models.CharField(max_length=4, blank=True)
card_kind = models.CharField(max_length=50, blank=True)
date_purged = models.DateTimeField(null=True, editable=False)
objects = CustomerManager()
def __unicode__(self):
return unicode(self.user)
@property
def stripe_customer(self):
return stripe.Customer.retrieve(self.stripe_id)
def purge(self):
try:
self.stripe_customer.delete()
except stripe.InvalidRequestError as e:
if e.message.startswith("No such customer:"):
# The exception was thrown because the customer was already
# deleted on the stripe side, ignore the exception
pass
else:
# The exception was raised for another reason, re-raise it
raise
self.user = None
self.card_fingerprint = ""
self.card_last_4 = ""
self.card_kind = ""
self.date_purged = timezone.now()
self.save()
def delete(self, using=None):
# Only way to delete a customer is to use SQL
self.purge()
def can_charge(self):
return self.card_fingerprint and \
self.card_last_4 and \
self.card_kind and \
self.date_purged is None
def has_active_subscription(self):
try:
return self.current_subscription.is_valid()
except CurrentSubscription.DoesNotExist:
return False
def cancel(self, at_period_end=True):
try:
current = self.current_subscription
except CurrentSubscription.DoesNotExist:
return
sub = self.stripe_customer.cancel_subscription(
at_period_end=at_period_end
)
current.status = sub.status
current.cancel_at_period_end = sub.cancel_at_period_end
current.current_period_end = convert_tstamp(sub, "current_period_end")
current.save()
cancelled.send(sender=self, stripe_response=sub)
@classmethod
def create(cls, user, card=None, plan=None, charge_immediately=True):
if card and plan:
plan = PAYMENTS_PLANS[plan]["stripe_plan_id"]
elif DEFAULT_PLAN:
plan = PAYMENTS_PLANS[DEFAULT_PLAN]["stripe_plan_id"]
else:
plan = None
trial_end = None
if TRIAL_PERIOD_FOR_USER_CALLBACK and plan:
trial_days = TRIAL_PERIOD_FOR_USER_CALLBACK(user)
trial_end = datetime.datetime.utcnow() + datetime.timedelta(
days=trial_days
)
stripe_customer = stripe.Customer.create(
email=user.email,
card=card,
plan=plan or DEFAULT_PLAN,
trial_end=trial_end
)
if stripe_customer.active_card:
cus = cls.objects.create(
user=user,
stripe_id=stripe_customer.id,
card_fingerprint=stripe_customer.active_card.fingerprint,
card_last_4=stripe_customer.active_card.last4,
card_kind=stripe_customer.active_card.type
)
else:
cus = cls.objects.create(
user=user,
stripe_id=stripe_customer.id,
)
if plan:
if stripe_customer.subscription:
cus.sync_current_subscription(cu=stripe_customer)
if charge_immediately:
cus.send_invoice()
return cus
def update_card(self, token):
cu = self.stripe_customer
cu.card = token
cu.save()
self.save_card(cu)
def save_card(self, cu=None):
cu = cu or self.stripe_customer
active_card = cu.active_card
self.card_fingerprint = active_card.fingerprint
self.card_last_4 = active_card.last4
self.card_kind = active_card.type
self.save()
card_changed.send(sender=self, stripe_response=cu)
def retry_unpaid_invoices(self):
self.sync_invoices()
for inv in self.invoices.filter(paid=False, closed=False):
try:
inv.retry() # Always retry unpaid invoices
except stripe.InvalidRequestError, error:
if error.message != "Invoice is already paid":
raise error
def send_invoice(self):
try:
invoice = stripe.Invoice.create(customer=self.stripe_id)
if invoice.amount_due > 0:
invoice.pay()
return True
except stripe.InvalidRequestError:
return False # There was nothing to invoice
def sync(self, cu=None):
cu = cu or self.stripe_customer
updated = False
if hasattr(cu, "active_card") and cu.active_card:
# Test to make sure the card has changed, otherwise do not update it
# (i.e. refrain from sending any signals)
if (self.card_last_4 != cu.active_card.last4 or
self.card_fingerprint != cu.active_card.fingerprint or
self.card_kind != cu.active_card.type):
updated = True
self.card_last_4 = cu.active_card.last4
self.card_fingerprint = cu.active_card.fingerprint
self.card_kind = cu.active_card.type
else:
updated = True
self.card_fingerprint = ""
self.card_last_4 = ""
self.card_kind = ""
if updated:
self.save()
card_changed.send(sender=self, stripe_response=cu)
def sync_invoices(self, cu=None):
cu = cu or self.stripe_customer
for invoice in cu.invoices().data:
Invoice.sync_from_stripe_data(invoice, send_receipt=False)
def sync_charges(self, cu=None):
cu = cu or self.stripe_customer
for charge in cu.charges().data:
self.record_charge(charge.id)
def sync_current_subscription(self, cu=None):
cu = cu or self.stripe_customer
sub = getattr(cu, "subscription", None)
if sub is None:
try:
self.current_subscription.delete()
except CurrentSubscription.DoesNotExist:
pass
else:
try:
sub_obj = self.current_subscription
sub_obj.plan = plan_from_stripe_id(sub.plan.id)
sub_obj.current_period_start = convert_tstamp(
sub.current_period_start
)
sub_obj.current_period_end = convert_tstamp(
sub.current_period_end
)
sub_obj.amount = (sub.plan.amount / decimal.Decimal("100"))
sub_obj.status = sub.status
sub_obj.cancel_at_period_end = sub.cancel_at_period_end
sub_obj.start = convert_tstamp(sub.start)
sub_obj.quantity = sub.quantity
sub_obj.save()
except CurrentSubscription.DoesNotExist:
sub_obj = CurrentSubscription.objects.create(
customer=self,
plan=plan_from_stripe_id(sub.plan.id),
current_period_start=convert_tstamp(
sub.current_period_start
),
current_period_end=convert_tstamp(
sub.current_period_end
),
amount=(sub.plan.amount / decimal.Decimal("100")),
status=sub.status,
cancel_at_period_end=sub.cancel_at_period_end,
start=convert_tstamp(sub.start),
quantity=sub.quantity
)
if sub.trial_start and sub.trial_end:
sub_obj.trial_start = convert_tstamp(sub.trial_start)
sub_obj.trial_end = convert_tstamp(sub.trial_end)
sub_obj.save()
return sub_obj
def update_plan_quantity(self, quantity, charge_immediately=False):
self.subscribe(
plan=plan_from_stripe_id(
self.stripe_customer.subscription.plan.id
),
quantity=quantity,
charge_immediately=charge_immediately
)
def subscribe(self, plan, quantity=None, trial_days=None,
charge_immediately=True, token=None, coupon=None):
if quantity is None:
if PLAN_QUANTITY_CALLBACK is not None:
quantity = PLAN_QUANTITY_CALLBACK(self)
else:
quantity = 1
cu = self.stripe_customer
subscription_params = {}
if trial_days:
subscription_params["trial_end"] = \
datetime.datetime.utcnow() + datetime.timedelta(days=trial_days)
if token:
subscription_params["card"] = token
subscription_params["plan"] = PAYMENTS_PLANS[plan]["stripe_plan_id"]
subscription_params["quantity"] = quantity
subscription_params["coupon"] = coupon
resp = cu.update_subscription(**subscription_params)
if token:
# Refetch the stripe customer so we have the updated card info
cu = self.stripe_customer
self.save_card(cu)
self.sync_current_subscription(cu)
if charge_immediately:
self.send_invoice()
subscription_made.send(sender=self, plan=plan, stripe_response=resp)
return resp
def charge(self, amount, currency="usd", description=None,
send_receipt=True, application_fee=None,
stripe_connect_user=None):
"""
This method expects `amount` and 'application_fee' to be a Decimal type representing a
dollar amount. It will be converted to cents so any decimals beyond
two will be ignored.
"""
if not isinstance(amount, decimal.Decimal) or (not application_fee is None and not isinstance(application_fee, decimal.Decimal)):
raise ValueError(
"You must supply a decimal value representing dollars for amount and for application_fee (if supplied)."
)
charge_args = {
'amount': int(amount * 100),
'currency': currency,
'description': description,
}
if stripe_connect_user and isinstance(stripe_connect_user, ConnectUser):
charge_args['card'] = stripe.Token.create(customer=self.stripe_id, api_key=stripe_connect_user.stripe_access_token)
charge_args['api_key'] = stripe_connect_user.stripe_access_token
else:
charge_args['customer'] = self.stripe_id
if application_fee:
charge_args['application_fee'] = int(application_fee * 100)
resp = stripe.Charge.create(**charge_args)
obj = self.record_charge(resp["id"], stripe_connect_user)
if send_receipt:
obj.send_receipt()
return obj
def record_charge(self, charge_id, stripe_connect_user=None):
if stripe_connect_user and isinstance(stripe_connect_user, ConnectUser):
data = stripe.Charge.retrieve(charge_id, api_key=stripe_connect_user.stripe_access_token)
else:
data = stripe.Charge.retrieve(charge_id)
return Charge.sync_from_stripe_data(data)
class ConnectUser(models.Model):
"""
A user in your system who you may be routing payments to through "Stripe Connect"
"""
user = models.OneToOneField(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True
)
# when a webhook is received for an action related to a ConnectUser, a 'user_id' will be provided
# This is the same as an account id
account_id = models.CharField(max_length=100)
stripe_access_token = models.CharField(max_length=100)
stripe_publishable_key = models.CharField(max_length=100)
@staticmethod
def account_id_lookup(stripe_access_token):
data = stripe.Account.retrieve(api_key=stripe_access_token)
return data.get('id', None)
def __unicode__(self):
return unicode(self.user)
class CurrentSubscription(models.Model):
customer = models.OneToOneField(
Customer,
related_name="current_subscription",
null=True
)
plan = models.CharField(max_length=100)
quantity = models.IntegerField()
start = models.DateTimeField()
# trialing, active, past_due, canceled, or unpaid
status = models.CharField(max_length=25)
cancel_at_period_end = models.BooleanField(default=False)
canceled_at = models.DateTimeField(blank=True, null=True)
current_period_end = models.DateTimeField(blank=True, null=True)
current_period_start = models.DateTimeField(blank=True, null=True)
ended_at = models.DateTimeField(blank=True, null=True)
trial_end = models.DateTimeField(blank=True, null=True)
trial_start = models.DateTimeField(blank=True, null=True)
amount = models.DecimalField(decimal_places=2, max_digits=7)
created_at = models.DateTimeField(default=timezone.now)
@property
def total_amount(self):
return self.amount * self.quantity
def plan_display(self):
return PAYMENTS_PLANS[self.plan]["name"]
def status_display(self):
return self.status.replace("_", " ").title()
def is_period_current(self):
return self.current_period_end > timezone.now()
def is_status_current(self):
return self.status in ["trialing", "active"]
def is_valid(self):
if not self.is_status_current():
return False
if self.cancel_at_period_end and not self.is_period_current():
return False
return True
def delete(self, using=None): # pylint: disable=E1002
"""
Set values to None while deleting the object so that any lingering
references will not show previous values (such as when an Event
signal is triggered after a subscription has been deleted)
"""
super(CurrentSubscription, self).delete(using=using)
self.plan = None
self.status = None
self.quantity = 0
self.amount = 0
class Invoice(models.Model):
stripe_id = models.CharField(max_length=255)
customer = models.ForeignKey(Customer, related_name="invoices")
attempted = models.NullBooleanField()
attempts = models.PositiveIntegerField(null=True)
closed = models.BooleanField(default=False)
paid = models.BooleanField(default=False)
period_end = models.DateTimeField()
period_start = models.DateTimeField()
subtotal = models.DecimalField(decimal_places=2, max_digits=7)
total = models.DecimalField(decimal_places=2, max_digits=7)
date = models.DateTimeField()
charge = models.CharField(max_length=50, blank=True)
created_at = models.DateTimeField(default=timezone.now)
stripe_connect = models.ForeignKey(ConnectUser, null=True)
class Meta: # pylint: disable=E0012,C1001
ordering = ["-date"]
def retry(self):
if not self.paid and not self.closed:
inv = stripe.Invoice.retrieve(self.stripe_id)
inv.pay()
return True
return False
def status(self):
if self.paid:
return "Paid"
return "Open"
@classmethod
def sync_from_stripe_data(cls, stripe_invoice, send_receipt=True, stripe_connect=None):
c = Customer.objects.get(stripe_id=stripe_invoice["customer"])
period_end = convert_tstamp(stripe_invoice, "period_end")
period_start = convert_tstamp(stripe_invoice, "period_start")
date = convert_tstamp(stripe_invoice, "date")
invoice, created = cls.objects.get_or_create(
stripe_id=stripe_invoice["id"],
defaults=dict(
customer=c,
attempted=stripe_invoice["attempted"],
attempts=stripe_invoice["attempt_count"],
closed=stripe_invoice["closed"],
paid=stripe_invoice["paid"],
period_end=period_end,
period_start=period_start,
subtotal=stripe_invoice["subtotal"] / decimal.Decimal("100"),
total=stripe_invoice["total"] / decimal.Decimal("100"),
date=date,
charge=stripe_invoice.get("charge") or "",
stripe_connect=stripe_connect
)
)
if not created:
# pylint: disable=C0301
invoice.attempted = stripe_invoice["attempted"]
invoice.attempts = stripe_invoice["attempt_count"]
invoice.closed = stripe_invoice["closed"]
invoice.paid = stripe_invoice["paid"]
invoice.period_end = period_end
invoice.period_start = period_start
invoice.subtotal = stripe_invoice["subtotal"] / decimal.Decimal("100")
invoice.total = stripe_invoice["total"] / decimal.Decimal("100")
invoice.date = date
invoice.charge = stripe_invoice.get("charge") or ""
invoice.stripe_connect = stripe_connect
invoice.save()
for item in stripe_invoice["lines"].get("data", []):
period_end = convert_tstamp(item["period"], "end")
period_start = convert_tstamp(item["period"], "start")
if item.get("plan"):
plan = plan_from_stripe_id(item["plan"]["id"])
else:
plan = ""
inv_item, inv_item_created = invoice.items.get_or_create(
stripe_id=item["id"],
defaults=dict(
amount=(item["amount"] / decimal.Decimal("100")),
currency=item["currency"],
proration=item["proration"],
description=item.get("description") or "",
line_type=item["type"],
plan=plan,
period_start=period_start,
period_end=period_end,
quantity=item.get("quantity")
)
)
if not inv_item_created:
inv_item.amount = (item["amount"] / decimal.Decimal("100"))
inv_item.currency = item["currency"]
inv_item.proration = item["proration"]
inv_item.description = item.get("description") or ""
inv_item.line_type = item["type"]
inv_item.plan = plan
inv_item.period_start = period_start
inv_item.period_end = period_end
inv_item.quantity = item.get("quantity")
inv_item.save()
if stripe_invoice.get("charge"):
obj = c.record_charge(stripe_invoice["charge"])
obj.invoice = invoice
obj.save()
if send_receipt:
obj.send_receipt()
return invoice
@classmethod
def handle_event(cls, event, send_receipt=SEND_EMAIL_RECEIPTS):
valid_events = ["invoice.payment_failed", "invoice.payment_succeeded"]
if event.kind in valid_events:
invoice_data = event.message["data"]["object"]
stripe_invoice = stripe.Invoice.retrieve(invoice_data["id"])
cls.sync_from_stripe_data(stripe_invoice, send_receipt=send_receipt, stripe_connect=event.stripe_connect)
class InvoiceItem(models.Model):
stripe_id = models.CharField(max_length=255)
created_at = models.DateTimeField(default=timezone.now)
invoice = models.ForeignKey(Invoice, related_name="items")
amount = models.DecimalField(decimal_places=2, max_digits=7)
currency = models.CharField(max_length=10)
period_start = models.DateTimeField()
period_end = models.DateTimeField()
proration = models.BooleanField(default=False)
line_type = models.CharField(max_length=50)
description = models.CharField(max_length=200, blank=True)
plan = models.CharField(max_length=100, blank=True)
quantity = models.IntegerField(null=True)
def plan_display(self):
return PAYMENTS_PLANS[self.plan]["name"]
class Charge(StripeObject):
customer = models.ForeignKey(Customer, related_name="charges", null=True)
invoice = models.ForeignKey(Invoice, null=True, related_name="charges")
card_last_4 = models.CharField(max_length=4, blank=True)
card_kind = models.CharField(max_length=50, blank=True)
amount = models.DecimalField(decimal_places=2, max_digits=7, null=True)
amount_refunded = models.DecimalField(
decimal_places=2,
max_digits=7,
null=True
)
description = models.TextField(blank=True)
paid = models.NullBooleanField(null=True)
disputed = models.NullBooleanField(null=True)
refunded = models.NullBooleanField(null=True)
fee = models.DecimalField(decimal_places=2, max_digits=7, null=True)
receipt_sent = models.BooleanField(default=False)
charge_created = models.DateTimeField(null=True, blank=True)
stripe_connect = models.ForeignKey(ConnectUser, null=True)
objects = ChargeManager()
def calculate_refund_amount(self, amount=None):
eligible_to_refund = self.amount - (self.amount_refunded or 0)
if amount:
amount_to_refund = min(eligible_to_refund, amount)
else:
amount_to_refund = eligible_to_refund
return int(amount_to_refund * 100)
def refund(self, amount=None):
# pylint: disable=E1121
charge_obj = stripe.Charge.retrieve(
self.stripe_id
).refund(
amount=self.calculate_refund_amount(amount=amount)
)
Charge.sync_from_stripe_data(charge_obj)
@classmethod
def sync_from_stripe_data(cls, data):
obj, _ = Charge.objects.get_or_create(
stripe_id=data["id"]
)
customer_id = data.get("customer", None);
customer = Customer.objects.get(stripe_id=customer_id) if customer_id else None
obj.customer = customer
invoice_id = data.get("invoice", None)
if Invoice.objects.filter(stripe_id=invoice_id).exists():
obj.invoice = obj.customer.invoices.get(stripe_id=invoice_id)
obj.card_last_4 = data["card"]["last4"]
obj.card_kind = data["card"]["type"]
obj.amount = (data["amount"] / decimal.Decimal("100"))
obj.paid = data["paid"]
obj.refunded = data["refunded"]
obj.fee = (data["fee"] / decimal.Decimal("100"))
obj.disputed = data["dispute"] is not None
obj.charge_created = convert_tstamp(data, "created")
if data.get("description"):
obj.description = data["description"]
if data.get("amount_refunded"):
# pylint: disable=C0301
obj.amount_refunded = (data["amount_refunded"] / decimal.Decimal("100"))
if data["refunded"]:
obj.amount_refunded = (data["amount"] / decimal.Decimal("100"))
user_id = data.get("user_id", None)
if user_id and ConnectUser.objects.filter(account_id=user_id).exists():
obj.stripe_connect = ConnectUser.objects.get(account_id=user_id)
obj.save()
return obj
def send_receipt(self):
if not self.receipt_sent and self.customer:
site = Site.objects.get_current()
protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http")
ctx = {
"charge": self,
"site": site,
"protocol": protocol,
}
subject = render_to_string("payments/email/subject.txt", ctx)
subject = subject.strip()
message = render_to_string("payments/email/body.txt", ctx)
num_sent = EmailMessage(
subject,
message,
to=[self.customer.user.email],
from_email=INVOICE_FROM_EMAIL
).send()
self.receipt_sent = num_sent > 0
self.save()
@classmethod
def create(cls, card, amount, currency="usd", description=None, application_fee=None, stripe_connect_user=None):
"""
This method expects `amount` and 'application_fee' to be a Decimal type representing a
dollar amount. It will be converted to cents so any decimals beyond
two will be ignored.
"""
if not isinstance(amount, decimal.Decimal) or (not application_fee is None and not isinstance(application_fee, decimal.Decimal)):
raise ValueError(
"You must supply a decimal value representing dollars for amount and for application_fee (if supplied)."
)
charge_args = {
'amount': int(amount * 100),
'currency': currency,
'description': description,
'card': card,
}
if stripe_connect_user and isinstance(stripe_connect_user, ConnectUser):
charge_args['api_key'] = stripe_connect_user.stripe_access_token
elif stripe_connect_user:
charge_args['api_key'] = stripe_connect_user
if application_fee:
charge_args['application_fee'] = int(application_fee * 100)
resp = stripe.Charge.create(**charge_args)
return Charge.sync_from_stripe_data(resp)
| [] |
tomzhang/mars-1 | mars/tensor/base/flip.py | 6f1d85e37eb1b383251314cb0ba13e06288af03d | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..datasource import tensor as astensor
def flip(m, axis):
"""
Reverse the order of elements in a tensor along the given axis.
The shape of the array is preserved, but the elements are reordered.
Parameters
----------
m : array_like
Input tensor.
axis : integer
Axis in tensor, which entries are reversed.
Returns
-------
out : array_like
A view of `m` with the entries of axis reversed. Since a view is
returned, this operation is done in constant time.
See Also
--------
flipud : Flip a tensor vertically (axis=0).
fliplr : Flip a tensor horizontally (axis=1).
Notes
-----
flip(m, 0) is equivalent to flipud(m).
flip(m, 1) is equivalent to fliplr(m).
flip(m, n) corresponds to ``m[...,::-1,...]`` with ``::-1`` at position n.
Examples
--------
>>> import mars.tensor as mt
>>> A = mt.arange(8).reshape((2,2,2))
>>> A.execute()
array([[[0, 1],
[2, 3]],
[[4, 5],
[6, 7]]])
>>> mt.flip(A, 0).execute()
array([[[4, 5],
[6, 7]],
[[0, 1],
[2, 3]]])
>>> mt.flip(A, 1).execute()
array([[[2, 3],
[0, 1]],
[[6, 7],
[4, 5]]])
>>> A = mt.random.randn(3,4,5)
>>> mt.all(mt.flip(A,2) == A[:,:,::-1,...]).execute()
True
"""
m = astensor(m)
sl = [slice(None)] * m.ndim
try:
sl[axis] = slice(None, None, -1)
except IndexError:
raise ValueError("axis=%i is invalid for the %i-dimensional input tensor"
% (axis, m.ndim))
return m[tuple(sl)]
| [] |
imabackstabber/mmcv | tests/test_ops/test_upfirdn2d.py | b272c09b463f00fd7fdd455f7bd4a055f9995521 | # Copyright (c) OpenMMLab. All rights reserved.
import pytest
import torch
_USING_PARROTS = True
try:
from parrots.autograd import gradcheck
except ImportError:
from torch.autograd import gradcheck, gradgradcheck
_USING_PARROTS = False
class TestUpFirDn2d:
"""Unit test for UpFirDn2d.
Here, we just test the basic case of upsample version. More gerneal tests
will be included in other unit test for UpFirDnUpsample and
UpFirDnDownSample modules.
"""
@classmethod
def setup_class(cls):
kernel_1d = torch.tensor([1., 3., 3., 1.])
cls.kernel = kernel_1d[:, None] * kernel_1d[None, :]
cls.kernel = cls.kernel / cls.kernel.sum()
cls.factor = 2
pad = cls.kernel.shape[0] - cls.factor
cls.pad = ((pad + 1) // 2 + cls.factor - 1, pad // 2)
cls.input_tensor = torch.randn((2, 3, 4, 4), requires_grad=True)
@pytest.mark.skipif(not torch.cuda.is_available(), reason='requires cuda')
def test_upfirdn2d(self):
from mmcv.ops import upfirdn2d
if _USING_PARROTS:
gradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
delta=1e-4,
pt_atol=1e-3)
else:
gradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
eps=1e-4,
atol=1e-3)
gradgradcheck(
upfirdn2d,
(self.input_tensor.cuda(),
self.kernel.type_as(
self.input_tensor).cuda(), self.factor, 1, self.pad),
eps=1e-4,
atol=1e-3)
| [((550, 584), 'torch.tensor', 'torch.tensor', (['[1.0, 3.0, 3.0, 1.0]'], {}), '([1.0, 3.0, 3.0, 1.0])\n', (562, 584), False, 'import torch\n'), ((853, 898), 'torch.randn', 'torch.randn', (['(2, 3, 4, 4)'], {'requires_grad': '(True)'}), '((2, 3, 4, 4), requires_grad=True)\n', (864, 898), False, 'import torch\n'), ((928, 953), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (951, 953), False, 'import torch\n')] |
rmorain/kirby | dataset_creation/description_task2.py | ef115dbaed4acd1b23c3e10ca3b496f05b9a2382 | import pandas as pd
from tqdm import tqdm
data_list = []
def get_questions(row):
global data_list
random_samples = df.sample(n=num_choices - 1)
distractors = random_samples["description"].tolist()
data = {
"question": "What is " + row["label"] + "?",
"correct": row["description"],
"distractors": distractors,
"knowledge": "{" + row["label"] + " : " + row["description"] + "}",
}
data_list.append(data)
debug = False
num_choices = 4
tqdm.pandas(desc="Progress")
df = pd.read_pickle("data/augmented_datasets/pickle/label_description.pkl")
if debug:
df = df.iloc[:10]
df = df.progress_apply(get_questions, axis=1)
new_df = pd.DataFrame(data_list)
if not debug:
new_df.to_pickle("data/augmented_datasets/pickle/description_qa_knowledge.pkl")
else:
__import__("pudb").set_trace()
| [((495, 523), 'tqdm.tqdm.pandas', 'tqdm.pandas', ([], {'desc': '"""Progress"""'}), "(desc='Progress')\n", (506, 523), False, 'from tqdm import tqdm\n'), ((529, 599), 'pandas.read_pickle', 'pd.read_pickle', (['"""data/augmented_datasets/pickle/label_description.pkl"""'], {}), "('data/augmented_datasets/pickle/label_description.pkl')\n", (543, 599), True, 'import pandas as pd\n'), ((689, 712), 'pandas.DataFrame', 'pd.DataFrame', (['data_list'], {}), '(data_list)\n', (701, 712), True, 'import pandas as pd\n')] |
gonzoua/scarab | scarab/commands/attach.py | b86474527b7b2ec30710ae79ea3f1cf5b7a93005 | # vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
"""
'attach' command implementation'''
"""
from base64 import b64encode
import argparse
import magic
from ..bugzilla import BugzillaError
from ..context import bugzilla_instance
from .. import ui
from .base import Base
class Command(Base):
"""Attach file to the existing PR"""
def register(self, subparsers):
"""Register 'attach' parser"""
parser = subparsers.add_parser('attach')
parser.set_defaults(func=self.run)
parser.add_argument('attachment', type=str, help='path to the attachment')
parser.add_argument('pr', type=int, help='PR number')
parser.add_argument('-b', '--batch', action='store_true', \
help='batch mode, only print newly created attachment\'s id')
parser.add_argument('-s', '--summary', dest='summary', help='summary for the attachment')
comment_group = parser.add_mutually_exclusive_group()
comment_group.add_argument('-c', '--comment', dest='comment', help='comment text')
comment_group.add_argument('-F', '--comment-file', dest='comment_file', \
type=argparse.FileType('r'), help='file with comment text')
parser.add_argument('-t', '--content-type', dest='content_type', help='file content type')
def run(self, args):
"""Run 'attach' command"""
bugzilla = bugzilla_instance()
content_type = args.content_type
# Read data and encode it to base64
try:
with open(args.attachment, 'rb') as attach_file:
data = attach_file.read()
except IOError as ex:
ui.fatal('error reading file: {}'.format(str(ex)))
comment = args.comment
if comment is None:
if args.comment_file:
comment = args.comment_file.read()
if comment is None:
if args.batch:
comment = ''
else:
comment = ui.edit_message()
# Try and guess file content type
if content_type is None:
mime = magic.Magic(mime=True)
content_type = mime.from_file(args.attachment)
try:
attachment = bugzilla.add_attachment(args.pr, args.attachment, data, \
summary=args.summary, comment=comment, content_type=content_type)
except BugzillaError as ex:
ui.fatal('Bugzilla error: {}'.format(ex.message))
if args.batch:
ui.output('{}'.format(attachment))
else:
ui.output('New attachment {} has been added to bug {}'.format(attachment, args.pr))
ui.output('Attachment URL: {}'.format(bugzilla.attachment_url(attachment)))
ui.output('Bug URL: {}'.format(bugzilla.bug_url(args.pr)))
| [((2079, 2101), 'magic.Magic', 'magic.Magic', ([], {'mime': '(True)'}), '(mime=True)\n', (2090, 2101), False, 'import magic\n'), ((1145, 1167), 'argparse.FileType', 'argparse.FileType', (['"""r"""'], {}), "('r')\n", (1162, 1167), False, 'import argparse\n')] |
chabotsi/pygmsh | test/test_airfoil.py | f2c26d9193c63efd9fa7676ea0860a18de7e8b52 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import numpy
import pygmsh
from helpers import compute_volume
def test():
# Airfoil coordinates
airfoil_coordinates = numpy.array([
[1.000000, 0.000000, 0.0],
[0.999023, 0.000209, 0.0],
[0.996095, 0.000832, 0.0],
[0.991228, 0.001863, 0.0],
[0.984438, 0.003289, 0.0],
[0.975752, 0.005092, 0.0],
[0.965201, 0.007252, 0.0],
[0.952825, 0.009744, 0.0],
[0.938669, 0.012538, 0.0],
[0.922788, 0.015605, 0.0],
[0.905240, 0.018910, 0.0],
[0.886092, 0.022419, 0.0],
[0.865417, 0.026096, 0.0],
[0.843294, 0.029903, 0.0],
[0.819807, 0.033804, 0.0],
[0.795047, 0.037760, 0.0],
[0.769109, 0.041734, 0.0],
[0.742094, 0.045689, 0.0],
[0.714107, 0.049588, 0.0],
[0.685258, 0.053394, 0.0],
[0.655659, 0.057071, 0.0],
[0.625426, 0.060584, 0.0],
[0.594680, 0.063897, 0.0],
[0.563542, 0.066977, 0.0],
[0.532136, 0.069789, 0.0],
[0.500587, 0.072303, 0.0],
[0.469022, 0.074486, 0.0],
[0.437567, 0.076312, 0.0],
[0.406350, 0.077752, 0.0],
[0.375297, 0.078743, 0.0],
[0.344680, 0.079180, 0.0],
[0.314678, 0.079051, 0.0],
[0.285418, 0.078355, 0.0],
[0.257025, 0.077096, 0.0],
[0.229618, 0.075287, 0.0],
[0.203313, 0.072945, 0.0],
[0.178222, 0.070096, 0.0],
[0.154449, 0.066770, 0.0],
[0.132094, 0.063005, 0.0],
[0.111248, 0.058842, 0.0],
[0.091996, 0.054325, 0.0],
[0.074415, 0.049504, 0.0],
[0.058573, 0.044427, 0.0],
[0.044532, 0.039144, 0.0],
[0.032343, 0.033704, 0.0],
[0.022051, 0.028152, 0.0],
[0.013692, 0.022531, 0.0],
[0.007292, 0.016878, 0.0],
[0.002870, 0.011224, 0.0],
[0.000439, 0.005592, 0.0],
[0.000000, 0.000000, 0.0],
[0.001535, -0.005395, 0.0],
[0.005015, -0.010439, 0.0],
[0.010421, -0.015126, 0.0],
[0.017725, -0.019451, 0.0],
[0.026892, -0.023408, 0.0],
[0.037880, -0.026990, 0.0],
[0.050641, -0.030193, 0.0],
[0.065120, -0.033014, 0.0],
[0.081257, -0.035451, 0.0],
[0.098987, -0.037507, 0.0],
[0.118239, -0.039185, 0.0],
[0.138937, -0.040493, 0.0],
[0.161004, -0.041444, 0.0],
[0.184354, -0.042054, 0.0],
[0.208902, -0.042343, 0.0],
[0.234555, -0.042335, 0.0],
[0.261221, -0.042058, 0.0],
[0.288802, -0.041541, 0.0],
[0.317197, -0.040817, 0.0],
[0.346303, -0.039923, 0.0],
[0.376013, -0.038892, 0.0],
[0.406269, -0.037757, 0.0],
[0.437099, -0.036467, 0.0],
[0.468187, -0.035009, 0.0],
[0.499413, -0.033414, 0.0],
[0.530654, -0.031708, 0.0],
[0.561791, -0.029917, 0.0],
[0.592701, -0.028066, 0.0],
[0.623264, -0.026176, 0.0],
[0.653358, -0.024269, 0.0],
[0.682867, -0.022360, 0.0],
[0.711672, -0.020466, 0.0],
[0.739659, -0.018600, 0.0],
[0.766718, -0.016774, 0.0],
[0.792738, -0.014999, 0.0],
[0.817617, -0.013284, 0.0],
[0.841253, -0.011637, 0.0],
[0.863551, -0.010068, 0.0],
[0.884421, -0.008583, 0.0],
[0.903777, -0.007191, 0.0],
[0.921540, -0.005900, 0.0],
[0.937637, -0.004717, 0.0],
[0.952002, -0.003650, 0.0],
[0.964576, -0.002708, 0.0],
[0.975305, -0.001896, 0.0],
[0.984145, -0.001222, 0.0],
[0.991060, -0.000691, 0.0],
[0.996020, -0.000308, 0.0],
[0.999004, -0.000077, 0.0]
])
# Scale airfoil to input coord
coord = 1.0
airfoil_coordinates *= coord
# Instantiate geometry object
geom = pygmsh.built_in.Geometry()
# Create polygon for airfoil
char_length = 1.0e-1
airfoil = geom.add_polygon(
airfoil_coordinates,
char_length,
make_surface=False
)
# Create surface for numerical domain with an airfoil-shaped hole
left_dist = 1.0
right_dist = 3.0
top_dist = 1.0
bottom_dist = 1.0
xmin = airfoil_coordinates[:, 0].min() - left_dist*coord
xmax = airfoil_coordinates[:, 0].max() + right_dist*coord
ymin = airfoil_coordinates[:, 1].min() - bottom_dist*coord
ymax = airfoil_coordinates[:, 1].max() + top_dist*coord
domainCoordinates = numpy.array([
[xmin, ymin, 0.0],
[xmax, ymin, 0.0],
[xmax, ymax, 0.0],
[xmin, ymax, 0.0],
])
polygon = geom.add_polygon(
domainCoordinates,
char_length,
holes=[airfoil]
)
geom.add_raw_code('Recombine Surface {%s};' % polygon.surface.id)
ref = 10.525891646546
points, cells, _, _, _ = pygmsh.generate_mesh(geom)
assert abs(compute_volume(points, cells) - ref) < 1.0e-2 * ref
return points, cells
if __name__ == '__main__':
import meshio
meshio.write('airfoil.vtu', *test())
| [((177, 3082), 'numpy.array', 'numpy.array', (['[[1.0, 0.0, 0.0], [0.999023, 0.000209, 0.0], [0.996095, 0.000832, 0.0], [\n 0.991228, 0.001863, 0.0], [0.984438, 0.003289, 0.0], [0.975752, \n 0.005092, 0.0], [0.965201, 0.007252, 0.0], [0.952825, 0.009744, 0.0], [\n 0.938669, 0.012538, 0.0], [0.922788, 0.015605, 0.0], [0.90524, 0.01891,\n 0.0], [0.886092, 0.022419, 0.0], [0.865417, 0.026096, 0.0], [0.843294, \n 0.029903, 0.0], [0.819807, 0.033804, 0.0], [0.795047, 0.03776, 0.0], [\n 0.769109, 0.041734, 0.0], [0.742094, 0.045689, 0.0], [0.714107, \n 0.049588, 0.0], [0.685258, 0.053394, 0.0], [0.655659, 0.057071, 0.0], [\n 0.625426, 0.060584, 0.0], [0.59468, 0.063897, 0.0], [0.563542, 0.066977,\n 0.0], [0.532136, 0.069789, 0.0], [0.500587, 0.072303, 0.0], [0.469022, \n 0.074486, 0.0], [0.437567, 0.076312, 0.0], [0.40635, 0.077752, 0.0], [\n 0.375297, 0.078743, 0.0], [0.34468, 0.07918, 0.0], [0.314678, 0.079051,\n 0.0], [0.285418, 0.078355, 0.0], [0.257025, 0.077096, 0.0], [0.229618, \n 0.075287, 0.0], [0.203313, 0.072945, 0.0], [0.178222, 0.070096, 0.0], [\n 0.154449, 0.06677, 0.0], [0.132094, 0.063005, 0.0], [0.111248, 0.058842,\n 0.0], [0.091996, 0.054325, 0.0], [0.074415, 0.049504, 0.0], [0.058573, \n 0.044427, 0.0], [0.044532, 0.039144, 0.0], [0.032343, 0.033704, 0.0], [\n 0.022051, 0.028152, 0.0], [0.013692, 0.022531, 0.0], [0.007292, \n 0.016878, 0.0], [0.00287, 0.011224, 0.0], [0.000439, 0.005592, 0.0], [\n 0.0, 0.0, 0.0], [0.001535, -0.005395, 0.0], [0.005015, -0.010439, 0.0],\n [0.010421, -0.015126, 0.0], [0.017725, -0.019451, 0.0], [0.026892, -\n 0.023408, 0.0], [0.03788, -0.02699, 0.0], [0.050641, -0.030193, 0.0], [\n 0.06512, -0.033014, 0.0], [0.081257, -0.035451, 0.0], [0.098987, -\n 0.037507, 0.0], [0.118239, -0.039185, 0.0], [0.138937, -0.040493, 0.0],\n [0.161004, -0.041444, 0.0], [0.184354, -0.042054, 0.0], [0.208902, -\n 0.042343, 0.0], [0.234555, -0.042335, 0.0], [0.261221, -0.042058, 0.0],\n [0.288802, -0.041541, 0.0], [0.317197, -0.040817, 0.0], [0.346303, -\n 0.039923, 0.0], [0.376013, -0.038892, 0.0], [0.406269, -0.037757, 0.0],\n [0.437099, -0.036467, 0.0], [0.468187, -0.035009, 0.0], [0.499413, -\n 0.033414, 0.0], [0.530654, -0.031708, 0.0], [0.561791, -0.029917, 0.0],\n [0.592701, -0.028066, 0.0], [0.623264, -0.026176, 0.0], [0.653358, -\n 0.024269, 0.0], [0.682867, -0.02236, 0.0], [0.711672, -0.020466, 0.0],\n [0.739659, -0.0186, 0.0], [0.766718, -0.016774, 0.0], [0.792738, -\n 0.014999, 0.0], [0.817617, -0.013284, 0.0], [0.841253, -0.011637, 0.0],\n [0.863551, -0.010068, 0.0], [0.884421, -0.008583, 0.0], [0.903777, -\n 0.007191, 0.0], [0.92154, -0.0059, 0.0], [0.937637, -0.004717, 0.0], [\n 0.952002, -0.00365, 0.0], [0.964576, -0.002708, 0.0], [0.975305, -\n 0.001896, 0.0], [0.984145, -0.001222, 0.0], [0.99106, -0.000691, 0.0],\n [0.99602, -0.000308, 0.0], [0.999004, -7.7e-05, 0.0]]'], {}), '([[1.0, 0.0, 0.0], [0.999023, 0.000209, 0.0], [0.996095, \n 0.000832, 0.0], [0.991228, 0.001863, 0.0], [0.984438, 0.003289, 0.0], [\n 0.975752, 0.005092, 0.0], [0.965201, 0.007252, 0.0], [0.952825, \n 0.009744, 0.0], [0.938669, 0.012538, 0.0], [0.922788, 0.015605, 0.0], [\n 0.90524, 0.01891, 0.0], [0.886092, 0.022419, 0.0], [0.865417, 0.026096,\n 0.0], [0.843294, 0.029903, 0.0], [0.819807, 0.033804, 0.0], [0.795047, \n 0.03776, 0.0], [0.769109, 0.041734, 0.0], [0.742094, 0.045689, 0.0], [\n 0.714107, 0.049588, 0.0], [0.685258, 0.053394, 0.0], [0.655659, \n 0.057071, 0.0], [0.625426, 0.060584, 0.0], [0.59468, 0.063897, 0.0], [\n 0.563542, 0.066977, 0.0], [0.532136, 0.069789, 0.0], [0.500587, \n 0.072303, 0.0], [0.469022, 0.074486, 0.0], [0.437567, 0.076312, 0.0], [\n 0.40635, 0.077752, 0.0], [0.375297, 0.078743, 0.0], [0.34468, 0.07918, \n 0.0], [0.314678, 0.079051, 0.0], [0.285418, 0.078355, 0.0], [0.257025, \n 0.077096, 0.0], [0.229618, 0.075287, 0.0], [0.203313, 0.072945, 0.0], [\n 0.178222, 0.070096, 0.0], [0.154449, 0.06677, 0.0], [0.132094, 0.063005,\n 0.0], [0.111248, 0.058842, 0.0], [0.091996, 0.054325, 0.0], [0.074415, \n 0.049504, 0.0], [0.058573, 0.044427, 0.0], [0.044532, 0.039144, 0.0], [\n 0.032343, 0.033704, 0.0], [0.022051, 0.028152, 0.0], [0.013692, \n 0.022531, 0.0], [0.007292, 0.016878, 0.0], [0.00287, 0.011224, 0.0], [\n 0.000439, 0.005592, 0.0], [0.0, 0.0, 0.0], [0.001535, -0.005395, 0.0],\n [0.005015, -0.010439, 0.0], [0.010421, -0.015126, 0.0], [0.017725, -\n 0.019451, 0.0], [0.026892, -0.023408, 0.0], [0.03788, -0.02699, 0.0], [\n 0.050641, -0.030193, 0.0], [0.06512, -0.033014, 0.0], [0.081257, -\n 0.035451, 0.0], [0.098987, -0.037507, 0.0], [0.118239, -0.039185, 0.0],\n [0.138937, -0.040493, 0.0], [0.161004, -0.041444, 0.0], [0.184354, -\n 0.042054, 0.0], [0.208902, -0.042343, 0.0], [0.234555, -0.042335, 0.0],\n [0.261221, -0.042058, 0.0], [0.288802, -0.041541, 0.0], [0.317197, -\n 0.040817, 0.0], [0.346303, -0.039923, 0.0], [0.376013, -0.038892, 0.0],\n [0.406269, -0.037757, 0.0], [0.437099, -0.036467, 0.0], [0.468187, -\n 0.035009, 0.0], [0.499413, -0.033414, 0.0], [0.530654, -0.031708, 0.0],\n [0.561791, -0.029917, 0.0], [0.592701, -0.028066, 0.0], [0.623264, -\n 0.026176, 0.0], [0.653358, -0.024269, 0.0], [0.682867, -0.02236, 0.0],\n [0.711672, -0.020466, 0.0], [0.739659, -0.0186, 0.0], [0.766718, -\n 0.016774, 0.0], [0.792738, -0.014999, 0.0], [0.817617, -0.013284, 0.0],\n [0.841253, -0.011637, 0.0], [0.863551, -0.010068, 0.0], [0.884421, -\n 0.008583, 0.0], [0.903777, -0.007191, 0.0], [0.92154, -0.0059, 0.0], [\n 0.937637, -0.004717, 0.0], [0.952002, -0.00365, 0.0], [0.964576, -\n 0.002708, 0.0], [0.975305, -0.001896, 0.0], [0.984145, -0.001222, 0.0],\n [0.99106, -0.000691, 0.0], [0.99602, -0.000308, 0.0], [0.999004, -\n 7.7e-05, 0.0]])\n', (188, 3082), False, 'import numpy\n'), ((3881, 3907), 'pygmsh.built_in.Geometry', 'pygmsh.built_in.Geometry', ([], {}), '()\n', (3905, 3907), False, 'import pygmsh\n'), ((4509, 4602), 'numpy.array', 'numpy.array', (['[[xmin, ymin, 0.0], [xmax, ymin, 0.0], [xmax, ymax, 0.0], [xmin, ymax, 0.0]]'], {}), '([[xmin, ymin, 0.0], [xmax, ymin, 0.0], [xmax, ymax, 0.0], [xmin,\n ymax, 0.0]])\n', (4520, 4602), False, 'import numpy\n'), ((4882, 4908), 'pygmsh.generate_mesh', 'pygmsh.generate_mesh', (['geom'], {}), '(geom)\n', (4902, 4908), False, 'import pygmsh\n'), ((4924, 4953), 'helpers.compute_volume', 'compute_volume', (['points', 'cells'], {}), '(points, cells)\n', (4938, 4953), False, 'from helpers import compute_volume\n')] |
ZintrulCre/LeetCode_Archiver | LeetCode/python3/287.py | de23e16ead29336b5ee7aa1898a392a5d6463d27 | class Solution:
def findDuplicate(self, nums: List[int]) -> int:
p1, p2 = nums[0], nums[nums[0]]
while nums[p1] != nums[p2]:
p1 = nums[p1]
p2 = nums[nums[p2]]
p2 = 0
while nums[p1] != nums[p2]:
p1 = nums[p1]
p2 = nums[p2]
return nums[p1]
| [] |
mathieui/twisted | src/twisted/test/myrebuilder1.py | 35546d2b50742a32edba54719ce3e752dc50dd2a |
class A:
def a(self):
return 'a'
class B(A, object):
def b(self):
return 'b'
class Inherit(A):
def a(self):
return 'c'
| [] |
MateuszG/django_auth | examples/test_yield_8.py | 4cda699c1b6516ffaa26329f545a674a7c849a16 | import pytest
@pytest.yield_fixture
def passwd():
print ("\nsetup before yield")
f = open("/etc/passwd")
yield f.readlines()
print ("teardown after yield")
f.close()
def test_has_lines(passwd):
print ("test called")
assert passwd
| [] |
BuddyVolly/sepal | modules/google-earth-engine/docker/src/sepalinternal/gee.py | 6a2356a88940a36568b1d83ba3aeaae4283d5445 | import json
from threading import Semaphore
import ee
from flask import request
from google.auth import crypt
from google.oauth2 import service_account
from google.oauth2.credentials import Credentials
service_account_credentials = None
import logging
export_semaphore = Semaphore(5)
get_info_semaphore = Semaphore(2)
def init_service_account_credentials(args):
global service_account_credentials
with open(args['gee_key_path'], 'r') as file_:
key_data = file_.read()
signer = crypt.RSASigner.from_string(key_data)
service_account_credentials = service_account.Credentials(
signer=signer,
service_account_email=args['gee_email'],
token_uri=ee.oauth.TOKEN_URI,
scopes=ee.oauth.SCOPES + ['https://www.googleapis.com/auth/drive']
)
def init_ee():
credentials = service_account_credentials
if 'sepal-user' in request.headers:
user = json.loads(request.headers['sepal-user'])
googleTokens = user.get('googleTokens', None)
if googleTokens:
credentials = Credentials(googleTokens['accessToken'])
ee.InitializeThread(credentials)
def to_asset_id(asset_path):
asset_roots = ee.data.getAssetRoots()
if not asset_roots:
raise Exception('User has no GEE asset roots')
return asset_roots[0]['id'] + '/' + asset_path
def delete_asset_collection(asset_id):
logging.info('Recursively deleting ' + asset_id)
if ee.data.getInfo(asset_id):
images = ee.data.getList({
'id': asset_id,
'fields': 'id'
})
for image in images:
ee.data.deleteAsset(image['id'])
logging.info('Deleted ' + image['id'])
ee.data.deleteAsset(asset_id)
logging.info('Deleted ' + asset_id)
def create_asset_image_collection(asset_id):
delete_asset_collection(asset_id)
ee.data.create_assets(
asset_ids=[asset_id],
asset_type=ee.data.ASSET_TYPE_IMAGE_COLL,
mk_parents=True
)
def create_asset_folder(asset_id):
ee.data.create_assets(
asset_ids=[asset_id],
asset_type=ee.data.ASSET_TYPE_FOLDER,
mk_parents=True
)
def get_info(ee_object):
try:
get_info_semaphore.acquire()
return ee_object.getInfo()
finally:
get_info_semaphore.release()
| [((274, 286), 'threading.Semaphore', 'Semaphore', (['(5)'], {}), '(5)\n', (283, 286), False, 'from threading import Semaphore\n'), ((308, 320), 'threading.Semaphore', 'Semaphore', (['(2)'], {}), '(2)\n', (317, 320), False, 'from threading import Semaphore\n'), ((503, 540), 'google.auth.crypt.RSASigner.from_string', 'crypt.RSASigner.from_string', (['key_data'], {}), '(key_data)\n', (530, 540), False, 'from google.auth import crypt\n'), ((575, 766), 'google.oauth2.service_account.Credentials', 'service_account.Credentials', ([], {'signer': 'signer', 'service_account_email': "args['gee_email']", 'token_uri': 'ee.oauth.TOKEN_URI', 'scopes': "(ee.oauth.SCOPES + ['https://www.googleapis.com/auth/drive'])"}), "(signer=signer, service_account_email=args[\n 'gee_email'], token_uri=ee.oauth.TOKEN_URI, scopes=ee.oauth.SCOPES + [\n 'https://www.googleapis.com/auth/drive'])\n", (602, 766), False, 'from google.oauth2 import service_account\n'), ((1105, 1137), 'ee.InitializeThread', 'ee.InitializeThread', (['credentials'], {}), '(credentials)\n', (1124, 1137), False, 'import ee\n'), ((1187, 1210), 'ee.data.getAssetRoots', 'ee.data.getAssetRoots', ([], {}), '()\n', (1208, 1210), False, 'import ee\n'), ((1386, 1434), 'logging.info', 'logging.info', (["('Recursively deleting ' + asset_id)"], {}), "('Recursively deleting ' + asset_id)\n", (1398, 1434), False, 'import logging\n'), ((1442, 1467), 'ee.data.getInfo', 'ee.data.getInfo', (['asset_id'], {}), '(asset_id)\n', (1457, 1467), False, 'import ee\n'), ((1866, 1973), 'ee.data.create_assets', 'ee.data.create_assets', ([], {'asset_ids': '[asset_id]', 'asset_type': 'ee.data.ASSET_TYPE_IMAGE_COLL', 'mk_parents': '(True)'}), '(asset_ids=[asset_id], asset_type=ee.data.\n ASSET_TYPE_IMAGE_COLL, mk_parents=True)\n', (1887, 1973), False, 'import ee\n'), ((2040, 2143), 'ee.data.create_assets', 'ee.data.create_assets', ([], {'asset_ids': '[asset_id]', 'asset_type': 'ee.data.ASSET_TYPE_FOLDER', 'mk_parents': '(True)'}), '(asset_ids=[asset_id], asset_type=ee.data.\n ASSET_TYPE_FOLDER, mk_parents=True)\n', (2061, 2143), False, 'import ee\n'), ((913, 954), 'json.loads', 'json.loads', (["request.headers['sepal-user']"], {}), "(request.headers['sepal-user'])\n", (923, 954), False, 'import json\n'), ((1486, 1535), 'ee.data.getList', 'ee.data.getList', (["{'id': asset_id, 'fields': 'id'}"], {}), "({'id': asset_id, 'fields': 'id'})\n", (1501, 1535), False, 'import ee\n'), ((1703, 1732), 'ee.data.deleteAsset', 'ee.data.deleteAsset', (['asset_id'], {}), '(asset_id)\n', (1722, 1732), False, 'import ee\n'), ((1741, 1776), 'logging.info', 'logging.info', (["('Deleted ' + asset_id)"], {}), "('Deleted ' + asset_id)\n", (1753, 1776), False, 'import logging\n'), ((1060, 1100), 'google.oauth2.credentials.Credentials', 'Credentials', (["googleTokens['accessToken']"], {}), "(googleTokens['accessToken'])\n", (1071, 1100), False, 'from google.oauth2.credentials import Credentials\n'), ((1611, 1643), 'ee.data.deleteAsset', 'ee.data.deleteAsset', (["image['id']"], {}), "(image['id'])\n", (1630, 1643), False, 'import ee\n'), ((1656, 1694), 'logging.info', 'logging.info', (["('Deleted ' + image['id'])"], {}), "('Deleted ' + image['id'])\n", (1668, 1694), False, 'import logging\n')] |
mirontoli/tolle-rasp | micropython/007_boat_sink.py | 020638e86c167aedd7b556d8515a3adef70724af | #https://microbit-micropython.readthedocs.io/en/latest/tutorials/images.html#animation
from microbit import *
boat1 = Image("05050:05050:05050:99999:09990")
boat2 = Image("00000:05050:05050:05050:99999")
boat3 = Image("00000:00000:05050:05050:05050")
boat4 = Image("00000:00000:00000:05050:05050")
boat5 = Image("00000:00000:00000:00000:05050")
boat6 = Image("00000:00000:00000:00000:00000")
all_boats = [boat1, boat2, boat3, boat4, boat5, boat6]
display.show(all_boats, delay=200) | [] |
groupdocs-legacy-sdk/python | examples/api-samples/inc_samples/convert_callback.py | 80e5ef5a9a14ac4a7815c6cf933b5b2997381455 | import os
import json
import shutil
import time
from pyramid.renderers import render_to_response
from pyramid.response import Response
from groupdocs.ApiClient import ApiClient
from groupdocs.AsyncApi import AsyncApi
from groupdocs.StorageApi import StorageApi
from groupdocs.GroupDocsRequestSigner import GroupDocsRequestSigner
# Checking value on null
def IsNotNull(value):
return value is not None and len(value) > 0
def convert_callback(request):
currentDir = os.path.dirname(os.path.realpath(__file__))
if os.path.exists(currentDir + '/../user_info.txt'):
f = open(currentDir + '/../user_info.txt')
lines = f.readlines()
f.close()
clientId = lines[0].replace("\r\n", "")
privateKey = lines[1]
if IsNotNull(request.json_body):
jsonPostData = request.json_body
jobId = jsonPostData['SourceId']
# Create signer object
signer = GroupDocsRequestSigner(privateKey)
# Create apiClient object
apiClient = ApiClient(signer)
# Create AsyncApi object
async = AsyncApi(apiClient)
# Create Storage object
api = StorageApi(apiClient)
if jobId != '':
time.sleep(5)
# Make request to api for get document info by job id
jobs = async.GetJobDocuments(clientId, jobId)
if jobs.status == 'Ok':
# Get file guid
resultGuid = jobs.result.inputs[0].outputs[0].guid
name = jobs.result.inputs[0].outputs[0].name
currentDir = os.path.dirname(os.path.realpath(__file__))
downloadFolder = currentDir + '/../downloads/'
if not os.path.isdir(downloadFolder):
os.makedirs(downloadFolder)
#Downlaoding of file
fs = api.GetFile(clientId, resultGuid);
if fs:
filePath = downloadFolder + name
with open(filePath, 'wb') as fp:
shutil.copyfileobj(fs.inputStream, fp)
| [] |
AliShug/EvoArm | PyIK/src/litearm.py | a5dea204914ee1e25867e4412e88d245329316f2 | from __future__ import print_function
import numpy as np
import struct
import solvers
import pid
from util import *
MOTORSPEED = 0.9
MOTORMARGIN = 1
MOTORSLOPE = 30
ERRORLIM = 5.0
class ArmConfig:
"""Holds an arm's proportions, limits and other configuration data"""
def __init__(self,
main_length = 148.4,
forearm_length = 160,
linkage_length = 155,
lower_actuator_length = 65,
upper_actuator_length = 54.4,
wrist_length = 90.52,
shoulder_offset = [-9.7, 18.71]):
self.main_length = main_length
self.forearm_length = forearm_length
self.linkage_length = linkage_length
self.lower_actuator_length = lower_actuator_length
self.upper_actuator_length = upper_actuator_length
self.wrist_length = wrist_length;
self.shoulder_offset = shoulder_offset
class ArmPose:
"""
Defines a physical configuration of a LiteArm robot arm.
Internal angles are relative to vertical (elevator/actuator) or straight
forward (swing), and are stored in radians. Extracted servo angles range
0-300 and are measured in degrees.
Provides methods for:
- finding the required servo angles to reach the pose
- checking the validity of the pose
"""
structFormat = 'fffff'
@staticmethod
def calcElevatorAngle(servoAngle):
return radians(178.21 - servoAngle)
@staticmethod
def calcSwingAngle(servoAngle):
return radians(150.0 - servoAngle)
@staticmethod
def calcActuatorAngle(servoAngle):
return radians(servoAngle - 204.78)
@staticmethod
def calcWristXAngle(servoAngle):
return radians(150.0 - servoAngle)
@staticmethod
def calcWristYAngle(servoAngle):
return radians(servoAngle - 147.0)
def __init__(self,
arm_config,
swing_angle,
shoulder_angle,
actuator_angle,
elbow_angle,
elbow2D,
wrist2D,
effector2D,
effector,
wrist_x,
wrist_y):
self.cfg = arm_config
self.swing_angle = swing_angle
self.shoulder_angle = shoulder_angle
self.actuator_angle = actuator_angle
self.elbow_angle = elbow_angle
# Joints in the arm
shoulder = rotate(self.cfg.shoulder_offset, swing_angle)
self.shoulder2D = [self.cfg.shoulder_offset[1], 0]
self.shoulder = [shoulder[0], 0, shoulder[1]]
self.wrist2D = wrist2D
self.effector2D = effector2D
self.effector = effector
# Construct the 3D elbow & wrist positions from the 2D (planar) IK
# solution
arm_vec = effector - self.shoulder
arm_vec[1] = 0
self.elbow2D = elbow2D
self.elbow = self.shoulder + normalize(arm_vec)*elbow2D[0]
self.elbow[1] = elbow2D[1]
self.wrist = self.effector - normalize(arm_vec)*arm_config.wrist_length
# Wrist pose
self.wristXAngle = wrist_x
self.wristYAngle = wrist_y
def getServoElevator(self):
return 178.21 - degrees(self.shoulder_angle)
def getServoActuator(self):
return degrees(self.actuator_angle) + 204.78
def getServoSwing(self):
return 150 - degrees(self.swing_angle)
def getServoWristX(self):
return 150 - degrees(self.wristXAngle)
def getServoWristY(self):
return 147 + degrees(self.wristYAngle)
def armDiffAngle(self):
return degrees(self.shoulder_angle - self.actuator_angle)
def checkActuator(self):
angle = self.getServoActuator()
return angle >= 95 and angle <= 250
def checkDiff(self):
angle = self.armDiffAngle()
return angle >= 44 and angle <= 175
def checkElevator(self):
angle = self.getServoElevator()
return angle >= 60 and angle <= 210
def checkForearm(self):
angle = degrees(self.elbow_angle + self.shoulder_angle)
return angle < 200 and angle > 80
def checkSwing(self):
angle = self.getServoSwing()
return angle >= 60 and angle <= 240
def checkWristX(self):
angle = self.getServoWristX()
return angle >= 60 and angle <= 240
def checkWristY(self):
angle = self.getServoWristY()
return angle >= 60 and angle <= 160
def checkPositioning(self):
# When Y>0 Forearm always faces outwards
if self.wrist2D[1] > 0 and self.wrist2D[0] < self.elbow2D[0]:
return False
# No valid positions X<=0
if self.wrist2D[0] <= 0:
return False
# Effector height range
if self.effector[1] > 180 or self.effector[1] < -200:
return False
return True
def checkClearance(self):
return (self.checkDiff() and self.checkActuator() and
self.checkElevator() and self.checkSwing() and
self.checkWristX() and self.checkWristY() and
self.checkPositioning() and self.checkForearm())
def serialize(self):
"""Returns a packed struct holding the pose information"""
return struct.pack(
ArmPose.structFormat,
self.swing_angle,
self.shoulder_angle,
self.elbow_angle,
self.wristXAngle,
self.wristYAngle
)
class ArmController:
def __init__(self,
servo_swing,
servo_shoulder,
servo_elbow,
servo_wrist_x,
servo_wrist_y,
arm_config,
motion_enable = False):
# Solvers are responsible for calculating the target servo positions to
# reach a given goal position
self.ik = solvers.IKSolver(
arm_config.main_length,
arm_config.forearm_length,
arm_config.wrist_length,
arm_config.shoulder_offset)
self.physsolver = solvers.PhysicalSolver(
arm_config.main_length,
arm_config.linkage_length,
arm_config.lower_actuator_length,
arm_config.upper_actuator_length)
# Servos
self.servos = {}
self.servos["swing"] = servo_swing
self.servos["shoulder"] = servo_shoulder
self.servos["elbow"] = servo_elbow
self.servos["wrist_x"] = servo_wrist_x
self.servos["wrist_y"] = servo_wrist_y
for key, servo in self.servos.iteritems():
if servo is None:
print ("Warning: {0} servo not connected".format(key))
else:
# Initialise a PID controller for the servo
if servo.protocol == 1:
servo.setGoalSpeed(-MOTORSPEED)
servo.data['pid'] = pid.PIDControl(2.4, 0, 0.4)
else:
servo.setGoalSpeed(0)
servo.data['error'] = 0.0
# Make sure the goal speed is set
servo.setTorqueEnable(1)
if servo.protocol == 1:
print("Setting slope")
servo.setCWMargin(MOTORMARGIN)
servo.setCCWMargin(MOTORMARGIN)
servo.setCWSlope(MOTORSLOPE)
servo.setCCWSlope(MOTORSLOPE)
# Store parameters
self.motion_enable = True
self.enableMovement(False)
self.cfg = arm_config
# Dirty flags for stored poses
self.ik_pose = None
self.ik_dirty = True
self.real_pose = None
self.real_dirty = True
# Current target pose
self.target_pose = None
def enableMovement(self, enable):
changed = False
if enable and not self.motion_enable:
print ("Warning: Arm enabled")
self.motion_enable = True
changed = True
elif not enable:
self.motion_enable = False
changed = True
if changed:
# Set servos on/off
if self.servos['swing'] is not None:
self.servos['swing'].setTorqueEnable(self.motion_enable)
if self.servos['shoulder'] is not None:
self.servos['shoulder'].setTorqueEnable(self.motion_enable)
if self.servos['elbow'] is not None:
self.servos['elbow'].setTorqueEnable(self.motion_enable)
if self.servos['wrist_x'] is not None:
self.servos['wrist_x'].setTorqueEnable(self.motion_enable)
if self.servos['wrist_y'] is not None:
self.servos['wrist_y'].setTorqueEnable(self.motion_enable)
def setWristGoalPosition(self, pos):
self.ik.setGoal(pos)
self.ik_dirty = True
def setWristGoalDirection(self, normal):
self.ik.setWristDir(normal)
self.ik_dirty = True
def getIKPose(self):
if self.ik_dirty and self.ik.valid:
# Construct geometry of arm from IK state
main_arm = self.ik.elbow - self.ik.originpl
arm_vert_angle = sigangle(main_arm, vertical)
forearm = self.ik.wristpl - self.ik.elbow
elbow_angle = angle_between(main_arm, forearm)
# Solve actuator angle for given elbow angle
# Base angle is between the main arm and actuator
base_angle = self.physsolver.inverse_forearm(elbow_angle)
actuator_angle = arm_vert_angle - base_angle
self.ik_pose = ArmPose(
self.cfg,
swing_angle = self.ik.swing,
# angles from vertical
shoulder_angle = arm_vert_angle,
actuator_angle = actuator_angle,
# angle between the main arm and forearm
elbow_angle = elbow_angle,
elbow2D = self.ik.elbow,
wrist2D = self.ik.wristpl,
effector2D = self.ik.goalpl,
effector = self.ik.goal,
wrist_x = self.ik.wrist_x,
wrist_y = self.ik.wrist_y
)
return self.ik_pose
def pollServos(self):
"""Poll the real-world servo positions"""
for servo in self.servos.itervalues():
if servo is not None:
newPos = servo.getPosition()
if type(newPos) is float:
servo.data['pos'] = newPos
def clearPositionError(self):
"""Clears the servo's position-error accumulators"""
for servo in self.servos.itervalues():
if servo is not None and servo.protocol == 1:
servo.data['error'] = 0.0
def getRealPose(self):
"""Retrieve the real-world arm pose, or None if not all servos are
connected.
"""
if any([servo is None for servo in self.servos.itervalues()]):
return None
# This whole function is essentially just FK based on the known servo
# angles
swing_servo = self.servos['swing'].data['pos']
elevator_servo = self.servos['shoulder'].data['pos']
actuator_servo = self.servos['elbow'].data['pos']
wrist_x_servo = self.servos['wrist_x'].data['pos']
wrist_y_servo = self.servos['wrist_y'].data['pos']
# Find the internal arm-pose angles for the given servo positions
swing_angle = ArmPose.calcSwingAngle(swing_servo)
elevator_angle = ArmPose.calcElevatorAngle(elevator_servo)
actuator_angle = ArmPose.calcActuatorAngle(actuator_servo)
wrist_x_angle = ArmPose.calcWristXAngle(wrist_x_servo)
wrist_y_angle = ArmPose.calcWristYAngle(wrist_y_servo)
# Solve elbow angle for given actuator and elevator angles
# (this is the angle from the elevator arm's direction to the forearm's)
elbow_angle = self.physsolver.solve_forearm(elevator_angle, actuator_angle)
# FK positions from config and angles
offset = self.cfg.shoulder_offset
shoulder2D = np.array([offset[1], 0])
elbow2D = shoulder2D + rotate(vertical, elevator_angle)*self.cfg.main_length
wrist2D = elbow2D + rotate(vertical, elevator_angle + elbow_angle)*self.cfg.forearm_length
effector2D = wrist2D + [self.cfg.wrist_length, 0]
# 3D Effector calculation is a little more involved
td = rotate([offset[0], effector2D[0]], swing_angle)
effector = np.array([td[0], effector2D[1], td[1]])
pose = ArmPose(
self.cfg,
swing_angle, elevator_angle, actuator_angle,
elbow_angle, elbow2D, wrist2D, effector2D,
effector, wrist_x_angle, wrist_y_angle)
return pose
def setTargetPose(self, new_pose):
self.target_pose = new_pose
def tick(self):
if self.target_pose is not None:
if self.motion_enable:
# Drive servos
gain = 0.1
if self.servos['swing'] is not None:
s = self.servos['swing']
pos = s.data['pos']
target = self.target_pose.getServoSwing()
# err = min(10, pos-target)
# s.data['error'] += err*gain
s.setGoalPosition(target)
if self.servos['shoulder'] is not None:
s = self.servos['shoulder']
# cumulative error
pos = s.data['pos']
target = self.target_pose.getServoElevator()
err = min(10, pos-target)
s.data['error'] += err*gain
s.data['error'] = np.clip(s.data['error'], -ERRORLIM, ERRORLIM)
s.setGoalPosition(target - s.data['error'])
if self.servos['elbow'] is not None:
s = self.servos['elbow']
pos = s.data['pos']
target = self.target_pose.getServoActuator()
err = min(10, pos-target)
s.data['error'] += err*gain
s.data['error'] = np.clip(s.data['error'], -ERRORLIM, ERRORLIM)
s.setGoalPosition(target - s.data['error'])
if self.servos['wrist_x'] is not None:
self.servos['wrist_x'].setGoalPosition(self.target_pose.getServoWristX())
if self.servos['wrist_y'] is not None:
self.servos['wrist_y'].setGoalPosition(self.target_pose.getServoWristY())
| [((5285, 5415), 'struct.pack', 'struct.pack', (['ArmPose.structFormat', 'self.swing_angle', 'self.shoulder_angle', 'self.elbow_angle', 'self.wristXAngle', 'self.wristYAngle'], {}), '(ArmPose.structFormat, self.swing_angle, self.shoulder_angle,\n self.elbow_angle, self.wristXAngle, self.wristYAngle)\n', (5296, 5415), False, 'import struct\n'), ((5902, 6026), 'solvers.IKSolver', 'solvers.IKSolver', (['arm_config.main_length', 'arm_config.forearm_length', 'arm_config.wrist_length', 'arm_config.shoulder_offset'], {}), '(arm_config.main_length, arm_config.forearm_length,\n arm_config.wrist_length, arm_config.shoulder_offset)\n', (5918, 6026), False, 'import solvers\n'), ((6098, 6243), 'solvers.PhysicalSolver', 'solvers.PhysicalSolver', (['arm_config.main_length', 'arm_config.linkage_length', 'arm_config.lower_actuator_length', 'arm_config.upper_actuator_length'], {}), '(arm_config.main_length, arm_config.linkage_length,\n arm_config.lower_actuator_length, arm_config.upper_actuator_length)\n', (6120, 6243), False, 'import solvers\n'), ((12098, 12122), 'numpy.array', 'np.array', (['[offset[1], 0]'], {}), '([offset[1], 0])\n', (12106, 12122), True, 'import numpy as np\n'), ((12505, 12544), 'numpy.array', 'np.array', (['[td[0], effector2D[1], td[1]]'], {}), '([td[0], effector2D[1], td[1]])\n', (12513, 12544), True, 'import numpy as np\n'), ((6922, 6949), 'pid.PIDControl', 'pid.PIDControl', (['(2.4)', '(0)', '(0.4)'], {}), '(2.4, 0, 0.4)\n', (6936, 6949), False, 'import pid\n'), ((13731, 13776), 'numpy.clip', 'np.clip', (["s.data['error']", '(-ERRORLIM)', 'ERRORLIM'], {}), "(s.data['error'], -ERRORLIM, ERRORLIM)\n", (13738, 13776), True, 'import numpy as np\n'), ((14176, 14221), 'numpy.clip', 'np.clip', (["s.data['error']", '(-ERRORLIM)', 'ERRORLIM'], {}), "(s.data['error'], -ERRORLIM, ERRORLIM)\n", (14183, 14221), True, 'import numpy as np\n')] |
jakobabesser/piano_aug | create_augmented_versions.py | 37f78c77465749c80d7aa91d9e804b89024eb278 | from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard
import soundfile as sf
if __name__ == '__main__':
# replace by path of unprocessed piano file if necessar
fn_wav_source = 'live_grand_piano.wav'
# augmentation settings using Pedalboard library
settings = {'rev-': [Reverb(room_size=.4)],
'rev+': [Reverb(room_size=.8)],
'comp+': [Compressor(threshold_db=-15, ratio=20)],
'comp-': [Compressor(threshold_db=-10, ratio=10)],
'gain+': [Gain(gain_db=15)], # clipping
'gain-': [Gain(gain_db=5)],
'lpf-': [LowpassFilter(cutoff_frequency_hz=50)],
'lpf+': [LowpassFilter(cutoff_frequency_hz=250)]}
# create augmented versions
for s in settings.keys():
# load unprocessed piano recording
audio, sample_rate = sf.read(fn_wav_source)
# create Pedalboard object
board = Pedalboard(settings[s])
# create augmented audio
effected = board(audio, sample_rate)
# save it
fn_target = fn_wav_source.replace('.wav', f'_{s}.wav')
sf.write(fn_target, effected, sample_rate)
| [((883, 905), 'soundfile.read', 'sf.read', (['fn_wav_source'], {}), '(fn_wav_source)\n', (890, 905), True, 'import soundfile as sf\n'), ((958, 981), 'pedalboard.Pedalboard', 'Pedalboard', (['settings[s]'], {}), '(settings[s])\n', (968, 981), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((1151, 1193), 'soundfile.write', 'sf.write', (['fn_target', 'effected', 'sample_rate'], {}), '(fn_target, effected, sample_rate)\n', (1159, 1193), True, 'import soundfile as sf\n'), ((310, 331), 'pedalboard.Reverb', 'Reverb', ([], {'room_size': '(0.4)'}), '(room_size=0.4)\n', (316, 331), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((358, 379), 'pedalboard.Reverb', 'Reverb', ([], {'room_size': '(0.8)'}), '(room_size=0.8)\n', (364, 379), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((407, 445), 'pedalboard.Compressor', 'Compressor', ([], {'threshold_db': '(-15)', 'ratio': '(20)'}), '(threshold_db=-15, ratio=20)\n', (417, 445), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((474, 512), 'pedalboard.Compressor', 'Compressor', ([], {'threshold_db': '(-10)', 'ratio': '(10)'}), '(threshold_db=-10, ratio=10)\n', (484, 512), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((541, 557), 'pedalboard.Gain', 'Gain', ([], {'gain_db': '(15)'}), '(gain_db=15)\n', (545, 557), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((598, 613), 'pedalboard.Gain', 'Gain', ([], {'gain_db': '(5)'}), '(gain_db=5)\n', (602, 613), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((641, 678), 'pedalboard.LowpassFilter', 'LowpassFilter', ([], {'cutoff_frequency_hz': '(50)'}), '(cutoff_frequency_hz=50)\n', (654, 678), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n'), ((706, 744), 'pedalboard.LowpassFilter', 'LowpassFilter', ([], {'cutoff_frequency_hz': '(250)'}), '(cutoff_frequency_hz=250)\n', (719, 744), False, 'from pedalboard import Reverb, Compressor, Gain, LowpassFilter, Pedalboard\n')] |
siq/flux | flux/migrations/versions/9ba67b798fa_add_request_system.py | ca7563deb9ebef14840bbf0cb7bab4d9478b2470 | """add_request_system
Revision: 9ba67b798fa
Revises: 31b92bf6506d
Created: 2013-07-23 02:49:09.342814
"""
revision = '9ba67b798fa'
down_revision = '31b92bf6506d'
from alembic import op
from spire.schema.fields import *
from spire.mesh import SurrogateType
from sqlalchemy import (Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint,
CheckConstraint, UniqueConstraint)
from sqlalchemy.dialects import postgresql
def upgrade():
op.create_table('request',
Column('id', UUIDType(), nullable=False),
Column('name', TextType(), nullable=False),
Column('status', EnumerationType(), nullable=False),
Column('originator', TokenType(), nullable=False),
Column('assignee', TokenType(), nullable=False),
PrimaryKeyConstraint('id'),
UniqueConstraint('name'),
)
op.create_table('request_slot',
Column('id', UUIDType(), nullable=False),
Column('request_id', UUIDType(), nullable=False),
Column('token', TokenType(), nullable=False),
Column('title', TextType(), nullable=True),
Column('slot', TokenType(), nullable=False),
ForeignKeyConstraint(['request_id'], ['request.id'], ondelete='CASCADE'),
PrimaryKeyConstraint('id'),
UniqueConstraint('request_id','token'),
)
op.create_table('request_attachment',
Column('id', UUIDType(), nullable=False),
Column('request_id', UUIDType(), nullable=False),
Column('token', TokenType(), nullable=True),
Column('title', TextType(), nullable=True),
Column('attachment', SurrogateType(), nullable=False),
ForeignKeyConstraint(['request_id'], ['request.id'], ondelete='CASCADE'),
PrimaryKeyConstraint('id'),
)
op.create_table('request_product',
Column('id', UUIDType(), nullable=False),
Column('request_id', UUIDType(), nullable=False),
Column('token', TokenType(), nullable=False),
Column('title', TextType(), nullable=True),
Column('product', SurrogateType(), nullable=False),
ForeignKeyConstraint(['request_id'], ['request.id'], ondelete='CASCADE'),
PrimaryKeyConstraint('id'),
UniqueConstraint('request_id','token'),
)
op.create_table('message',
Column('id', UUIDType(), nullable=False),
Column('request_id', UUIDType(), nullable=False),
Column('author', TokenType(), nullable=False),
Column('occurrence', DateTimeType(timezone=True), nullable=False),
Column('message', TextType(), nullable=True),
ForeignKeyConstraint(['request_id'], ['request.id'], ondelete='CASCADE'),
PrimaryKeyConstraint('id'),
)
def downgrade():
op.drop_table('message')
op.drop_table('request_product')
op.drop_table('request_attachment')
op.drop_table('request_slot')
op.drop_table('request')
| [((2702, 2726), 'alembic.op.drop_table', 'op.drop_table', (['"""message"""'], {}), "('message')\n", (2715, 2726), False, 'from alembic import op\n'), ((2731, 2763), 'alembic.op.drop_table', 'op.drop_table', (['"""request_product"""'], {}), "('request_product')\n", (2744, 2763), False, 'from alembic import op\n'), ((2768, 2803), 'alembic.op.drop_table', 'op.drop_table', (['"""request_attachment"""'], {}), "('request_attachment')\n", (2781, 2803), False, 'from alembic import op\n'), ((2808, 2837), 'alembic.op.drop_table', 'op.drop_table', (['"""request_slot"""'], {}), "('request_slot')\n", (2821, 2837), False, 'from alembic import op\n'), ((2842, 2866), 'alembic.op.drop_table', 'op.drop_table', (['"""request"""'], {}), "('request')\n", (2855, 2866), False, 'from alembic import op\n'), ((763, 789), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (783, 789), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((799, 823), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""name"""'], {}), "('name')\n", (815, 823), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1142, 1214), 'sqlalchemy.ForeignKeyConstraint', 'ForeignKeyConstraint', (["['request_id']", "['request.id']"], {'ondelete': '"""CASCADE"""'}), "(['request_id'], ['request.id'], ondelete='CASCADE')\n", (1162, 1214), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1224, 1250), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1244, 1250), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1260, 1299), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""request_id"""', '"""token"""'], {}), "('request_id', 'token')\n", (1276, 1299), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1632, 1704), 'sqlalchemy.ForeignKeyConstraint', 'ForeignKeyConstraint', (["['request_id']", "['request.id']"], {'ondelete': '"""CASCADE"""'}), "(['request_id'], ['request.id'], ondelete='CASCADE')\n", (1652, 1704), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1714, 1740), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (1734, 1740), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2069, 2141), 'sqlalchemy.ForeignKeyConstraint', 'ForeignKeyConstraint', (["['request_id']", "['request.id']"], {'ondelete': '"""CASCADE"""'}), "(['request_id'], ['request.id'], ondelete='CASCADE')\n", (2089, 2141), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2151, 2177), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2171, 2177), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2187, 2226), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""request_id"""', '"""token"""'], {}), "('request_id', 'token')\n", (2203, 2226), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2564, 2636), 'sqlalchemy.ForeignKeyConstraint', 'ForeignKeyConstraint', (["['request_id']", "['request.id']"], {'ondelete': '"""CASCADE"""'}), "(['request_id'], ['request.id'], ondelete='CASCADE')\n", (2584, 2636), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((2646, 2672), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (2666, 2672), False, 'from sqlalchemy import Column, ForeignKey, ForeignKeyConstraint, PrimaryKeyConstraint, CheckConstraint, UniqueConstraint\n'), ((1590, 1605), 'spire.mesh.SurrogateType', 'SurrogateType', ([], {}), '()\n', (1603, 1605), False, 'from spire.mesh import SurrogateType\n'), ((2027, 2042), 'spire.mesh.SurrogateType', 'SurrogateType', ([], {}), '()\n', (2040, 2042), False, 'from spire.mesh import SurrogateType\n')] |
clalancette/ign-math | src/python/Vector2_TEST.py | 84eb1bfe470d00d335c048f102b56c49a15b56be | # Copyright (C) 2021 Open Source Robotics Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import math
from ignition.math import Vector2d
from ignition.math import Vector2f
class TestVector2(unittest.TestCase):
def test_construction(self):
v = Vector2d()
self.assertAlmostEqual(0.0, v.x())
self.assertAlmostEqual(0.0, v.y())
vec = Vector2d(1, 0)
self.assertEqual(vec.x(), 1)
self.assertEqual(vec.y(), 0)
vec2 = Vector2d(vec)
self.assertEqual(vec2, vec)
# Copy
vec3 = vec
self.assertEqual(vec3, vec)
# Inequality
vec4 = Vector2d()
self.assertNotEqual(vec, vec4)
def test_vector2(self):
v = Vector2d(1, 2)
# Distance
self.assertAlmostEqual(2.236, v.distance(Vector2d()), delta=1e-2)
# Normalize
v.normalize()
self.assertTrue(v.equal(Vector2d(0.447214, 0.894427), 1e-4))
# Set
v.set(4, 5)
self.assertTrue(v.equal(Vector2d(4, 5), 1e-4))
# Abs
v.set(-1, -2)
self.assertTrue(v.abs().equal(Vector2d(1, 2), 1e-4))
# _eq_
v = Vector2d(6, 7)
self.assertTrue(v.equal(Vector2d(6, 7), 1e-4))
# _add_
v = v + Vector2d(1, 2)
self.assertTrue(v.equal(Vector2d(7, 9), 1e-4))
v += Vector2d(5, 6)
self.assertTrue(v.equal(Vector2d(12, 15), 1e-4))
# __sub__
v = v - Vector2d(2, 4)
self.assertTrue(v.equal(Vector2d(10, 11), 1e-4))
v.set(2, 4)
v -= Vector2d(1, 6)
self.assertTrue(v.equal(Vector2d(1, -2), 1e-4))
# __truediv__
v.set(10, 6)
v = v / Vector2d(2, 3)
self.assertTrue(v.equal(Vector2d(5, 2), 1e-4))
v.set(10, 6)
v /= Vector2d(2, 3)
self.assertTrue(v.equal(Vector2d(5, 2), 1e-4))
# __truediv__ int
v.set(10, 6)
v = v / 2
self.assertTrue(v.equal(Vector2d(5, 3), 1e-4))
v.set(10, 6)
v /= 2
self.assertTrue(v.equal(Vector2d(5, 3), 1e-4))
# __mul__
v.set(10, 6)
v = v * Vector2d(2, 4)
self.assertTrue(v.equal(Vector2d(20, 24), 1e-4))
v.set(10, 6)
v *= Vector2d(2, 4)
self.assertTrue(v.equal(Vector2d(20, 24), 1e-4))
# __mul__ int
v.set(10, 6)
v = v * 2
self.assertTrue(v.equal(Vector2d(20, 12), 1e-4))
v.set(10, 6)
v *= 2
self.assertTrue(v.equal(Vector2d(20, 12), 1e-4))
# is_finite
self.assertTrue(v.is_finite())
def test_max(self):
vec1 = Vector2d(0.1, 0.2)
vec2 = Vector2d(0.3, 0.5)
vec3 = Vector2d(0.4, 0.2)
self.assertAlmostEqual(vec1.max(), 0.2)
self.assertAlmostEqual(vec3.max(), 0.4)
vec1.max(vec2)
self.assertAlmostEqual(vec1, Vector2d(0.3, 0.5))
vec1.max(vec3)
self.assertAlmostEqual(vec1, Vector2d(0.4, 0.5))
def test_min(self):
vec1 = Vector2d(0.3, 0.5)
vec2 = Vector2d(0.1, 0.2)
vec3 = Vector2d(0.05, 0.1)
self.assertAlmostEqual(vec1.min(), 0.3)
self.assertAlmostEqual(vec3.min(), 0.05)
vec1.min(vec2)
self.assertAlmostEqual(vec1, Vector2d(0.1, 0.2))
vec1.min(vec3)
self.assertAlmostEqual(vec1, Vector2d(0.05, 0.1))
def test_equal_tolerance(self):
# Test Equal function with specified tolerance
self.assertFalse(Vector2d.ZERO.equal(Vector2d.ONE, 1e-6))
self.assertFalse(Vector2d.ZERO.equal(Vector2d.ONE, 1e-3))
self.assertFalse(Vector2d.ZERO.equal(Vector2d.ONE, 1e-1))
self.assertTrue(Vector2d.ZERO.equal(Vector2d.ONE, 1))
self.assertTrue(Vector2d.ZERO.equal(Vector2d.ONE, 1.1))
def test_dot(self):
v = Vector2d(1, 2)
self.assertAlmostEqual(v.dot(Vector2d(3, 4)), 11.0)
self.assertAlmostEqual(v.dot(Vector2d(0, 0)), 0.0)
self.assertAlmostEqual(v.dot(Vector2d(1, 0)), 1.0)
self.assertAlmostEqual(v.dot(Vector2d(0, 1)), 2.0)
def test_correct(self):
vec1 = Vector2d(0, float("nan"))
vec2 = Vector2d(float("inf"), -1)
vec3 = Vector2d(10, -2)
vec1.correct()
vec2.correct()
vec3.correct()
self.assertAlmostEqual(vec1, Vector2d(0, 0))
self.assertAlmostEqual(vec2, Vector2d(0, -1))
self.assertAlmostEqual(vec3, Vector2d(10, -2))
def test_abs_dot(self):
v = Vector2d(1, -2)
self.assertAlmostEqual(v.abs_dot(Vector2d(3, 4)), 11.0)
self.assertAlmostEqual(v.abs_dot(Vector2d(0, 0)), 0.0)
self.assertAlmostEqual(v.abs_dot(Vector2d(1, 0)), 1.0)
self.assertAlmostEqual(v.abs_dot(Vector2d(0, 1)), 2.0)
def test_add(self):
vec1 = Vector2d(0.1, 0.2)
vec2 = Vector2d(1.1, 2.2)
vec3 = vec1
vec3 += vec2
self.assertAlmostEqual(vec1 + vec2, Vector2d(1.2, 2.4))
self.assertAlmostEqual(vec3, Vector2d(1.2, 2.4))
# Add zero
# Scalar right
self.assertEqual(vec1 + 0, vec1)
# Vector left and right
self.assertAlmostEqual(Vector2d.ZERO + vec1, vec1)
self.assertAlmostEqual(vec1 + Vector2d.ZERO, vec1)
# Addition assigment
vec4 = Vector2d(vec1)
vec4 += 0
self.assertEqual(vec4, vec1)
vec4 += Vector2d.ZERO
self.assertAlmostEqual(vec4, vec1)
# Add non-trivial scalar values left and right
self.assertEqual(vec1 + 2.5, Vector2d(2.6, 2.7))
vec1 = vec4
vec4 += 2.5
self.assertEqual(vec4, Vector2d(2.6, 2.7))
def test_sub(self):
vec1 = Vector2d(0.1, 0.2)
vec2 = Vector2d(1.1, 2.2)
vec3 = vec2
vec3 -= vec1
self.assertAlmostEqual(vec2 - vec1, Vector2d(1.0, 2.0))
self.assertAlmostEqual(vec3, Vector2d(1.0, 2.0))
# Subtraction with zeros
# Scalar right
self.assertEqual(vec1 - 0, vec1)
# Vector left and right
self.assertAlmostEqual(Vector2d.ZERO - vec1, -vec1)
self.assertAlmostEqual(vec1 - Vector2d.ZERO, vec1)
# Subtraction assignment
vec4 = Vector2d(vec1)
vec4 -= 0
self.assertEqual(vec4, vec1)
vec4 -= Vector2d.ZERO
self.assertAlmostEqual(vec4, vec1)
# Subtract non-trivial scalar values left and right
self.assertEqual(vec1 - 2.5, -Vector2d(2.4, 2.3))
vec4 = vec1
vec4 -= 2.5
self.assertEqual(vec4, -Vector2d(2.4, 2.3))
def test_multiply(self):
v = Vector2d(0.1, -4.2)
vec2 = v * 2.0
self.assertEqual(vec2, Vector2d(0.2, -8.4))
vec2 *= 4.0
self.assertEqual(vec2, Vector2d(0.8, -33.6))
# Multiply by zero
# Scalar right
self.assertEqual(v * 0, Vector2d.ZERO)
# Element-wise vector multiplication
self.assertEqual(v * Vector2d.ZERO, Vector2d.ZERO)
# Multiply by one
# Scalar right
self.assertEqual(v * 1, v)
# Element-wise vector multiplication
self.assertEqual(v * Vector2d.ONE, v)
# Multiply by non-trivial scalar value
scalar = 2.5
expect = Vector2d(0.25, -10.5)
self.assertEqual(v * scalar, expect)
# Multiply by itself element-wise
v.set(0.1, 0.5)
self.assertAlmostEqual(v * v, Vector2d(0.01, 0.25))
def test_lenght(self):
# Zero vector
self.assertAlmostEqual(Vector2d.ZERO.length(), 0.0)
self.assertAlmostEqual(Vector2d.ZERO.squared_length(), 0.0)
# One vector
self.assertAlmostEqual(Vector2d.ONE.length(),
math.sqrt(2), delta=1e-10)
self.assertAlmostEqual(Vector2d.ONE.squared_length(), 2.0)
# Arbitrary vector
v = Vector2d(0.1, -4.2)
self.assertAlmostEqual(v.length(), 4.20119030752, delta=1e-10)
self.assertAlmostEqual(v.squared_length(), 17.65)
# Integer vector
v = Vector2d(3, 4)
self.assertAlmostEqual(v.length(), 5)
self.assertAlmostEqual(v.squared_length(), 25)
def test_nan(self):
nanVec = Vector2d.NAN
self.assertFalse(nanVec.is_finite())
self.assertTrue(math.isnan(nanVec.x()))
self.assertTrue(math.isnan(nanVec.y()))
nanVec.correct()
self.assertEqual(Vector2d.ZERO, nanVec)
self.assertTrue(nanVec.is_finite())
nanVecF = Vector2f.NAN
self.assertFalse(nanVecF.is_finite())
self.assertTrue(math.isnan(nanVecF.x()))
self.assertTrue(math.isnan(nanVecF.y()))
nanVecF.correct()
self.assertEqual(Vector2f.ZERO, nanVecF)
self.assertTrue(nanVecF.is_finite())
if __name__ == '__main__':
unittest.main()
| [((9317, 9332), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9330, 9332), False, 'import unittest\n'), ((786, 796), 'ignition.math.Vector2d', 'Vector2d', ([], {}), '()\n', (794, 796), False, 'from ignition.math import Vector2d\n'), ((898, 912), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(0)'], {}), '(1, 0)\n', (906, 912), False, 'from ignition.math import Vector2d\n'), ((1003, 1016), 'ignition.math.Vector2d', 'Vector2d', (['vec'], {}), '(vec)\n', (1011, 1016), False, 'from ignition.math import Vector2d\n'), ((1161, 1171), 'ignition.math.Vector2d', 'Vector2d', ([], {}), '()\n', (1169, 1171), False, 'from ignition.math import Vector2d\n'), ((1252, 1266), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(2)'], {}), '(1, 2)\n', (1260, 1266), False, 'from ignition.math import Vector2d\n'), ((1689, 1703), 'ignition.math.Vector2d', 'Vector2d', (['(6)', '(7)'], {}), '(6, 7)\n', (1697, 1703), False, 'from ignition.math import Vector2d\n'), ((1876, 1890), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(6)'], {}), '(5, 6)\n', (1884, 1890), False, 'from ignition.math import Vector2d\n'), ((2089, 2103), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(6)'], {}), '(1, 6)\n', (2097, 2103), False, 'from ignition.math import Vector2d\n'), ((2325, 2339), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(3)'], {}), '(2, 3)\n', (2333, 2339), False, 'from ignition.math import Vector2d\n'), ((2771, 2785), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(4)'], {}), '(2, 4)\n', (2779, 2785), False, 'from ignition.math import Vector2d\n'), ((3156, 3174), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (3164, 3174), False, 'from ignition.math import Vector2d\n'), ((3190, 3208), 'ignition.math.Vector2d', 'Vector2d', (['(0.3)', '(0.5)'], {}), '(0.3, 0.5)\n', (3198, 3208), False, 'from ignition.math import Vector2d\n'), ((3224, 3242), 'ignition.math.Vector2d', 'Vector2d', (['(0.4)', '(0.2)'], {}), '(0.4, 0.2)\n', (3232, 3242), False, 'from ignition.math import Vector2d\n'), ((3542, 3560), 'ignition.math.Vector2d', 'Vector2d', (['(0.3)', '(0.5)'], {}), '(0.3, 0.5)\n', (3550, 3560), False, 'from ignition.math import Vector2d\n'), ((3576, 3594), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (3584, 3594), False, 'from ignition.math import Vector2d\n'), ((3610, 3629), 'ignition.math.Vector2d', 'Vector2d', (['(0.05)', '(0.1)'], {}), '(0.05, 0.1)\n', (3618, 3629), False, 'from ignition.math import Vector2d\n'), ((4344, 4358), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(2)'], {}), '(1, 2)\n', (4352, 4358), False, 'from ignition.math import Vector2d\n'), ((4723, 4739), 'ignition.math.Vector2d', 'Vector2d', (['(10)', '(-2)'], {}), '(10, -2)\n', (4731, 4739), False, 'from ignition.math import Vector2d\n'), ((5014, 5029), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(-2)'], {}), '(1, -2)\n', (5022, 5029), False, 'from ignition.math import Vector2d\n'), ((5324, 5342), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (5332, 5342), False, 'from ignition.math import Vector2d\n'), ((5358, 5376), 'ignition.math.Vector2d', 'Vector2d', (['(1.1)', '(2.2)'], {}), '(1.1, 2.2)\n', (5366, 5376), False, 'from ignition.math import Vector2d\n'), ((5821, 5835), 'ignition.math.Vector2d', 'Vector2d', (['vec1'], {}), '(vec1)\n', (5829, 5835), False, 'from ignition.math import Vector2d\n'), ((6209, 6227), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (6217, 6227), False, 'from ignition.math import Vector2d\n'), ((6243, 6261), 'ignition.math.Vector2d', 'Vector2d', (['(1.1)', '(2.2)'], {}), '(1.1, 2.2)\n', (6251, 6261), False, 'from ignition.math import Vector2d\n'), ((6725, 6739), 'ignition.math.Vector2d', 'Vector2d', (['vec1'], {}), '(vec1)\n', (6733, 6739), False, 'from ignition.math import Vector2d\n'), ((7122, 7141), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(-4.2)'], {}), '(0.1, -4.2)\n', (7130, 7141), False, 'from ignition.math import Vector2d\n'), ((7758, 7779), 'ignition.math.Vector2d', 'Vector2d', (['(0.25)', '(-10.5)'], {}), '(0.25, -10.5)\n', (7766, 7779), False, 'from ignition.math import Vector2d\n'), ((8371, 8390), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(-4.2)'], {}), '(0.1, -4.2)\n', (8379, 8390), False, 'from ignition.math import Vector2d\n'), ((8558, 8572), 'ignition.math.Vector2d', 'Vector2d', (['(3)', '(4)'], {}), '(3, 4)\n', (8566, 8572), False, 'from ignition.math import Vector2d\n'), ((1792, 1806), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(2)'], {}), '(1, 2)\n', (1800, 1806), False, 'from ignition.math import Vector2d\n'), ((1983, 1997), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(4)'], {}), '(2, 4)\n', (1991, 1997), False, 'from ignition.math import Vector2d\n'), ((2220, 2234), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(3)'], {}), '(2, 3)\n', (2228, 2234), False, 'from ignition.math import Vector2d\n'), ((2664, 2678), 'ignition.math.Vector2d', 'Vector2d', (['(2)', '(4)'], {}), '(2, 4)\n', (2672, 2678), False, 'from ignition.math import Vector2d\n'), ((3401, 3419), 'ignition.math.Vector2d', 'Vector2d', (['(0.3)', '(0.5)'], {}), '(0.3, 0.5)\n', (3409, 3419), False, 'from ignition.math import Vector2d\n'), ((3482, 3500), 'ignition.math.Vector2d', 'Vector2d', (['(0.4)', '(0.5)'], {}), '(0.4, 0.5)\n', (3490, 3500), False, 'from ignition.math import Vector2d\n'), ((3789, 3807), 'ignition.math.Vector2d', 'Vector2d', (['(0.1)', '(0.2)'], {}), '(0.1, 0.2)\n', (3797, 3807), False, 'from ignition.math import Vector2d\n'), ((3870, 3889), 'ignition.math.Vector2d', 'Vector2d', (['(0.05)', '(0.1)'], {}), '(0.05, 0.1)\n', (3878, 3889), False, 'from ignition.math import Vector2d\n'), ((4008, 4048), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(1e-06)'], {}), '(Vector2d.ONE, 1e-06)\n', (4027, 4048), False, 'from ignition.math import Vector2d\n'), ((4074, 4114), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(0.001)'], {}), '(Vector2d.ONE, 0.001)\n', (4093, 4114), False, 'from ignition.math import Vector2d\n'), ((4140, 4178), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(0.1)'], {}), '(Vector2d.ONE, 0.1)\n', (4159, 4178), False, 'from ignition.math import Vector2d\n'), ((4205, 4241), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(1)'], {}), '(Vector2d.ONE, 1)\n', (4224, 4241), False, 'from ignition.math import Vector2d\n'), ((4267, 4305), 'ignition.math.Vector2d.ZERO.equal', 'Vector2d.ZERO.equal', (['Vector2d.ONE', '(1.1)'], {}), '(Vector2d.ONE, 1.1)\n', (4286, 4305), False, 'from ignition.math import Vector2d\n'), ((4848, 4862), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(0)'], {}), '(0, 0)\n', (4856, 4862), False, 'from ignition.math import Vector2d\n'), ((4901, 4916), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(-1)'], {}), '(0, -1)\n', (4909, 4916), False, 'from ignition.math import Vector2d\n'), ((4955, 4971), 'ignition.math.Vector2d', 'Vector2d', (['(10)', '(-2)'], {}), '(10, -2)\n', (4963, 4971), False, 'from ignition.math import Vector2d\n'), ((5464, 5482), 'ignition.math.Vector2d', 'Vector2d', (['(1.2)', '(2.4)'], {}), '(1.2, 2.4)\n', (5472, 5482), False, 'from ignition.math import Vector2d\n'), ((5521, 5539), 'ignition.math.Vector2d', 'Vector2d', (['(1.2)', '(2.4)'], {}), '(1.2, 2.4)\n', (5529, 5539), False, 'from ignition.math import Vector2d\n'), ((6057, 6075), 'ignition.math.Vector2d', 'Vector2d', (['(2.6)', '(2.7)'], {}), '(2.6, 2.7)\n', (6065, 6075), False, 'from ignition.math import Vector2d\n'), ((6149, 6167), 'ignition.math.Vector2d', 'Vector2d', (['(2.6)', '(2.7)'], {}), '(2.6, 2.7)\n', (6157, 6167), False, 'from ignition.math import Vector2d\n'), ((6349, 6367), 'ignition.math.Vector2d', 'Vector2d', (['(1.0)', '(2.0)'], {}), '(1.0, 2.0)\n', (6357, 6367), False, 'from ignition.math import Vector2d\n'), ((6406, 6424), 'ignition.math.Vector2d', 'Vector2d', (['(1.0)', '(2.0)'], {}), '(1.0, 2.0)\n', (6414, 6424), False, 'from ignition.math import Vector2d\n'), ((7197, 7216), 'ignition.math.Vector2d', 'Vector2d', (['(0.2)', '(-8.4)'], {}), '(0.2, -8.4)\n', (7205, 7216), False, 'from ignition.math import Vector2d\n'), ((7270, 7290), 'ignition.math.Vector2d', 'Vector2d', (['(0.8)', '(-33.6)'], {}), '(0.8, -33.6)\n', (7278, 7290), False, 'from ignition.math import Vector2d\n'), ((7930, 7950), 'ignition.math.Vector2d', 'Vector2d', (['(0.01)', '(0.25)'], {}), '(0.01, 0.25)\n', (7938, 7950), False, 'from ignition.math import Vector2d\n'), ((8033, 8055), 'ignition.math.Vector2d.ZERO.length', 'Vector2d.ZERO.length', ([], {}), '()\n', (8053, 8055), False, 'from ignition.math import Vector2d\n'), ((8093, 8123), 'ignition.math.Vector2d.ZERO.squared_length', 'Vector2d.ZERO.squared_length', ([], {}), '()\n', (8121, 8123), False, 'from ignition.math import Vector2d\n'), ((8183, 8204), 'ignition.math.Vector2d.ONE.length', 'Vector2d.ONE.length', ([], {}), '()\n', (8202, 8204), False, 'from ignition.math import Vector2d\n'), ((8237, 8249), 'math.sqrt', 'math.sqrt', (['(2)'], {}), '(2)\n', (8246, 8249), False, 'import math\n'), ((8295, 8324), 'ignition.math.Vector2d.ONE.squared_length', 'Vector2d.ONE.squared_length', ([], {}), '()\n', (8322, 8324), False, 'from ignition.math import Vector2d\n'), ((1336, 1346), 'ignition.math.Vector2d', 'Vector2d', ([], {}), '()\n', (1344, 1346), False, 'from ignition.math import Vector2d\n'), ((1436, 1464), 'ignition.math.Vector2d', 'Vector2d', (['(0.447214)', '(0.894427)'], {}), '(0.447214, 0.894427)\n', (1444, 1464), False, 'from ignition.math import Vector2d\n'), ((1540, 1554), 'ignition.math.Vector2d', 'Vector2d', (['(4)', '(5)'], {}), '(4, 5)\n', (1548, 1554), False, 'from ignition.math import Vector2d\n'), ((1638, 1652), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(2)'], {}), '(1, 2)\n', (1646, 1652), False, 'from ignition.math import Vector2d\n'), ((1736, 1750), 'ignition.math.Vector2d', 'Vector2d', (['(6)', '(7)'], {}), '(6, 7)\n', (1744, 1750), False, 'from ignition.math import Vector2d\n'), ((1839, 1853), 'ignition.math.Vector2d', 'Vector2d', (['(7)', '(9)'], {}), '(7, 9)\n', (1847, 1853), False, 'from ignition.math import Vector2d\n'), ((1923, 1939), 'ignition.math.Vector2d', 'Vector2d', (['(12)', '(15)'], {}), '(12, 15)\n', (1931, 1939), False, 'from ignition.math import Vector2d\n'), ((2030, 2046), 'ignition.math.Vector2d', 'Vector2d', (['(10)', '(11)'], {}), '(10, 11)\n', (2038, 2046), False, 'from ignition.math import Vector2d\n'), ((2136, 2151), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(-2)'], {}), '(1, -2)\n', (2144, 2151), False, 'from ignition.math import Vector2d\n'), ((2267, 2281), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(2)'], {}), '(5, 2)\n', (2275, 2281), False, 'from ignition.math import Vector2d\n'), ((2372, 2386), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(2)'], {}), '(5, 2)\n', (2380, 2386), False, 'from ignition.math import Vector2d\n'), ((2493, 2507), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(3)'], {}), '(5, 3)\n', (2501, 2507), False, 'from ignition.math import Vector2d\n'), ((2585, 2599), 'ignition.math.Vector2d', 'Vector2d', (['(5)', '(3)'], {}), '(5, 3)\n', (2593, 2599), False, 'from ignition.math import Vector2d\n'), ((2711, 2727), 'ignition.math.Vector2d', 'Vector2d', (['(20)', '(24)'], {}), '(20, 24)\n', (2719, 2727), False, 'from ignition.math import Vector2d\n'), ((2818, 2834), 'ignition.math.Vector2d', 'Vector2d', (['(20)', '(24)'], {}), '(20, 24)\n', (2826, 2834), False, 'from ignition.math import Vector2d\n'), ((2937, 2953), 'ignition.math.Vector2d', 'Vector2d', (['(20)', '(12)'], {}), '(20, 12)\n', (2945, 2953), False, 'from ignition.math import Vector2d\n'), ((3031, 3047), 'ignition.math.Vector2d', 'Vector2d', (['(20)', '(12)'], {}), '(20, 12)\n', (3039, 3047), False, 'from ignition.math import Vector2d\n'), ((4396, 4410), 'ignition.math.Vector2d', 'Vector2d', (['(3)', '(4)'], {}), '(3, 4)\n', (4404, 4410), False, 'from ignition.math import Vector2d\n'), ((4456, 4470), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(0)'], {}), '(0, 0)\n', (4464, 4470), False, 'from ignition.math import Vector2d\n'), ((4515, 4529), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(0)'], {}), '(1, 0)\n', (4523, 4529), False, 'from ignition.math import Vector2d\n'), ((4574, 4588), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(1)'], {}), '(0, 1)\n', (4582, 4588), False, 'from ignition.math import Vector2d\n'), ((5072, 5086), 'ignition.math.Vector2d', 'Vector2d', (['(3)', '(4)'], {}), '(3, 4)\n', (5080, 5086), False, 'from ignition.math import Vector2d\n'), ((5136, 5150), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(0)'], {}), '(0, 0)\n', (5144, 5150), False, 'from ignition.math import Vector2d\n'), ((5199, 5213), 'ignition.math.Vector2d', 'Vector2d', (['(1)', '(0)'], {}), '(1, 0)\n', (5207, 5213), False, 'from ignition.math import Vector2d\n'), ((5262, 5276), 'ignition.math.Vector2d', 'Vector2d', (['(0)', '(1)'], {}), '(0, 1)\n', (5270, 5276), False, 'from ignition.math import Vector2d\n'), ((6967, 6985), 'ignition.math.Vector2d', 'Vector2d', (['(2.4)', '(2.3)'], {}), '(2.4, 2.3)\n', (6975, 6985), False, 'from ignition.math import Vector2d\n'), ((7060, 7078), 'ignition.math.Vector2d', 'Vector2d', (['(2.4)', '(2.3)'], {}), '(2.4, 2.3)\n', (7068, 7078), False, 'from ignition.math import Vector2d\n')] |
sodre/filesystem_spec | fsspec/tests/test_mapping.py | 5fe51c5e85366b57a11ed66637a940970372ea4b | import os
import fsspec
from fsspec.implementations.memory import MemoryFileSystem
import pickle
import pytest
def test_mapping_prefix(tmpdir):
tmpdir = str(tmpdir)
os.makedirs(os.path.join(tmpdir, "afolder"))
open(os.path.join(tmpdir, "afile"), "w").write("test")
open(os.path.join(tmpdir, "afolder", "anotherfile"), "w").write("test2")
m = fsspec.get_mapper("file://" + tmpdir)
assert "afile" in m
assert m["afolder/anotherfile"] == b"test2"
fs = fsspec.filesystem("file")
m2 = fs.get_mapper(tmpdir)
m3 = fs.get_mapper("file://" + tmpdir)
assert m == m2 == m3
def test_ops():
MemoryFileSystem.store.clear()
m = fsspec.get_mapper("memory://")
assert not m
assert list(m) == []
with pytest.raises(KeyError):
m["hi"]
assert m.pop("key", 0) == 0
m["key0"] = b"data"
assert list(m) == ["key0"]
assert m["key0"] == b"data"
m.clear()
assert list(m) == []
def test_pickle():
m = fsspec.get_mapper("memory://")
assert isinstance(m.fs, MemoryFileSystem)
m["key"] = b"data"
m2 = pickle.loads(pickle.dumps(m))
assert list(m) == list(m2)
def test_keys_view():
# https://github.com/intake/filesystem_spec/issues/186
m = fsspec.get_mapper("memory://")
m["key"] = b"data"
keys = m.keys()
assert len(keys) == 1
# check that we don't consume the keys
assert len(keys) == 1
| [((365, 402), 'fsspec.get_mapper', 'fsspec.get_mapper', (["('file://' + tmpdir)"], {}), "('file://' + tmpdir)\n", (382, 402), False, 'import fsspec\n'), ((485, 510), 'fsspec.filesystem', 'fsspec.filesystem', (['"""file"""'], {}), "('file')\n", (502, 510), False, 'import fsspec\n'), ((633, 663), 'fsspec.implementations.memory.MemoryFileSystem.store.clear', 'MemoryFileSystem.store.clear', ([], {}), '()\n', (661, 663), False, 'from fsspec.implementations.memory import MemoryFileSystem\n'), ((672, 702), 'fsspec.get_mapper', 'fsspec.get_mapper', (['"""memory://"""'], {}), "('memory://')\n", (689, 702), False, 'import fsspec\n'), ((987, 1017), 'fsspec.get_mapper', 'fsspec.get_mapper', (['"""memory://"""'], {}), "('memory://')\n", (1004, 1017), False, 'import fsspec\n'), ((1248, 1278), 'fsspec.get_mapper', 'fsspec.get_mapper', (['"""memory://"""'], {}), "('memory://')\n", (1265, 1278), False, 'import fsspec\n'), ((187, 218), 'os.path.join', 'os.path.join', (['tmpdir', '"""afolder"""'], {}), "(tmpdir, 'afolder')\n", (199, 218), False, 'import os\n'), ((755, 778), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (768, 778), False, 'import pytest\n'), ((1109, 1124), 'pickle.dumps', 'pickle.dumps', (['m'], {}), '(m)\n', (1121, 1124), False, 'import pickle\n'), ((229, 258), 'os.path.join', 'os.path.join', (['tmpdir', '"""afile"""'], {}), "(tmpdir, 'afile')\n", (241, 258), False, 'import os\n'), ((288, 334), 'os.path.join', 'os.path.join', (['tmpdir', '"""afolder"""', '"""anotherfile"""'], {}), "(tmpdir, 'afolder', 'anotherfile')\n", (300, 334), False, 'import os\n')] |
EderReisS/pythonChallenges | testedome/questions/quest_5.py | a880358c2cb4de0863f4b4cada36b3d439a8a018 | """
A
/ |
B C
'B, C'
"""
class CategoryTree:
def __init__(self):
self.root = {}
self.all_categories = []
def add_category(self, category, parent):
if category in self.all_categories:
raise KeyError(f"{category} exists")
if parent is None:
self.root[category] = set()
if parent:
if parent not in self.root:
raise KeyError(f"{parent} invalid")
self.root[category] = set()
self.root[parent].add(category)
self.all_categories.append(category)
def get_children(self, parent):
if parent and parent not in self.root:
raise KeyError(f"{parent} invalid")
return list(self.root[parent])
if __name__ == "__main__":
c = CategoryTree()
c.add_category('A', None)
c.add_category('B', 'A')
c.add_category('C', 'A')
print(','.join(c.get_children('A') or []))
print(','.join(c.get_children('E') or []))
| [] |
mirfan899/MTTS | sppas/sppas/src/anndata/aio/__init__.py | 3167b65f576abcc27a8767d24c274a04712bd948 | # -*- coding: UTF-8 -*-
"""
..
---------------------------------------------------------------------
___ __ __ __ ___
/ | \ | \ | \ / the automatic
\__ |__/ |__/ |___| \__ annotation and
\ | | | | \ analysis
___/ | | | | ___/ of speech
http://www.sppas.org/
Use of this software is governed by the GNU Public License, version 3.
SPPAS is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SPPAS is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SPPAS. If not, see <http://www.gnu.org/licenses/>.
This banner notice must not be removed.
---------------------------------------------------------------------
anndata.aio
~~~~~~~~~~~
Readers and writers of annotated data.
:author: Brigitte Bigi
:organization: Laboratoire Parole et Langage, Aix-en-Provence, France
:contact: [email protected]
:license: GPL, v3
:copyright: Copyright (C) 2011-2018 Brigitte Bigi
"""
from .annotationpro import sppasANT
from .annotationpro import sppasANTX
from .anvil import sppasAnvil
from .audacity import sppasAudacity
from .elan import sppasEAF
from .htk import sppasLab
from .phonedit import sppasMRK
from .phonedit import sppasSignaix
from .praat import sppasTextGrid
from .praat import sppasIntensityTier
from .praat import sppasPitchTier
from .sclite import sppasCTM
from .sclite import sppasSTM
from .subtitle import sppasSubRip
from .subtitle import sppasSubViewer
from .text import sppasRawText
from .text import sppasCSV
from .weka import sppasARFF
from .weka import sppasXRFF
from .xtrans import sppasTDF
from .xra import sppasXRA
# ----------------------------------------------------------------------------
# Variables
# ----------------------------------------------------------------------------
# TODO: get extension from the "default_extension" member of each class
ext_sppas = ['.xra', '.[Xx][Rr][Aa]']
ext_praat = ['.TextGrid', '.PitchTier', '.[Tt][eE][xX][tT][Gg][Rr][Ii][dD]','.[Pp][Ii][tT][cC][hH][Tt][Ii][Ee][rR]']
ext_transcriber = ['.trs','.[tT][rR][sS]']
ext_elan = ['.eaf', '[eE][aA][fF]']
ext_ascii = ['.txt', '.csv', '.[cC][sS][vV]', '.[tT][xX][Tt]', '.info']
ext_phonedit = ['.mrk', '.[mM][rR][kK]']
ext_signaix = ['.hz', '.[Hh][zZ]']
ext_sclite = ['.stm', '.ctm', '.[sScC][tT][mM]']
ext_htk = ['.lab', '.mlf']
ext_subtitles = ['.sub', '.srt', '.[sS][uU][bB]', '.[sS][rR][tT]']
ext_anvil = ['.anvil', '.[aA][aN][vV][iI][lL]']
ext_annotationpro = ['.antx', '.[aA][aN][tT][xX]']
ext_xtrans = ['.tdf', '.[tT][dD][fF]']
ext_audacity = ['.aup']
ext_weka = ['.arff', '.xrff']
primary_in = ['.hz', '.PitchTier']
annotations_in = ['.xra', '.TextGrid', '.eaf', '.csv', '.mrk', '.txt', '.stm', '.ctm', '.lab', '.mlf', '.sub', '.srt', '.antx', '.anvil', '.aup', '.trs', '.tdf']
extensions = ['.xra', '.textgrid', '.pitchtier', '.hz', '.eaf', '.trs', '.csv', '.mrk', '.txt', '.mrk', '.stm', '.ctm', '.lab', '.mlf', '.sub', '.srt', 'anvil', '.antx', '.tdf', '.arff', '.xrff']
extensionsul = ext_sppas + ext_praat + ext_transcriber + ext_elan + ext_ascii + ext_phonedit + ext_signaix + ext_sclite + ext_htk + ext_subtitles + ext_anvil + ext_annotationpro + ext_xtrans + ext_audacity + ext_weka
extensions_in = primary_in + annotations_in
extensions_out = ['.xra', '.TextGrid', '.eaf', '.csv', '.mrk', '.txt', '.stm', '.ctm', '.lab', '.mlf', '.sub', '.srt', '.antx', '.arff', '.xrff']
extensions_out_multitiers = ['.xra', '.TextGrid', '.eaf', '.csv', '.mrk', '.antx', '.arff', '.xrff']
# ----------------------------------------------------------------------------
__all__ = (
"sppasANT",
"sppasANTX",
"sppasAnvil",
"sppasAudacity",
"sppasEAF",
"sppasLab",
"sppasMRK",
"sppasSignaix",
"sppasTextGrid",
"sppasIntensityTier",
"sppasPitchTier",
"sppasCTM",
"sppasSTM",
"sppasSubRip",
"sppasSubViewer",
"sppasRawText",
"sppasCSV",
"sppasARFF",
"sppasXRFF",
"sppasTDF",
"sppasXRA",
"extensions",
"extensions_in",
"extensions_out"
)
| [] |
dapengchen123/hfsoftmax | models/__init__.py | 467bd90814abdf3e5ad8384e6e05749172b68ae6 | from .resnet import *
from .hynet import *
from .classifier import Classifier, HFClassifier, HNSWClassifier
from .ext_layers import ParameterClient
samplerClassifier = {
'hf': HFClassifier,
'hnsw': HNSWClassifier,
}
| [] |
AgnirudraSil/tetris | scripts/multiplayer/server.py | 2a4f4c26190fc8b669f98c116af343f7f1ac51bf | import pickle
import socket
import _thread
from scripts.multiplayer import game, board, tetriminos
server = "192.168.29.144"
port = 5555
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind((server, port))
except socket.error as e:
print(e)
s.listen()
print("Waiting for connection")
connected = set()
games = {}
idCount = 0
def threaded_client(conn, p, gameId):
global idCount
conn.send(str.encode(str(p)))
reply = ""
while True:
try:
data = conn.recv(4096).decode()
if gameId in games:
game = games[gameId]
if not data:
break
else:
game.update(p, data)
reply = game
conn.sendall(pickle.dumps(reply))
else:
break
except:
break
print("Lost Connection!")
try:
del games[gameId]
print("Closing Game", gameId)
except:
pass
idCount -= 1
conn.close()
while True:
conn, addr = s.accept()
print("Connected to: ", addr)
idCount += 1
p = 0
game_id = (idCount - 1) // 2
if idCount % 2 == 1:
games[game_id] = game.Game((0, 0, 0), None, board)
else:
games[game_id].ready = True
p = 1
_thread.start_new_thread(threaded_client, (conn, p, game_id))
| [((143, 192), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (156, 192), False, 'import socket\n'), ((1335, 1396), '_thread.start_new_thread', '_thread.start_new_thread', (['threaded_client', '(conn, p, game_id)'], {}), '(threaded_client, (conn, p, game_id))\n', (1359, 1396), False, 'import _thread\n'), ((1236, 1269), 'scripts.multiplayer.game.Game', 'game.Game', (['(0, 0, 0)', 'None', 'board'], {}), '((0, 0, 0), None, board)\n', (1245, 1269), False, 'from scripts.multiplayer import game, board, tetriminos\n'), ((701, 721), 'scripts.multiplayer.game.update', 'game.update', (['p', 'data'], {}), '(p, data)\n', (712, 721), False, 'from scripts.multiplayer import game, board, tetriminos\n'), ((789, 808), 'pickle.dumps', 'pickle.dumps', (['reply'], {}), '(reply)\n', (801, 808), False, 'import pickle\n')] |
smaranjitghose/PyProjectEuler | solutions/6-sum-suqare-difference.py | d1303d18a0d90acf885ab5ac54b3ea91d99e83db | def sum_of_squares(n):
return sum(i ** 2 for i in range(1, n+1))
def square_of_sum(n):
return sum(range(1, n+1)) ** 2
| [] |
AustinTSchaffer/DailyProgrammer | AdventOfCode/2018/src/day-03/app.py | b16d9babb298ac5e879c514f9c4646b99c6860a8 | import os
import re
from collections import defaultdict
class Claim(object):
def __init__(self, data_row):
match = re.match(r'#(\d+) @ (\d+),(\d+): (\d+)x(\d+)', data_row)
self.id = int(match[1])
self.x = int(match[2])
self.y = int(match[3])
self.width = int(match[4])
self.height = int(match[5])
def all_locations(self):
for x in range(self.width):
for y in range(self.height):
yield (self.x + x, self.y + y)
CURRENT_DIR, _ = os.path.split(__file__)
DATA_FLIE = os.path.join(CURRENT_DIR, 'data.txt')
def data_file_iter(data_file) -> Claim:
with open(data_file, 'r') as data:
for claim in data:
claim = claim.strip()
if (claim):
yield Claim(claim)
def part1(claims):
"""
This is basically a single-threaded collision detection method,
implemented in pure python. Computation complexity is obviously
not a consideration.
"""
# Determines how many times each locations was claimed
claimed_space_registry = defaultdict(int)
for claim in claims:
for location in claim.all_locations():
claimed_space_registry[location] += 1
# Generates the set of all locations that were claimed more than once
multi_claimed_spaces = {
location
for location,count in claimed_space_registry.items()
if count > 1
}
# Prints the number of locations that are claimed more than once
# and returns the set of locations that were claimed more than once
print('Multi-Claimed Spaces:', len(multi_claimed_spaces))
return multi_claimed_spaces
def part2(claims, multi_claimed_spaces):
"""
Might not be the optimal solution, but it runs fast enough, and uses
components that were already calculated in part 1.
"""
for claim in claims:
all_locations_are_non_overlapping = all(map(
lambda loc: loc not in multi_claimed_spaces,
claim.all_locations()
))
if all_locations_are_non_overlapping:
print('Non-overlapping claim:', claim.id)
return claim
if __name__ == '__main__':
claims = list(data_file_iter(DATA_FLIE))
mcs = part1(claims)
santas_suit_material = part2(claims, mcs)
| [((522, 545), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (535, 545), False, 'import os\n'), ((558, 595), 'os.path.join', 'os.path.join', (['CURRENT_DIR', '"""data.txt"""'], {}), "(CURRENT_DIR, 'data.txt')\n", (570, 595), False, 'import os\n'), ((1082, 1098), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (1093, 1098), False, 'from collections import defaultdict\n'), ((128, 188), 're.match', 're.match', (['"""#(\\\\d+) @ (\\\\d+),(\\\\d+): (\\\\d+)x(\\\\d+)"""', 'data_row'], {}), "('#(\\\\d+) @ (\\\\d+),(\\\\d+): (\\\\d+)x(\\\\d+)', data_row)\n", (136, 188), False, 'import re\n')] |
ArianeFire/HaniCam | facerec-master/py/facerec/distance.py | 8a940486a613d680a0b556209a596cdf3eb71f53 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) Philipp Wagner. All rights reserved.
# Licensed under the BSD license. See LICENSE file in the project root for full license information.
import numpy as np
class AbstractDistance(object):
def __init__(self, name):
self._name = name
def __call__(self,p,q):
raise NotImplementedError("Every AbstractDistance must implement the __call__ method.")
@property
def name(self):
return self._name
def __repr__(self):
return self._name
class EuclideanDistance(AbstractDistance):
def __init__(self):
AbstractDistance.__init__(self,"EuclideanDistance")
def __call__(self, p, q):
p = np.asarray(p).flatten()
q = np.asarray(q).flatten()
return np.sqrt(np.sum(np.power((p-q),2)))
class CosineDistance(AbstractDistance):
"""
Negated Mahalanobis Cosine Distance.
Literature:
"Studies on sensitivity of face recognition performance to eye location accuracy.". Master Thesis (2004), Wang
"""
def __init__(self):
AbstractDistance.__init__(self,"CosineDistance")
def __call__(self, p, q):
p = np.asarray(p).flatten()
q = np.asarray(q).flatten()
return -np.dot(p.T,q) / (np.sqrt(np.dot(p,p.T)*np.dot(q,q.T)))
class NormalizedCorrelation(AbstractDistance):
"""
Calculates the NormalizedCorrelation Coefficient for two vectors.
Literature:
"Multi-scale Local Binary Pattern Histogram for Face Recognition". PhD (2008). Chi Ho Chan, University Of Surrey.
"""
def __init__(self):
AbstractDistance.__init__(self,"NormalizedCorrelation")
def __call__(self, p, q):
p = np.asarray(p).flatten()
q = np.asarray(q).flatten()
pmu = p.mean()
qmu = q.mean()
pm = p - pmu
qm = q - qmu
return 1.0 - (np.dot(pm, qm) / (np.sqrt(np.dot(pm, pm)) * np.sqrt(np.dot(qm, qm))))
class ChiSquareDistance(AbstractDistance):
"""
Negated Mahalanobis Cosine Distance.
Literature:
"Studies on sensitivity of face recognition performance to eye location accuracy.". Master Thesis (2004), Wang
"""
def __init__(self):
AbstractDistance.__init__(self,"ChiSquareDistance")
def __call__(self, p, q):
p = np.asarray(p).flatten()
q = np.asarray(q).flatten()
bin_dists = (p-q)**2 / (p+q+np.finfo('float').eps)
return np.sum(bin_dists)
class HistogramIntersection(AbstractDistance):
def __init__(self):
AbstractDistance.__init__(self,"HistogramIntersection")
def __call__(self, p, q):
p = np.asarray(p).flatten()
q = np.asarray(q).flatten()
return np.sum(np.minimum(p,q))
class BinRatioDistance(AbstractDistance):
"""
Calculates the Bin Ratio Dissimilarity.
Literature:
"Use Bin-Ratio Information for Category and Scene Classification" (2010), Xie et.al.
"""
def __init__(self):
AbstractDistance.__init__(self,"BinRatioDistance")
def __call__(self, p, q):
p = np.asarray(p).flatten()
q = np.asarray(q).flatten()
a = np.abs(1-np.dot(p,q.T)) # NumPy needs np.dot instead of * for reducing to tensor
b = ((p-q)**2 + 2*a*(p*q))/((p+q)**2+np.finfo('float').eps)
return np.abs(np.sum(b))
class L1BinRatioDistance(AbstractDistance):
"""
Calculates the L1-Bin Ratio Dissimilarity.
Literature:
"Use Bin-Ratio Information for Category and Scene Classification" (2010), Xie et.al.
"""
def __init__(self):
AbstractDistance.__init__(self,"L1-BinRatioDistance")
def __call__(self, p, q):
p = np.asarray(p, dtype=np.float).flatten()
q = np.asarray(q, dtype=np.float).flatten()
a = np.abs(1-np.dot(p,q.T)) # NumPy needs np.dot instead of * for reducing to tensor
b = ((p-q)**2 + 2*a*(p*q)) * abs(p-q) / ((p+q)**2+np.finfo('float').eps)
return np.abs(np.sum(b))
class ChiSquareBRD(AbstractDistance):
"""
Calculates the ChiSquare-Bin Ratio Dissimilarity.
Literature:
"Use Bin-Ratio Information for Category and Scene Classification" (2010), Xie et.al.
"""
def __init__(self):
AbstractDistance.__init__(self,"ChiSquare-BinRatioDistance")
def __call__(self, p, q):
p = np.asarray(p, dtype=np.float).flatten()
q = np.asarray(q, dtype=np.float).flatten()
a = np.abs(1-np.dot(p,q.T)) # NumPy needs np.dot instead of * for reducing to tensor
b = ((p-q)**2 + 2*a*(p*q)) * (p-q)**2 / ((p+q)**3+np.finfo('float').eps)
return np.abs(np.sum(b))
| [((2543, 2560), 'numpy.sum', 'np.sum', (['bin_dists'], {}), '(bin_dists)\n', (2549, 2560), True, 'import numpy as np\n'), ((2822, 2838), 'numpy.minimum', 'np.minimum', (['p', 'q'], {}), '(p, q)\n', (2832, 2838), True, 'import numpy as np\n'), ((3420, 3429), 'numpy.sum', 'np.sum', (['b'], {}), '(b)\n', (3426, 3429), True, 'import numpy as np\n'), ((4069, 4078), 'numpy.sum', 'np.sum', (['b'], {}), '(b)\n', (4075, 4078), True, 'import numpy as np\n'), ((4726, 4735), 'numpy.sum', 'np.sum', (['b'], {}), '(b)\n', (4732, 4735), True, 'import numpy as np\n'), ((742, 755), 'numpy.asarray', 'np.asarray', (['p'], {}), '(p)\n', (752, 755), True, 'import numpy as np\n'), ((778, 791), 'numpy.asarray', 'np.asarray', (['q'], {}), '(q)\n', (788, 791), True, 'import numpy as np\n'), ((832, 850), 'numpy.power', 'np.power', (['(p - q)', '(2)'], {}), '(p - q, 2)\n', (840, 850), True, 'import numpy as np\n'), ((1226, 1239), 'numpy.asarray', 'np.asarray', (['p'], {}), '(p)\n', (1236, 1239), True, 'import numpy as np\n'), ((1262, 1275), 'numpy.asarray', 'np.asarray', (['q'], {}), '(q)\n', (1272, 1275), True, 'import numpy as np\n'), ((1302, 1316), 'numpy.dot', 'np.dot', (['p.T', 'q'], {}), '(p.T, q)\n', (1308, 1316), True, 'import numpy as np\n'), ((1781, 1794), 'numpy.asarray', 'np.asarray', (['p'], {}), '(p)\n', (1791, 1794), True, 'import numpy as np\n'), ((1817, 1830), 'numpy.asarray', 'np.asarray', (['q'], {}), '(q)\n', (1827, 1830), True, 'import numpy as np\n'), ((1951, 1965), 'numpy.dot', 'np.dot', (['pm', 'qm'], {}), '(pm, qm)\n', (1957, 1965), True, 'import numpy as np\n'), ((2409, 2422), 'numpy.asarray', 'np.asarray', (['p'], {}), '(p)\n', (2419, 2422), True, 'import numpy as np\n'), ((2445, 2458), 'numpy.asarray', 'np.asarray', (['q'], {}), '(q)\n', (2455, 2458), True, 'import numpy as np\n'), ((2740, 2753), 'numpy.asarray', 'np.asarray', (['p'], {}), '(p)\n', (2750, 2753), True, 'import numpy as np\n'), ((2776, 2789), 'numpy.asarray', 'np.asarray', (['q'], {}), '(q)\n', (2786, 2789), True, 'import numpy as np\n'), ((3177, 3190), 'numpy.asarray', 'np.asarray', (['p'], {}), '(p)\n', (3187, 3190), True, 'import numpy as np\n'), ((3213, 3226), 'numpy.asarray', 'np.asarray', (['q'], {}), '(q)\n', (3223, 3226), True, 'import numpy as np\n'), ((3258, 3272), 'numpy.dot', 'np.dot', (['p', 'q.T'], {}), '(p, q.T)\n', (3264, 3272), True, 'import numpy as np\n'), ((3781, 3810), 'numpy.asarray', 'np.asarray', (['p'], {'dtype': 'np.float'}), '(p, dtype=np.float)\n', (3791, 3810), True, 'import numpy as np\n'), ((3833, 3862), 'numpy.asarray', 'np.asarray', (['q'], {'dtype': 'np.float'}), '(q, dtype=np.float)\n', (3843, 3862), True, 'import numpy as np\n'), ((3894, 3908), 'numpy.dot', 'np.dot', (['p', 'q.T'], {}), '(p, q.T)\n', (3900, 3908), True, 'import numpy as np\n'), ((4438, 4467), 'numpy.asarray', 'np.asarray', (['p'], {'dtype': 'np.float'}), '(p, dtype=np.float)\n', (4448, 4467), True, 'import numpy as np\n'), ((4490, 4519), 'numpy.asarray', 'np.asarray', (['q'], {'dtype': 'np.float'}), '(q, dtype=np.float)\n', (4500, 4519), True, 'import numpy as np\n'), ((4551, 4565), 'numpy.dot', 'np.dot', (['p', 'q.T'], {}), '(p, q.T)\n', (4557, 4565), True, 'import numpy as np\n'), ((1327, 1341), 'numpy.dot', 'np.dot', (['p', 'p.T'], {}), '(p, p.T)\n', (1333, 1341), True, 'import numpy as np\n'), ((1341, 1355), 'numpy.dot', 'np.dot', (['q', 'q.T'], {}), '(q, q.T)\n', (1347, 1355), True, 'import numpy as np\n'), ((2505, 2522), 'numpy.finfo', 'np.finfo', (['"""float"""'], {}), "('float')\n", (2513, 2522), True, 'import numpy as np\n'), ((3375, 3392), 'numpy.finfo', 'np.finfo', (['"""float"""'], {}), "('float')\n", (3383, 3392), True, 'import numpy as np\n'), ((4024, 4041), 'numpy.finfo', 'np.finfo', (['"""float"""'], {}), "('float')\n", (4032, 4041), True, 'import numpy as np\n'), ((4681, 4698), 'numpy.finfo', 'np.finfo', (['"""float"""'], {}), "('float')\n", (4689, 4698), True, 'import numpy as np\n'), ((1977, 1991), 'numpy.dot', 'np.dot', (['pm', 'pm'], {}), '(pm, pm)\n', (1983, 1991), True, 'import numpy as np\n'), ((2003, 2017), 'numpy.dot', 'np.dot', (['qm', 'qm'], {}), '(qm, qm)\n', (2009, 2017), True, 'import numpy as np\n')] |
elina8013/android_demo | pgyer_uploader.py | d8cef19d06a4f21f7cf2c277bbabba8cf10a8608 | #!/usr/bin/python
#coding=utf-8
import os
import requests
import time
import re
from datetime import datetime
import urllib2
import json
import mimetypes
import smtplib
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
# configuration for pgyer
USER_KEY = "f605b7c7826690f796078e3dd23a60d5"
API_KEY = "8bdd05df986d598f01456914e51fc889"
PGYER_UPLOAD_URL = "https://www.pgyer.com/apiv1/app/upload"
repo_path = 'C:/Users/Administrator/.jenkins/workspace/Demo/app'
repo_url = 'https://github.com/r17171709/iite_test'
ipa_path = "C:/Users/Administrator/.jenkins/workspace/Demo/app/build/outputs/apk/app-release.apk"
update_description = "版本更新测试"
def parseUploadResult(jsonResult):
print 'post response: %s' % jsonResult
resultCode = jsonResult['code']
send_Email(jsonResult)
if resultCode != 0:
print "Upload Fail!"
raise Exception("Reason: %s" % jsonResult['message'])
print "Upload Success"
appKey = jsonResult['data']['appKey']
appDownloadPageURL = "https://www.pgyer.com/%s" % appKey
print "appDownloadPage: %s" % appDownloadPageURL
return appDownloadPageURL
def uploadIpaToPgyer(ipaPath, updateDescription):
print "Begin to upload ipa to Pgyer: %s" % ipaPath
headers = {'enctype': 'multipart/form-data'}
payload = {
'uKey': USER_KEY,
'_api_key': API_KEY,
'publishRange': '2', # 直接发布
'isPublishToPublic': '2', # 不发布到广场
'updateDescription': updateDescription # 版本更新描述
}
try_times = 0
while try_times < 5:
try:
print "uploading ... %s" % datetime.now()
ipa_file = {'file': open(ipaPath, 'rb')}
r = requests.post(PGYER_UPLOAD_URL,
headers = headers,
files = ipa_file,
data = payload
)
assert r.status_code == requests.codes.ok
result = r.json()
appDownloadPageURL = parseUploadResult(result)
return appDownloadPageURL
except requests.exceptions.ConnectionError:
print "requests.exceptions.ConnectionError occured!"
time.sleep(60)
print "try again ... %s" % datetime.now()
try_times += 1
except Exception as e:
print "Exception occured: %s" % str(e)
time.sleep(60)
print "try again ... %s" % datetime.now()
try_times += 1
if try_times >= 5:
raise Exception("Failed to upload ipa to Pgyer, retried 5 times.")
def parseQRCodeImageUrl(appDownloadPageURL):
try_times = 0
while try_times < 3:
try:
response = requests.get(appDownloadPageURL)
regex = '<img src=\"(.*?)\" style='
m = re.search(regex, response.content)
assert m is not None
appQRCodeURL = m.group(1)
print "appQRCodeURL: %s" % appQRCodeURL
return appQRCodeURL
except AssertionError:
try_times += 1
time.sleep(60)
print "Can not locate QRCode image. retry ... %s: %s" % (try_times, datetime.now())
if try_times >= 3:
raise Exception("Failed to locate QRCode image in download page, retried 3 times.")
def saveQRCodeImage(appDownloadPageURL, output_folder):
appQRCodeURL = parseQRCodeImageUrl(appDownloadPageURL)
response = requests.get(appQRCodeURL)
qr_image_file_path = os.path.join(output_folder, 'QRCode.png')
if response.status_code == 200:
with open(qr_image_file_path, 'wb') as f:
f.write(response.content)
print 'Save QRCode image to file: %s' % qr_image_file_path
def main():
appDownloadPageURL = uploadIpaToPgyer(ipa_path, update_description)
try:
output_folder = os.path.dirname(ipa_path)
saveQRCodeImage(appDownloadPageURL, output_folder)
except Exception as e:
print "Exception occured: %s" % str(e)
#获取 最后一次 提交git的信息
def getCommitInfo():
#方法一 使用 python 库 前提是 当前分支 在服务器上存在
# repo = Gittle(repo_path, origin_uri=repo_url)
# commitInfo = repo.commit_info(start=0, end=1)
# lastCommitInfo = commitInfo[0]
#方法二 直接 cd 到 目录下 git log -1 打印commit 信息
os.chdir(repo_path);
lastCommitInfo = run_cmd('git log -1')
return lastCommitInfo
#发送邮件
def send_Email(json_result):
print '*******start to send mail****'
appName = json_result['data']['appName']
appKey = json_result['data']['appKey']
appVersion = json_result['data']['appVersion']
appBuildVersion = json_result['data']['appBuildVersion']
appShortcutUrl = json_result['data']['appShortcutUrl']
#邮件接受者
mail_receiver = ['[email protected]']
#根据不同邮箱配置 host,user,和pwd
mail_host = 'smtp.139.com'
mail_port = 465
mail_user = '[email protected]'
mail_pwd = 'xxx'
mail_to = ','.join(mail_receiver)
msg = MIMEMultipart()
environsString = '<p><h3>本次打包相关信息</h3><p>'
# environsString += '<p>ipa 包下载地址 : ' + 'wudizhi' + '<p>'
environsString += '<p>蒲公英安装地址 : ' + 'http://www.pgyer.com/' + str(appShortcutUrl) + '<p><p><p><p>'
# environsString += '<li><a href="itms-services://?action=download-manifest&url=https://ssl.pgyer.com/app/plist/' + str(appKey) + '"></a>点击直接安装</li>'
environsString += '<p><h3>本次git提交相关信息</h3><p>'
#获取git最后一次提交信息
lastCommitInfo = getCommitInfo()
# #提交人
# committer = lastCommitInfo['committer']['raw']
# #提交信息
# description = lastCommitInfo['description']
environsString += '<p>' + '<font color="red">' + lastCommitInfo + '</font>' + '<p>'
# environsString += '<p>Description:' + '<font color="red">' + description + '</font>' + '<p>'
message = environsString
body = MIMEText(message, _subtype='html', _charset='utf-8')
msg["Accept-Language"]="zh-CN"
msg["Accept-Charset"]="ISO-8859-1,utf-8"
msg.attach(body)
msg['To'] = mail_to
msg['from'] = '[email protected]'
msg['subject'] = 'Android APP 最新打包文件'
try:
s = smtplib.SMTP()
# 设置为调试模式,就是在会话过程中会有输出信息
s.set_debuglevel(1)
s.connect(mail_host)
s.starttls() # 创建 SSL 安全加密 链接
s.login(mail_user, mail_pwd)
s.sendmail(mail_user, mail_receiver, msg.as_string())
s.close()
print '*******mail send ok****'
except Exception, e:
print e
def run_cmd(cmd):
try:
import subprocess
except ImportError:
_, result_f, error_f = os.popen3(cmd)
else:
process = subprocess.Popen(cmd, shell = True,
stdout = subprocess.PIPE, stderr = subprocess.PIPE)
result_f, error_f = process.stdout, process.stderr
errors = error_f.read()
if errors: pass
result_str = result_f.read().strip()
if result_f : result_f.close()
if error_f : error_f.close()
return result_str
if __name__ == '__main__':
main()
| [] |
Subsets and Splits